summaryrefslogtreecommitdiffstats
path: root/yocto-poky/meta/lib/oe
diff options
context:
space:
mode:
authorPatrick Williams <patrick@stwcx.xyz>2016-03-30 15:21:19 -0500
committerPatrick Williams <patrick@stwcx.xyz>2016-03-30 15:21:19 -0500
commitb4a027550acf2c1051c34f997b8e7e845017af4b (patch)
tree9e38d3c17b42cb1e6765620a87e908973a93c821 /yocto-poky/meta/lib/oe
parent2fe86d90044af218ced8f42fdded6b136f1046d2 (diff)
parentf1e5d6968976c2341c6d554bfcc8895f1b33c26b (diff)
downloadtalos-openbmc-b4a027550acf2c1051c34f997b8e7e845017af4b.tar.gz
talos-openbmc-b4a027550acf2c1051c34f997b8e7e845017af4b.zip
Merge commit 'f1e5d6968976c2341c6d554bfcc8895f1b33c26b' from yocto-2.0.1
Diffstat (limited to 'yocto-poky/meta/lib/oe')
-rw-r--r--yocto-poky/meta/lib/oe/copy_buildsystem.py5
-rw-r--r--yocto-poky/meta/lib/oe/distro_check.py108
-rw-r--r--yocto-poky/meta/lib/oe/image.py62
-rw-r--r--yocto-poky/meta/lib/oe/package_manager.py137
-rw-r--r--yocto-poky/meta/lib/oe/patch.py12
-rw-r--r--yocto-poky/meta/lib/oe/recipeutils.py94
-rw-r--r--yocto-poky/meta/lib/oe/rootfs.py8
-rw-r--r--yocto-poky/meta/lib/oe/sdk.py76
-rw-r--r--yocto-poky/meta/lib/oe/sstatesig.py20
9 files changed, 411 insertions, 111 deletions
diff --git a/yocto-poky/meta/lib/oe/copy_buildsystem.py b/yocto-poky/meta/lib/oe/copy_buildsystem.py
index 979578c41..c0e7541c0 100644
--- a/yocto-poky/meta/lib/oe/copy_buildsystem.py
+++ b/yocto-poky/meta/lib/oe/copy_buildsystem.py
@@ -14,8 +14,9 @@ def _smart_copy(src, dest):
shutil.copymode(src, dest)
class BuildSystem(object):
- def __init__(self, d):
+ def __init__(self, context, d):
self.d = d
+ self.context = context
self.layerdirs = d.getVar('BBLAYERS', True).split()
def copy_bitbake_and_layers(self, destdir):
@@ -38,7 +39,7 @@ class BuildSystem(object):
if os.path.exists(layerconf):
with open(layerconf, 'r') as f:
if f.readline().startswith("# ### workspace layer auto-generated by devtool ###"):
- bb.warn("Skipping local workspace layer %s" % layer)
+ bb.plain("NOTE: Excluding local workspace layer %s from %s" % (layer, self.context))
continue
# If the layer was already under corebase, leave it there
diff --git a/yocto-poky/meta/lib/oe/distro_check.py b/yocto-poky/meta/lib/oe/distro_check.py
index 8ed5b0ec8..f92cd2e42 100644
--- a/yocto-poky/meta/lib/oe/distro_check.py
+++ b/yocto-poky/meta/lib/oe/distro_check.py
@@ -1,7 +1,23 @@
-def get_links_from_url(url):
+from contextlib import contextmanager
+@contextmanager
+def create_socket(url, d):
+ import urllib
+ socket = urllib.urlopen(url, proxies=get_proxies(d))
+ try:
+ yield socket
+ finally:
+ socket.close()
+
+def get_proxies(d):
+ import os
+ proxykeys = ['http', 'https', 'ftp', 'ftps', 'no', 'all']
+ proxyvalues = map(lambda key: d.getVar(key+'_proxy', True), proxykeys)
+ return dict(zip(proxykeys, proxyvalues))
+
+def get_links_from_url(url, d):
"Return all the href links found on the web location"
- import urllib, sgmllib
+ import sgmllib
class LinksParser(sgmllib.SGMLParser):
def parse(self, s):
@@ -24,19 +40,18 @@ def get_links_from_url(url):
"Return the list of hyperlinks."
return self.hyperlinks
- sock = urllib.urlopen(url)
- webpage = sock.read()
- sock.close()
+ with create_socket(url,d) as sock:
+ webpage = sock.read()
linksparser = LinksParser()
linksparser.parse(webpage)
return linksparser.get_hyperlinks()
-def find_latest_numeric_release(url):
+def find_latest_numeric_release(url, d):
"Find the latest listed numeric release on the given url"
max=0
maxstr=""
- for link in get_links_from_url(url):
+ for link in get_links_from_url(url, d):
try:
release = float(link)
except:
@@ -70,7 +85,7 @@ def clean_package_list(package_list):
return set.keys()
-def get_latest_released_meego_source_package_list():
+def get_latest_released_meego_source_package_list(d):
"Returns list of all the name os packages in the latest meego distro"
package_names = []
@@ -82,11 +97,11 @@ def get_latest_released_meego_source_package_list():
package_list=clean_package_list(package_names)
return "1.0", package_list
-def get_source_package_list_from_url(url, section):
+def get_source_package_list_from_url(url, section, d):
"Return a sectioned list of package names from a URL list"
bb.note("Reading %s: %s" % (url, section))
- links = get_links_from_url(url)
+ links = get_links_from_url(url, d)
srpms = filter(is_src_rpm, links)
names_list = map(package_name_from_srpm, srpms)
@@ -96,44 +111,44 @@ def get_source_package_list_from_url(url, section):
return new_pkgs
-def get_latest_released_fedora_source_package_list():
+def get_latest_released_fedora_source_package_list(d):
"Returns list of all the name os packages in the latest fedora distro"
- latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/")
+ latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d)
- package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main")
+ package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main", d)
# package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything")
- package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates")
+ package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d)
package_list=clean_package_list(package_names)
return latest, package_list
-def get_latest_released_opensuse_source_package_list():
+def get_latest_released_opensuse_source_package_list(d):
"Returns list of all the name os packages in the latest opensuse distro"
- latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/")
+ latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/",d)
- package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main")
- package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates")
+ package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main", d)
+ package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates", d)
package_list=clean_package_list(package_names)
return latest, package_list
-def get_latest_released_mandriva_source_package_list():
+def get_latest_released_mandriva_source_package_list(d):
"Returns list of all the name os packages in the latest mandriva distro"
- latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/")
- package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main")
+ latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/", d)
+ package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main", d)
# package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib")
- package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates")
+ package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d)
package_list=clean_package_list(package_names)
return latest, package_list
-def find_latest_debian_release(url):
+def find_latest_debian_release(url, d):
"Find the latest listed debian release on the given url"
releases = []
- for link in get_links_from_url(url):
+ for link in get_links_from_url(url, d):
if link[:6] == "Debian":
if ';' not in link:
releases.append(link)
@@ -143,16 +158,15 @@ def find_latest_debian_release(url):
except:
return "_NotFound_"
-def get_debian_style_source_package_list(url, section):
+def get_debian_style_source_package_list(url, section, d):
"Return the list of package-names stored in the debian style Sources.gz file"
- import urllib
- sock = urllib.urlopen(url)
- import tempfile
- tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False)
- tmpfilename=tmpfile.name
- tmpfile.write(sock.read())
- sock.close()
- tmpfile.close()
+ with create_socket(url,d) as sock:
+ webpage = sock.read()
+ import tempfile
+ tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False)
+ tmpfilename=tmpfile.name
+ tmpfile.write(sock.read())
+ tmpfile.close()
import gzip
bb.note("Reading %s: %s" % (url, section))
@@ -165,41 +179,41 @@ def get_debian_style_source_package_list(url, section):
return package_names
-def get_latest_released_debian_source_package_list():
+def get_latest_released_debian_source_package_list(d):
"Returns list of all the name os packages in the latest debian distro"
- latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/")
+ latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d)
url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz"
- package_names = get_debian_style_source_package_list(url, "main")
+ package_names = get_debian_style_source_package_list(url, "main", d)
# url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz"
# package_names += get_debian_style_source_package_list(url, "contrib")
url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz"
- package_names += get_debian_style_source_package_list(url, "updates")
+ package_names += get_debian_style_source_package_list(url, "updates", d)
package_list=clean_package_list(package_names)
return latest, package_list
-def find_latest_ubuntu_release(url):
+def find_latest_ubuntu_release(url, d):
"Find the latest listed ubuntu release on the given url"
url += "?C=M;O=D" # Descending Sort by Last Modified
- for link in get_links_from_url(url):
+ for link in get_links_from_url(url, d):
if link[-8:] == "-updates":
return link[:-8]
return "_NotFound_"
-def get_latest_released_ubuntu_source_package_list():
+def get_latest_released_ubuntu_source_package_list(d):
"Returns list of all the name os packages in the latest ubuntu distro"
- latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/")
+ latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d)
url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest
- package_names = get_debian_style_source_package_list(url, "main")
+ package_names = get_debian_style_source_package_list(url, "main", d)
# url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest
# package_names += get_debian_style_source_package_list(url, "multiverse")
# url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest
# package_names += get_debian_style_source_package_list(url, "universe")
url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest
- package_names += get_debian_style_source_package_list(url, "updates")
+ package_names += get_debian_style_source_package_list(url, "updates", d)
package_list=clean_package_list(package_names)
return latest, package_list
-def create_distro_packages_list(distro_check_dir):
+def create_distro_packages_list(distro_check_dir, d):
pkglst_dir = os.path.join(distro_check_dir, "package_lists")
if not os.path.isdir (pkglst_dir):
os.makedirs(pkglst_dir)
@@ -220,7 +234,7 @@ def create_distro_packages_list(distro_check_dir):
begin = datetime.now()
for distro in per_distro_functions:
name = distro[0]
- release, package_list = distro[1]()
+ release, package_list = distro[1](d)
bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list)))
package_list_file = os.path.join(pkglst_dir, name + "-" + release)
f = open(package_list_file, "w+b")
@@ -231,7 +245,7 @@ def create_distro_packages_list(distro_check_dir):
delta = end - begin
bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds)
-def update_distro_data(distro_check_dir, datetime):
+def update_distro_data(distro_check_dir, datetime, d):
"""
If distro packages list data is old then rebuild it.
The operations has to be protected by a lock so that
@@ -258,7 +272,7 @@ def update_distro_data(distro_check_dir, datetime):
if saved_datetime[0:8] != datetime[0:8]:
bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime))
bb.note("Regenerating distro package lists")
- create_distro_packages_list(distro_check_dir)
+ create_distro_packages_list(distro_check_dir, d)
f.seek(0)
f.write(datetime)
diff --git a/yocto-poky/meta/lib/oe/image.py b/yocto-poky/meta/lib/oe/image.py
index f9e9bfd58..b9eb3de5a 100644
--- a/yocto-poky/meta/lib/oe/image.py
+++ b/yocto-poky/meta/lib/oe/image.py
@@ -5,7 +5,7 @@ import multiprocessing
def generate_image(arg):
- (type, subimages, create_img_cmd) = arg
+ (type, subimages, create_img_cmd, sprefix) = arg
bb.note("Running image creation script for %s: %s ..." %
(type, create_img_cmd))
@@ -54,14 +54,16 @@ class ImageDepGraph(object):
base_type = self._image_base_type(node)
deps = (self.d.getVar('IMAGE_TYPEDEP_' + node, True) or "")
base_deps = (self.d.getVar('IMAGE_TYPEDEP_' + base_type, True) or "")
- if deps != "" or base_deps != "":
- graph[node] = deps
- for dep in deps.split() + base_deps.split():
- if not dep in graph:
- add_node(dep)
- else:
- graph[node] = ""
+ graph[node] = ""
+ for dep in deps.split() + base_deps.split():
+ if not dep in graph[node]:
+ if graph[node] != "":
+ graph[node] += " "
+ graph[node] += dep
+
+ if not dep in graph:
+ add_node(dep)
for fstype in image_fstypes:
add_node(fstype)
@@ -264,9 +266,9 @@ class Image(ImageDepGraph):
return (alltypes, filtered_groups, cimages)
- def _write_script(self, type, cmds):
+ def _write_script(self, type, cmds, sprefix=""):
tempdir = self.d.getVar('T', True)
- script_name = os.path.join(tempdir, "create_image." + type)
+ script_name = os.path.join(tempdir, sprefix + "create_image." + type)
rootfs_size = self._get_rootfs_size()
self.d.setVar('img_creation_func', '\n'.join(cmds))
@@ -284,7 +286,7 @@ class Image(ImageDepGraph):
return script_name
- def _get_imagecmds(self):
+ def _get_imagecmds(self, sprefix=""):
old_overrides = self.d.getVar('OVERRIDES', 0)
alltypes, fstype_groups, cimages = self._get_image_types()
@@ -320,9 +322,9 @@ class Image(ImageDepGraph):
else:
subimages.append(type)
- script_name = self._write_script(type, cmds)
+ script_name = self._write_script(type, cmds, sprefix)
- image_cmds.append((type, subimages, script_name))
+ image_cmds.append((type, subimages, script_name, sprefix))
image_cmd_groups.append(image_cmds)
@@ -355,6 +357,27 @@ class Image(ImageDepGraph):
image_cmd_groups = self._get_imagecmds()
+ # Process the debug filesystem...
+ debugfs_d = bb.data.createCopy(self.d)
+ if self.d.getVar('IMAGE_GEN_DEBUGFS', True) == "1":
+ bb.note("Processing debugfs image(s) ...")
+ orig_d = self.d
+ self.d = debugfs_d
+
+ self.d.setVar('IMAGE_ROOTFS', orig_d.getVar('IMAGE_ROOTFS', True) + '-dbg')
+ self.d.setVar('IMAGE_NAME', orig_d.getVar('IMAGE_NAME', True) + '-dbg')
+ self.d.setVar('IMAGE_LINK_NAME', orig_d.getVar('IMAGE_LINK_NAME', True) + '-dbg')
+
+ debugfs_image_fstypes = orig_d.getVar('IMAGE_FSTYPES_DEBUGFS', True)
+ if debugfs_image_fstypes:
+ self.d.setVar('IMAGE_FSTYPES', orig_d.getVar('IMAGE_FSTYPES_DEBUGFS', True))
+
+ self._remove_old_symlinks()
+
+ image_cmd_groups += self._get_imagecmds("debugfs.")
+
+ self.d = orig_d
+
self._write_wic_env()
for image_cmds in image_cmd_groups:
@@ -369,9 +392,16 @@ class Image(ImageDepGraph):
if result is not None:
bb.fatal(result)
- for image_type, subimages, script in image_cmds:
- bb.note("Creating symlinks for %s image ..." % image_type)
- self._create_symlinks(subimages)
+ for image_type, subimages, script, sprefix in image_cmds:
+ if sprefix == 'debugfs.':
+ bb.note("Creating symlinks for %s debugfs image ..." % image_type)
+ orig_d = self.d
+ self.d = debugfs_d
+ self._create_symlinks(subimages)
+ self.d = orig_d
+ else:
+ bb.note("Creating symlinks for %s image ..." % image_type)
+ self._create_symlinks(subimages)
execute_pre_post_process(self.d, post_process_cmds)
diff --git a/yocto-poky/meta/lib/oe/package_manager.py b/yocto-poky/meta/lib/oe/package_manager.py
index 292ed4446..b9fa6d879 100644
--- a/yocto-poky/meta/lib/oe/package_manager.py
+++ b/yocto-poky/meta/lib/oe/package_manager.py
@@ -133,8 +133,11 @@ class RpmIndexer(Indexer):
if pkgfeed_gpg_name:
repomd_file = os.path.join(arch_dir, 'repodata', 'repomd.xml')
gpg_cmd = "%s --detach-sign --armor --batch --no-tty --yes " \
- "--passphrase-file '%s' -u '%s' %s" % (gpg_bin,
- pkgfeed_gpg_pass, pkgfeed_gpg_name, repomd_file)
+ "--passphrase-file '%s' -u '%s' " % \
+ (gpg_bin, pkgfeed_gpg_pass, pkgfeed_gpg_name)
+ if self.d.getVar('GPG_PATH', True):
+ gpg_cmd += "--homedir %s " % self.d.getVar('GPG_PATH', True)
+ gpg_cmd += repomd_file
repo_sign_cmds.append(gpg_cmd)
rpm_dirs_found = True
@@ -200,6 +203,8 @@ class OpkgIndexer(Indexer):
result = oe.utils.multiprocess_exec(index_cmds, create_index)
if result:
bb.fatal('%s' % ('\n'.join(result)))
+ if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
+ raise NotImplementedError('Package feed signing not implementd for ipk')
@@ -275,6 +280,8 @@ class DpkgIndexer(Indexer):
result = oe.utils.multiprocess_exec(index_cmds, create_index)
if result:
bb.fatal('%s' % ('\n'.join(result)))
+ if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
+ raise NotImplementedError('Package feed signing not implementd for dpkg')
@@ -434,24 +441,30 @@ class OpkgPkgsList(PkgsList):
(self.opkg_cmd, self.opkg_args)
try:
- output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
+ # bb.note(cmd)
+ tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
+
except subprocess.CalledProcessError as e:
bb.fatal("Cannot get the installed packages list. Command '%s' "
"returned %d:\n%s" % (cmd, e.returncode, e.output))
- if output and format == "file":
- tmp_output = ""
- for line in output.split('\n'):
+ output = list()
+ for line in tmp_output.split('\n'):
+ if len(line.strip()) == 0:
+ continue
+ if format == "file":
pkg, pkg_file, pkg_arch = line.split()
full_path = os.path.join(self.rootfs_dir, pkg_arch, pkg_file)
if os.path.exists(full_path):
- tmp_output += "%s %s %s\n" % (pkg, full_path, pkg_arch)
+ output.append('%s %s %s' % (pkg, full_path, pkg_arch))
else:
- tmp_output += "%s %s %s\n" % (pkg, pkg_file, pkg_arch)
+ output.append('%s %s %s' % (pkg, pkg_file, pkg_arch))
+ else:
+ output.append(line)
- output = tmp_output
+ output.sort()
- return output
+ return '\n'.join(output)
class DpkgPkgsList(PkgsList):
@@ -605,12 +618,12 @@ class PackageManager(object):
cmd.extend(['-x', exclude])
try:
bb.note("Installing complementary packages ...")
+ bb.note('Running %s' % cmd)
complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
bb.fatal("Could not compute complementary packages list. Command "
"'%s' returned %d:\n%s" %
(' '.join(cmd), e.returncode, e.output))
-
self.install(complementary_pkgs.split(), attempt_only=True)
def deploy_dir_lock(self):
@@ -1050,6 +1063,35 @@ class RpmPM(PackageManager):
def update(self):
self._invoke_smart('update rpmsys')
+ def get_rdepends_recursively(self, pkgs):
+ # pkgs will be changed during the loop, so use [:] to make a copy.
+ for pkg in pkgs[:]:
+ sub_data = oe.packagedata.read_subpkgdata(pkg, self.d)
+ sub_rdep = sub_data.get("RDEPENDS_" + pkg)
+ if not sub_rdep:
+ continue
+ done = bb.utils.explode_dep_versions2(sub_rdep).keys()
+ next = done
+ # Find all the rdepends on dependency chain
+ while next:
+ new = []
+ for sub_pkg in next:
+ sub_data = oe.packagedata.read_subpkgdata(sub_pkg, self.d)
+ sub_pkg_rdep = sub_data.get("RDEPENDS_" + sub_pkg)
+ if not sub_pkg_rdep:
+ continue
+ for p in bb.utils.explode_dep_versions2(sub_pkg_rdep):
+ # Already handled, skip it.
+ if p in done or p in pkgs:
+ continue
+ # It's a new dep
+ if oe.packagedata.has_subpkgdata(p, self.d):
+ done.append(p)
+ new.append(p)
+ next = new
+ pkgs.extend(done)
+ return pkgs
+
'''
Install pkgs with smart, the pkg name is oe format
'''
@@ -1059,8 +1101,58 @@ class RpmPM(PackageManager):
bb.note("There are no packages to install")
return
bb.note("Installing the following packages: %s" % ' '.join(pkgs))
+ if not attempt_only:
+ # Pull in multilib requires since rpm may not pull in them
+ # correctly, for example,
+ # lib32-packagegroup-core-standalone-sdk-target requires
+ # lib32-libc6, but rpm may pull in libc6 rather than lib32-libc6
+ # since it doesn't know mlprefix (lib32-), bitbake knows it and
+ # can handle it well, find out the RDEPENDS on the chain will
+ # fix the problem. Both do_rootfs and do_populate_sdk have this
+ # issue.
+ # The attempt_only packages don't need this since they are
+ # based on the installed ones.
+ #
+ # Separate pkgs into two lists, one is multilib, the other one
+ # is non-multilib.
+ ml_pkgs = []
+ non_ml_pkgs = pkgs[:]
+ for pkg in pkgs:
+ for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
+ if pkg.startswith(mlib + '-'):
+ ml_pkgs.append(pkg)
+ non_ml_pkgs.remove(pkg)
+
+ if len(ml_pkgs) > 0 and len(non_ml_pkgs) > 0:
+ # Found both foo and lib-foo
+ ml_pkgs = self.get_rdepends_recursively(ml_pkgs)
+ non_ml_pkgs = self.get_rdepends_recursively(non_ml_pkgs)
+ # Longer list makes smart slower, so only keep the pkgs
+ # which have the same BPN, and smart can handle others
+ # correctly.
+ pkgs_new = []
+ for pkg in non_ml_pkgs:
+ for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
+ mlib_pkg = mlib + "-" + pkg
+ if mlib_pkg in ml_pkgs:
+ pkgs_new.append(pkg)
+ pkgs_new.append(mlib_pkg)
+ for pkg in pkgs:
+ if pkg not in pkgs_new:
+ pkgs_new.append(pkg)
+ pkgs = pkgs_new
+ new_depends = {}
+ deps = bb.utils.explode_dep_versions2(" ".join(pkgs))
+ for depend in deps:
+ data = oe.packagedata.read_subpkgdata(depend, self.d)
+ key = "PKG_%s" % depend
+ if key in data:
+ new_depend = data[key]
+ else:
+ new_depend = depend
+ new_depends[new_depend] = deps[depend]
+ pkgs = bb.utils.join_deps(new_depends, commasep=True).split(', ')
pkgs = self._pkg_translate_oe_to_smart(pkgs, attempt_only)
-
if not attempt_only:
bb.note('to be installed: %s' % ' '.join(pkgs))
cmd = "%s %s install -y %s" % \
@@ -1379,6 +1471,16 @@ class OpkgPM(PackageManager):
self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True),
arch))
+ if self.opkg_dir != '/var/lib/opkg':
+ # There is no command line option for this anymore, we need to add
+ # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
+ # the default value of "/var/lib" as defined in opkg:
+ # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
+ # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
+ cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
+ cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
+
+
def _create_config(self):
with open(self.config_file, "w+") as config_file:
priority = 1
@@ -1394,6 +1496,15 @@ class OpkgPM(PackageManager):
config_file.write("src oe-%s file:%s\n" %
(arch, pkgs_dir))
+ if self.opkg_dir != '/var/lib/opkg':
+ # There is no command line option for this anymore, we need to add
+ # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
+ # the default value of "/var/lib" as defined in opkg:
+ # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
+ # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
+ config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
+ config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
+
def insert_feeds_uris(self):
if self.feed_uris == "":
return
@@ -1433,7 +1544,7 @@ class OpkgPM(PackageManager):
self.deploy_dir_unlock()
def install(self, pkgs, attempt_only=False):
- if attempt_only and len(pkgs) == 0:
+ if not pkgs:
return
cmd = "%s %s install %s" % (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
diff --git a/yocto-poky/meta/lib/oe/patch.py b/yocto-poky/meta/lib/oe/patch.py
index 108bf1de5..2bf501e9e 100644
--- a/yocto-poky/meta/lib/oe/patch.py
+++ b/yocto-poky/meta/lib/oe/patch.py
@@ -337,12 +337,15 @@ class GitApplyTree(PatchTree):
return (tmpfile, cmd)
@staticmethod
- def extractPatches(tree, startcommit, outdir):
+ def extractPatches(tree, startcommit, outdir, paths=None):
import tempfile
import shutil
tempdir = tempfile.mkdtemp(prefix='oepatch')
try:
shellcmd = ["git", "format-patch", startcommit, "-o", tempdir]
+ if paths:
+ shellcmd.append('--')
+ shellcmd.extend(paths)
out = runcmd(["sh", "-c", " ".join(shellcmd)], tree)
if out:
for srcfile in out.split():
@@ -407,6 +410,13 @@ class GitApplyTree(PatchTree):
runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
except CmdError:
pass
+ # git am won't always clean up after itself, sadly, so...
+ shellcmd = ["git", "--work-tree=%s" % reporoot, "reset", "--hard", "HEAD"]
+ runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ # Also need to take care of any stray untracked files
+ shellcmd = ["git", "--work-tree=%s" % reporoot, "clean", "-f"]
+ runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+
# Fall back to git apply
shellcmd = ["git", "--git-dir=%s" % reporoot, "apply", "-p%s" % patch['strippath']]
try:
diff --git a/yocto-poky/meta/lib/oe/recipeutils.py b/yocto-poky/meta/lib/oe/recipeutils.py
index d4fa72651..119a68821 100644
--- a/yocto-poky/meta/lib/oe/recipeutils.py
+++ b/yocto-poky/meta/lib/oe/recipeutils.py
@@ -31,9 +31,13 @@ def pn_to_recipe(cooker, pn):
import bb.providers
if pn in cooker.recipecache.pkg_pn:
- filenames = cooker.recipecache.pkg_pn[pn]
best = bb.providers.findBestProvider(pn, cooker.data, cooker.recipecache, cooker.recipecache.pkg_pn)
return best[3]
+ elif pn in cooker.recipecache.providers:
+ filenames = cooker.recipecache.providers[pn]
+ eligible, foundUnique = bb.providers.filterProviders(filenames, pn, cooker.expanded_data, cooker.recipecache)
+ filename = eligible[0]
+ return filename
else:
return None
@@ -72,6 +76,8 @@ def parse_recipe_simple(cooker, pn, d, appends=True):
raise bb.providers.NoProvider('Unable to find any recipe file matching %s' % pn)
if appends:
appendfiles = cooker.collection.get_file_appends(recipefile)
+ else:
+ appendfiles = None
return parse_recipe(recipefile, appendfiles, d)
@@ -95,6 +101,63 @@ def get_var_files(fn, varlist, d):
return varfiles
+def split_var_value(value, assignment=True):
+ """
+ Split a space-separated variable's value into a list of items,
+ taking into account that some of the items might be made up of
+ expressions containing spaces that should not be split.
+ Parameters:
+ value:
+ The string value to split
+ assignment:
+ True to assume that the value represents an assignment
+ statement, False otherwise. If True, and an assignment
+ statement is passed in the first item in
+ the returned list will be the part of the assignment
+ statement up to and including the opening quote character,
+ and the last item will be the closing quote.
+ """
+ inexpr = 0
+ lastchar = None
+ out = []
+ buf = ''
+ for char in value:
+ if char == '{':
+ if lastchar == '$':
+ inexpr += 1
+ elif char == '}':
+ inexpr -= 1
+ elif assignment and char in '"\'' and inexpr == 0:
+ if buf:
+ out.append(buf)
+ out.append(char)
+ char = ''
+ buf = ''
+ elif char.isspace() and inexpr == 0:
+ char = ''
+ if buf:
+ out.append(buf)
+ buf = ''
+ buf += char
+ lastchar = char
+ if buf:
+ out.append(buf)
+
+ # Join together assignment statement and opening quote
+ outlist = out
+ if assignment:
+ assigfound = False
+ for idx, item in enumerate(out):
+ if '=' in item:
+ assigfound = True
+ if assigfound:
+ if '"' in item or "'" in item:
+ outlist = [' '.join(out[:idx+1])]
+ outlist.extend(out[idx+1:])
+ break
+ return outlist
+
+
def patch_recipe_file(fn, values, patch=False, relpath=''):
"""Update or insert variable values into a recipe file (assuming you
have already identified the exact file you want to update.)
@@ -112,7 +175,7 @@ def patch_recipe_file(fn, values, patch=False, relpath=''):
if name in nowrap_vars:
tf.write(rawtext)
elif name in list_vars:
- splitvalue = values[name].split()
+ splitvalue = split_var_value(values[name], assignment=False)
if len(splitvalue) > 1:
linesplit = ' \\\n' + (' ' * (len(name) + 4))
tf.write('%s = "%s%s"\n' % (name, linesplit.join(splitvalue), linesplit))
@@ -277,6 +340,22 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True):
return remotes
+def get_recipe_local_files(d, patches=False):
+ """Get a list of local files in SRC_URI within a recipe."""
+ uris = (d.getVar('SRC_URI', True) or "").split()
+ fetch = bb.fetch2.Fetch(uris, d)
+ ret = {}
+ for uri in uris:
+ if fetch.ud[uri].type == 'file':
+ if (not patches and
+ bb.utils.exec_flat_python_func('patch_path', uri, fetch, '')):
+ continue
+ # Skip files that are referenced by absolute path
+ if not os.path.isabs(fetch.ud[uri].basepath):
+ ret[fetch.ud[uri].basepath] = fetch.localpath(uri)
+ return ret
+
+
def get_recipe_patches(d):
"""Get a list of the patches included in SRC_URI within a recipe."""
patchfiles = []
@@ -518,7 +597,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
instfunclines.append(line)
return (instfunclines, None, 4, False)
else:
- splitval = origvalue.split()
+ splitval = split_var_value(origvalue, assignment=False)
changed = False
removevar = varname
if varname in ['SRC_URI', 'SRC_URI_append%s' % appendoverride]:
@@ -673,11 +752,14 @@ def get_recipe_upstream_version(rd):
ru['type'] = 'U'
ru['datetime'] = ''
+ pv = rd.getVar('PV', True)
+
# XXX: If don't have SRC_URI means that don't have upstream sources so
- # returns 1.0.
+ # returns the current recipe version, so that upstream version check
+ # declares a match.
src_uris = rd.getVar('SRC_URI', True)
if not src_uris:
- ru['version'] = '1.0'
+ ru['version'] = pv
ru['type'] = 'M'
ru['datetime'] = datetime.now()
return ru
@@ -686,8 +768,6 @@ def get_recipe_upstream_version(rd):
src_uri = src_uris.split()[0]
uri_type, _, _, _, _, _ = decodeurl(src_uri)
- pv = rd.getVar('PV', True)
-
manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True)
if manual_upstream_version:
# manual tracking of upstream version.
diff --git a/yocto-poky/meta/lib/oe/rootfs.py b/yocto-poky/meta/lib/oe/rootfs.py
index 3b53fce4a..18df22d9a 100644
--- a/yocto-poky/meta/lib/oe/rootfs.py
+++ b/yocto-poky/meta/lib/oe/rootfs.py
@@ -66,6 +66,7 @@ class Rootfs(object):
m = r.search(line)
if m:
found_error = 1
+ bb.warn('[log_check] In line: [%s]' % line)
bb.warn('[log_check] %s: found an error message in the logfile (keyword \'%s\'):\n[log_check] %s'
% (self.d.getVar('PN', True), m.group(), line))
@@ -278,6 +279,7 @@ class Rootfs(object):
bb.note("Running intercept scripts:")
os.environ['D'] = self.image_rootfs
+ os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE', True)
for script in os.listdir(intercepts_dir):
script_full = os.path.join(intercepts_dir, script)
@@ -595,7 +597,11 @@ class DpkgOpkgRootfs(Rootfs):
pkg_list = []
- pkgs = self._get_pkgs_postinsts(status_file)
+ pkgs = None
+ if not self.d.getVar('PACKAGE_INSTALL', True).strip():
+ bb.note("Building empty image")
+ else:
+ pkgs = self._get_pkgs_postinsts(status_file)
if pkgs:
root = "__packagegroup_postinst__"
pkgs[root] = pkgs.keys()
diff --git a/yocto-poky/meta/lib/oe/sdk.py b/yocto-poky/meta/lib/oe/sdk.py
index 53da0f01a..3103f4889 100644
--- a/yocto-poky/meta/lib/oe/sdk.py
+++ b/yocto-poky/meta/lib/oe/sdk.py
@@ -5,6 +5,7 @@ from oe.package_manager import *
import os
import shutil
import glob
+import traceback
class Sdk(object):
@@ -25,7 +26,7 @@ class Sdk(object):
else:
self.manifest_dir = manifest_dir
- bb.utils.remove(self.sdk_output, True)
+ self.remove(self.sdk_output, True)
self.install_order = Manifest.INSTALL_ORDER
@@ -34,29 +35,56 @@ class Sdk(object):
pass
def populate(self):
- bb.utils.mkdirhier(self.sdk_output)
+ self.mkdirhier(self.sdk_output)
# call backend dependent implementation
self._populate()
# Don't ship any libGL in the SDK
- bb.utils.remove(os.path.join(self.sdk_output, self.sdk_native_path,
- self.d.getVar('libdir_nativesdk', True).strip('/'),
- "libGL*"))
+ self.remove(os.path.join(self.sdk_output, self.sdk_native_path,
+ self.d.getVar('libdir_nativesdk', True).strip('/'),
+ "libGL*"))
# Fix or remove broken .la files
- bb.utils.remove(os.path.join(self.sdk_output, self.sdk_native_path,
- self.d.getVar('libdir_nativesdk', True).strip('/'),
- "*.la"))
+ self.remove(os.path.join(self.sdk_output, self.sdk_native_path,
+ self.d.getVar('libdir_nativesdk', True).strip('/'),
+ "*.la"))
# Link the ld.so.cache file into the hosts filesystem
link_name = os.path.join(self.sdk_output, self.sdk_native_path,
self.sysconfdir, "ld.so.cache")
- bb.utils.mkdirhier(os.path.dirname(link_name))
+ self.mkdirhier(os.path.dirname(link_name))
os.symlink("/etc/ld.so.cache", link_name)
execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND', True))
+ def movefile(self, sourcefile, destdir):
+ try:
+ # FIXME: this check of movefile's return code to None should be
+ # fixed within the function to use only exceptions to signal when
+ # something goes wrong
+ if (bb.utils.movefile(sourcefile, destdir) == None):
+ raise OSError("moving %s to %s failed"
+ %(sourcefile, destdir))
+ #FIXME: using umbrella exc catching because bb.utils method raises it
+ except Exception as e:
+ bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
+ bb.error("unable to place %s in final SDK location" % sourcefile)
+
+ def mkdirhier(self, dirpath):
+ try:
+ bb.utils.mkdirhier(dirpath)
+ except OSError as e:
+ bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
+ bb.fatal("cannot make dir for SDK: %s" % dirpath)
+
+ def remove(self, path, recurse=False):
+ try:
+ bb.utils.remove(path, recurse)
+ #FIXME: using umbrella exc catching because bb.utils method raises it
+ except Exception as e:
+ bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
+ bb.warn("cannot remove SDK dir: %s" % path)
class RpmSdk(Sdk):
def __init__(self, d, manifest_dir=None):
@@ -143,15 +171,15 @@ class RpmSdk(Sdk):
"lib",
"rpm"
)
- bb.utils.mkdirhier(native_rpm_state_dir)
+ self.mkdirhier(native_rpm_state_dir)
for f in glob.glob(os.path.join(self.sdk_output,
"var",
"lib",
"rpm",
"*")):
- bb.utils.movefile(f, native_rpm_state_dir)
+ self.movefile(f, native_rpm_state_dir)
- bb.utils.remove(os.path.join(self.sdk_output, "var"), True)
+ self.remove(os.path.join(self.sdk_output, "var"), True)
# Move host sysconfig data
native_sysconf_dir = os.path.join(self.sdk_output,
@@ -159,10 +187,10 @@ class RpmSdk(Sdk):
self.d.getVar('sysconfdir',
True).strip('/'),
)
- bb.utils.mkdirhier(native_sysconf_dir)
+ self.mkdirhier(native_sysconf_dir)
for f in glob.glob(os.path.join(self.sdk_output, "etc", "*")):
- bb.utils.movefile(f, native_sysconf_dir)
- bb.utils.remove(os.path.join(self.sdk_output, "etc"), True)
+ self.movefile(f, native_sysconf_dir)
+ self.remove(os.path.join(self.sdk_output, "etc"), True)
class OpkgSdk(Sdk):
@@ -219,12 +247,12 @@ class OpkgSdk(Sdk):
target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir)
host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir)
- bb.utils.mkdirhier(target_sysconfdir)
+ self.mkdirhier(target_sysconfdir)
shutil.copy(self.target_conf, target_sysconfdir)
os.chmod(os.path.join(target_sysconfdir,
os.path.basename(self.target_conf)), 0644)
- bb.utils.mkdirhier(host_sysconfdir)
+ self.mkdirhier(host_sysconfdir)
shutil.copy(self.host_conf, host_sysconfdir)
os.chmod(os.path.join(host_sysconfdir,
os.path.basename(self.host_conf)), 0644)
@@ -232,11 +260,11 @@ class OpkgSdk(Sdk):
native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path,
self.d.getVar('localstatedir_nativesdk', True).strip('/'),
"lib", "opkg")
- bb.utils.mkdirhier(native_opkg_state_dir)
+ self.mkdirhier(native_opkg_state_dir)
for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")):
- bb.utils.movefile(f, native_opkg_state_dir)
+ self.movefile(f, native_opkg_state_dir)
- bb.utils.remove(os.path.join(self.sdk_output, "var"), True)
+ self.remove(os.path.join(self.sdk_output, "var"), True)
class DpkgSdk(Sdk):
@@ -264,7 +292,7 @@ class DpkgSdk(Sdk):
def _copy_apt_dir_to(self, dst_dir):
staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE", True)
- bb.utils.remove(dst_dir, True)
+ self.remove(dst_dir, True)
shutil.copytree(os.path.join(staging_etcdir_native, "apt"), dst_dir)
@@ -306,11 +334,11 @@ class DpkgSdk(Sdk):
native_dpkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path,
"var", "lib", "dpkg")
- bb.utils.mkdirhier(native_dpkg_state_dir)
+ self.mkdirhier(native_dpkg_state_dir)
for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "dpkg", "*")):
- bb.utils.movefile(f, native_dpkg_state_dir)
+ self.movefile(f, native_dpkg_state_dir)
+ self.remove(os.path.join(self.sdk_output, "var"), True)
- bb.utils.remove(os.path.join(self.sdk_output, "var"), True)
def sdk_list_installed_packages(d, target, format=None, rootfs_dir=None):
diff --git a/yocto-poky/meta/lib/oe/sstatesig.py b/yocto-poky/meta/lib/oe/sstatesig.py
index cb46712ee..6d1be3e37 100644
--- a/yocto-poky/meta/lib/oe/sstatesig.py
+++ b/yocto-poky/meta/lib/oe/sstatesig.py
@@ -94,6 +94,26 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
self.machine = data.getVar("MACHINE", True)
self.mismatch_msgs = []
pass
+
+ def tasks_resolved(self, virtmap, virtpnmap, dataCache):
+ # Translate virtual/xxx entries to PN values
+ newabisafe = []
+ for a in self.abisaferecipes:
+ if a in virtpnmap:
+ newabisafe.append(virtpnmap[a])
+ else:
+ newabisafe.append(a)
+ self.abisaferecipes = newabisafe
+ newsafedeps = []
+ for a in self.saferecipedeps:
+ a1, a2 = a.split("->")
+ if a1 in virtpnmap:
+ a1 = virtpnmap[a1]
+ if a2 in virtpnmap:
+ a2 = virtpnmap[a2]
+ newsafedeps.append(a1 + "->" + a2)
+ self.saferecipedeps = newsafedeps
+
def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
OpenPOWER on IntegriCloud