summaryrefslogtreecommitdiffstats
path: root/import-layers/yocto-poky/meta/classes/base.bbclass
diff options
context:
space:
mode:
authorBrad Bishop <bradleyb@fuzziesquirrel.com>2018-02-01 10:27:11 -0500
committerBrad Bishop <bradleyb@fuzziesquirrel.com>2018-03-12 22:51:39 -0400
commit6e60e8b2b2bab889379b380a28a167a0edd9d1d3 (patch)
treef12f54d5ba8e74e67e5fad3651a1e125bb8f4191 /import-layers/yocto-poky/meta/classes/base.bbclass
parent509842add85b53e13164c1569a1fd43d5b8d91c5 (diff)
downloadtalos-openbmc-6e60e8b2b2bab889379b380a28a167a0edd9d1d3.tar.gz
talos-openbmc-6e60e8b2b2bab889379b380a28a167a0edd9d1d3.zip
Yocto 2.3
Move OpenBMC to Yocto 2.3(pyro). Tested: Built and verified Witherspoon and Palmetto images Change-Id: I50744030e771f4850afc2a93a10d3507e76d36bc Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com> Resolves: openbmc/openbmc#2461
Diffstat (limited to 'import-layers/yocto-poky/meta/classes/base.bbclass')
-rw-r--r--import-layers/yocto-poky/meta/classes/base.bbclass155
1 files changed, 89 insertions, 66 deletions
diff --git a/import-layers/yocto-poky/meta/classes/base.bbclass b/import-layers/yocto-poky/meta/classes/base.bbclass
index 024fe4331..d95afb7b9 100644
--- a/import-layers/yocto-poky/meta/classes/base.bbclass
+++ b/import-layers/yocto-poky/meta/classes/base.bbclass
@@ -10,13 +10,13 @@ inherit utility-tasks
inherit metadata_scm
inherit logging
-OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath"
+OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license"
OE_IMPORTS[type] = "list"
def oe_import(d):
import sys
- bbpath = d.getVar("BBPATH", True).split(":")
+ bbpath = d.getVar("BBPATH").split(":")
sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
def inject(name, value):
@@ -37,7 +37,7 @@ def oe_import(d):
OE_IMPORTED := "${@oe_import(d)}"
def lsb_distro_identifier(d):
- adjust = d.getVar('LSB_DISTRO_ADJUST', True)
+ adjust = d.getVar('LSB_DISTRO_ADJUST')
adjust_func = None
if adjust:
try:
@@ -72,7 +72,7 @@ def base_dep_prepend(d):
# we need that built is the responsibility of the patch function / class, not
# the application.
if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
- if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)):
+ if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
return deps
@@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} "
FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
# THISDIR only works properly with imediate expansion as it has to run
# in the context of the location its used (:=)
-THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}"
+THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
def extra_path_elements(d):
path = ""
- elements = (d.getVar('EXTRANATIVEPATH', True) or "").split()
+ elements = (d.getVar('EXTRANATIVEPATH') or "").split()
for e in elements:
path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
return path
@@ -96,8 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}"
def get_lic_checksum_file_list(d):
filelist = []
- lic_files = d.getVar("LIC_FILES_CHKSUM", True) or ''
- tmpdir = d.getVar("TMPDIR", True)
+ lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
+ tmpdir = d.getVar("TMPDIR")
+ s = d.getVar("S")
+ b = d.getVar("B")
+ workdir = d.getVar("WORKDIR")
urls = lic_files.split()
for url in urls:
@@ -109,13 +112,32 @@ def get_lic_checksum_file_list(d):
raise bb.fetch.MalformedUrl(url)
if path[0] == '/':
- if path.startswith(tmpdir):
+ if path.startswith((tmpdir, s, b, workdir)):
continue
filelist.append(path + ":" + str(os.path.exists(path)))
except bb.fetch.MalformedUrl:
- bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
+ bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
return " ".join(filelist)
+def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
+ tools = d.getVar(toolsvar).split()
+ origbbenv = d.getVar("BB_ORIGENV", False)
+ path = origbbenv.getVar("PATH")
+ bb.utils.mkdirhier(dest)
+ notfound = []
+ for tool in tools:
+ desttool = os.path.join(dest, tool)
+ if not os.path.exists(desttool):
+ srctool = bb.utils.which(path, tool, executable=True)
+ if "ccache" in srctool:
+ srctool = bb.utils.which(path, tool, executable=True, direction=1)
+ if srctool:
+ os.symlink(srctool, desttool)
+ else:
+ notfound.append(tool)
+ if notfound and fatal:
+ bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
+
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
@@ -123,7 +145,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
do_fetch[vardeps] += "SRCREV"
python base_do_fetch() {
- src_uri = (d.getVar('SRC_URI', True) or "").split()
+ src_uri = (d.getVar('SRC_URI') or "").split()
if len(src_uri) == 0:
return
@@ -138,31 +160,31 @@ addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}"
python () {
- if d.getVar('S', True) != d.getVar('WORKDIR', True):
+ if d.getVar('S') != d.getVar('WORKDIR'):
d.setVarFlag('do_unpack', 'cleandirs', '${S}')
else:
d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
}
python base_do_unpack() {
- src_uri = (d.getVar('SRC_URI', True) or "").split()
+ src_uri = (d.getVar('SRC_URI') or "").split()
if len(src_uri) == 0:
return
try:
fetcher = bb.fetch2.Fetch(src_uri, d)
- fetcher.unpack(d.getVar('WORKDIR', True))
+ fetcher.unpack(d.getVar('WORKDIR'))
except bb.fetch2.BBFetchException as e:
bb.fatal(str(e))
}
def pkgarch_mapping(d):
# Compatibility mappings of TUNE_PKGARCH (opt in)
- if d.getVar("PKGARCHCOMPAT_ARMV7A", True):
- if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon":
+ if d.getVar("PKGARCHCOMPAT_ARMV7A"):
+ if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon":
d.setVar("TUNE_PKGARCH", "armv7a")
def get_layers_branch_rev(d):
- layers = (d.getVar("BBLAYERS", True) or "").split()
+ layers = (d.getVar("BBLAYERS") or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \
@@ -189,7 +211,7 @@ BUILDCFG_FUNCS[type] = "list"
def buildcfg_vars(d):
statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
for var in statusvars:
- value = d.getVar(var, True)
+ value = d.getVar(var)
if value is not None:
yield '%-17s = "%s"' % (var, value)
@@ -197,7 +219,7 @@ def buildcfg_neededvars(d):
needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
pesteruser = []
for v in needed_vars:
- val = d.getVar(v, True)
+ val = d.getVar(v)
if not val or val == 'INVALID':
pesteruser.append(v)
@@ -216,10 +238,12 @@ python base_eventhandler() {
pkgarch_mapping(e.data)
oe.utils.features_backfill("DISTRO_FEATURES", e.data)
oe.utils.features_backfill("MACHINE_FEATURES", e.data)
+ # Works with the line in layer.conf which changes PATH to point here
+ setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
+ setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
if isinstance(e, bb.event.BuildStarted):
localdata = bb.data.createCopy(e.data)
- bb.data.update_data(localdata)
statuslines = []
for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
g = globals()
@@ -230,7 +254,7 @@ python base_eventhandler() {
if flines:
statuslines.extend(flines)
- statusheader = e.data.getVar('BUILDCFG_HEADER', True)
+ statusheader = e.data.getVar('BUILDCFG_HEADER')
if statusheader:
bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
@@ -238,7 +262,7 @@ python base_eventhandler() {
# target ones and we'd see dulpicate key names overwriting each other
# for various PREFERRED_PROVIDERS
if isinstance(e, bb.event.RecipePreFinalise):
- if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True):
+ if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"):
e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
@@ -264,14 +288,14 @@ python base_eventhandler() {
# sysroot since they're now "unreachable". This makes switching virtual/kernel work in
# particular.
#
- pn = d.getVar('PN', True)
+ pn = d.getVar('PN')
source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
if not source_mirror_fetch:
- provs = (d.getVar("PROVIDES", True) or "").split()
- multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
+ provs = (d.getVar("PROVIDES") or "").split()
+ multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
for p in provs:
if p.startswith("virtual/") and p not in multiwhitelist:
- profprov = d.getVar("PREFERRED_PROVIDER_" + p, True)
+ profprov = d.getVar("PREFERRED_PROVIDER_" + p)
if profprov and pn != profprov:
raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
}
@@ -281,7 +305,7 @@ CLEANBROKEN = "0"
addtask configure after do_patch
do_configure[dirs] = "${B}"
-do_configure[deptask] = "do_populate_sysroot"
+do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
base_do_configure() {
if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
@@ -333,9 +357,9 @@ def set_packagetriplet(d):
tos = []
tvs = []
- archs.append(d.getVar("PACKAGE_ARCHS", True).split())
- tos.append(d.getVar("TARGET_OS", True))
- tvs.append(d.getVar("TARGET_VENDOR", True))
+ archs.append(d.getVar("PACKAGE_ARCHS").split())
+ tos.append(d.getVar("TARGET_OS"))
+ tvs.append(d.getVar("TARGET_VENDOR"))
def settriplet(d, varname, archs, tos, tvs):
triplets = []
@@ -347,16 +371,15 @@ def set_packagetriplet(d):
settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
- variants = d.getVar("MULTILIB_VARIANTS", True) or ""
+ variants = d.getVar("MULTILIB_VARIANTS") or ""
for item in variants.split():
localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides)
- bb.data.update_data(localdata)
- archs.append(localdata.getVar("PACKAGE_ARCHS", True).split())
- tos.append(localdata.getVar("TARGET_OS", True))
- tvs.append(localdata.getVar("TARGET_VENDOR", True))
+ archs.append(localdata.getVar("PACKAGE_ARCHS").split())
+ tos.append(localdata.getVar("TARGET_OS"))
+ tvs.append(localdata.getVar("TARGET_VENDOR"))
settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
@@ -371,10 +394,10 @@ python () {
# PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
if pkgconfigflags:
- pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split()
- pn = d.getVar("PN", True)
+ pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
+ pn = d.getVar("PN")
- mlprefix = d.getVar("MLPREFIX", True)
+ mlprefix = d.getVar("MLPREFIX")
def expandFilter(appends, extension, prefix):
appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
@@ -416,7 +439,7 @@ python () {
num = len(items)
if num > 4:
bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
- % (d.getVar('PN', True), flag))
+ % (d.getVar('PN'), flag))
if flag in pkgconfig:
if num >= 3 and items[2]:
@@ -431,8 +454,8 @@ python () {
appendVar('RDEPENDS_${PN}', extrardeps)
appendVar('PACKAGECONFIG_CONFARGS', extraconf)
- pn = d.getVar('PN', True)
- license = d.getVar('LICENSE', True)
+ pn = d.getVar('PN')
+ license = d.getVar('LICENSE')
if license == "INVALID":
bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
@@ -462,26 +485,26 @@ python () {
d.setVarFlag('do_devshell', 'fakeroot', '1')
d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
- need_machine = d.getVar('COMPATIBLE_MACHINE', True)
+ need_machine = d.getVar('COMPATIBLE_MACHINE')
if need_machine:
import re
- compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":")
+ compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
for m in compat_machines:
if re.match(need_machine, m):
break
else:
- raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True))
+ raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
if not source_mirror_fetch:
- need_host = d.getVar('COMPATIBLE_HOST', True)
+ need_host = d.getVar('COMPATIBLE_HOST')
if need_host:
import re
- this_host = d.getVar('HOST_SYS', True)
+ this_host = d.getVar('HOST_SYS')
if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
- bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split()
+ bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
check_license = False if pn.startswith("nativesdk-") else True
for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
@@ -500,21 +523,21 @@ python () {
for lic in bad_licenses:
spdx_license = return_spdx(d, lic)
for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
- whitelist.extend((d.getVar(w + lic, True) or "").split())
+ whitelist.extend((d.getVar(w + lic) or "").split())
if spdx_license:
- whitelist.extend((d.getVar(w + spdx_license, True) or "").split())
+ whitelist.extend((d.getVar(w + spdx_license) or "").split())
'''
We need to track what we are whitelisting and why. If pn is
incompatible we need to be able to note that the image that
is created may infact contain incompatible licenses despite
INCOMPATIBLE_LICENSE being set.
'''
- incompatwl.extend((d.getVar(w + lic, True) or "").split())
+ incompatwl.extend((d.getVar(w + lic) or "").split())
if spdx_license:
- incompatwl.extend((d.getVar(w + spdx_license, True) or "").split())
+ incompatwl.extend((d.getVar(w + spdx_license) or "").split())
if not pn in whitelist:
- pkgs = d.getVar('PACKAGES', True).split()
+ pkgs = d.getVar('PACKAGES').split()
skipped_pkgs = []
unskipped_pkgs = []
for pkg in pkgs:
@@ -526,13 +549,13 @@ python () {
if unskipped_pkgs:
for pkg in skipped_pkgs:
bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
- mlprefix = d.getVar('MLPREFIX', True)
+ mlprefix = d.getVar('MLPREFIX')
d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
for pkg in unskipped_pkgs:
bb.debug(1, "INCLUDING the package " + pkg)
elif all_skipped or incompatible_license(d, bad_licenses):
bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
- raise bb.parse.SkipPackage("incompatible with license %s" % license)
+ raise bb.parse.SkipPackage("it has an incompatible license: %s" % license)
elif pn in whitelist:
if pn in incompatwl:
bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
@@ -542,8 +565,8 @@ python () {
# matching of license expressions - just check that all license strings
# in LICENSE_<pkg> are found in LICENSE.
license_set = oe.license.list_licenses(license)
- for pkg in d.getVar('PACKAGES', True).split():
- pkg_license = d.getVar('LICENSE_' + pkg, True)
+ for pkg in d.getVar('PACKAGES').split():
+ pkg_license = d.getVar('LICENSE_' + pkg)
if pkg_license:
unlisted = oe.license.list_licenses(pkg_license) - license_set
if unlisted:
@@ -551,7 +574,7 @@ python () {
"listed in LICENSE" % (pkg, ' '.join(unlisted)))
needsrcrev = False
- srcuri = d.getVar('SRC_URI', True)
+ srcuri = d.getVar('SRC_URI')
for uri in srcuri.split():
(scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
@@ -611,8 +634,8 @@ python () {
set_packagetriplet(d)
# 'multimachine' handling
- mach_arch = d.getVar('MACHINE_ARCH', True)
- pkg_arch = d.getVar('PACKAGE_ARCH', True)
+ mach_arch = d.getVar('MACHINE_ARCH')
+ pkg_arch = d.getVar('PACKAGE_ARCH')
if (pkg_arch == mach_arch):
# Already machine specific - nothing further to do
@@ -622,11 +645,11 @@ python () {
# We always try to scan SRC_URI for urls with machine overrides
# unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
#
- override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True)
+ override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
if override != '0':
paths = []
- fpaths = (d.getVar('FILESPATH', True) or '').split(':')
- machine = d.getVar('MACHINE', True)
+ fpaths = (d.getVar('FILESPATH') or '').split(':')
+ machine = d.getVar('MACHINE')
for p in fpaths:
if os.path.basename(p) == machine and os.path.isdir(p):
paths.append(p)
@@ -643,16 +666,16 @@ python () {
d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
return
- packages = d.getVar('PACKAGES', True).split()
+ packages = d.getVar('PACKAGES').split()
for pkg in packages:
- pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True)
+ pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
# We could look for != PACKAGE_ARCH here but how to choose
# if multiple differences are present?
# Look through PACKAGE_ARCHS for the priority order?
if pkgarch and pkgarch == mach_arch:
d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
- bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True))
+ bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
}
addtask cleansstate after do_clean
@@ -663,7 +686,7 @@ addtask cleanall after do_cleansstate
do_cleansstate[nostamp] = "1"
python do_cleanall() {
- src_uri = (d.getVar('SRC_URI', True) or "").split()
+ src_uri = (d.getVar('SRC_URI') or "").split()
if len(src_uri) == 0:
return
OpenPOWER on IntegriCloud