summaryrefslogtreecommitdiffstats
path: root/import-layers/yocto-poky/scripts/lib/devtool
diff options
context:
space:
mode:
authorBrad Bishop <bradleyb@fuzziesquirrel.com>2018-02-01 10:27:11 -0500
committerBrad Bishop <bradleyb@fuzziesquirrel.com>2018-03-12 22:51:39 -0400
commit6e60e8b2b2bab889379b380a28a167a0edd9d1d3 (patch)
treef12f54d5ba8e74e67e5fad3651a1e125bb8f4191 /import-layers/yocto-poky/scripts/lib/devtool
parent509842add85b53e13164c1569a1fd43d5b8d91c5 (diff)
downloadtalos-openbmc-6e60e8b2b2bab889379b380a28a167a0edd9d1d3.tar.gz
talos-openbmc-6e60e8b2b2bab889379b380a28a167a0edd9d1d3.zip
Yocto 2.3
Move OpenBMC to Yocto 2.3(pyro). Tested: Built and verified Witherspoon and Palmetto images Change-Id: I50744030e771f4850afc2a93a10d3507e76d36bc Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com> Resolves: openbmc/openbmc#2461
Diffstat (limited to 'import-layers/yocto-poky/scripts/lib/devtool')
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/__init__.py78
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/build.py2
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/build_image.py10
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/deploy.py30
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/package.py8
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/runqemu.py6
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/sdk.py14
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/search.py4
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/standard.py570
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/upgrade.py30
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py18
11 files changed, 532 insertions, 238 deletions
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
index e675133f6..d646b0cf6 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
@@ -23,6 +23,7 @@ import sys
import subprocess
import logging
import re
+import codecs
logger = logging.getLogger('devtool')
@@ -67,10 +68,10 @@ def exec_watch(cmd, **options):
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
)
+ reader = codecs.getreader('utf-8')(process.stdout)
buf = ''
while True:
- out = process.stdout.read(1)
- out = out.decode('utf-8')
+ out = reader.read(1, 1)
if out:
sys.stdout.write(out)
sys.stdout.flush()
@@ -86,13 +87,13 @@ def exec_watch(cmd, **options):
def exec_fakeroot(d, cmd, **kwargs):
"""Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
# Grab the command and check it actually exists
- fakerootcmd = d.getVar('FAKEROOTCMD', True)
+ fakerootcmd = d.getVar('FAKEROOTCMD')
if not os.path.exists(fakerootcmd):
logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
return 2
# Set up the appropriate environment
newenv = dict(os.environ)
- fakerootenv = d.getVar('FAKEROOTENV', True)
+ fakerootenv = d.getVar('FAKEROOTENV')
for varvalue in fakerootenv.split():
if '=' in varvalue:
splitval = varvalue.split('=', 1)
@@ -113,40 +114,40 @@ def setup_tinfoil(config_only=False, basepath=None, tracking=False):
import bb.tinfoil
tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
- tinfoil.prepare(config_only)
- tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ try:
+ tinfoil.prepare(config_only)
+ tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ except bb.tinfoil.TinfoilUIException:
+ tinfoil.shutdown()
+ raise DevtoolError('Failed to start bitbake environment')
+ except:
+ tinfoil.shutdown()
+ raise
finally:
os.chdir(orig_cwd)
return tinfoil
-def get_recipe_file(cooker, pn):
- """Find recipe file corresponding a package name"""
- import oe.recipeutils
- recipefile = oe.recipeutils.pn_to_recipe(cooker, pn)
- if not recipefile:
- skipreasons = oe.recipeutils.get_unavailable_reasons(cooker, pn)
- if skipreasons:
- logger.error('\n'.join(skipreasons))
- else:
- logger.error("Unable to find any recipe file matching %s" % pn)
- return recipefile
-
def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
- """Parse recipe of a package"""
- import oe.recipeutils
- recipefile = get_recipe_file(tinfoil.cooker, pn)
- if not recipefile:
- # Error already logged
+ """Parse the specified recipe"""
+ try:
+ recipefile = tinfoil.get_recipe_file(pn)
+ except bb.providers.NoProvider as e:
+ logger.error(str(e))
return None
if appends:
- append_files = tinfoil.cooker.collection.get_file_appends(recipefile)
+ append_files = tinfoil.get_file_appends(recipefile)
if filter_workspace:
# Filter out appends from the workspace
append_files = [path for path in append_files if
not path.startswith(config.workspace_path)]
else:
append_files = None
- return oe.recipeutils.parse_recipe(tinfoil.cooker, recipefile, append_files)
+ try:
+ rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
+ except Exception as e:
+ logger.error(str(e))
+ return None
+ return rd
def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
"""
@@ -190,7 +191,7 @@ def use_external_build(same_dir, no_same_dir, d):
logger.info('Using source tree as build directory since --same-dir specified')
elif bb.data.inherits_class('autotools-brokensep', d):
logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
- elif d.getVar('B', True) == os.path.abspath(d.getVar('S', True)):
+ elif d.getVar('B') == os.path.abspath(d.getVar('S')):
logger.info('Using source tree as build directory since that would be the default for this recipe')
else:
b_is_s = False
@@ -260,23 +261,28 @@ def get_bbclassextend_targets(recipefile, pn):
targets.append('%s-%s' % (pn, variant))
return targets
-def ensure_npm(config, basepath, fixed_setup=False):
+def ensure_npm(config, basepath, fixed_setup=False, check_exists=True):
"""
Ensure that npm is available and either build it or show a
reasonable error message
"""
- tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
- try:
- nativepath = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True)
- finally:
- tinfoil.shutdown()
+ if check_exists:
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ rd = tinfoil.parse_recipe('nodejs-native')
+ nativepath = rd.getVar('STAGING_BINDIR_NATIVE')
+ finally:
+ tinfoil.shutdown()
+ npmpath = os.path.join(nativepath, 'npm')
+ build_npm = not os.path.exists(npmpath)
+ else:
+ build_npm = True
- npmpath = os.path.join(nativepath, 'npm')
- if not os.path.exists(npmpath):
+ if build_npm:
logger.info('Building nodejs-native')
try:
exec_build_env_command(config.init_path, basepath,
- 'bitbake -q nodejs-native', watch=True)
+ 'bitbake -q nodejs-native -c addto_recipe_sysroot', watch=True)
except bb.process.ExecutionError as e:
if "Nothing PROVIDES 'nodejs-native'" in e.stdout:
if fixed_setup:
@@ -286,5 +292,3 @@ def ensure_npm(config, basepath, fixed_setup=False):
raise DevtoolError(msg)
else:
raise
- if not os.path.exists(npmpath):
- raise DevtoolError('Built nodejs-native but npm binary still could not be found at %s' % npmpath)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/build.py b/import-layers/yocto-poky/scripts/lib/devtool/build.py
index 6be549dd5..252379e9b 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/build.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/build.py
@@ -80,7 +80,7 @@ def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
parser_build = subparsers.add_parser('build', help='Build a recipe',
description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
- group='working')
+ group='working', order=50)
parser_build.add_argument('recipename', help='Recipe to build')
parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
parser_build.set_defaults(func=build)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/build_image.py b/import-layers/yocto-poky/scripts/lib/devtool/build_image.py
index ae75511dc..e5810389b 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/build_image.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/build_image.py
@@ -34,8 +34,8 @@ def _get_packages(tinfoil, workspace, config):
result = []
for recipe in workspace:
data = parse_recipe(config, tinfoil, recipe, True)
- if 'class-target' in data.getVar('OVERRIDES', True).split(':'):
- if recipe in data.getVar('PACKAGES', True).split():
+ if 'class-target' in data.getVar('OVERRIDES').split(':'):
+ if recipe in data.getVar('PACKAGES').split():
result.append(recipe)
else:
logger.warning("Skipping recipe %s as it doesn't produce a "
@@ -95,7 +95,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task
raise TargetNotImageError()
# Get the actual filename used and strip the .bb and full path
- target_basename = rd.getVar('FILE', True)
+ target_basename = rd.getVar('FILE')
target_basename = os.path.splitext(os.path.basename(target_basename))[0]
config.set('SDK', 'target_basename', target_basename)
config.write()
@@ -132,9 +132,9 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task
afile.write('%s\n' % line)
if task in ['populate_sdk', 'populate_sdk_ext']:
- outputdir = rd.getVar('SDK_DEPLOY', True)
+ outputdir = rd.getVar('SDK_DEPLOY')
else:
- outputdir = rd.getVar('DEPLOY_DIR_IMAGE', True)
+ outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
tmp_tinfoil = tinfoil
tinfoil = None
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
index c4c7bf6c7..b3730ae83 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
@@ -156,11 +156,11 @@ def deploy(args, config, basepath, workspace):
tinfoil = setup_tinfoil(basepath=basepath)
try:
try:
- rd = oe.recipeutils.parse_recipe_simple(tinfoil.cooker, args.recipename, tinfoil.config_data)
+ rd = tinfoil.parse_recipe(args.recipename)
except Exception as e:
raise DevtoolError('Exception parsing recipe %s: %s' %
(args.recipename, e))
- recipe_outdir = rd.getVar('D', True)
+ recipe_outdir = rd.getVar('D')
if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
raise DevtoolError('No files to deploy - have you built the %s '
'recipe? If so, the install step has not installed '
@@ -192,6 +192,14 @@ def deploy(args, config, basepath, workspace):
if not args.show_status:
extraoptions += ' -q'
+ scp_port = ''
+ ssh_port = ''
+ if not args.port:
+ raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target")
+ else:
+ scp_port = "-P %s" % args.port
+ ssh_port = "-p %s" % args.port
+
# In order to delete previously deployed files and have the manifest file on
# the target, we write out a shell script and then copy it to the target
# so we can then run it (piping tar output to it).
@@ -213,7 +221,7 @@ def deploy(args, config, basepath, workspace):
for fpath, fsize in filelist:
f.write('%s %d\n' % (fpath, fsize))
# Copy them to the target
- ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
if ret != 0:
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
'get a complete error message' % args.target)
@@ -221,7 +229,7 @@ def deploy(args, config, basepath, workspace):
shutil.rmtree(tmpdir)
# Now run the script
- ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s \'sh %s %s %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
+ ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s %s \'sh %s %s %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
if ret != 0:
raise DevtoolError('Deploy failed - rerun with -s to get a complete '
'error message')
@@ -251,6 +259,14 @@ def undeploy(args, config, basepath, workspace):
if not args.show_status:
extraoptions += ' -q'
+ scp_port = ''
+ ssh_port = ''
+ if not args.port:
+ raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target")
+ else:
+ scp_port = "-P %s" % args.port
+ ssh_port = "-p %s" % args.port
+
args.target = args.target.split(':')[0]
tmpdir = tempfile.mkdtemp(prefix='devtool')
@@ -261,7 +277,7 @@ def undeploy(args, config, basepath, workspace):
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
f.write(shellscript)
# Copy it to the target
- ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
if ret != 0:
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
'get a complete error message' % args.target)
@@ -269,7 +285,7 @@ def undeploy(args, config, basepath, workspace):
shutil.rmtree(tmpdir)
# Now run the script
- ret = subprocess.call('ssh %s %s \'sh %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename), shell=True)
+ ret = subprocess.call('ssh %s %s %s \'sh %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
if ret != 0:
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
'error message')
@@ -292,6 +308,7 @@ def register_commands(subparsers, context):
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
+ parser_deploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
parser_deploy.set_defaults(func=deploy)
parser_undeploy = subparsers.add_parser('undeploy-target',
@@ -304,4 +321,5 @@ def register_commands(subparsers, context):
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
+ parser_undeploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
parser_undeploy.set_defaults(func=undeploy)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/package.py b/import-layers/yocto-poky/scripts/lib/devtool/package.py
index afb5809a3..af9e8f15f 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/package.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/package.py
@@ -28,15 +28,13 @@ def package(args, config, basepath, workspace):
"""Entry point for the devtool 'package' subcommand"""
check_workspace_recipe(workspace, args.recipename)
- tinfoil = setup_tinfoil(basepath=basepath)
+ tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
try:
- tinfoil.prepare(config_only=True)
-
image_pkgtype = config.get('Package', 'image_pkgtype', '')
if not image_pkgtype:
- image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True)
+ image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
- deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True)
+ deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
finally:
tinfoil.shutdown()
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py b/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py
index ae25cee08..e26cf28c2 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py
@@ -31,8 +31,10 @@ def runqemu(args, config, basepath, workspace):
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
try:
- machine = tinfoil.config_data.getVar('MACHINE', True)
- bindir_native = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True)
+ machine = tinfoil.config_data.getVar('MACHINE')
+ bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
+ tinfoil.config_data.getVar('BUILD_ARCH'),
+ tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
finally:
tinfoil.shutdown()
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
index 922277b79..e8bf0ad98 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
@@ -132,9 +132,9 @@ def sdk_update(args, config, basepath, workspace):
# Grab variable values
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
try:
- stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR', True)
- sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS', True)
- site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION', True)
+ stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
+ sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
+ site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
finally:
tinfoil.shutdown()
@@ -273,7 +273,7 @@ def sdk_install(args, config, basepath, workspace):
rd = parse_recipe(config, tinfoil, recipe, True)
if not rd:
return 1
- stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP', True), tasks[0])
+ stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
if checkstamp(recipe):
logger.info('%s is already installed' % recipe)
else:
@@ -306,6 +306,12 @@ def sdk_install(args, config, basepath, workspace):
if failed:
return 2
+ try:
+ exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True)
+ except bb.process.ExecutionError as e:
+ raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
+
+
def register_commands(subparsers, context):
"""Register devtool subcommands from the sdk plugin"""
if context.fixed_setup:
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/search.py b/import-layers/yocto-poky/scripts/lib/devtool/search.py
index b44bed7f6..054985b85 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/search.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/search.py
@@ -31,7 +31,7 @@ def search(args, config, basepath, workspace):
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
- pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
+ pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
keyword_rc = re.compile(args.keyword)
@@ -70,7 +70,7 @@ def search(args, config, basepath, workspace):
if match:
rd = parse_recipe(config, tinfoil, fn, True)
- summary = rd.getVar('SUMMARY', True)
+ summary = rd.getVar('SUMMARY')
if summary == rd.expand(defsummary):
summary = ''
print("%s %s" % (fn.ljust(20), summary))
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/standard.py b/import-layers/yocto-poky/scripts/lib/devtool/standard.py
index 4eff6f878..5ff1e230f 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/standard.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/standard.py
@@ -150,25 +150,32 @@ def add(args, config, basepath, workspace):
extracmdopts += ' --src-subdir "%s"' % args.src_subdir
if args.autorev:
extracmdopts += ' -a'
+ if args.fetch_dev:
+ extracmdopts += ' --fetch-dev'
tempdir = tempfile.mkdtemp(prefix='devtool')
try:
+ builtnpm = False
while True:
try:
- stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, tempdir, source, extracmdopts))
+ stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
except bb.process.ExecutionError as e:
if e.exitcode == 14:
+ if builtnpm:
+ raise DevtoolError('Re-running recipetool still failed to find npm')
# FIXME this is a horrible hack that is unfortunately
# necessary due to the fact that we can't run bitbake from
# inside recipetool since recipetool keeps tinfoil active
# with references to it throughout the code, so we have
# to exit out and come back here to do it.
- ensure_npm(config, basepath, args.fixed_setup)
+ ensure_npm(config, basepath, args.fixed_setup, check_exists=False)
+ logger.info('Re-running recipe creation process after building nodejs')
+ builtnpm = True
continue
elif e.exitcode == 15:
raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
else:
- raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
+ raise DevtoolError('Command \'%s\' failed' % e.command)
break
recipes = glob.glob(os.path.join(tempdir, '*.bb'))
@@ -223,8 +230,17 @@ def add(args, config, basepath, workspace):
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
try:
- rd = oe.recipeutils.parse_recipe(tinfoil.cooker, recipefile, None)
+ try:
+ rd = tinfoil.parse_recipe_file(recipefile, False)
+ except Exception as e:
+ logger.error(str(e))
+ rd = None
if not rd:
+ # Parsing failed. We just created this recipe and we shouldn't
+ # leave it in the workdir or it'll prevent bitbake from starting
+ movefn = '%s.parsefailed' % recipefile
+ logger.error('Parsing newly created recipe failed, moving recipe to %s for reference. If this looks to be caused by the recipe itself, please report this error.' % movefn)
+ shutil.move(recipefile, movefn)
return 1
if args.fetchuri and not args.no_git:
@@ -302,7 +318,7 @@ def _check_compatible_recipe(pn, d):
raise DevtoolError("The %s recipe is a meta-recipe, and therefore is "
"not supported by this tool" % pn, 4)
- if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC', True):
+ if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'):
# Not an incompatibility error per se, so we don't pass the error code
raise DevtoolError("externalsrc is currently enabled for the %s "
"recipe. This prevents the normal do_patch task "
@@ -331,10 +347,11 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
cmd.append('-r')
out, _ = bb.process.run(cmd, cwd=repodir)
ret = {}
- for line in out.split('\0'):
- if line:
- split = line.split(None, 4)
- ret[split[3]] = split[0:3]
+ if out:
+ for line in out.split('\0'):
+ if line:
+ split = line.split(None, 4)
+ ret[split[3]] = split[0:3]
return ret
def _git_exclude_path(srctree, path):
@@ -376,7 +393,7 @@ def extract(args, config, basepath, workspace):
return 1
srctree = os.path.abspath(args.srctree)
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil)
logger.info('Source tree extracted to %s' % srctree)
if initial_rev:
@@ -400,7 +417,7 @@ def sync(args, config, basepath, workspace):
return 1
srctree = os.path.abspath(args.srctree)
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd, tinfoil)
logger.info('Source tree %s synchronized' % srctree)
if initial_rev:
@@ -410,70 +427,6 @@ def sync(args, config, basepath, workspace):
finally:
tinfoil.shutdown()
-class BbTaskExecutor(object):
- """Class for executing bitbake tasks for a recipe
-
- FIXME: This is very awkward. Unfortunately it's not currently easy to
- properly execute tasks outside of bitbake itself, until then this has to
- suffice if we are to handle e.g. linux-yocto's extra tasks
- """
-
- def __init__(self, rdata):
- self.rdata = rdata
- self.executed = []
-
- def exec_func(self, func, report):
- """Run bitbake task function"""
- if not func in self.executed:
- deps = self.rdata.getVarFlag(func, 'deps', False)
- if deps:
- for taskdepfunc in deps:
- self.exec_func(taskdepfunc, True)
- if report:
- logger.info('Executing %s...' % func)
- fn = self.rdata.getVar('FILE', True)
- localdata = bb.build._task_data(fn, func, self.rdata)
- try:
- bb.build.exec_func(func, localdata)
- except bb.build.FuncFailed as e:
- raise DevtoolError(str(e))
- self.executed.append(func)
-
-
-class PatchTaskExecutor(BbTaskExecutor):
- def __init__(self, rdata):
- import oe.patch
- self.check_git = False
- self.useroptions = []
- oe.patch.GitApplyTree.gitCommandUserOptions(self.useroptions, d=rdata)
- super(PatchTaskExecutor, self).__init__(rdata)
-
- def exec_func(self, func, report):
- from oe.patch import GitApplyTree
- srcsubdir = self.rdata.getVar('S', True)
- haspatches = False
- if func == 'do_patch':
- patchdir = os.path.join(srcsubdir, 'patches')
- if os.path.exists(patchdir):
- if os.listdir(patchdir):
- haspatches = True
- else:
- os.rmdir(patchdir)
-
- super(PatchTaskExecutor, self).exec_func(func, report)
- if self.check_git and os.path.exists(srcsubdir):
- if func == 'do_patch':
- if os.path.exists(patchdir):
- shutil.rmtree(patchdir)
- if haspatches:
- stdout, _ = bb.process.run('git status --porcelain patches', cwd=srcsubdir)
- if stdout:
- bb.process.run('git checkout patches', cwd=srcsubdir)
-
- stdout, _ = bb.process.run('git status --porcelain', cwd=srcsubdir)
- if stdout:
- bb.process.run('git add .; git %s commit -a -m "Committing changes from %s\n\n%s"' % (' '.join(self.useroptions), func, GitApplyTree.ignore_commit_prefix + ' - from %s' % func), cwd=srcsubdir)
-
def _prep_extract_operation(config, basepath, recipename, tinfoil=None):
"""HACK: Ugly workaround for making sure that requirements are met when
@@ -497,22 +450,11 @@ def _prep_extract_operation(config, basepath, recipename, tinfoil=None):
return tinfoil
-def _extract_source(srctree, keep_temp, devbranch, sync, d):
+def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil):
"""Extract sources of a recipe"""
- import bb.event
import oe.recipeutils
- def eventfilter(name, handler, event, d):
- """Bitbake event filter for devtool extract operation"""
- if name == 'base_eventhandler':
- return True
- else:
- return False
-
- if hasattr(bb.event, 'set_eventfilter'):
- bb.event.set_eventfilter(eventfilter)
-
- pn = d.getVar('PN', True)
+ pn = d.getVar('PN')
_check_compatible_recipe(pn, d)
@@ -537,45 +479,92 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d):
bb.utils.mkdirhier(srctree)
os.rmdir(srctree)
- # We don't want notes to be printed, they are too verbose
- origlevel = bb.logger.getEffectiveLevel()
- if logger.getEffectiveLevel() > logging.DEBUG:
- bb.logger.setLevel(logging.WARNING)
-
initial_rev = None
- tempdir = tempfile.mkdtemp(prefix='devtool')
+ # We need to redirect WORKDIR, STAMPS_DIR etc. under a temporary
+ # directory so that:
+ # (a) we pick up all files that get unpacked to the WORKDIR, and
+ # (b) we don't disturb the existing build
+ # However, with recipe-specific sysroots the sysroots for the recipe
+ # will be prepared under WORKDIR, and if we used the system temporary
+ # directory (i.e. usually /tmp) as used by mkdtemp by default, then
+ # our attempts to hardlink files into the recipe-specific sysroots
+ # will fail on systems where /tmp is a different filesystem, and it
+ # would have to fall back to copying the files which is a waste of
+ # time. Put the temp directory under the WORKDIR to prevent that from
+ # being a problem.
+ tempbasedir = d.getVar('WORKDIR')
+ bb.utils.mkdirhier(tempbasedir)
+ tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir)
try:
+ tinfoil.logger.setLevel(logging.WARNING)
+
crd = d.createCopy()
# Make a subdir so we guard against WORKDIR==S
workdir = os.path.join(tempdir, 'workdir')
crd.setVar('WORKDIR', workdir)
- crd.setVar('T', os.path.join(tempdir, 'temp'))
- if not crd.getVar('S', True).startswith(workdir):
+ if not crd.getVar('S').startswith(workdir):
# Usually a shared workdir recipe (kernel, gcc)
# Try to set a reasonable default
if bb.data.inherits_class('kernel', d):
crd.setVar('S', '${WORKDIR}/source')
else:
- crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S', True)))
+ crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S')))
if bb.data.inherits_class('kernel', d):
# We don't want to move the source to STAGING_KERNEL_DIR here
crd.setVar('STAGING_KERNEL_DIR', '${S}')
- task_executor = PatchTaskExecutor(crd)
+ is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
+ if not is_kernel_yocto:
+ crd.setVar('PATCHTOOL', 'git')
+ crd.setVar('PATCH_COMMIT_FUNCTIONS', '1')
+
+ # Apply our changes to the datastore to the server's datastore
+ for key in crd.localkeys():
+ tinfoil.config_data.setVar('%s_pn-%s' % (key, pn), crd.getVar(key, False))
+
+ tinfoil.config_data.setVar('STAMPS_DIR', os.path.join(tempdir, 'stamps'))
+ tinfoil.config_data.setVar('T', os.path.join(tempdir, 'temp'))
+ tinfoil.config_data.setVar('BUILDCFG_FUNCS', '')
+ tinfoil.config_data.setVar('BUILDCFG_HEADER', '')
+ tinfoil.config_data.setVar('BB_HASH_IGNORE_MISMATCH', '1')
+
+ tinfoil.set_event_mask(['bb.event.BuildStarted',
+ 'bb.event.BuildCompleted',
+ 'logging.LogRecord',
+ 'bb.command.CommandCompleted',
+ 'bb.command.CommandFailed',
+ 'bb.build.TaskStarted',
+ 'bb.build.TaskSucceeded',
+ 'bb.build.TaskFailed',
+ 'bb.build.TaskFailedSilent'])
+
+ def runtask(target, task):
+ if tinfoil.build_file(target, task):
+ while True:
+ event = tinfoil.wait_event(0.25)
+ if event:
+ if isinstance(event, bb.command.CommandCompleted):
+ break
+ elif isinstance(event, bb.command.CommandFailed):
+ raise DevtoolError('Task do_%s failed: %s' % (task, event.error))
+ elif isinstance(event, bb.build.TaskFailed):
+ raise DevtoolError('Task do_%s failed' % task)
+ elif isinstance(event, bb.build.TaskStarted):
+ logger.info('Executing %s...' % event._task)
+ elif isinstance(event, logging.LogRecord):
+ if event.levelno <= logging.INFO:
+ continue
+ logger.handle(event)
- crd.setVar('EXTERNALSRC_forcevariable', '')
+ # we need virtual:native:/path/to/recipe if it's a BBCLASSEXTEND
+ fn = tinfoil.get_recipe_file(pn)
+ runtask(fn, 'unpack')
- logger.info('Fetching %s...' % pn)
- task_executor.exec_func('do_fetch', False)
- logger.info('Unpacking...')
- task_executor.exec_func('do_unpack', False)
if bb.data.inherits_class('kernel-yocto', d):
# Extra step for kernel to populate the source directory
- logger.info('Doing kernel checkout...')
- task_executor.exec_func('do_kernel_checkout', False)
- srcsubdir = crd.getVar('S', True)
+ runtask(fn, 'kernel_checkout')
- task_executor.check_git = True
+ srcsubdir = crd.getVar('S')
# Move local source files into separate subdir
recipe_patches = [os.path.basename(patch) for patch in
@@ -605,7 +594,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d):
os.path.basename(fname) not in recipe_patches]
# Force separate S so that patch files can be left out from srctree
srcsubdir = tempfile.mkdtemp(dir=workdir)
- crd.setVar('S', srcsubdir)
+ tinfoil.config_data.setVar('S_task-patch', srcsubdir)
# Move source files to S
for path in src_files:
_move_file(os.path.join(workdir, path),
@@ -623,15 +612,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d):
"doesn't use any source or the correct source "
"directory could not be determined" % pn)
- setup_git_repo(srcsubdir, crd.getVar('PV', True), devbranch, d=d)
+ setup_git_repo(srcsubdir, crd.getVar('PV'), devbranch, d=d)
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir)
initial_rev = stdout.rstrip()
- crd.setVar('PATCHTOOL', 'git')
-
logger.info('Patching...')
- task_executor.exec_func('do_patch', False)
+ runtask(fn, 'patch')
bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
@@ -639,8 +626,8 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d):
if bb.data.inherits_class('kernel-yocto', d):
# Store generate and store kernel config
logger.info('Generating kernel config')
- task_executor.exec_func('do_configure', False)
- kconfig = os.path.join(crd.getVar('B', True), '.config')
+ runtask(fn, 'configure')
+ kconfig = os.path.join(crd.getVar('B'), '.config')
tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
@@ -672,13 +659,34 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d):
shutil.move(srcsubdir, srctree)
+ if os.path.abspath(d.getVar('S')) == os.path.abspath(d.getVar('WORKDIR')):
+ # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
+ # (otherwise the recipe won't build as expected)
+ local_files_dir = os.path.join(srctree, 'oe-local-files')
+ addfiles = []
+ for root, _, files in os.walk(local_files_dir):
+ relpth = os.path.relpath(root, local_files_dir)
+ if relpth != '.':
+ bb.utils.mkdirhier(os.path.join(srctree, relpth))
+ for fn in files:
+ if fn == '.gitignore':
+ continue
+ destpth = os.path.join(srctree, relpth, fn)
+ if os.path.exists(destpth):
+ os.unlink(destpth)
+ os.symlink('oe-local-files/%s' % fn, destpth)
+ addfiles.append(os.path.join(relpth, fn))
+ if addfiles:
+ bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
+ useroptions = []
+ oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
+ bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
+
if kconfig:
logger.info('Copying kernel config to srctree')
shutil.copy2(kconfig, srctree)
finally:
- bb.logger.setLevel(origlevel)
-
if keep_temp:
logger.info('Preserving temporary directory %s' % tempdir)
else:
@@ -748,7 +756,7 @@ def modify(args, config, basepath, workspace):
if not rd:
return 1
- pn = rd.getVar('PN', True)
+ pn = rd.getVar('PN')
if pn != args.recipename:
logger.info('Mapping %s to %s' % (args.recipename, pn))
if pn in workspace:
@@ -769,8 +777,10 @@ def modify(args, config, basepath, workspace):
if not tinfoil:
# Error already shown
return 1
+ # We need to re-parse because tinfoil may have been re-initialised
+ rd = parse_recipe(config, tinfoil, args.recipename, True)
- recipefile = rd.getVar('FILE', True)
+ recipefile = rd.getVar('FILE')
appendfile = recipe_to_append(recipefile, config, args.wildcard)
if os.path.exists(appendfile):
raise DevtoolError("Another variant of recipe %s is already in your "
@@ -783,7 +793,7 @@ def modify(args, config, basepath, workspace):
initial_rev = None
commits = []
if not args.no_extract:
- initial_rev = _extract_source(srctree, False, args.branch, False, rd)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil)
if not initial_rev:
return 1
logger.info('Source tree extracted to %s' % srctree)
@@ -807,8 +817,8 @@ def modify(args, config, basepath, workspace):
initial_rev = stdout.rstrip()
# Check that recipe isn't using a shared workdir
- s = os.path.abspath(rd.getVar('S', True))
- workdir = os.path.abspath(rd.getVar('WORKDIR', True))
+ s = os.path.abspath(rd.getVar('S'))
+ workdir = os.path.abspath(rd.getVar('WORKDIR'))
if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
# Handle if S is set to a subdirectory of the source
srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
@@ -851,6 +861,199 @@ def modify(args, config, basepath, workspace):
return 0
+
+def rename(args, config, basepath, workspace):
+ """Entry point for the devtool 'rename' subcommand"""
+ import bb
+ import oe.recipeutils
+
+ check_workspace_recipe(workspace, args.recipename)
+
+ if not (args.newname or args.version):
+ raise DevtoolError('You must specify a new name, a version with -V/--version, or both')
+
+ recipefile = workspace[args.recipename]['recipefile']
+ if not recipefile:
+ raise DevtoolError('devtool rename can only be used where the recipe file itself is in the workspace (e.g. after devtool add)')
+
+ if args.newname and args.newname != args.recipename:
+ reason = oe.recipeutils.validate_pn(args.newname)
+ if reason:
+ raise DevtoolError(reason)
+ newname = args.newname
+ else:
+ newname = args.recipename
+
+ append = workspace[args.recipename]['bbappend']
+ appendfn = os.path.splitext(os.path.basename(append))[0]
+ splitfn = appendfn.split('_')
+ if len(splitfn) > 1:
+ origfnver = appendfn.split('_')[1]
+ else:
+ origfnver = ''
+
+ recipefilemd5 = None
+ tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
+ try:
+ rd = parse_recipe(config, tinfoil, args.recipename, True)
+ if not rd:
+ return 1
+
+ bp = rd.getVar('BP')
+ bpn = rd.getVar('BPN')
+ if newname != args.recipename:
+ localdata = rd.createCopy()
+ localdata.setVar('PN', newname)
+ newbpn = localdata.getVar('BPN')
+ else:
+ newbpn = bpn
+ s = rd.getVar('S', False)
+ src_uri = rd.getVar('SRC_URI', False)
+ pv = rd.getVar('PV')
+
+ # Correct variable values that refer to the upstream source - these
+ # values must stay the same, so if the name/version are changing then
+ # we need to fix them up
+ new_s = s
+ new_src_uri = src_uri
+ if newbpn != bpn:
+ # ${PN} here is technically almost always incorrect, but people do use it
+ new_s = new_s.replace('${BPN}', bpn)
+ new_s = new_s.replace('${PN}', bpn)
+ new_s = new_s.replace('${BP}', '%s-${PV}' % bpn)
+ new_src_uri = new_src_uri.replace('${BPN}', bpn)
+ new_src_uri = new_src_uri.replace('${PN}', bpn)
+ new_src_uri = new_src_uri.replace('${BP}', '%s-${PV}' % bpn)
+ if args.version and origfnver == pv:
+ new_s = new_s.replace('${PV}', pv)
+ new_s = new_s.replace('${BP}', '${BPN}-%s' % pv)
+ new_src_uri = new_src_uri.replace('${PV}', pv)
+ new_src_uri = new_src_uri.replace('${BP}', '${BPN}-%s' % pv)
+ patchfields = {}
+ if new_s != s:
+ patchfields['S'] = new_s
+ if new_src_uri != src_uri:
+ patchfields['SRC_URI'] = new_src_uri
+ if patchfields:
+ recipefilemd5 = bb.utils.md5_file(recipefile)
+ oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
+ newrecipefilemd5 = bb.utils.md5_file(recipefile)
+ finally:
+ tinfoil.shutdown()
+
+ if args.version:
+ newver = args.version
+ else:
+ newver = origfnver
+
+ if newver:
+ newappend = '%s_%s.bbappend' % (newname, newver)
+ newfile = '%s_%s.bb' % (newname, newver)
+ else:
+ newappend = '%s.bbappend' % newname
+ newfile = '%s.bb' % newname
+
+ oldrecipedir = os.path.dirname(recipefile)
+ newrecipedir = os.path.join(config.workspace_path, 'recipes', newname)
+ if oldrecipedir != newrecipedir:
+ bb.utils.mkdirhier(newrecipedir)
+
+ newappend = os.path.join(os.path.dirname(append), newappend)
+ newfile = os.path.join(newrecipedir, newfile)
+
+ # Rename bbappend
+ logger.info('Renaming %s to %s' % (append, newappend))
+ os.rename(append, newappend)
+ # Rename recipe file
+ logger.info('Renaming %s to %s' % (recipefile, newfile))
+ os.rename(recipefile, newfile)
+
+ # Rename source tree if it's the default path
+ appendmd5 = None
+ if not args.no_srctree:
+ srctree = workspace[args.recipename]['srctree']
+ if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename):
+ newsrctree = os.path.join(config.workspace_path, 'sources', newname)
+ logger.info('Renaming %s to %s' % (srctree, newsrctree))
+ shutil.move(srctree, newsrctree)
+ # Correct any references (basically EXTERNALSRC*) in the .bbappend
+ appendmd5 = bb.utils.md5_file(newappend)
+ appendlines = []
+ with open(newappend, 'r') as f:
+ for line in f:
+ appendlines.append(line)
+ with open(newappend, 'w') as f:
+ for line in appendlines:
+ if srctree in line:
+ line = line.replace(srctree, newsrctree)
+ f.write(line)
+ newappendmd5 = bb.utils.md5_file(newappend)
+
+ bpndir = None
+ newbpndir = None
+ if newbpn != bpn:
+ bpndir = os.path.join(oldrecipedir, bpn)
+ if os.path.exists(bpndir):
+ newbpndir = os.path.join(newrecipedir, newbpn)
+ logger.info('Renaming %s to %s' % (bpndir, newbpndir))
+ shutil.move(bpndir, newbpndir)
+
+ bpdir = None
+ newbpdir = None
+ if newver != origfnver or newbpn != bpn:
+ bpdir = os.path.join(oldrecipedir, bp)
+ if os.path.exists(bpdir):
+ newbpdir = os.path.join(newrecipedir, '%s-%s' % (newbpn, newver))
+ logger.info('Renaming %s to %s' % (bpdir, newbpdir))
+ shutil.move(bpdir, newbpdir)
+
+ if oldrecipedir != newrecipedir:
+ # Move any stray files and delete the old recipe directory
+ for entry in os.listdir(oldrecipedir):
+ oldpath = os.path.join(oldrecipedir, entry)
+ newpath = os.path.join(newrecipedir, entry)
+ logger.info('Renaming %s to %s' % (oldpath, newpath))
+ shutil.move(oldpath, newpath)
+ os.rmdir(oldrecipedir)
+
+ # Now take care of entries in .devtool_md5
+ md5entries = []
+ with open(os.path.join(config.workspace_path, '.devtool_md5'), 'r') as f:
+ for line in f:
+ md5entries.append(line)
+
+ if bpndir and newbpndir:
+ relbpndir = os.path.relpath(bpndir, config.workspace_path) + '/'
+ else:
+ relbpndir = None
+ if bpdir and newbpdir:
+ relbpdir = os.path.relpath(bpdir, config.workspace_path) + '/'
+ else:
+ relbpdir = None
+
+ with open(os.path.join(config.workspace_path, '.devtool_md5'), 'w') as f:
+ for entry in md5entries:
+ splitentry = entry.rstrip().split('|')
+ if len(splitentry) > 2:
+ if splitentry[0] == args.recipename:
+ splitentry[0] = newname
+ if splitentry[1] == os.path.relpath(append, config.workspace_path):
+ splitentry[1] = os.path.relpath(newappend, config.workspace_path)
+ if appendmd5 and splitentry[2] == appendmd5:
+ splitentry[2] = newappendmd5
+ elif splitentry[1] == os.path.relpath(recipefile, config.workspace_path):
+ splitentry[1] = os.path.relpath(newfile, config.workspace_path)
+ if recipefilemd5 and splitentry[2] == recipefilemd5:
+ splitentry[2] = newrecipefilemd5
+ elif relbpndir and splitentry[1].startswith(relbpndir):
+ splitentry[1] = os.path.relpath(os.path.join(newbpndir, splitentry[1][len(relbpndir):]), config.workspace_path)
+ elif relbpdir and splitentry[1].startswith(relbpdir):
+ splitentry[1] = os.path.relpath(os.path.join(newbpdir, splitentry[1][len(relbpdir):]), config.workspace_path)
+ entry = '|'.join(splitentry) + '\n'
+ f.write(entry)
+ return 0
+
+
def _get_patchset_revs(srctree, recipe_path, initial_rev=None):
"""Get initial and update rev of a recipe. These are the start point of the
whole patchset and start point for the patches to be re-generated/updated.
@@ -909,6 +1112,15 @@ def _remove_file_entries(srcuri, filelist):
break
return entries, remaining
+def _replace_srcuri_entry(srcuri, filename, newentry):
+ """Replace entry corresponding to specified file with a new entry"""
+ basename = os.path.basename(filename)
+ for i in range(len(srcuri)):
+ if os.path.basename(srcuri[i].split(';')[0]) == basename:
+ srcuri.pop(i)
+ srcuri.insert(i, newentry)
+ break
+
def _remove_source_files(append, files, destpath):
"""Unlink existing patch files"""
for path in files:
@@ -933,7 +1145,7 @@ def _remove_source_files(append, files, destpath):
raise
-def _export_patches(srctree, rd, start_rev, destdir):
+def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
"""Export patches from srctree to given location.
Returns three-tuple of dicts:
1. updated - patches that already exist in SRCURI
@@ -962,18 +1174,44 @@ def _export_patches(srctree, rd, start_rev, destdir):
# revision This does assume that people are using unique shortlog
# values, but they ought to be anyway...
new_basename = seqpatch_re.match(new_patch).group(2)
- found = False
+ match_name = None
for old_patch in existing_patches:
old_basename = seqpatch_re.match(old_patch).group(2)
- if new_basename == old_basename:
- updated[new_patch] = existing_patches.pop(old_patch)
- found = True
- # Rename patch files
- if new_patch != old_patch:
- os.rename(os.path.join(destdir, new_patch),
- os.path.join(destdir, old_patch))
+ old_basename_splitext = os.path.splitext(old_basename)
+ if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
+ old_patch_noext = os.path.splitext(old_patch)[0]
+ match_name = old_patch_noext
+ break
+ elif new_basename == old_basename:
+ match_name = old_patch
break
- if not found:
+ if match_name:
+ # Rename patch files
+ if new_patch != match_name:
+ os.rename(os.path.join(destdir, new_patch),
+ os.path.join(destdir, match_name))
+ # Need to pop it off the list now before checking changed_revs
+ oldpath = existing_patches.pop(old_patch)
+ if changed_revs is not None:
+ # Avoid updating patches that have not actually changed
+ with open(os.path.join(destdir, match_name), 'r') as f:
+ firstlineitems = f.readline().split()
+ # Looking for "From <hash>" line
+ if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
+ if not firstlineitems[1] in changed_revs:
+ continue
+ # Recompress if necessary
+ if oldpath.endswith(('.gz', '.Z')):
+ bb.process.run(['gzip', match_name], cwd=destdir)
+ if oldpath.endswith('.gz'):
+ match_name += '.gz'
+ else:
+ match_name += '.Z'
+ elif oldpath.endswith('.bz2'):
+ bb.process.run(['bzip2', match_name], cwd=destdir)
+ match_name += '.bz2'
+ updated[match_name] = oldpath
+ else:
added[new_patch] = None
return (updated, added, existing_patches)
@@ -991,7 +1229,7 @@ def _create_kconfig_diff(srctree, rd, outfile):
stdout, stderr = pipe.communicate()
if pipe.returncode == 1:
logger.info("Updating config fragment %s" % outfile)
- with open(outfile, 'w') as fobj:
+ with open(outfile, 'wb') as fobj:
fobj.write(stdout)
elif pipe.returncode == 0:
logger.info("Would remove config fragment %s" % outfile)
@@ -1072,8 +1310,8 @@ def _export_local_files(srctree, rd, destdir):
elif fname != '.gitignore':
added[fname] = None
- workdir = rd.getVar('WORKDIR', True)
- s = rd.getVar('S', True)
+ workdir = rd.getVar('WORKDIR')
+ s = rd.getVar('S')
if not s.endswith(os.sep):
s += os.sep
@@ -1095,14 +1333,14 @@ def _export_local_files(srctree, rd, destdir):
def _determine_files_dir(rd):
"""Determine the appropriate files directory for a recipe"""
- recipedir = rd.getVar('FILE_DIRNAME', True)
- for entry in rd.getVar('FILESPATH', True).split(':'):
+ recipedir = rd.getVar('FILE_DIRNAME')
+ for entry in rd.getVar('FILESPATH').split(':'):
relpth = os.path.relpath(entry, recipedir)
if not os.sep in relpth:
# One (or zero) levels below only, so we don't put anything in machine-specific directories
if os.path.isdir(entry):
return entry
- return os.path.join(recipedir, rd.getVar('BPN', True))
+ return os.path.join(recipedir, rd.getVar('BPN'))
def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remove):
@@ -1110,7 +1348,7 @@ def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remo
import bb
import oe.recipeutils
- recipefile = rd.getVar('FILE', True)
+ recipefile = rd.getVar('FILE')
logger.info('Updating SRCREV in recipe %s' % os.path.basename(recipefile))
# Get HEAD revision
@@ -1192,7 +1430,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
import bb
import oe.recipeutils
- recipefile = rd.getVar('FILE', True)
+ recipefile = rd.getVar('FILE')
append = workspace[recipename]['bbappend']
if not os.path.exists(append):
raise DevtoolError('unable to find workspace bbappend for recipe %s' %
@@ -1203,6 +1441,10 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
raise DevtoolError('Unable to find initial revision - please specify '
'it with --initial-rev')
+ dl_dir = rd.getVar('DL_DIR')
+ if not dl_dir.endswith('/'):
+ dl_dir += '/'
+
tempdir = tempfile.mkdtemp(prefix='devtool')
try:
local_files_dir = tempfile.mkdtemp(dir=tempdir)
@@ -1220,7 +1462,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
# Get updated patches from source tree
patches_dir = tempfile.mkdtemp(dir=tempdir)
upd_p, new_p, del_p = _export_patches(srctree, rd, update_rev,
- patches_dir)
+ patches_dir, changed_revs)
updatefiles = False
updaterecipe = False
destpath = None
@@ -1247,6 +1489,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
logger.info('No patches or local source files needed updating')
else:
# Update existing files
+ files_dir = _determine_files_dir(rd)
for basepath, path in upd_f.items():
logger.info('Updating file %s' % basepath)
if os.path.isabs(basepath):
@@ -1258,18 +1501,19 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
updatefiles = True
for basepath, path in upd_p.items():
patchfn = os.path.join(patches_dir, basepath)
- if changed_revs is not None:
- # Avoid updating patches that have not actually changed
- with open(patchfn, 'r') as f:
- firstlineitems = f.readline().split()
- if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
- if not firstlineitems[1] in changed_revs:
- continue
- logger.info('Updating patch %s' % basepath)
+ if os.path.dirname(path) + '/' == dl_dir:
+ # This is a a downloaded patch file - we now need to
+ # replace the entry in SRC_URI with our local version
+ logger.info('Replacing remote patch %s with updated local version' % basepath)
+ path = os.path.join(files_dir, basepath)
+ _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath)
+ updaterecipe = True
+ else:
+ logger.info('Updating patch %s' % basepath)
+ logger.debug('Moving new patch %s to %s' % (patchfn, path))
_move_file(patchfn, path)
updatefiles = True
# Add any new files
- files_dir = _determine_files_dir(rd)
for basepath, path in new_f.items():
logger.info('Adding new file %s' % basepath)
_move_file(os.path.join(local_files_dir, basepath),
@@ -1356,7 +1600,7 @@ def update_recipe(args, config, basepath, workspace):
updated = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev)
if updated:
- rf = rd.getVar('FILE', True)
+ rf = rd.getVar('FILE')
if rf.startswith(config.workspace_path):
logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
finally:
@@ -1460,7 +1704,7 @@ def reset(args, config, basepath, workspace):
def _get_layer(layername, d):
"""Determine the base layer path for the specified layer name/path"""
- layerdirs = d.getVar('BBLAYERS', True).split()
+ layerdirs = d.getVar('BBLAYERS').split()
layers = {os.path.basename(p): p for p in layerdirs}
# Provide some shortcuts
if layername.lower() in ['oe-core', 'openembedded-core']:
@@ -1478,6 +1722,7 @@ def finish(args, config, basepath, workspace):
check_workspace_recipe(workspace, args.recipename)
+ no_clean = False
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -1485,7 +1730,7 @@ def finish(args, config, basepath, workspace):
return 1
destlayerdir = _get_layer(args.destination, tinfoil.config_data)
- origlayerdir = oe.recipeutils.find_layerdir(rd.getVar('FILE', True))
+ origlayerdir = oe.recipeutils.find_layerdir(rd.getVar('FILE'))
if not os.path.isdir(destlayerdir):
raise DevtoolError('Unable to find layer or directory matching "%s"' % args.destination)
@@ -1515,6 +1760,11 @@ def finish(args, config, basepath, workspace):
destpath = oe.recipeutils.get_bbfile_path(rd, destlayerdir, origrelpath)
if not destpath:
raise DevtoolError("Unable to determine destination layer path - check that %s specifies an actual layer and %s/conf/layer.conf specifies BBFILES. You may also need to specify a more complete path." % (args.destination, destlayerdir))
+ # Warn if the layer isn't in bblayers.conf (the code to create a bbappend will do this in other cases)
+ layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
+ if not os.path.abspath(destlayerdir) in layerdirs:
+ bb.warn('Specified destination layer is not currently enabled in bblayers.conf, so the %s recipe will now be unavailable in your current configuration until you add the layer there' % args.recipename)
+
elif destlayerdir == origlayerdir:
# Same layer, update the original recipe
appendlayerdir = None
@@ -1539,8 +1789,9 @@ def finish(args, config, basepath, workspace):
if origlayerdir == config.workspace_path and destpath:
# Recipe file itself is in the workspace - need to move it and any
# associated files to the specified layer
+ no_clean = True
logger.info('Moving recipe file to %s' % destpath)
- recipedir = os.path.dirname(rd.getVar('FILE', True))
+ recipedir = os.path.dirname(rd.getVar('FILE'))
for root, _, files in os.walk(recipedir):
for fn in files:
srcpath = os.path.join(root, fn)
@@ -1553,7 +1804,7 @@ def finish(args, config, basepath, workspace):
tinfoil.shutdown()
# Everything else has succeeded, we can now reset
- _reset([args.recipename], no_clean=False, config=config, basepath=basepath, workspace=workspace)
+ _reset([args.recipename], no_clean=no_clean, config=config, basepath=basepath, workspace=workspace)
return 0
@@ -1580,6 +1831,7 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
+ parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true")
parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
parser_add.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
@@ -1601,6 +1853,7 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
+ parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
parser_modify.set_defaults(func=modify)
parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
@@ -1622,6 +1875,15 @@ def register_commands(subparsers, context):
parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
parser_sync.set_defaults(func=sync)
+ parser_rename = subparsers.add_parser('rename', help='Rename a recipe file in the workspace',
+ description='Renames the recipe file for a recipe in the workspace, changing the name or version part or both, ensuring that all references within the workspace are updated at the same time. Only works when the recipe file itself is in the workspace, e.g. after devtool add. Particularly useful when devtool add did not automatically determine the correct name.',
+ group='working', order=10)
+ parser_rename.add_argument('recipename', help='Current name of recipe to rename')
+ parser_rename.add_argument('newname', nargs='?', help='New name for recipe (optional, not needed if you only want to change the version)')
+ parser_rename.add_argument('--version', '-V', help='Change the version (NOTE: this does not change the version fetched by the recipe, just the version in the recipe file name)')
+ parser_rename.add_argument('--no-srctree', '-s', action='store_true', help='Do not rename the source tree directory (if the default source tree path has been used) - keeping the old name may be desirable if there are internal/other external references to this path')
+ parser_rename.set_defaults(func=rename)
+
parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV). Note that these changes need to have been committed to the git repository in order to be recognised.',
group='working', order=-90)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
index a4239f1cd..05fb9e5ed 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
@@ -27,6 +27,10 @@ import argparse
import scriptutils
import errno
import bb
+
+devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
+sys.path = sys.path + [devtool_path]
+
import oe.recipeutils
from devtool import standard
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build
@@ -68,7 +72,7 @@ def _remove_patch_dirs(recipefolder):
shutil.rmtree(os.path.join(root,d))
def _recipe_contains(rd, var):
- rf = rd.getVar('FILE', True)
+ rf = rd.getVar('FILE')
varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
for var, fn in varfiles.items():
if fn and fn.startswith(os.path.dirname(rf) + os.sep):
@@ -117,7 +121,7 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
srctree = os.path.abspath(srctree)
- pn = d.getVar('PN',True)
+ pn = d.getVar('PN')
af = os.path.join(appendpath, '%s.bbappend' % brf)
with open(af, 'w') as f:
f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
@@ -132,7 +136,7 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
if rev:
f.write('# initial_rev: %s\n' % rev)
if copied:
- f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE', True)))
+ f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
f.write('# original_files: %s\n' % ' '.join(copied))
return af
@@ -154,7 +158,7 @@ def _upgrade_error(e, rf, srctree):
raise DevtoolError(e)
def _get_uri(rd):
- srcuris = rd.getVar('SRC_URI', True).split()
+ srcuris = rd.getVar('SRC_URI').split()
if not len(srcuris):
raise DevtoolError('SRC_URI not found on recipe')
# Get first non-local entry in SRC_URI - usually by convention it's
@@ -185,7 +189,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
crd = rd.createCopy()
- pv = crd.getVar('PV', True)
+ pv = crd.getVar('PV')
crd.setVar('PV', newpv)
tmpsrctree = None
@@ -270,15 +274,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd):
"""Creates the new recipe under workspace"""
- bpn = rd.getVar('BPN', True)
+ bpn = rd.getVar('BPN')
path = os.path.join(workspace, 'recipes', bpn)
bb.utils.mkdirhier(path)
copied, _ = oe.recipeutils.copy_recipe_files(rd, path)
- oldpv = rd.getVar('PV', True)
+ oldpv = rd.getVar('PV')
if not newpv:
newpv = oldpv
- origpath = rd.getVar('FILE', True)
+ origpath = rd.getVar('FILE')
fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
logger.debug('Upgraded %s => %s' % (origpath, fullpath))
@@ -320,7 +324,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil
newvalues['SRC_URI[md5sum]'] = md5
newvalues['SRC_URI[sha256sum]'] = sha256
- rd = oe.recipeutils.parse_recipe(tinfoil.cooker, fullpath, None)
+ rd = tinfoil.parse_recipe_file(fullpath, False)
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
return fullpath, copied
@@ -341,7 +345,7 @@ def upgrade(args, config, basepath, workspace):
if not rd:
return 1
- pn = rd.getVar('PN', True)
+ pn = rd.getVar('PN')
if pn != args.recipename:
logger.info('Mapping %s to %s' % (args.recipename, pn))
if pn in workspace:
@@ -353,17 +357,17 @@ def upgrade(args, config, basepath, workspace):
srctree = standard.get_default_srctree(config, pn)
standard._check_compatible_recipe(pn, rd)
- old_srcrev = rd.getVar('SRCREV', True)
+ old_srcrev = rd.getVar('SRCREV')
if old_srcrev == 'INVALID':
old_srcrev = None
if old_srcrev and not args.srcrev:
raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
- if rd.getVar('PV', True) == args.version and old_srcrev == args.srcrev:
+ if rd.getVar('PV') == args.version and old_srcrev == args.srcrev:
raise DevtoolError("Current and upgrade versions are the same version")
rf = None
try:
- rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd)
+ rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd, tinfoil)
rev2, md5, sha256 = _extract_new_source(args.version, srctree, args.no_patch,
args.srcrev, args.branch, args.keep_temp,
tinfoil, rd)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
index b761a80f8..0437e6417 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
@@ -39,7 +39,7 @@ def edit_recipe(args, config, basepath, workspace):
rd = parse_recipe(config, tinfoil, args.recipename, True)
if not rd:
return 1
- recipefile = rd.getVar('FILE', True)
+ recipefile = rd.getVar('FILE')
finally:
tinfoil.shutdown()
else:
@@ -62,20 +62,20 @@ def configure_help(args, config, basepath, workspace):
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
if not rd:
return 1
- b = rd.getVar('B', True)
- s = rd.getVar('S', True)
+ b = rd.getVar('B')
+ s = rd.getVar('S')
configurescript = os.path.join(s, 'configure')
confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
- configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS', True) or '')
- extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF', True) or '')
- extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE', True) or '')
- do_configure = rd.getVar('do_configure', True) or ''
+ configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
+ extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
+ extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
+ do_configure = rd.getVar('do_configure') or ''
do_configure_noexpand = rd.getVar('do_configure', False) or ''
packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
- cmake_do_configure = rd.getVar('cmake_do_configure', True)
- pn = rd.getVar('PN', True)
+ cmake_do_configure = rd.getVar('cmake_do_configure')
+ pn = rd.getVar('PN')
finally:
tinfoil.shutdown()
OpenPOWER on IntegriCloud