summaryrefslogtreecommitdiffstats
path: root/yocto-poky/scripts/lib/devtool
diff options
context:
space:
mode:
Diffstat (limited to 'yocto-poky/scripts/lib/devtool')
-rw-r--r--yocto-poky/scripts/lib/devtool/__init__.py257
-rw-r--r--yocto-poky/scripts/lib/devtool/build.py86
-rw-r--r--yocto-poky/scripts/lib/devtool/build_image.py154
-rw-r--r--yocto-poky/scripts/lib/devtool/build_sdk.py65
-rw-r--r--yocto-poky/scripts/lib/devtool/deploy.py304
-rw-r--r--yocto-poky/scripts/lib/devtool/package.py62
-rw-r--r--yocto-poky/scripts/lib/devtool/runqemu.py65
-rw-r--r--yocto-poky/scripts/lib/devtool/sdk.py366
-rw-r--r--yocto-poky/scripts/lib/devtool/search.py88
-rw-r--r--yocto-poky/scripts/lib/devtool/standard.py1454
-rw-r--r--yocto-poky/scripts/lib/devtool/upgrade.py382
-rw-r--r--yocto-poky/scripts/lib/devtool/utilcmds.py233
12 files changed, 0 insertions, 3516 deletions
diff --git a/yocto-poky/scripts/lib/devtool/__init__.py b/yocto-poky/scripts/lib/devtool/__init__.py
deleted file mode 100644
index ff97dfc94..000000000
--- a/yocto-poky/scripts/lib/devtool/__init__.py
+++ /dev/null
@@ -1,257 +0,0 @@
-#!/usr/bin/env python
-
-# Development tool - utility functions for plugins
-#
-# Copyright (C) 2014 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-"""Devtool plugins module"""
-
-import os
-import sys
-import subprocess
-import logging
-import re
-
-logger = logging.getLogger('devtool')
-
-
-class DevtoolError(Exception):
- """Exception for handling devtool errors"""
- pass
-
-
-def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
- """Run a program in bitbake build context"""
- import bb
- if not 'cwd' in options:
- options["cwd"] = builddir
- if init_path:
- # As the OE init script makes use of BASH_SOURCE to determine OEROOT,
- # and can't determine it when running under dash, we need to set
- # the executable to bash to correctly set things up
- if not 'executable' in options:
- options['executable'] = 'bash'
- logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
- init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
- else:
- logger.debug('Executing command "%s"' % cmd)
- init_prefix = ''
- if watch:
- if sys.stdout.isatty():
- # Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
- cmd = 'script -e -q -c "%s" /dev/null' % cmd
- return exec_watch('%s%s' % (init_prefix, cmd), **options)
- else:
- return bb.process.run('%s%s' % (init_prefix, cmd), **options)
-
-def exec_watch(cmd, **options):
- """Run program with stdout shown on sys.stdout"""
- import bb
- if isinstance(cmd, basestring) and not "shell" in options:
- options["shell"] = True
-
- process = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
- )
-
- buf = ''
- while True:
- out = process.stdout.read(1)
- if out:
- sys.stdout.write(out)
- sys.stdout.flush()
- buf += out
- elif out == '' and process.poll() != None:
- break
-
- if process.returncode != 0:
- raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
-
- return buf, None
-
-def exec_fakeroot(d, cmd, **kwargs):
- """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
- # Grab the command and check it actually exists
- fakerootcmd = d.getVar('FAKEROOTCMD', True)
- if not os.path.exists(fakerootcmd):
- logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
- return 2
- # Set up the appropriate environment
- newenv = dict(os.environ)
- fakerootenv = d.getVar('FAKEROOTENV', True)
- for varvalue in fakerootenv.split():
- if '=' in varvalue:
- splitval = varvalue.split('=', 1)
- newenv[splitval[0]] = splitval[1]
- return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
-
-def setup_tinfoil(config_only=False, basepath=None, tracking=False):
- """Initialize tinfoil api from bitbake"""
- import scriptpath
- orig_cwd = os.path.abspath(os.curdir)
- try:
- if basepath:
- os.chdir(basepath)
- bitbakepath = scriptpath.add_bitbake_lib_path()
- if not bitbakepath:
- logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
- sys.exit(1)
-
- import bb.tinfoil
- tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
- tinfoil.prepare(config_only)
- tinfoil.logger.setLevel(logger.getEffectiveLevel())
- finally:
- os.chdir(orig_cwd)
- return tinfoil
-
-def get_recipe_file(cooker, pn):
- """Find recipe file corresponding a package name"""
- import oe.recipeutils
- recipefile = oe.recipeutils.pn_to_recipe(cooker, pn)
- if not recipefile:
- skipreasons = oe.recipeutils.get_unavailable_reasons(cooker, pn)
- if skipreasons:
- logger.error('\n'.join(skipreasons))
- else:
- logger.error("Unable to find any recipe file matching %s" % pn)
- return recipefile
-
-def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
- """Parse recipe of a package"""
- import oe.recipeutils
- recipefile = get_recipe_file(tinfoil.cooker, pn)
- if not recipefile:
- # Error already logged
- return None
- if appends:
- append_files = tinfoil.cooker.collection.get_file_appends(recipefile)
- if filter_workspace:
- # Filter out appends from the workspace
- append_files = [path for path in append_files if
- not path.startswith(config.workspace_path)]
- else:
- append_files = None
- return oe.recipeutils.parse_recipe(recipefile, append_files,
- tinfoil.config_data)
-
-def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
- """
- Check that a recipe is in the workspace and (optionally) that source
- is present.
- """
-
- workspacepn = pn
-
- for recipe, value in workspace.iteritems():
- if recipe == pn:
- break
- if bbclassextend:
- recipefile = value['recipefile']
- if recipefile:
- targets = get_bbclassextend_targets(recipefile, recipe)
- if pn in targets:
- workspacepn = recipe
- break
- else:
- raise DevtoolError("No recipe named '%s' in your workspace" % pn)
-
- if checksrc:
- srctree = workspace[workspacepn]['srctree']
- if not os.path.exists(srctree):
- raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
- if not os.listdir(srctree):
- raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
-
- return workspacepn
-
-def use_external_build(same_dir, no_same_dir, d):
- """
- Determine if we should use B!=S (separate build and source directories) or not
- """
- b_is_s = True
- if no_same_dir:
- logger.info('Using separate build directory since --no-same-dir specified')
- b_is_s = False
- elif same_dir:
- logger.info('Using source tree as build directory since --same-dir specified')
- elif bb.data.inherits_class('autotools-brokensep', d):
- logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
- elif d.getVar('B', True) == os.path.abspath(d.getVar('S', True)):
- logger.info('Using source tree as build directory since that would be the default for this recipe')
- else:
- b_is_s = False
- return b_is_s
-
-def setup_git_repo(repodir, version, devbranch, basetag='devtool-base'):
- """
- Set up the git repository for the source tree
- """
- import bb.process
- if not os.path.exists(os.path.join(repodir, '.git')):
- bb.process.run('git init', cwd=repodir)
- bb.process.run('git add .', cwd=repodir)
- commit_cmd = ['git', 'commit', '-q']
- stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
- if not stdout:
- commit_cmd.append('--allow-empty')
- commitmsg = "Initial empty commit with no upstream sources"
- elif version:
- commitmsg = "Initial commit from upstream at version %s" % version
- else:
- commitmsg = "Initial commit from upstream"
- commit_cmd += ['-m', commitmsg]
- bb.process.run(commit_cmd, cwd=repodir)
-
- bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
- bb.process.run('git tag -f %s' % basetag, cwd=repodir)
-
-def recipe_to_append(recipefile, config, wildcard=False):
- """
- Convert a recipe file to a bbappend file path within the workspace.
- NOTE: if the bbappend already exists, you should be using
- workspace[args.recipename]['bbappend'] instead of calling this
- function.
- """
- appendname = os.path.splitext(os.path.basename(recipefile))[0]
- if wildcard:
- appendname = re.sub(r'_.*', '_%', appendname)
- appendpath = os.path.join(config.workspace_path, 'appends')
- appendfile = os.path.join(appendpath, appendname + '.bbappend')
- return appendfile
-
-def get_bbclassextend_targets(recipefile, pn):
- """
- Cheap function to get BBCLASSEXTEND and then convert that to the
- list of targets that would result.
- """
- import bb.utils
-
- values = {}
- def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
- values[varname] = origvalue
- return origvalue, None, 0, True
- with open(recipefile, 'r') as f:
- bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
-
- targets = []
- bbclassextend = values.get('BBCLASSEXTEND', '').split()
- if bbclassextend:
- for variant in bbclassextend:
- if variant == 'nativesdk':
- targets.append('%s-%s' % (variant, pn))
- elif variant in ['native', 'cross', 'crosssdk']:
- targets.append('%s-%s' % (pn, variant))
- return targets
diff --git a/yocto-poky/scripts/lib/devtool/build.py b/yocto-poky/scripts/lib/devtool/build.py
deleted file mode 100644
index 48f6fe1be..000000000
--- a/yocto-poky/scripts/lib/devtool/build.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Development tool - build command plugin
-#
-# Copyright (C) 2014-2015 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-"""Devtool build plugin"""
-
-import os
-import bb
-import logging
-import argparse
-import tempfile
-from devtool import exec_build_env_command, check_workspace_recipe, DevtoolError
-
-logger = logging.getLogger('devtool')
-
-
-def _set_file_values(fn, values):
- remaining = values.keys()
-
- def varfunc(varname, origvalue, op, newlines):
- newvalue = values.get(varname, origvalue)
- remaining.remove(varname)
- return (newvalue, '=', 0, True)
-
- with open(fn, 'r') as f:
- (updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
-
- for item in remaining:
- updated = True
- newlines.append('%s = "%s"' % (item, values[item]))
-
- if updated:
- with open(fn, 'w') as f:
- f.writelines(newlines)
- return updated
-
-def _get_build_tasks(config):
- tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
- return ['do_%s' % task.strip() for task in tasks]
-
-def build(args, config, basepath, workspace):
- """Entry point for the devtool 'build' subcommand"""
- workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
-
- build_tasks = _get_build_tasks(config)
-
- bbappend = workspace[workspacepn]['bbappend']
- if args.disable_parallel_make:
- logger.info("Disabling 'make' parallelism")
- _set_file_values(bbappend, {'PARALLEL_MAKE': ''})
- try:
- bbargs = []
- for task in build_tasks:
- if args.recipename.endswith('-native') and 'package' in task:
- continue
- bbargs.append('%s:%s' % (args.recipename, task))
- exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
- except bb.process.ExecutionError as e:
- # We've already seen the output since watch=True, so just ensure we return something to the user
- return e.exitcode
- finally:
- if args.disable_parallel_make:
- _set_file_values(bbappend, {'PARALLEL_MAKE': None})
-
- return 0
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from this plugin"""
- parser_build = subparsers.add_parser('build', help='Build a recipe',
- description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
- group='working')
- parser_build.add_argument('recipename', help='Recipe to build')
- parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
- parser_build.set_defaults(func=build)
diff --git a/yocto-poky/scripts/lib/devtool/build_image.py b/yocto-poky/scripts/lib/devtool/build_image.py
deleted file mode 100644
index e51d76647..000000000
--- a/yocto-poky/scripts/lib/devtool/build_image.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Development tool - build-image plugin
-#
-# Copyright (C) 2015 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""Devtool plugin containing the build-image subcommand."""
-
-import os
-import logging
-
-from bb.process import ExecutionError
-from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
-
-logger = logging.getLogger('devtool')
-
-class TargetNotImageError(Exception):
- pass
-
-def _get_packages(tinfoil, workspace, config):
- """Get list of packages from recipes in the workspace."""
- result = []
- for recipe in workspace:
- data = parse_recipe(config, tinfoil, recipe, True)
- if 'class-target' in data.getVar('OVERRIDES', True).split(':'):
- if recipe in data.getVar('PACKAGES', True):
- result.append(recipe)
- else:
- logger.warning("Skipping recipe %s as it doesn't produce a "
- "package with the same name", recipe)
- return result
-
-def build_image(args, config, basepath, workspace):
- """Entry point for the devtool 'build-image' subcommand."""
-
- image = args.imagename
- auto_image = False
- if not image:
- sdk_targets = config.get('SDK', 'sdk_targets', '').split()
- if sdk_targets:
- image = sdk_targets[0]
- auto_image = True
- if not image:
- raise DevtoolError('Unable to determine image to build, please specify one')
-
- try:
- if args.add_packages:
- add_packages = args.add_packages.split(',')
- else:
- add_packages = None
- result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
- except TargetNotImageError:
- if auto_image:
- raise DevtoolError('Unable to determine image to build, please specify one')
- else:
- raise DevtoolError('Specified recipe %s is not an image recipe' % image)
-
- if result == 0:
- logger.info('Successfully built %s. You can find output files in %s'
- % (image, outputdir))
- return result
-
-def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
- appendfile = os.path.join(config.workspace_path, 'appends',
- '%s.bbappend' % image)
-
- # remove <image>.bbappend to make sure setup_tinfoil doesn't
- # break because of it
- if os.path.isfile(appendfile):
- os.unlink(appendfile)
-
- tinfoil = setup_tinfoil(basepath=basepath)
- rd = parse_recipe(config, tinfoil, image, True)
- if not rd:
- # Error already shown
- return (1, None)
- if not bb.data.inherits_class('image', rd):
- raise TargetNotImageError()
-
- outputdir = None
- try:
- if workspace or add_packages:
- if add_packages:
- packages = add_packages
- else:
- packages = _get_packages(tinfoil, workspace, config)
- else:
- packages = None
- if not task:
- if not packages and not add_packages and workspace:
- logger.warning('No recipes in workspace, building image %s unmodified', image)
- elif not packages:
- logger.warning('No packages to add, building image %s unmodified', image)
-
- if packages or extra_append:
- bb.utils.mkdirhier(os.path.dirname(appendfile))
- with open(appendfile, 'w') as afile:
- if packages:
- # include packages from workspace recipes into the image
- afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages))
- if not task:
- logger.info('Building image %s with the following '
- 'additional packages: %s', image, ' '.join(packages))
- if extra_append:
- for line in extra_append:
- afile.write('%s\n' % line)
-
- if task in ['populate_sdk', 'populate_sdk_ext']:
- outputdir = rd.getVar('SDK_DEPLOY', True)
- else:
- outputdir = rd.getVar('DEPLOY_DIR_IMAGE', True)
-
- tinfoil.shutdown()
-
- options = ''
- if task:
- options += '-c %s' % task
-
- # run bitbake to build image (or specified task)
- try:
- exec_build_env_command(config.init_path, basepath,
- 'bitbake %s %s' % (options, image), watch=True)
- except ExecutionError as err:
- return (err.exitcode, None)
- finally:
- if os.path.isfile(appendfile):
- os.unlink(appendfile)
- return (0, outputdir)
-
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from the build-image plugin"""
- parser = subparsers.add_parser('build-image',
- help='Build image including workspace recipe packages',
- description='Builds an image, extending it to include '
- 'packages from recipes in the workspace',
- group='testbuild', order=-10)
- parser.add_argument('imagename', help='Image recipe to build', nargs='?')
- parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
- 'entire workspace, specify packages to be added to the image '
- '(separate multiple packages by commas)',
- metavar='PACKAGES')
- parser.set_defaults(func=build_image)
diff --git a/yocto-poky/scripts/lib/devtool/build_sdk.py b/yocto-poky/scripts/lib/devtool/build_sdk.py
deleted file mode 100644
index b89d65b0c..000000000
--- a/yocto-poky/scripts/lib/devtool/build_sdk.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Development tool - build-sdk command plugin
-#
-# Copyright (C) 2015-2016 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import subprocess
-import logging
-import glob
-import shutil
-import errno
-import sys
-import tempfile
-from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
-from devtool import build_image
-
-logger = logging.getLogger('devtool')
-
-
-def build_sdk(args, config, basepath, workspace):
- """Entry point for the devtool build-sdk command"""
-
- sdk_targets = config.get('SDK', 'sdk_targets', '').split()
- if sdk_targets:
- image = sdk_targets[0]
- else:
- raise DevtoolError('Unable to determine image to build SDK for')
-
- extra_append = ['SDK_DERIVATIVE = "1"']
- try:
- result, outputdir = build_image.build_image_task(config,
- basepath,
- workspace,
- image,
- task='populate_sdk_ext',
- extra_append=extra_append)
- except build_image.TargetNotImageError:
- raise DevtoolError('Unable to determine image to build SDK for')
-
- if result == 0:
- logger.info('Successfully built SDK. You can find output files in %s'
- % outputdir)
- return result
-
-
-def register_commands(subparsers, context):
- """Register devtool subcommands"""
- if context.fixed_setup:
- parser_build_sdk = subparsers.add_parser('build-sdk',
- help='Build a derivative SDK of this one',
- description='Builds an extensible SDK based upon this one and the items in your workspace',
- group='advanced')
- parser_build_sdk.set_defaults(func=build_sdk)
diff --git a/yocto-poky/scripts/lib/devtool/deploy.py b/yocto-poky/scripts/lib/devtool/deploy.py
deleted file mode 100644
index 66644ccb6..000000000
--- a/yocto-poky/scripts/lib/devtool/deploy.py
+++ /dev/null
@@ -1,304 +0,0 @@
-# Development tool - deploy/undeploy command plugin
-#
-# Copyright (C) 2014-2016 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-"""Devtool plugin containing the deploy subcommands"""
-
-import os
-import subprocess
-import logging
-import tempfile
-import shutil
-import argparse_oe
-from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
-
-logger = logging.getLogger('devtool')
-
-deploylist_path = '/.devtool'
-
-def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
- """
- Prepare a shell script for running on the target to
- deploy/undeploy files. We have to be careful what we put in this
- script - only commands that are likely to be available on the
- target are suitable (the target might be constrained, e.g. using
- busybox rather than bash with coreutils).
- """
- lines = []
- lines.append('#!/bin/sh')
- lines.append('set -e')
- if undeployall:
- # Yes, I know this is crude - but it does work
- lines.append('for entry in %s/*.list; do' % deploylist_path)
- lines.append('[ ! -f $entry ] && exit')
- lines.append('set `basename $entry | sed "s/.list//"`')
- if dryrun:
- if not deploy:
- lines.append('echo "Previously deployed files for $1:"')
- lines.append('manifest="%s/$1.list"' % deploylist_path)
- lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
- lines.append('if [ -f $manifest ] ; then')
- # Read manifest in reverse and delete files / remove empty dirs
- lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
- lines.append(' do')
- if dryrun:
- lines.append(' if [ ! -d $file ] ; then')
- lines.append(' echo $file')
- lines.append(' fi')
- else:
- lines.append(' if [ -d $file ] ; then')
- # Avoid deleting a preserved directory in case it has special perms
- lines.append(' if [ ! -d $preservedir/$file ] ; then')
- lines.append(' rmdir $file > /dev/null 2>&1 || true')
- lines.append(' fi')
- lines.append(' else')
- lines.append(' rm $file')
- lines.append(' fi')
- lines.append(' done')
- if not dryrun:
- lines.append(' rm $manifest')
- if not deploy and not dryrun:
- # May as well remove all traces
- lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
- lines.append('fi')
-
- if deploy:
- if not nocheckspace:
- # Check for available space
- # FIXME This doesn't take into account files spread across multiple
- # partitions, but doing that is non-trivial
- # Find the part of the destination path that exists
- lines.append('checkpath="$2"')
- lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
- lines.append('do')
- lines.append(' checkpath=`dirname "$checkpath"`')
- lines.append('done')
- lines.append('freespace=`df -P $checkpath | sed "1d" | awk \'{ print $4 }\'`')
- # First line of the file is the total space
- lines.append('total=`head -n1 $3`')
- lines.append('if [ $total -gt $freespace ] ; then')
- lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
- lines.append(' exit 1')
- lines.append('fi')
- if not nopreserve:
- # Preserve any files that exist. Note that this will add to the
- # preserved list with successive deployments if the list of files
- # deployed changes, but because we've deleted any previously
- # deployed files at this point it will never preserve anything
- # that was deployed, only files that existed prior to any deploying
- # (which makes the most sense)
- lines.append('cat $3 | sed "1d" | while read file fsize')
- lines.append('do')
- lines.append(' if [ -e $file ] ; then')
- lines.append(' dest="$preservedir/$file"')
- lines.append(' mkdir -p `dirname $dest`')
- lines.append(' mv $file $dest')
- lines.append(' fi')
- lines.append('done')
- lines.append('rm $3')
- lines.append('mkdir -p `dirname $manifest`')
- lines.append('mkdir -p $2')
- if verbose:
- lines.append(' tar xv -C $2 -f - | tee $manifest')
- else:
- lines.append(' tar xv -C $2 -f - > $manifest')
- lines.append('sed -i "s!^./!$2!" $manifest')
- elif not dryrun:
- # Put any preserved files back
- lines.append('if [ -d $preservedir ] ; then')
- lines.append(' cd $preservedir')
- lines.append(' find . -type f -exec mv {} /{} \;')
- lines.append(' cd /')
- lines.append(' rm -rf $preservedir')
- lines.append('fi')
-
- if undeployall:
- if not dryrun:
- lines.append('echo "NOTE: Successfully undeployed $1"')
- lines.append('done')
-
- # Delete the script itself
- lines.append('rm $0')
- lines.append('')
-
- return '\n'.join(lines)
-
-
-def deploy(args, config, basepath, workspace):
- """Entry point for the devtool 'deploy' subcommand"""
- import re
- import math
- import oe.recipeutils
-
- check_workspace_recipe(workspace, args.recipename, checksrc=False)
-
- try:
- host, destdir = args.target.split(':')
- except ValueError:
- destdir = '/'
- else:
- args.target = host
- if not destdir.endswith('/'):
- destdir += '/'
-
- tinfoil = setup_tinfoil(basepath=basepath)
- try:
- rd = oe.recipeutils.parse_recipe_simple(tinfoil.cooker, args.recipename, tinfoil.config_data)
- except Exception as e:
- raise DevtoolError('Exception parsing recipe %s: %s' %
- (args.recipename, e))
- recipe_outdir = rd.getVar('D', True)
- if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
- raise DevtoolError('No files to deploy - have you built the %s '
- 'recipe? If so, the install step has not installed '
- 'any files.' % args.recipename)
-
- filelist = []
- ftotalsize = 0
- for root, _, files in os.walk(recipe_outdir):
- for fn in files:
- # Get the size in kiB (since we'll be comparing it to the output of du -k)
- # MUST use lstat() here not stat() or getfilesize() since we don't want to
- # dereference symlinks
- fsize = int(math.ceil(float(os.lstat(os.path.join(root, fn)).st_size)/1024))
- ftotalsize += fsize
- # The path as it would appear on the target
- fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
- filelist.append((fpath, fsize))
-
- if args.dry_run:
- print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
- for item, _ in filelist:
- print(' %s' % item)
- return 0
-
-
- extraoptions = ''
- if args.no_host_check:
- extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
- if not args.show_status:
- extraoptions += ' -q'
-
- # In order to delete previously deployed files and have the manifest file on
- # the target, we write out a shell script and then copy it to the target
- # so we can then run it (piping tar output to it).
- # (We cannot use scp here, because it doesn't preserve symlinks.)
- tmpdir = tempfile.mkdtemp(prefix='devtool')
- try:
- tmpscript = '/tmp/devtool_deploy.sh'
- tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
- shellscript = _prepare_remote_script(deploy=True,
- verbose=args.show_status,
- nopreserve=args.no_preserve,
- nocheckspace=args.no_check_space)
- # Write out the script to a file
- with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
- f.write(shellscript)
- # Write out the file list
- with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
- f.write('%d\n' % ftotalsize)
- for fpath, fsize in filelist:
- f.write('%s %d\n' % (fpath, fsize))
- # Copy them to the target
- ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
- if ret != 0:
- raise DevtoolError('Failed to copy script to %s - rerun with -s to '
- 'get a complete error message' % args.target)
- finally:
- shutil.rmtree(tmpdir)
-
- # Now run the script
- ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s \'sh %s %s %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
- if ret != 0:
- raise DevtoolError('Deploy failed - rerun with -s to get a complete '
- 'error message')
-
- logger.info('Successfully deployed %s' % recipe_outdir)
-
- files_list = []
- for root, _, files in os.walk(recipe_outdir):
- for filename in files:
- filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
- files_list.append(os.path.join(destdir, filename))
-
- return 0
-
-def undeploy(args, config, basepath, workspace):
- """Entry point for the devtool 'undeploy' subcommand"""
- if args.all and args.recipename:
- raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
- elif not args.recipename and not args.all:
- raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
-
- extraoptions = ''
- if args.no_host_check:
- extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
- if not args.show_status:
- extraoptions += ' -q'
-
- args.target = args.target.split(':')[0]
-
- tmpdir = tempfile.mkdtemp(prefix='devtool')
- try:
- tmpscript = '/tmp/devtool_undeploy.sh'
- shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
- # Write out the script to a file
- with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
- f.write(shellscript)
- # Copy it to the target
- ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
- if ret != 0:
- raise DevtoolError('Failed to copy script to %s - rerun with -s to '
- 'get a complete error message' % args.target)
- finally:
- shutil.rmtree(tmpdir)
-
- # Now run the script
- ret = subprocess.call('ssh %s %s \'sh %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename), shell=True)
- if ret != 0:
- raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
- 'error message')
-
- if not args.all and not args.dry_run:
- logger.info('Successfully undeployed %s' % args.recipename)
- return 0
-
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from the deploy plugin"""
- parser_deploy = subparsers.add_parser('deploy-target',
- help='Deploy recipe output files to live target machine',
- description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
- group='testbuild')
- parser_deploy.add_argument('recipename', help='Recipe to deploy')
- parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
- parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
- parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
- parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
- parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
- parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
- parser_deploy.set_defaults(func=deploy)
-
- parser_undeploy = subparsers.add_parser('undeploy-target',
- help='Undeploy recipe output files in live target machine',
- description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
- group='testbuild')
- parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
- parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
- parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
- parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
- parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
- parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
- parser_undeploy.set_defaults(func=undeploy)
diff --git a/yocto-poky/scripts/lib/devtool/package.py b/yocto-poky/scripts/lib/devtool/package.py
deleted file mode 100644
index afb5809a3..000000000
--- a/yocto-poky/scripts/lib/devtool/package.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Development tool - package command plugin
-#
-# Copyright (C) 2014-2015 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-"""Devtool plugin containing the package subcommands"""
-
-import os
-import subprocess
-import logging
-from bb.process import ExecutionError
-from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
-
-logger = logging.getLogger('devtool')
-
-def package(args, config, basepath, workspace):
- """Entry point for the devtool 'package' subcommand"""
- check_workspace_recipe(workspace, args.recipename)
-
- tinfoil = setup_tinfoil(basepath=basepath)
- try:
- tinfoil.prepare(config_only=True)
-
- image_pkgtype = config.get('Package', 'image_pkgtype', '')
- if not image_pkgtype:
- image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True)
-
- deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True)
- finally:
- tinfoil.shutdown()
-
- package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
- try:
- exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
- except bb.process.ExecutionError as e:
- # We've already seen the output since watch=True, so just ensure we return something to the user
- return e.exitcode
-
- logger.info('Your packages are in %s' % deploy_dir_pkg)
-
- return 0
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from the package plugin"""
- if context.fixed_setup:
- parser_package = subparsers.add_parser('package',
- help='Build packages for a recipe',
- description='Builds packages for a recipe\'s output files',
- group='testbuild', order=-5)
- parser_package.add_argument('recipename', help='Recipe to package')
- parser_package.set_defaults(func=package)
diff --git a/yocto-poky/scripts/lib/devtool/runqemu.py b/yocto-poky/scripts/lib/devtool/runqemu.py
deleted file mode 100644
index daee7fbbe..000000000
--- a/yocto-poky/scripts/lib/devtool/runqemu.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Development tool - runqemu command plugin
-#
-# Copyright (C) 2015 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""Devtool runqemu plugin"""
-
-import os
-import bb
-import logging
-import argparse
-import glob
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
-
-logger = logging.getLogger('devtool')
-
-def runqemu(args, config, basepath, workspace):
- """Entry point for the devtool 'runqemu' subcommand"""
-
- tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
- machine = tinfoil.config_data.getVar('MACHINE', True)
- bindir_native = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True)
- tinfoil.shutdown()
-
- if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
- raise DevtoolError('QEMU is not available within this SDK')
-
- imagename = args.imagename
- if not imagename:
- sdk_targets = config.get('SDK', 'sdk_targets', '').split()
- if sdk_targets:
- imagename = sdk_targets[0]
- if not imagename:
- raise DevtoolError('Unable to determine image name to run, please specify one')
-
- try:
- exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True)
- except bb.process.ExecutionError as e:
- # We've already seen the output since watch=True, so just ensure we return something to the user
- return e.exitcode
-
- return 0
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from this plugin"""
- if context.fixed_setup:
- parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
- description='Runs QEMU to boot the specified image',
- group='testbuild', order=-20)
- parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
- parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
- nargs=argparse.REMAINDER)
- parser_runqemu.set_defaults(func=runqemu)
diff --git a/yocto-poky/scripts/lib/devtool/sdk.py b/yocto-poky/scripts/lib/devtool/sdk.py
deleted file mode 100644
index 46fd12bdb..000000000
--- a/yocto-poky/scripts/lib/devtool/sdk.py
+++ /dev/null
@@ -1,366 +0,0 @@
-# Development tool - sdk-update command plugin
-#
-# Copyright (C) 2015-2016 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import subprocess
-import logging
-import glob
-import shutil
-import errno
-import sys
-import tempfile
-import re
-from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
-
-logger = logging.getLogger('devtool')
-
-def parse_locked_sigs(sigfile_path):
- """Return <pn:task>:<hash> dictionary"""
- sig_dict = {}
- with open(sigfile_path) as f:
- lines = f.readlines()
- for line in lines:
- if ':' in line:
- taskkey, _, hashval = line.rpartition(':')
- sig_dict[taskkey.strip()] = hashval.split()[0]
- return sig_dict
-
-def generate_update_dict(sigfile_new, sigfile_old):
- """Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
- update_dict = {}
- sigdict_new = parse_locked_sigs(sigfile_new)
- sigdict_old = parse_locked_sigs(sigfile_old)
- for k in sigdict_new:
- if k not in sigdict_old:
- update_dict[k] = sigdict_new[k]
- continue
- if sigdict_new[k] != sigdict_old[k]:
- update_dict[k] = sigdict_new[k]
- continue
- return update_dict
-
-def get_sstate_objects(update_dict, sstate_dir):
- """Return a list containing sstate objects which are to be installed"""
- sstate_objects = []
- for k in update_dict:
- files = set()
- hashval = update_dict[k]
- p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
- files |= set(glob.glob(p))
- p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
- files |= set(glob.glob(p))
- files = list(files)
- if len(files) == 1:
- sstate_objects.extend(files)
- elif len(files) > 1:
- logger.error("More than one matching sstate object found for %s" % hashval)
-
- return sstate_objects
-
-def mkdir(d):
- try:
- os.makedirs(d)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise e
-
-def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
- """Install sstate objects into destination SDK"""
- sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
- if not os.path.exists(sstate_dir):
- logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
- raise
- for sb in sstate_objects:
- dst = sb.replace(src_sdk, dest_sdk)
- destdir = os.path.dirname(dst)
- mkdir(destdir)
- logger.debug("Copying %s to %s" % (sb, dst))
- shutil.copy(sb, dst)
-
-def check_manifest(fn, basepath):
- import bb.utils
- changedfiles = []
- with open(fn, 'r') as f:
- for line in f:
- splitline = line.split()
- if len(splitline) > 1:
- chksum = splitline[0]
- fpath = splitline[1]
- curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
- if chksum != curr_chksum:
- logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
- changedfiles.append(fpath)
- return changedfiles
-
-def sdk_update(args, config, basepath, workspace):
- # Fetch locked-sigs.inc file from remote/local destination
- updateserver = args.updateserver
- if not updateserver:
- updateserver = config.get('SDK', 'updateserver', '')
- logger.debug("updateserver: %s" % updateserver)
-
- # Make sure we are using sdk-update from within SDK
- logger.debug("basepath = %s" % basepath)
- old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
- if not os.path.exists(old_locked_sig_file_path):
- logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
- return -1
- else:
- logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
-
- if ':' in updateserver:
- is_remote = True
- else:
- is_remote = False
-
- layers_dir = os.path.join(basepath, 'layers')
- conf_dir = os.path.join(basepath, 'conf')
-
- # Grab variable values
- tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
- try:
- stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR', True)
- sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS', True)
- site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION', True)
- finally:
- tinfoil.shutdown()
-
- if not is_remote:
- # devtool sdk-update /local/path/to/latest/sdk
- new_locked_sig_file_path = os.path.join(updateserver, 'conf/locked-sigs.inc')
- if not os.path.exists(new_locked_sig_file_path):
- logger.error("%s doesn't exist or is not an extensible SDK" % updateserver)
- return -1
- else:
- logger.debug("Found conf/locked-sigs.inc in %s" % updateserver)
- update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path)
- logger.debug("update_dict = %s" % update_dict)
- newsdk_path = updateserver
- sstate_dir = os.path.join(newsdk_path, 'sstate-cache')
- if not os.path.exists(sstate_dir):
- logger.error("sstate-cache directory not found under %s" % newsdk_path)
- return 1
- sstate_objects = get_sstate_objects(update_dict, sstate_dir)
- logger.debug("sstate_objects = %s" % sstate_objects)
- if len(sstate_objects) == 0:
- logger.info("No need to update.")
- return 0
- logger.info("Installing sstate objects into %s", basepath)
- install_sstate_objects(sstate_objects, updateserver.rstrip('/'), basepath)
- logger.info("Updating configuration files")
- new_conf_dir = os.path.join(updateserver, 'conf')
- shutil.rmtree(conf_dir)
- shutil.copytree(new_conf_dir, conf_dir)
- logger.info("Updating layers")
- new_layers_dir = os.path.join(updateserver, 'layers')
- shutil.rmtree(layers_dir)
- ret = subprocess.call("cp -a %s %s" % (new_layers_dir, layers_dir), shell=True)
- if ret != 0:
- logger.error("Copying %s to %s failed" % (new_layers_dir, layers_dir))
- return ret
- else:
- # devtool sdk-update http://myhost/sdk
- tmpsdk_dir = tempfile.mkdtemp()
- try:
- os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
- new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
- # Fetch manifest from server
- tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
- ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
- changedfiles = check_manifest(tmpmanifest, basepath)
- if not changedfiles:
- logger.info("Already up-to-date")
- return 0
- # Update metadata
- logger.debug("Updating metadata via git ...")
- #Check for the status before doing a fetch and reset
- if os.path.exists(os.path.join(basepath, 'layers/.git')):
- out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
- if not out:
- ret = subprocess.call("git fetch --all; git reset --hard", shell=True, cwd=layers_dir)
- else:
- logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
- logger.error("Changed files:\n%s" % out);
- return -1
- else:
- ret = -1
- if ret != 0:
- ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
- if ret != 0:
- logger.error("Updating metadata via git failed")
- return ret
- logger.debug("Updating conf files ...")
- for changedfile in changedfiles:
- ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
- if ret != 0:
- logger.error("Updating %s failed" % changedfile)
- return ret
-
- # Check if UNINATIVE_CHECKSUM changed
- uninative = False
- if 'conf/local.conf' in changedfiles:
- def read_uninative_checksums(fn):
- chksumitems = []
- with open(fn, 'r') as f:
- for line in f:
- if line.startswith('UNINATIVE_CHECKSUM'):
- splitline = re.split(r'[\[\]"\']', line)
- if len(splitline) > 3:
- chksumitems.append((splitline[1], splitline[3]))
- return chksumitems
-
- oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
- newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
- if oldsums != newsums:
- uninative = True
- for buildarch, chksum in newsums:
- uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
- mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
- ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
-
- # Ok, all is well at this point - move everything over
- tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
- if os.path.exists(tmplayers_dir):
- shutil.rmtree(layers_dir)
- shutil.move(tmplayers_dir, layers_dir)
- for changedfile in changedfiles:
- destfile = os.path.join(basepath, changedfile)
- os.remove(destfile)
- shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
- os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
- shutil.move(tmpmanifest, conf_dir)
- if uninative:
- shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
- shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
-
- if not sstate_mirrors:
- with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
- f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
- f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver)
- finally:
- shutil.rmtree(tmpsdk_dir)
-
- if not args.skip_prepare:
- # Find all potentially updateable tasks
- sdk_update_targets = []
- tasks = ['do_populate_sysroot', 'do_packagedata']
- for root, _, files in os.walk(stamps_dir):
- for fn in files:
- if not '.sigdata.' in fn:
- for task in tasks:
- if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
- sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
- # Run bitbake command for the whole SDK
- logger.info("Preparing build system... (This may take some time.)")
- try:
- exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
- output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
- runlines = []
- for line in output.splitlines():
- if 'Running task ' in line:
- runlines.append(line)
- if runlines:
- logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
- return -1
- except bb.process.ExecutionError as e:
- logger.error('Preparation failed:\n%s' % e.stdout)
- return -1
- return 0
-
-def sdk_install(args, config, basepath, workspace):
- """Entry point for the devtool sdk-install command"""
-
- import oe.recipeutils
- import bb.process
-
- for recipe in args.recipename:
- if recipe in workspace:
- raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
-
- tasks = ['do_populate_sysroot', 'do_packagedata']
- stampprefixes = {}
- def checkstamp(recipe):
- stampprefix = stampprefixes[recipe]
- stamps = glob.glob(stampprefix + '*')
- for stamp in stamps:
- if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
- return True
- else:
- return False
-
- install_recipes = []
- tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
- try:
- for recipe in args.recipename:
- rd = parse_recipe(config, tinfoil, recipe, True)
- if not rd:
- return 1
- stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP', True), tasks[0])
- if checkstamp(recipe):
- logger.info('%s is already installed' % recipe)
- else:
- install_recipes.append(recipe)
- finally:
- tinfoil.shutdown()
-
- if install_recipes:
- logger.info('Installing %s...' % ', '.join(install_recipes))
- install_tasks = []
- for recipe in install_recipes:
- for task in tasks:
- if recipe.endswith('-native') and 'package' in task:
- continue
- install_tasks.append('%s:%s' % (recipe, task))
- options = ''
- if not args.allow_build:
- options += ' --setscene-only'
- try:
- exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
- except bb.process.ExecutionError as e:
- raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
- failed = False
- for recipe in install_recipes:
- if checkstamp(recipe):
- logger.info('Successfully installed %s' % recipe)
- else:
- raise DevtoolError('Failed to install %s - unavailable' % recipe)
- failed = True
- if failed:
- return 2
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from the sdk plugin"""
- if context.fixed_setup:
- parser_sdk = subparsers.add_parser('sdk-update',
- help='Update SDK components',
- description='Updates installed SDK components from a remote server',
- group='sdk')
- updateserver = context.config.get('SDK', 'updateserver', '')
- if updateserver:
- parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
- else:
- parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
- parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
- parser_sdk.set_defaults(func=sdk_update)
-
- parser_sdk_install = subparsers.add_parser('sdk-install',
- help='Install additional SDK components',
- description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
- group='sdk')
- parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
- parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
- parser_sdk_install.set_defaults(func=sdk_install)
diff --git a/yocto-poky/scripts/lib/devtool/search.py b/yocto-poky/scripts/lib/devtool/search.py
deleted file mode 100644
index b44bed7f6..000000000
--- a/yocto-poky/scripts/lib/devtool/search.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Development tool - search command plugin
-#
-# Copyright (C) 2015 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""Devtool search plugin"""
-
-import os
-import bb
-import logging
-import argparse
-import re
-from devtool import setup_tinfoil, parse_recipe, DevtoolError
-
-logger = logging.getLogger('devtool')
-
-def search(args, config, basepath, workspace):
- """Entry point for the devtool 'search' subcommand"""
-
- tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
- try:
- pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
- defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
-
- keyword_rc = re.compile(args.keyword)
-
- for fn in os.listdir(pkgdata_dir):
- pfn = os.path.join(pkgdata_dir, fn)
- if not os.path.isfile(pfn):
- continue
-
- packages = []
- match = False
- if keyword_rc.search(fn):
- match = True
-
- if not match:
- with open(pfn, 'r') as f:
- for line in f:
- if line.startswith('PACKAGES:'):
- packages = line.split(':', 1)[1].strip().split()
-
- for pkg in packages:
- if keyword_rc.search(pkg):
- match = True
- break
- if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
- with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
- for line in f:
- if ': ' in line:
- splitline = line.split(':', 1)
- key = splitline[0]
- value = splitline[1].strip()
- if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'):
- if keyword_rc.search(value):
- match = True
- break
-
- if match:
- rd = parse_recipe(config, tinfoil, fn, True)
- summary = rd.getVar('SUMMARY', True)
- if summary == rd.expand(defsummary):
- summary = ''
- print("%s %s" % (fn.ljust(20), summary))
- finally:
- tinfoil.shutdown()
-
- return 0
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from this plugin"""
- parser_search = subparsers.add_parser('search', help='Search available recipes',
- description='Searches for available target recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name on match.',
- group='info')
- parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed)')
- parser_search.set_defaults(func=search, no_workspace=True)
diff --git a/yocto-poky/scripts/lib/devtool/standard.py b/yocto-poky/scripts/lib/devtool/standard.py
deleted file mode 100644
index 77a82d559..000000000
--- a/yocto-poky/scripts/lib/devtool/standard.py
+++ /dev/null
@@ -1,1454 +0,0 @@
-# Development tool - standard commands plugin
-#
-# Copyright (C) 2014-2015 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-"""Devtool standard plugins"""
-
-import os
-import sys
-import re
-import shutil
-import subprocess
-import tempfile
-import logging
-import argparse
-import argparse_oe
-import scriptutils
-import errno
-import glob
-from collections import OrderedDict
-from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, DevtoolError
-from devtool import parse_recipe
-
-logger = logging.getLogger('devtool')
-
-
-def add(args, config, basepath, workspace):
- """Entry point for the devtool 'add' subcommand"""
- import bb
- import oe.recipeutils
-
- if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
- raise argparse_oe.ArgumentUsageError('At least one of recipename, srctree, fetchuri or -f/--fetch must be specified', 'add')
-
- # These are positional arguments, but because we're nice, allow
- # specifying e.g. source tree without name, or fetch URI without name or
- # source tree (if we can detect that that is what the user meant)
- if '://' in args.recipename:
- if not args.fetchuri:
- if args.fetch:
- raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
- args.fetchuri = args.recipename
- args.recipename = ''
- elif args.srctree and '://' in args.srctree:
- if not args.fetchuri:
- if args.fetch:
- raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
- args.fetchuri = args.srctree
- args.srctree = ''
- elif args.recipename and not args.srctree:
- if os.sep in args.recipename:
- args.srctree = args.recipename
- args.recipename = None
- elif os.path.isdir(args.recipename):
- logger.warn('Ambiguous argument %s - assuming you mean it to be the recipe name')
-
- if args.fetch:
- if args.fetchuri:
- raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
- else:
- # FIXME should show a warning that -f/--fetch is deprecated here
- args.fetchuri = args.fetch
-
- if args.recipename:
- if args.recipename in workspace:
- raise DevtoolError("recipe %s is already in your workspace" %
- args.recipename)
- reason = oe.recipeutils.validate_pn(args.recipename)
- if reason:
- raise DevtoolError(reason)
-
- # FIXME this ought to be in validate_pn but we're using that in other contexts
- if '/' in args.recipename:
- raise DevtoolError('"/" is not a valid character in recipe names')
-
- if args.srctree:
- srctree = os.path.abspath(args.srctree)
- srctreeparent = None
- tmpsrcdir = None
- else:
- srctree = None
- srctreeparent = get_default_srctree(config)
- bb.utils.mkdirhier(srctreeparent)
- tmpsrcdir = tempfile.mkdtemp(prefix='devtoolsrc', dir=srctreeparent)
-
- if srctree and os.path.exists(srctree):
- if args.fetchuri:
- if not os.path.isdir(srctree):
- raise DevtoolError("Cannot fetch into source tree path %s as "
- "it exists and is not a directory" %
- srctree)
- elif os.listdir(srctree):
- raise DevtoolError("Cannot fetch into source tree path %s as "
- "it already exists and is non-empty" %
- srctree)
- elif not args.fetchuri:
- if args.srctree:
- raise DevtoolError("Specified source tree %s could not be found" %
- args.srctree)
- elif srctree:
- raise DevtoolError("No source tree exists at default path %s - "
- "either create and populate this directory, "
- "or specify a path to a source tree, or a "
- "URI to fetch source from" % srctree)
- else:
- raise DevtoolError("You must either specify a source tree "
- "or a URI to fetch source from")
-
- if args.version:
- if '_' in args.version or ' ' in args.version:
- raise DevtoolError('Invalid version string "%s"' % args.version)
-
- if args.color == 'auto' and sys.stdout.isatty():
- color = 'always'
- else:
- color = args.color
- extracmdopts = ''
- if args.fetchuri:
- source = args.fetchuri
- if srctree:
- extracmdopts += ' -x %s' % srctree
- else:
- extracmdopts += ' -x %s' % tmpsrcdir
- else:
- source = srctree
- if args.recipename:
- extracmdopts += ' -N %s' % args.recipename
- if args.version:
- extracmdopts += ' -V %s' % args.version
- if args.binary:
- extracmdopts += ' -b'
- if args.also_native:
- extracmdopts += ' --also-native'
- if args.src_subdir:
- extracmdopts += ' --src-subdir "%s"' % args.src_subdir
-
- tempdir = tempfile.mkdtemp(prefix='devtool')
- try:
- try:
- stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, tempdir, source, extracmdopts))
- except bb.process.ExecutionError as e:
- if e.exitcode == 15:
- raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
- else:
- raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
-
- recipes = glob.glob(os.path.join(tempdir, '*.bb'))
- if recipes:
- recipename = os.path.splitext(os.path.basename(recipes[0]))[0].split('_')[0]
- if recipename in workspace:
- raise DevtoolError('A recipe with the same name as the one being created (%s) already exists in your workspace' % recipename)
- recipedir = os.path.join(config.workspace_path, 'recipes', recipename)
- bb.utils.mkdirhier(recipedir)
- recipefile = os.path.join(recipedir, os.path.basename(recipes[0]))
- appendfile = recipe_to_append(recipefile, config)
- if os.path.exists(appendfile):
- # This shouldn't be possible, but just in case
- raise DevtoolError('A recipe with the same name as the one being created already exists in your workspace')
- if os.path.exists(recipefile):
- raise DevtoolError('A recipe file %s already exists in your workspace; this shouldn\'t be there - please delete it before continuing' % recipefile)
- if tmpsrcdir:
- srctree = os.path.join(srctreeparent, recipename)
- if os.path.exists(tmpsrcdir):
- if os.path.exists(srctree):
- if os.path.isdir(srctree):
- try:
- os.rmdir(srctree)
- except OSError as e:
- if e.errno == errno.ENOTEMPTY:
- raise DevtoolError('Source tree path %s already exists and is not empty' % srctree)
- else:
- raise
- else:
- raise DevtoolError('Source tree path %s already exists and is not a directory' % srctree)
- logger.info('Using default source tree path %s' % srctree)
- shutil.move(tmpsrcdir, srctree)
- else:
- raise DevtoolError('Couldn\'t find source tree created by recipetool')
- bb.utils.mkdirhier(recipedir)
- shutil.move(recipes[0], recipefile)
- # Move any additional files created by recipetool
- for fn in os.listdir(tempdir):
- shutil.move(os.path.join(tempdir, fn), recipedir)
- else:
- raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout))
- attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
- if os.path.exists(attic_recipe):
- logger.warn('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
- finally:
- if tmpsrcdir and os.path.exists(tmpsrcdir):
- shutil.rmtree(tmpsrcdir)
- shutil.rmtree(tempdir)
-
- for fn in os.listdir(recipedir):
- _add_md5(config, recipename, os.path.join(recipedir, fn))
-
- if args.fetchuri and not args.no_git:
- setup_git_repo(srctree, args.version, 'devtool')
-
- initial_rev = None
- if os.path.exists(os.path.join(srctree, '.git')):
- (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
-
- tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
- rd = oe.recipeutils.parse_recipe(recipefile, None, tinfoil.config_data)
- if not rd:
- return 1
-
- if args.src_subdir:
- srctree = os.path.join(srctree, args.src_subdir)
-
- bb.utils.mkdirhier(os.path.dirname(appendfile))
- with open(appendfile, 'w') as f:
- f.write('inherit externalsrc\n')
- f.write('EXTERNALSRC = "%s"\n' % srctree)
-
- b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
- if b_is_s:
- f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
- if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
-
- if args.binary:
- f.write('do_install_append() {\n')
- f.write(' rm -rf ${D}/.git\n')
- f.write(' rm -f ${D}/singletask.lock\n')
- f.write('}\n')
-
- if bb.data.inherits_class('npm', rd):
- f.write('do_install_append() {\n')
- f.write(' # Remove files added to source dir by devtool/externalsrc\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/singletask.lock\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/.git\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/oe-local-files\n')
- f.write(' for symlink in ${EXTERNALSRC_SYMLINKS} ; do\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/${symlink%%:*}\n')
- f.write(' done\n')
- f.write('}\n')
-
- _add_md5(config, recipename, appendfile)
-
- logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
-
- tinfoil.shutdown()
-
- return 0
-
-
-def _check_compatible_recipe(pn, d):
- """Check if the recipe is supported by devtool"""
- if pn == 'perf':
- raise DevtoolError("The perf recipe does not actually check out "
- "source and thus cannot be supported by this tool")
-
- if pn in ['kernel-devsrc', 'package-index'] or pn.startswith('gcc-source'):
- raise DevtoolError("The %s recipe is not supported by this tool" % pn)
-
- if bb.data.inherits_class('image', d):
- raise DevtoolError("The %s recipe is an image, and therefore is not "
- "supported by this tool" % pn)
-
- if bb.data.inherits_class('populate_sdk', d):
- raise DevtoolError("The %s recipe is an SDK, and therefore is not "
- "supported by this tool" % pn)
-
- if bb.data.inherits_class('packagegroup', d):
- raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
- "not supported by this tool" % pn)
-
- if bb.data.inherits_class('meta', d):
- raise DevtoolError("The %s recipe is a meta-recipe, and therefore is "
- "not supported by this tool" % pn)
-
- if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC', True):
- raise DevtoolError("externalsrc is currently enabled for the %s "
- "recipe. This prevents the normal do_patch task "
- "from working. You will need to disable this "
- "first." % pn)
-
-def _move_file(src, dst):
- """Move a file. Creates all the directory components of destination path."""
- dst_d = os.path.dirname(dst)
- if dst_d:
- bb.utils.mkdirhier(dst_d)
- shutil.move(src, dst)
-
-def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
- """List contents of a git treeish"""
- import bb
- cmd = ['git', 'ls-tree', '-z', treeish]
- if recursive:
- cmd.append('-r')
- out, _ = bb.process.run(cmd, cwd=repodir)
- ret = {}
- for line in out.split('\0'):
- if line:
- split = line.split(None, 4)
- ret[split[3]] = split[0:3]
- return ret
-
-def _git_exclude_path(srctree, path):
- """Return pathspec (list of paths) that excludes certain path"""
- # NOTE: "Filtering out" files/paths in this way is not entirely reliable -
- # we don't catch files that are deleted, for example. A more reliable way
- # to implement this would be to use "negative pathspecs" which were
- # introduced in Git v1.9.0. Revisit this when/if the required Git version
- # becomes greater than that.
- path = os.path.normpath(path)
- recurse = True if len(path.split(os.path.sep)) > 1 else False
- git_files = _git_ls_tree(srctree, 'HEAD', recurse).keys()
- if path in git_files:
- git_files.remove(path)
- return git_files
- else:
- return ['.']
-
-def _ls_tree(directory):
- """Recursive listing of files in a directory"""
- ret = []
- for root, dirs, files in os.walk(directory):
- ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
- fname in files])
- return ret
-
-
-def extract(args, config, basepath, workspace):
- """Entry point for the devtool 'extract' subcommand"""
- import bb
-
- tinfoil = _prep_extract_operation(config, basepath, args.recipename)
- if not tinfoil:
- # Error already shown
- return 1
-
- rd = parse_recipe(config, tinfoil, args.recipename, True)
- if not rd:
- return 1
-
- srctree = os.path.abspath(args.srctree)
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd)
- logger.info('Source tree extracted to %s' % srctree)
-
- if initial_rev:
- return 0
- else:
- return 1
-
-def sync(args, config, basepath, workspace):
- """Entry point for the devtool 'sync' subcommand"""
- import bb
-
- tinfoil = _prep_extract_operation(config, basepath, args.recipename)
- if not tinfoil:
- # Error already shown
- return 1
-
- rd = parse_recipe(config, tinfoil, args.recipename, True)
- if not rd:
- return 1
-
- srctree = os.path.abspath(args.srctree)
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd)
- logger.info('Source tree %s synchronized' % srctree)
-
- if initial_rev:
- return 0
- else:
- return 1
-
-class BbTaskExecutor(object):
- """Class for executing bitbake tasks for a recipe
-
- FIXME: This is very awkward. Unfortunately it's not currently easy to
- properly execute tasks outside of bitbake itself, until then this has to
- suffice if we are to handle e.g. linux-yocto's extra tasks
- """
-
- def __init__(self, rdata):
- self.rdata = rdata
- self.executed = []
-
- def exec_func(self, func, report):
- """Run bitbake task function"""
- if not func in self.executed:
- deps = self.rdata.getVarFlag(func, 'deps', False)
- if deps:
- for taskdepfunc in deps:
- self.exec_func(taskdepfunc, True)
- if report:
- logger.info('Executing %s...' % func)
- fn = self.rdata.getVar('FILE', True)
- localdata = bb.build._task_data(fn, func, self.rdata)
- try:
- bb.build.exec_func(func, localdata)
- except bb.build.FuncFailed as e:
- raise DevtoolError(str(e))
- self.executed.append(func)
-
-
-class PatchTaskExecutor(BbTaskExecutor):
- def __init__(self, rdata):
- self.check_git = False
- super(PatchTaskExecutor, self).__init__(rdata)
-
- def exec_func(self, func, report):
- from oe.patch import GitApplyTree
- srcsubdir = self.rdata.getVar('S', True)
- haspatches = False
- if func == 'do_patch':
- patchdir = os.path.join(srcsubdir, 'patches')
- if os.path.exists(patchdir):
- if os.listdir(patchdir):
- haspatches = True
- else:
- os.rmdir(patchdir)
-
- super(PatchTaskExecutor, self).exec_func(func, report)
- if self.check_git and os.path.exists(srcsubdir):
- if func == 'do_patch':
- if os.path.exists(patchdir):
- shutil.rmtree(patchdir)
- if haspatches:
- stdout, _ = bb.process.run('git status --porcelain patches', cwd=srcsubdir)
- if stdout:
- bb.process.run('git checkout patches', cwd=srcsubdir)
-
- stdout, _ = bb.process.run('git status --porcelain', cwd=srcsubdir)
- if stdout:
- bb.process.run('git add .; git commit -a -m "Committing changes from %s\n\n%s"' % (func, GitApplyTree.ignore_commit_prefix + ' - from %s' % func), cwd=srcsubdir)
-
-
-def _prep_extract_operation(config, basepath, recipename, tinfoil=None):
- """HACK: Ugly workaround for making sure that requirements are met when
- trying to extract a package. Returns the tinfoil instance to be used."""
- if not tinfoil:
- tinfoil = setup_tinfoil(basepath=basepath)
-
- rd = parse_recipe(config, tinfoil, recipename, True)
- if not rd:
- return None
-
- if bb.data.inherits_class('kernel-yocto', rd):
- tinfoil.shutdown()
- try:
- stdout, _ = exec_build_env_command(config.init_path, basepath,
- 'bitbake kern-tools-native')
- tinfoil = setup_tinfoil(basepath=basepath)
- except bb.process.ExecutionError as err:
- raise DevtoolError("Failed to build kern-tools-native:\n%s" %
- err.stdout)
- return tinfoil
-
-
-def _extract_source(srctree, keep_temp, devbranch, sync, d):
- """Extract sources of a recipe"""
- import bb.event
- import oe.recipeutils
-
- def eventfilter(name, handler, event, d):
- """Bitbake event filter for devtool extract operation"""
- if name == 'base_eventhandler':
- return True
- else:
- return False
-
- if hasattr(bb.event, 'set_eventfilter'):
- bb.event.set_eventfilter(eventfilter)
-
- pn = d.getVar('PN', True)
-
- _check_compatible_recipe(pn, d)
-
- if sync:
- if not os.path.exists(srctree):
- raise DevtoolError("output path %s does not exist" % srctree)
- else:
- if os.path.exists(srctree):
- if not os.path.isdir(srctree):
- raise DevtoolError("output path %s exists and is not a directory" %
- srctree)
- elif os.listdir(srctree):
- raise DevtoolError("output path %s already exists and is "
- "non-empty" % srctree)
-
- if 'noexec' in (d.getVarFlags('do_unpack', False) or []):
- raise DevtoolError("The %s recipe has do_unpack disabled, unable to "
- "extract source" % pn)
-
- if not sync:
- # Prepare for shutil.move later on
- bb.utils.mkdirhier(srctree)
- os.rmdir(srctree)
-
- # We don't want notes to be printed, they are too verbose
- origlevel = bb.logger.getEffectiveLevel()
- if logger.getEffectiveLevel() > logging.DEBUG:
- bb.logger.setLevel(logging.WARNING)
-
- initial_rev = None
- tempdir = tempfile.mkdtemp(prefix='devtool')
- try:
- crd = d.createCopy()
- # Make a subdir so we guard against WORKDIR==S
- workdir = os.path.join(tempdir, 'workdir')
- crd.setVar('WORKDIR', workdir)
- crd.setVar('T', os.path.join(tempdir, 'temp'))
- if not crd.getVar('S', True).startswith(workdir):
- # Usually a shared workdir recipe (kernel, gcc)
- # Try to set a reasonable default
- if bb.data.inherits_class('kernel', d):
- crd.setVar('S', '${WORKDIR}/source')
- else:
- crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S', True)))
- if bb.data.inherits_class('kernel', d):
- # We don't want to move the source to STAGING_KERNEL_DIR here
- crd.setVar('STAGING_KERNEL_DIR', '${S}')
-
- task_executor = PatchTaskExecutor(crd)
-
- crd.setVar('EXTERNALSRC_forcevariable', '')
-
- logger.info('Fetching %s...' % pn)
- task_executor.exec_func('do_fetch', False)
- logger.info('Unpacking...')
- task_executor.exec_func('do_unpack', False)
- if bb.data.inherits_class('kernel-yocto', d):
- # Extra step for kernel to populate the source directory
- logger.info('Doing kernel checkout...')
- task_executor.exec_func('do_kernel_checkout', False)
- srcsubdir = crd.getVar('S', True)
-
- task_executor.check_git = True
-
- # Move local source files into separate subdir
- recipe_patches = [os.path.basename(patch) for patch in
- oe.recipeutils.get_recipe_patches(crd)]
- local_files = oe.recipeutils.get_recipe_local_files(crd)
- local_files = [fname for fname in local_files if
- os.path.exists(os.path.join(workdir, fname))]
- if local_files:
- for fname in local_files:
- _move_file(os.path.join(workdir, fname),
- os.path.join(tempdir, 'oe-local-files', fname))
- with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'),
- 'w') as f:
- f.write('# Ignore local files, by default. Remove this file '
- 'if you want to commit the directory to Git\n*\n')
-
- if srcsubdir == workdir:
- # Find non-patch non-local sources that were "unpacked" to srctree
- # directory
- src_files = [fname for fname in _ls_tree(workdir) if
- os.path.basename(fname) not in recipe_patches]
- # Force separate S so that patch files can be left out from srctree
- srcsubdir = tempfile.mkdtemp(dir=workdir)
- crd.setVar('S', srcsubdir)
- # Move source files to S
- for path in src_files:
- _move_file(os.path.join(workdir, path),
- os.path.join(srcsubdir, path))
- elif os.path.dirname(srcsubdir) != workdir:
- # Handle if S is set to a subdirectory of the source
- srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
-
- scriptutils.git_convert_standalone_clone(srcsubdir)
-
- # Make sure that srcsubdir exists
- bb.utils.mkdirhier(srcsubdir)
- if not os.path.exists(srcsubdir) or not os.listdir(srcsubdir):
- logger.warning("no source unpacked to S, either the %s recipe "
- "doesn't use any source or the correct source "
- "directory could not be determined" % pn)
-
- setup_git_repo(srcsubdir, crd.getVar('PV', True), devbranch)
-
- (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir)
- initial_rev = stdout.rstrip()
-
- crd.setVar('PATCHTOOL', 'git')
-
- logger.info('Patching...')
- task_executor.exec_func('do_patch', False)
-
- bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
-
- kconfig = None
- if bb.data.inherits_class('kernel-yocto', d):
- # Store generate and store kernel config
- logger.info('Generating kernel config')
- task_executor.exec_func('do_configure', False)
- kconfig = os.path.join(crd.getVar('B', True), '.config')
-
-
- tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
- srctree_localdir = os.path.join(srctree, 'oe-local-files')
-
- if sync:
- bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
-
- # Move oe-local-files directory to srctree
- # As the oe-local-files is not part of the constructed git tree,
- # remove them directly during the synchrounizating might surprise
- # the users. Instead, we move it to oe-local-files.bak and remind
- # user in the log message.
- if os.path.exists(srctree_localdir + '.bak'):
- shutil.rmtree(srctree_localdir, srctree_localdir + '.bak')
-
- if os.path.exists(srctree_localdir):
- logger.info('Backing up current local file directory %s' % srctree_localdir)
- shutil.move(srctree_localdir, srctree_localdir + '.bak')
-
- if os.path.exists(tempdir_localdir):
- logger.info('Syncing local source files to srctree...')
- shutil.copytree(tempdir_localdir, srctree_localdir)
- else:
- # Move oe-local-files directory to srctree
- if os.path.exists(tempdir_localdir):
- logger.info('Adding local source files to srctree...')
- shutil.move(tempdir_localdir, srcsubdir)
-
- shutil.move(srcsubdir, srctree)
-
- if kconfig:
- logger.info('Copying kernel config to srctree')
- shutil.copy2(kconfig, srctree)
-
- finally:
- bb.logger.setLevel(origlevel)
-
- if keep_temp:
- logger.info('Preserving temporary directory %s' % tempdir)
- else:
- shutil.rmtree(tempdir)
- return initial_rev
-
-def _add_md5(config, recipename, filename):
- """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
- import bb.utils
-
- def addfile(fn):
- md5 = bb.utils.md5_file(fn)
- with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a') as f:
- f.write('%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5))
-
- if os.path.isdir(filename):
- for root, _, files in os.walk(filename):
- for f in files:
- addfile(os.path.join(root, f))
- else:
- addfile(filename)
-
-def _check_preserve(config, recipename):
- """Check if a file was manually changed and needs to be saved in 'attic'
- directory"""
- import bb.utils
- origfile = os.path.join(config.workspace_path, '.devtool_md5')
- newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
- preservepath = os.path.join(config.workspace_path, 'attic', recipename)
- with open(origfile, 'r') as f:
- with open(newfile, 'w') as tf:
- for line in f.readlines():
- splitline = line.rstrip().split('|')
- if splitline[0] == recipename:
- removefile = os.path.join(config.workspace_path, splitline[1])
- try:
- md5 = bb.utils.md5_file(removefile)
- except IOError as err:
- if err.errno == 2:
- # File no longer exists, skip it
- continue
- else:
- raise
- if splitline[2] != md5:
- bb.utils.mkdirhier(preservepath)
- preservefile = os.path.basename(removefile)
- logger.warn('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
- shutil.move(removefile, os.path.join(preservepath, preservefile))
- else:
- os.remove(removefile)
- else:
- tf.write(line)
- os.rename(newfile, origfile)
-
-def modify(args, config, basepath, workspace):
- """Entry point for the devtool 'modify' subcommand"""
- import bb
- import oe.recipeutils
-
- if args.recipename in workspace:
- raise DevtoolError("recipe %s is already in your workspace" %
- args.recipename)
-
- tinfoil = setup_tinfoil(basepath=basepath)
- rd = parse_recipe(config, tinfoil, args.recipename, True)
- if not rd:
- return 1
-
- pn = rd.getVar('PN', True)
- if pn != args.recipename:
- logger.info('Mapping %s to %s' % (args.recipename, pn))
- if pn in workspace:
- raise DevtoolError("recipe %s is already in your workspace" %
- pn)
-
- if args.srctree:
- srctree = os.path.abspath(args.srctree)
- else:
- srctree = get_default_srctree(config, pn)
-
- if args.no_extract and not os.path.isdir(srctree):
- raise DevtoolError("--no-extract specified and source path %s does "
- "not exist or is not a directory" %
- srctree)
- if not args.no_extract:
- tinfoil = _prep_extract_operation(config, basepath, pn, tinfoil)
- if not tinfoil:
- # Error already shown
- return 1
-
- recipefile = rd.getVar('FILE', True)
- appendfile = recipe_to_append(recipefile, config, args.wildcard)
- if os.path.exists(appendfile):
- raise DevtoolError("Another variant of recipe %s is already in your "
- "workspace (only one variant of a recipe can "
- "currently be worked on at once)"
- % pn)
-
- _check_compatible_recipe(pn, rd)
-
- initial_rev = None
- commits = []
- if not args.no_extract:
- initial_rev = _extract_source(srctree, False, args.branch, False, rd)
- if not initial_rev:
- return 1
- logger.info('Source tree extracted to %s' % srctree)
- # Get list of commits since this revision
- (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree)
- commits = stdout.split()
- else:
- if os.path.exists(os.path.join(srctree, '.git')):
- # Check if it's a tree previously extracted by us
- try:
- (stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=srctree)
- except bb.process.ExecutionError:
- stdout = ''
- for line in stdout.splitlines():
- if line.startswith('*'):
- (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
- initial_rev = stdout.rstrip()
- if not initial_rev:
- # Otherwise, just grab the head revision
- (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
-
- # Check that recipe isn't using a shared workdir
- s = os.path.abspath(rd.getVar('S', True))
- workdir = os.path.abspath(rd.getVar('WORKDIR', True))
- if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
- # Handle if S is set to a subdirectory of the source
- srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
- srctree = os.path.join(srctree, srcsubdir)
-
- bb.utils.mkdirhier(os.path.dirname(appendfile))
- with open(appendfile, 'w') as f:
- f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n')
- # Local files can be modified/tracked in separate subdir under srctree
- # Mostly useful for packages with S != WORKDIR
- f.write('FILESPATH_prepend := "%s:"\n' %
- os.path.join(srctree, 'oe-local-files'))
-
- f.write('\ninherit externalsrc\n')
- f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
- f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
-
- b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
- if b_is_s:
- f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
-
- if bb.data.inherits_class('kernel', rd):
- f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
- 'do_fetch do_unpack do_patch do_kernel_configme do_kernel_configcheck"\n')
- f.write('\ndo_configure_append() {\n'
- ' cp ${B}/.config ${S}/.config.baseline\n'
- ' ln -sfT ${B}/.config ${S}/.config.new\n'
- '}\n')
- if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
- for commit in commits:
- f.write('# commit: %s\n' % commit)
-
- _add_md5(config, pn, appendfile)
-
- logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
-
- tinfoil.shutdown()
-
- return 0
-
-def _get_patchset_revs(args, srctree, recipe_path):
- """Get initial and update rev of a recipe. These are the start point of the
- whole patchset and start point for the patches to be re-generated/updated.
- """
- import bb
-
- if args.initial_rev:
- return args.initial_rev, args.initial_rev
-
- # Parse initial rev from recipe
- commits = []
- initial_rev = None
- with open(recipe_path, 'r') as f:
- for line in f:
- if line.startswith('# initial_rev:'):
- initial_rev = line.split(':')[-1].strip()
- elif line.startswith('# commit:'):
- commits.append(line.split(':')[-1].strip())
-
- update_rev = initial_rev
- changed_revs = None
- if initial_rev:
- # Find first actually changed revision
- stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
- initial_rev, cwd=srctree)
- newcommits = stdout.split()
- for i in xrange(min(len(commits), len(newcommits))):
- if newcommits[i] == commits[i]:
- update_rev = commits[i]
-
- try:
- stdout, _ = bb.process.run('git cherry devtool-patched',
- cwd=srctree)
- except bb.process.ExecutionError as err:
- stdout = None
-
- if stdout is not None:
- changed_revs = []
- for line in stdout.splitlines():
- if line.startswith('+ '):
- rev = line.split()[1]
- if rev in newcommits:
- changed_revs.append(rev)
-
- return initial_rev, update_rev, changed_revs
-
-def _remove_file_entries(srcuri, filelist):
- """Remove file:// entries from SRC_URI"""
- remaining = filelist[:]
- entries = []
- for fname in filelist:
- basename = os.path.basename(fname)
- for i in xrange(len(srcuri)):
- if (srcuri[i].startswith('file://') and
- os.path.basename(srcuri[i].split(';')[0]) == basename):
- entries.append(srcuri[i])
- remaining.remove(fname)
- srcuri.pop(i)
- break
- return entries, remaining
-
-def _remove_source_files(args, files, destpath):
- """Unlink existing patch files"""
- for path in files:
- if args.append:
- if not destpath:
- raise Exception('destpath should be set here')
- path = os.path.join(destpath, os.path.basename(path))
-
- if os.path.exists(path):
- logger.info('Removing file %s' % path)
- # FIXME "git rm" here would be nice if the file in question is
- # tracked
- # FIXME there's a chance that this file is referred to by
- # another recipe, in which case deleting wouldn't be the
- # right thing to do
- os.remove(path)
- # Remove directory if empty
- try:
- os.rmdir(os.path.dirname(path))
- except OSError as ose:
- if ose.errno != errno.ENOTEMPTY:
- raise
-
-
-def _export_patches(srctree, rd, start_rev, destdir):
- """Export patches from srctree to given location.
- Returns three-tuple of dicts:
- 1. updated - patches that already exist in SRCURI
- 2. added - new patches that don't exist in SRCURI
- 3 removed - patches that exist in SRCURI but not in exported patches
- In each dict the key is the 'basepath' of the URI and value is the
- absolute path to the existing file in recipe space (if any).
- """
- import oe.recipeutils
- from oe.patch import GitApplyTree
- updated = OrderedDict()
- added = OrderedDict()
- seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
-
- existing_patches = dict((os.path.basename(path), path) for path in
- oe.recipeutils.get_recipe_patches(rd))
-
- # Generate patches from Git, exclude local files directory
- patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
- GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec)
-
- new_patches = sorted(os.listdir(destdir))
- for new_patch in new_patches:
- # Strip numbering from patch names. If it's a git sequence named patch,
- # the numbers might not match up since we are starting from a different
- # revision This does assume that people are using unique shortlog
- # values, but they ought to be anyway...
- new_basename = seqpatch_re.match(new_patch).group(2)
- found = False
- for old_patch in existing_patches:
- old_basename = seqpatch_re.match(old_patch).group(2)
- if new_basename == old_basename:
- updated[new_patch] = existing_patches.pop(old_patch)
- found = True
- # Rename patch files
- if new_patch != old_patch:
- os.rename(os.path.join(destdir, new_patch),
- os.path.join(destdir, old_patch))
- break
- if not found:
- added[new_patch] = None
- return (updated, added, existing_patches)
-
-
-def _create_kconfig_diff(srctree, rd, outfile):
- """Create a kconfig fragment"""
- # Only update config fragment if both config files exist
- orig_config = os.path.join(srctree, '.config.baseline')
- new_config = os.path.join(srctree, '.config.new')
- if os.path.exists(orig_config) and os.path.exists(new_config):
- cmd = ['diff', '--new-line-format=%L', '--old-line-format=',
- '--unchanged-line-format=', orig_config, new_config]
- pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = pipe.communicate()
- if pipe.returncode == 1:
- logger.info("Updating config fragment %s" % outfile)
- with open(outfile, 'w') as fobj:
- fobj.write(stdout)
- elif pipe.returncode == 0:
- logger.info("Would remove config fragment %s" % outfile)
- if os.path.exists(outfile):
- # Remove fragment file in case of empty diff
- logger.info("Removing config fragment %s" % outfile)
- os.unlink(outfile)
- else:
- raise bb.process.ExecutionError(cmd, pipe.returncode, stdout, stderr)
- return True
- return False
-
-
-def _export_local_files(srctree, rd, destdir):
- """Copy local files from srctree to given location.
- Returns three-tuple of dicts:
- 1. updated - files that already exist in SRCURI
- 2. added - new files files that don't exist in SRCURI
- 3 removed - files that exist in SRCURI but not in exported files
- In each dict the key is the 'basepath' of the URI and value is the
- absolute path to the existing file in recipe space (if any).
- """
- import oe.recipeutils
-
- # Find out local files (SRC_URI files that exist in the "recipe space").
- # Local files that reside in srctree are not included in patch generation.
- # Instead they are directly copied over the original source files (in
- # recipe space).
- existing_files = oe.recipeutils.get_recipe_local_files(rd)
- new_set = None
- updated = OrderedDict()
- added = OrderedDict()
- removed = OrderedDict()
- local_files_dir = os.path.join(srctree, 'oe-local-files')
- git_files = _git_ls_tree(srctree)
- if 'oe-local-files' in git_files:
- # If tracked by Git, take the files from srctree HEAD. First get
- # the tree object of the directory
- tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool')
- tree = git_files['oe-local-files'][2]
- bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree,
- env=dict(os.environ, GIT_WORK_TREE=destdir,
- GIT_INDEX_FILE=tmp_index))
- new_set = _git_ls_tree(srctree, tree, True).keys()
- elif os.path.isdir(local_files_dir):
- # If not tracked by Git, just copy from working copy
- new_set = _ls_tree(os.path.join(srctree, 'oe-local-files'))
- bb.process.run(['cp', '-ax',
- os.path.join(srctree, 'oe-local-files', '.'), destdir])
- else:
- new_set = []
-
- # Special handling for kernel config
- if bb.data.inherits_class('kernel-yocto', rd):
- fragment_fn = 'devtool-fragment.cfg'
- fragment_path = os.path.join(destdir, fragment_fn)
- if _create_kconfig_diff(srctree, rd, fragment_path):
- if os.path.exists(fragment_path):
- if fragment_fn not in new_set:
- new_set.append(fragment_fn)
- # Copy fragment to local-files
- if os.path.isdir(local_files_dir):
- shutil.copy2(fragment_path, local_files_dir)
- else:
- if fragment_fn in new_set:
- new_set.remove(fragment_fn)
- # Remove fragment from local-files
- if os.path.exists(os.path.join(local_files_dir, fragment_fn)):
- os.unlink(os.path.join(local_files_dir, fragment_fn))
-
- if new_set is not None:
- for fname in new_set:
- if fname in existing_files:
- updated[fname] = existing_files.pop(fname)
- elif fname != '.gitignore':
- added[fname] = None
-
- removed = existing_files
- return (updated, added, removed)
-
-
-def _update_recipe_srcrev(args, srctree, rd, config_data):
- """Implement the 'srcrev' mode of update-recipe"""
- import bb
- import oe.recipeutils
-
- recipefile = rd.getVar('FILE', True)
- logger.info('Updating SRCREV in recipe %s' % os.path.basename(recipefile))
-
- # Get HEAD revision
- try:
- stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
- except bb.process.ExecutionError as err:
- raise DevtoolError('Failed to get HEAD revision in %s: %s' %
- (srctree, err))
- srcrev = stdout.strip()
- if len(srcrev) != 40:
- raise DevtoolError('Invalid hash returned by git: %s' % stdout)
-
- destpath = None
- remove_files = []
- patchfields = {}
- patchfields['SRCREV'] = srcrev
- orig_src_uri = rd.getVar('SRC_URI', False) or ''
- srcuri = orig_src_uri.split()
- tempdir = tempfile.mkdtemp(prefix='devtool')
- update_srcuri = False
- try:
- local_files_dir = tempfile.mkdtemp(dir=tempdir)
- upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir)
- if not args.no_remove:
- # Find list of existing patches in recipe file
- patches_dir = tempfile.mkdtemp(dir=tempdir)
- old_srcrev = (rd.getVar('SRCREV', False) or '')
- upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
- patches_dir)
-
- # Remove deleted local files and "overlapping" patches
- remove_files = del_f.values() + upd_p.values()
- if remove_files:
- removedentries = _remove_file_entries(srcuri, remove_files)[0]
- update_srcuri = True
-
- if args.append:
- files = dict((os.path.join(local_files_dir, key), val) for
- key, val in upd_f.items() + new_f.items())
- removevalues = {}
- if update_srcuri:
- removevalues = {'SRC_URI': removedentries}
- patchfields['SRC_URI'] = '\\\n '.join(srcuri)
- _, destpath = oe.recipeutils.bbappend_recipe(
- rd, args.append, files, wildcardver=args.wildcard_version,
- extralines=patchfields, removevalues=removevalues)
- else:
- files_dir = os.path.join(os.path.dirname(recipefile),
- rd.getVar('BPN', True))
- for basepath, path in upd_f.iteritems():
- logger.info('Updating file %s' % basepath)
- _move_file(os.path.join(local_files_dir, basepath), path)
- update_srcuri= True
- for basepath, path in new_f.iteritems():
- logger.info('Adding new file %s' % basepath)
- _move_file(os.path.join(local_files_dir, basepath),
- os.path.join(files_dir, basepath))
- srcuri.append('file://%s' % basepath)
- update_srcuri = True
- if update_srcuri:
- patchfields['SRC_URI'] = ' '.join(srcuri)
- oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
- finally:
- shutil.rmtree(tempdir)
- if not 'git://' in orig_src_uri:
- logger.info('You will need to update SRC_URI within the recipe to '
- 'point to a git repository where you have pushed your '
- 'changes')
-
- _remove_source_files(args, remove_files, destpath)
- return True
-
-def _update_recipe_patch(args, config, workspace, srctree, rd, config_data):
- """Implement the 'patch' mode of update-recipe"""
- import bb
- import oe.recipeutils
-
- recipefile = rd.getVar('FILE', True)
- append = workspace[args.recipename]['bbappend']
- if not os.path.exists(append):
- raise DevtoolError('unable to find workspace bbappend for recipe %s' %
- args.recipename)
-
- initial_rev, update_rev, changed_revs = _get_patchset_revs(args, srctree, append)
- if not initial_rev:
- raise DevtoolError('Unable to find initial revision - please specify '
- 'it with --initial-rev')
-
- tempdir = tempfile.mkdtemp(prefix='devtool')
- try:
- local_files_dir = tempfile.mkdtemp(dir=tempdir)
- upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir)
-
- remove_files = []
- if not args.no_remove:
- # Get all patches from source tree and check if any should be removed
- all_patches_dir = tempfile.mkdtemp(dir=tempdir)
- upd_p, new_p, del_p = _export_patches(srctree, rd, initial_rev,
- all_patches_dir)
- # Remove deleted local files and patches
- remove_files = del_f.values() + del_p.values()
-
- # Get updated patches from source tree
- patches_dir = tempfile.mkdtemp(dir=tempdir)
- upd_p, new_p, del_p = _export_patches(srctree, rd, update_rev,
- patches_dir)
- updatefiles = False
- updaterecipe = False
- destpath = None
- srcuri = (rd.getVar('SRC_URI', False) or '').split()
- if args.append:
- files = dict((os.path.join(local_files_dir, key), val) for
- key, val in upd_f.items() + new_f.items())
- files.update(dict((os.path.join(patches_dir, key), val) for
- key, val in upd_p.items() + new_p.items()))
- if files or remove_files:
- removevalues = None
- if remove_files:
- removedentries, remaining = _remove_file_entries(
- srcuri, remove_files)
- if removedentries or remaining:
- remaining = ['file://' + os.path.basename(item) for
- item in remaining]
- removevalues = {'SRC_URI': removedentries + remaining}
- _, destpath = oe.recipeutils.bbappend_recipe(
- rd, args.append, files,
- removevalues=removevalues)
- else:
- logger.info('No patches or local source files needed updating')
- else:
- # Update existing files
- for basepath, path in upd_f.iteritems():
- logger.info('Updating file %s' % basepath)
- _move_file(os.path.join(local_files_dir, basepath), path)
- updatefiles = True
- for basepath, path in upd_p.iteritems():
- patchfn = os.path.join(patches_dir, basepath)
- if changed_revs is not None:
- # Avoid updating patches that have not actually changed
- with open(patchfn, 'r') as f:
- firstlineitems = f.readline().split()
- if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
- if not firstlineitems[1] in changed_revs:
- continue
- logger.info('Updating patch %s' % basepath)
- _move_file(patchfn, path)
- updatefiles = True
- # Add any new files
- files_dir = os.path.join(os.path.dirname(recipefile),
- rd.getVar('BPN', True))
- for basepath, path in new_f.iteritems():
- logger.info('Adding new file %s' % basepath)
- _move_file(os.path.join(local_files_dir, basepath),
- os.path.join(files_dir, basepath))
- srcuri.append('file://%s' % basepath)
- updaterecipe = True
- for basepath, path in new_p.iteritems():
- logger.info('Adding new patch %s' % basepath)
- _move_file(os.path.join(patches_dir, basepath),
- os.path.join(files_dir, basepath))
- srcuri.append('file://%s' % basepath)
- updaterecipe = True
- # Update recipe, if needed
- if _remove_file_entries(srcuri, remove_files)[0]:
- updaterecipe = True
- if updaterecipe:
- logger.info('Updating recipe %s' % os.path.basename(recipefile))
- oe.recipeutils.patch_recipe(rd, recipefile,
- {'SRC_URI': ' '.join(srcuri)})
- elif not updatefiles:
- # Neither patches nor recipe were updated
- logger.info('No patches or files need updating')
- return False
- finally:
- shutil.rmtree(tempdir)
-
- _remove_source_files(args, remove_files, destpath)
- return True
-
-def _guess_recipe_update_mode(srctree, rdata):
- """Guess the recipe update mode to use"""
- src_uri = (rdata.getVar('SRC_URI', False) or '').split()
- git_uris = [uri for uri in src_uri if uri.startswith('git://')]
- if not git_uris:
- return 'patch'
- # Just use the first URI for now
- uri = git_uris[0]
- # Check remote branch
- params = bb.fetch.decodeurl(uri)[5]
- upstr_branch = params['branch'] if 'branch' in params else 'master'
- # Check if current branch HEAD is found in upstream branch
- stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
- head_rev = stdout.rstrip()
- stdout, _ = bb.process.run('git branch -r --contains %s' % head_rev,
- cwd=srctree)
- remote_brs = [branch.strip() for branch in stdout.splitlines()]
- if 'origin/' + upstr_branch in remote_brs:
- return 'srcrev'
-
- return 'patch'
-
-def update_recipe(args, config, basepath, workspace):
- """Entry point for the devtool 'update-recipe' subcommand"""
- check_workspace_recipe(workspace, args.recipename)
-
- if args.append:
- if not os.path.exists(args.append):
- raise DevtoolError('bbappend destination layer directory "%s" '
- 'does not exist' % args.append)
- if not os.path.exists(os.path.join(args.append, 'conf', 'layer.conf')):
- raise DevtoolError('conf/layer.conf not found in bbappend '
- 'destination layer "%s"' % args.append)
-
- tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
-
- rd = parse_recipe(config, tinfoil, args.recipename, True)
- if not rd:
- return 1
-
- srctree = workspace[args.recipename]['srctree']
- if args.mode == 'auto':
- mode = _guess_recipe_update_mode(srctree, rd)
- else:
- mode = args.mode
-
- if mode == 'srcrev':
- updated = _update_recipe_srcrev(args, srctree, rd, tinfoil.config_data)
- elif mode == 'patch':
- updated = _update_recipe_patch(args, config, workspace, srctree, rd, tinfoil.config_data)
- else:
- raise DevtoolError('update_recipe: invalid mode %s' % mode)
-
- if updated:
- rf = rd.getVar('FILE', True)
- if rf.startswith(config.workspace_path):
- logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
-
- return 0
-
-
-def status(args, config, basepath, workspace):
- """Entry point for the devtool 'status' subcommand"""
- if workspace:
- for recipe, value in workspace.iteritems():
- recipefile = value['recipefile']
- if recipefile:
- recipestr = ' (%s)' % recipefile
- else:
- recipestr = ''
- print("%s: %s%s" % (recipe, value['srctree'], recipestr))
- else:
- logger.info('No recipes currently in your workspace - you can use "devtool modify" to work on an existing recipe or "devtool add" to add a new one')
- return 0
-
-
-def reset(args, config, basepath, workspace):
- """Entry point for the devtool 'reset' subcommand"""
- import bb
- if args.recipename:
- if args.all:
- raise DevtoolError("Recipe cannot be specified if -a/--all is used")
- else:
- check_workspace_recipe(workspace, args.recipename, checksrc=False)
- elif not args.all:
- raise DevtoolError("Recipe must be specified, or specify -a/--all to "
- "reset all recipes")
- if args.all:
- recipes = workspace.keys()
- else:
- recipes = [args.recipename]
-
- if recipes and not args.no_clean:
- if len(recipes) == 1:
- logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
- else:
- logger.info('Cleaning sysroot for recipes %s...' % ', '.join(recipes))
- # If the recipe file itself was created in the workspace, and
- # it uses BBCLASSEXTEND, then we need to also clean the other
- # variants
- targets = []
- for recipe in recipes:
- targets.append(recipe)
- recipefile = workspace[recipe]['recipefile']
- if recipefile:
- targets.extend(get_bbclassextend_targets(recipefile, recipe))
- try:
- exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(targets))
- except bb.process.ExecutionError as e:
- raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
- 'wish, you may specify -n/--no-clean to '
- 'skip running this command when resetting' %
- (e.command, e.stdout))
-
- for pn in recipes:
- _check_preserve(config, pn)
-
- preservepath = os.path.join(config.workspace_path, 'attic', pn, pn)
- def preservedir(origdir):
- if os.path.exists(origdir):
- for root, dirs, files in os.walk(origdir):
- for fn in files:
- logger.warn('Preserving %s in %s' % (fn, preservepath))
- _move_file(os.path.join(origdir, fn),
- os.path.join(preservepath, fn))
- for dn in dirs:
- preservedir(os.path.join(root, dn))
- os.rmdir(origdir)
-
- preservedir(os.path.join(config.workspace_path, 'recipes', pn))
- # We don't automatically create this dir next to appends, but the user can
- preservedir(os.path.join(config.workspace_path, 'appends', pn))
-
- srctree = workspace[pn]['srctree']
- if os.path.isdir(srctree):
- if os.listdir(srctree):
- # We don't want to risk wiping out any work in progress
- logger.info('Leaving source tree %s as-is; if you no '
- 'longer need it then please delete it manually'
- % srctree)
- else:
- # This is unlikely, but if it's empty we can just remove it
- os.rmdir(srctree)
-
- return 0
-
-
-def get_default_srctree(config, recipename=''):
- """Get the default srctree path"""
- srctreeparent = config.get('General', 'default_source_parent_dir', config.workspace_path)
- if recipename:
- return os.path.join(srctreeparent, 'sources', recipename)
- else:
- return os.path.join(srctreeparent, 'sources')
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from this plugin"""
-
- defsrctree = get_default_srctree(context.config)
- parser_add = subparsers.add_parser('add', help='Add a new recipe',
- description='Adds a new recipe to the workspace to build a specified source tree. Can optionally fetch a remote URI and unpack it to create the source tree.',
- group='starting', order=100)
- parser_add.add_argument('recipename', nargs='?', help='Name for new recipe to add (just name - no version, path or extension). If not specified, will attempt to auto-detect it.')
- parser_add.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
- parser_add.add_argument('fetchuri', nargs='?', help='Fetch the specified URI and extract it to create the source tree')
- group = parser_add.add_mutually_exclusive_group()
- group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
- group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
- parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
- parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
- parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
- parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure). Useful with binary packages e.g. RPMs.', action='store_true')
- parser_add.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
- parser_add.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
- parser_add.set_defaults(func=add)
-
- parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
- description='Sets up the build environment to modify the source for an existing recipe. The default behaviour is to extract the source being fetched by the recipe into a git tree so you can work on it; alternatively if you already have your own pre-prepared source tree you can specify -n/--no-extract.',
- group='starting', order=90)
- parser_modify.add_argument('recipename', help='Name of existing recipe to edit (just name - no version, path or extension)')
- parser_modify.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
- parser_modify.add_argument('--wildcard', '-w', action="store_true", help='Use wildcard for unversioned bbappend')
- group = parser_modify.add_mutually_exclusive_group()
- group.add_argument('--extract', '-x', action="store_true", help='Extract source for recipe (default)')
- group.add_argument('--no-extract', '-n', action="store_true", help='Do not extract source, expect it to exist')
- group = parser_modify.add_mutually_exclusive_group()
- group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
- group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
- parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
- parser_modify.set_defaults(func=modify)
-
- parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
- description='Extracts the source for an existing recipe',
- group='advanced')
- parser_extract.add_argument('recipename', help='Name of recipe to extract the source for')
- parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
- parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (default "%(default)s")')
- parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_extract.set_defaults(func=extract, no_workspace=True)
-
- parser_sync = subparsers.add_parser('sync', help='Synchronize the source tree for an existing recipe',
- description='Synchronize the previously extracted source tree for an existing recipe',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter,
- group='advanced')
- parser_sync.add_argument('recipename', help='Name of recipe to sync the source for')
- parser_sync.add_argument('srctree', help='Path to the source tree')
- parser_sync.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
- parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_sync.set_defaults(func=sync)
-
- parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
- description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV). Note that these changes need to have been committed to the git repository in order to be recognised.',
- group='working', order=-90)
- parser_update_recipe.add_argument('recipename', help='Name of recipe to update')
- parser_update_recipe.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
- parser_update_recipe.add_argument('--initial-rev', help='Override starting revision for patches')
- parser_update_recipe.add_argument('--append', '-a', help='Write changes to a bbappend in the specified layer instead of the recipe', metavar='LAYERDIR')
- parser_update_recipe.add_argument('--wildcard-version', '-w', help='In conjunction with -a/--append, use a wildcard to make the bbappend apply to any recipe version', action='store_true')
- parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
- parser_update_recipe.set_defaults(func=update_recipe)
-
- parser_status = subparsers.add_parser('status', help='Show workspace status',
- description='Lists recipes currently in your workspace and the paths to their respective external source trees',
- group='info', order=100)
- parser_status.set_defaults(func=status)
-
- parser_reset = subparsers.add_parser('reset', help='Remove a recipe from your workspace',
- description='Removes the specified recipe from your workspace (resetting its state)',
- group='working', order=-100)
- parser_reset.add_argument('recipename', nargs='?', help='Recipe to reset')
- parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
- parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
- parser_reset.set_defaults(func=reset)
diff --git a/yocto-poky/scripts/lib/devtool/upgrade.py b/yocto-poky/scripts/lib/devtool/upgrade.py
deleted file mode 100644
index a085f78c4..000000000
--- a/yocto-poky/scripts/lib/devtool/upgrade.py
+++ /dev/null
@@ -1,382 +0,0 @@
-# Development tool - upgrade command plugin
-#
-# Copyright (C) 2014-2015 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-"""Devtool upgrade plugin"""
-
-import os
-import sys
-import re
-import shutil
-import tempfile
-import logging
-import argparse
-import scriptutils
-import errno
-import bb
-import oe.recipeutils
-from devtool import standard
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build
-
-logger = logging.getLogger('devtool')
-
-def _run(cmd, cwd=''):
- logger.debug("Running command %s> %s" % (cwd,cmd))
- return bb.process.run('%s' % cmd, cwd=cwd)
-
-def _get_srctree(tmpdir):
- srctree = tmpdir
- dirs = os.listdir(tmpdir)
- if len(dirs) == 1:
- srctree = os.path.join(tmpdir, dirs[0])
- return srctree
-
-def _copy_source_code(orig, dest):
- for path in standard._ls_tree(orig):
- dest_dir = os.path.join(dest, os.path.dirname(path))
- bb.utils.mkdirhier(dest_dir)
- dest_path = os.path.join(dest, path)
- shutil.move(os.path.join(orig, path), dest_path)
-
-def _get_checksums(rf):
- import re
- checksums = {}
- with open(rf) as f:
- for line in f:
- for cs in ['md5sum', 'sha256sum']:
- m = re.match("^SRC_URI\[%s\].*=.*\"(.*)\"" % cs, line)
- if m:
- checksums[cs] = m.group(1)
- return checksums
-
-def _remove_patch_dirs(recipefolder):
- for root, dirs, files in os.walk(recipefolder):
- for d in dirs:
- shutil.rmtree(os.path.join(root,d))
-
-def _recipe_contains(rd, var):
- rf = rd.getVar('FILE', True)
- varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
- for var, fn in varfiles.iteritems():
- if fn and fn.startswith(os.path.dirname(rf) + os.sep):
- return True
- return False
-
-def _rename_recipe_dirs(oldpv, newpv, path):
- for root, dirs, files in os.walk(path):
- for olddir in dirs:
- if olddir.find(oldpv) != -1:
- newdir = olddir.replace(oldpv, newpv)
- if olddir != newdir:
- shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
-
-def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
- oldrecipe = os.path.basename(oldrecipe)
- if oldrecipe.endswith('_%s.bb' % oldpv):
- newrecipe = '%s_%s.bb' % (bpn, newpv)
- if oldrecipe != newrecipe:
- shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
- else:
- newrecipe = oldrecipe
- return os.path.join(path, newrecipe)
-
-def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
- _rename_recipe_dirs(oldpv, newpv, path)
- return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
-
-def _write_append(rc, srctree, same_dir, no_same_dir, rev, workspace, d):
- """Writes an append file"""
- if not os.path.exists(rc):
- raise DevtoolError("bbappend not created because %s does not exist" % rc)
-
- appendpath = os.path.join(workspace, 'appends')
- if not os.path.exists(appendpath):
- bb.utils.mkdirhier(appendpath)
-
- brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
-
- srctree = os.path.abspath(srctree)
- pn = d.getVar('PN',True)
- af = os.path.join(appendpath, '%s.bbappend' % brf)
- with open(af, 'w') as f:
- f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
- f.write('inherit externalsrc\n')
- f.write(('# NOTE: We use pn- overrides here to avoid affecting'
- 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
- f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
- b_is_s = use_external_build(same_dir, no_same_dir, d)
- if b_is_s:
- f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
- if rev:
- f.write('\n# initial_rev: %s\n' % rev)
- return af
-
-def _cleanup_on_error(rf, srctree):
- rfp = os.path.split(rf)[0] # recipe folder
- rfpp = os.path.split(rfp)[0] # recipes folder
- if os.path.exists(rfp):
- shutil.rmtree(b)
- if not len(os.listdir(rfpp)):
- os.rmdir(rfpp)
- srctree = os.path.abspath(srctree)
- if os.path.exists(srctree):
- shutil.rmtree(srctree)
-
-def _upgrade_error(e, rf, srctree):
- if rf:
- cleanup_on_error(rf, srctree)
- logger.error(e)
- raise DevtoolError(e)
-
-def _get_uri(rd):
- srcuris = rd.getVar('SRC_URI', True).split()
- if not len(srcuris):
- raise DevtoolError('SRC_URI not found on recipe')
- # Get first non-local entry in SRC_URI - usually by convention it's
- # the first entry, but not always!
- srcuri = None
- for entry in srcuris:
- if not entry.startswith('file://'):
- srcuri = entry
- break
- if not srcuri:
- raise DevtoolError('Unable to find non-local entry in SRC_URI')
- srcrev = '${AUTOREV}'
- if '://' in srcuri:
- # Fetch a URL
- rev_re = re.compile(';rev=([^;]+)')
- res = rev_re.search(srcuri)
- if res:
- srcrev = res.group(1)
- srcuri = rev_re.sub('', srcuri)
- return srcuri, srcrev
-
-def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tinfoil, rd):
- """Extract sources of a recipe with a new version"""
-
- def __run(cmd):
- """Simple wrapper which calls _run with srctree as cwd"""
- return _run(cmd, srctree)
-
- crd = rd.createCopy()
-
- pv = crd.getVar('PV', True)
- crd.setVar('PV', newpv)
-
- tmpsrctree = None
- uri, rev = _get_uri(crd)
- if srcrev:
- rev = srcrev
- if uri.startswith('git://'):
- __run('git fetch')
- __run('git checkout %s' % rev)
- __run('git tag -f devtool-base-new')
- md5 = None
- sha256 = None
- else:
- __run('git checkout devtool-base -b devtool-%s' % newpv)
-
- tmpdir = tempfile.mkdtemp(prefix='devtool')
- try:
- md5, sha256 = scriptutils.fetch_uri(tinfoil.config_data, uri, tmpdir, rev)
- except bb.fetch2.FetchError as e:
- raise DevtoolError(e)
-
- tmpsrctree = _get_srctree(tmpdir)
- srctree = os.path.abspath(srctree)
-
- # Delete all sources so we ensure no stray files are left over
- for item in os.listdir(srctree):
- if item in ['.git', 'oe-local-files']:
- continue
- itempath = os.path.join(srctree, item)
- if os.path.isdir(itempath):
- shutil.rmtree(itempath)
- else:
- os.remove(itempath)
-
- # Copy in new ones
- _copy_source_code(tmpsrctree, srctree)
-
- (stdout,_) = __run('git ls-files --modified --others --exclude-standard')
- for f in stdout.splitlines():
- __run('git add "%s"' % f)
-
- __run('git commit -q -m "Commit of upstream changes at version %s" --allow-empty' % newpv)
- __run('git tag -f devtool-base-%s' % newpv)
-
- (stdout, _) = __run('git rev-parse HEAD')
- rev = stdout.rstrip()
-
- if no_patch:
- patches = oe.recipeutils.get_recipe_patches(crd)
- if len(patches):
- logger.warn('By user choice, the following patches will NOT be applied')
- for patch in patches:
- logger.warn("%s" % os.path.basename(patch))
- else:
- try:
- __run('git checkout devtool-patched -b %s' % branch)
- __run('git rebase %s' % rev)
- if uri.startswith('git://'):
- suffix = 'new'
- else:
- suffix = newpv
- __run('git tag -f devtool-patched-%s' % suffix)
- except bb.process.ExecutionError as e:
- logger.warn('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
-
- if tmpsrctree:
- if keep_temp:
- logger.info('Preserving temporary directory %s' % tmpsrctree)
- else:
- shutil.rmtree(tmpsrctree)
-
- return (rev, md5, sha256)
-
-def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd):
- """Creates the new recipe under workspace"""
-
- bpn = rd.getVar('BPN', True)
- path = os.path.join(workspace, 'recipes', bpn)
- bb.utils.mkdirhier(path)
- oe.recipeutils.copy_recipe_files(rd, path)
-
- oldpv = rd.getVar('PV', True)
- if not newpv:
- newpv = oldpv
- origpath = rd.getVar('FILE', True)
- fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
- logger.debug('Upgraded %s => %s' % (origpath, fullpath))
-
- newvalues = {}
- if _recipe_contains(rd, 'PV') and newpv != oldpv:
- newvalues['PV'] = newpv
-
- if srcrev:
- newvalues['SRCREV'] = srcrev
-
- if srcbranch:
- src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
- changed = False
- replacing = True
- new_src_uri = []
- for entry in src_uri:
- scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
- if replacing and scheme in ['git', 'gitsm']:
- branch = params.get('branch', 'master')
- if rd.expand(branch) != srcbranch:
- # Handle case where branch is set through a variable
- res = re.match(r'\$\{([^}@]+)\}', branch)
- if res:
- newvalues[res.group(1)] = srcbranch
- # We know we won't change SRC_URI now, so break out
- break
- else:
- params['branch'] = srcbranch
- entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
- changed = True
- replacing = False
- new_src_uri.append(entry)
- if changed:
- newvalues['SRC_URI'] = ' '.join(new_src_uri)
-
- newvalues['PR'] = None
-
- if md5 and sha256:
- newvalues['SRC_URI[md5sum]'] = md5
- newvalues['SRC_URI[sha256sum]'] = sha256
-
- rd = oe.recipeutils.parse_recipe(fullpath, None, tinfoil.config_data)
- oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
-
- return fullpath
-
-def upgrade(args, config, basepath, workspace):
- """Entry point for the devtool 'upgrade' subcommand"""
-
- if args.recipename in workspace:
- raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
- if not args.version and not args.srcrev:
- raise DevtoolError("You must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option")
- if args.srcbranch and not args.srcrev:
- raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
-
- tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
- rd = parse_recipe(config, tinfoil, args.recipename, True)
- if not rd:
- return 1
-
- pn = rd.getVar('PN', True)
- if pn != args.recipename:
- logger.info('Mapping %s to %s' % (args.recipename, pn))
- if pn in workspace:
- raise DevtoolError("recipe %s is already in your workspace" % pn)
-
- if args.srctree:
- srctree = os.path.abspath(args.srctree)
- else:
- srctree = standard.get_default_srctree(config, pn)
-
- standard._check_compatible_recipe(pn, rd)
- old_srcrev = rd.getVar('SRCREV', True)
- if old_srcrev == 'INVALID':
- old_srcrev = None
- if old_srcrev and not args.srcrev:
- raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
- if rd.getVar('PV', True) == args.version and old_srcrev == args.srcrev:
- raise DevtoolError("Current and upgrade versions are the same version")
-
- rf = None
- try:
- rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd)
- rev2, md5, sha256 = _extract_new_source(args.version, srctree, args.no_patch,
- args.srcrev, args.branch, args.keep_temp,
- tinfoil, rd)
- rf = _create_new_recipe(args.version, md5, sha256, args.srcrev, args.srcbranch, config.workspace_path, tinfoil, rd)
- except bb.process.CmdError as e:
- _upgrade_error(e, rf, srctree)
- except DevtoolError as e:
- _upgrade_error(e, rf, srctree)
- standard._add_md5(config, pn, os.path.dirname(rf))
-
- af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
- config.workspace_path, rd)
- standard._add_md5(config, pn, af)
- logger.info('Upgraded source extracted to %s' % srctree)
- logger.info('New recipe is %s' % rf)
- return 0
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from this plugin"""
-
- defsrctree = standard.get_default_srctree(context.config)
-
- parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
- description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
- group='starting')
- parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
- parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
- parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV)')
- parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (if fetching from an SCM such as git)')
- parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
- parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
- parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
- group = parser_upgrade.add_mutually_exclusive_group()
- group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
- group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
- parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_upgrade.set_defaults(func=upgrade)
diff --git a/yocto-poky/scripts/lib/devtool/utilcmds.py b/yocto-poky/scripts/lib/devtool/utilcmds.py
deleted file mode 100644
index b761a80f8..000000000
--- a/yocto-poky/scripts/lib/devtool/utilcmds.py
+++ /dev/null
@@ -1,233 +0,0 @@
-# Development tool - utility commands plugin
-#
-# Copyright (C) 2015-2016 Intel Corporation
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""Devtool utility plugins"""
-
-import os
-import sys
-import shutil
-import tempfile
-import logging
-import argparse
-import subprocess
-import scriptutils
-from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
-from devtool import parse_recipe
-
-logger = logging.getLogger('devtool')
-
-
-def edit_recipe(args, config, basepath, workspace):
- """Entry point for the devtool 'edit-recipe' subcommand"""
- if args.any_recipe:
- tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
- try:
- rd = parse_recipe(config, tinfoil, args.recipename, True)
- if not rd:
- return 1
- recipefile = rd.getVar('FILE', True)
- finally:
- tinfoil.shutdown()
- else:
- check_workspace_recipe(workspace, args.recipename)
- recipefile = workspace[args.recipename]['recipefile']
- if not recipefile:
- raise DevtoolError("Recipe file for %s is not under the workspace" %
- args.recipename)
-
- return scriptutils.run_editor(recipefile)
-
-
-def configure_help(args, config, basepath, workspace):
- """Entry point for the devtool 'configure-help' subcommand"""
- import oe.utils
-
- check_workspace_recipe(workspace, args.recipename)
- tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
- try:
- rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
- if not rd:
- return 1
- b = rd.getVar('B', True)
- s = rd.getVar('S', True)
- configurescript = os.path.join(s, 'configure')
- confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
- configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS', True) or '')
- extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF', True) or '')
- extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE', True) or '')
- do_configure = rd.getVar('do_configure', True) or ''
- do_configure_noexpand = rd.getVar('do_configure', False) or ''
- packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
- autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
- cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
- cmake_do_configure = rd.getVar('cmake_do_configure', True)
- pn = rd.getVar('PN', True)
- finally:
- tinfoil.shutdown()
-
- if 'doc' in packageconfig:
- del packageconfig['doc']
-
- if autotools and not os.path.exists(configurescript):
- logger.info('Running do_configure to generate configure script')
- try:
- stdout, _ = exec_build_env_command(config.init_path, basepath,
- 'bitbake -c configure %s' % args.recipename,
- stderr=subprocess.STDOUT)
- except bb.process.ExecutionError:
- pass
-
- if confdisabled or do_configure.strip() in ('', ':'):
- raise DevtoolError("do_configure task has been disabled for this recipe")
- elif args.no_pager and not os.path.exists(configurescript):
- raise DevtoolError("No configure script found and no other information to display")
- else:
- configopttext = ''
- if autotools and configureopts:
- configopttext = '''
-Arguments currently passed to the configure script:
-
-%s
-
-Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
- if extra_oeconf:
- configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
-
-%s''' % extra_oeconf
-
- elif cmake:
- in_cmake = False
- cmake_cmd = ''
- for line in cmake_do_configure.splitlines():
- if in_cmake:
- cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
- if not line.endswith('\\'):
- break
- if line.lstrip().startswith('cmake '):
- cmake_cmd = line.strip().rstrip('\\')
- if line.endswith('\\'):
- in_cmake = True
- else:
- break
- if cmake_cmd:
- configopttext = '''
-The current cmake command line:
-
-%s
-
-Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
-
-%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
- else:
- configopttext = '''
-The current implementation of cmake_do_configure:
-
-cmake_do_configure() {
-%s
-}
-
-Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
-
-%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
-
- elif do_configure:
- configopttext = '''
-The current implementation of do_configure:
-
-do_configure() {
-%s
-}''' % do_configure.rstrip()
- if '${EXTRA_OECONF}' in do_configure_noexpand:
- configopttext += '''
-
-Arguments specified through EXTRA_OECONF (which you can change or add to easily):
-
-%s''' % extra_oeconf
-
- if packageconfig:
- configopttext += '''
-
-Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
-
- if args.arg:
- helpargs = ' '.join(args.arg)
- elif cmake:
- helpargs = '-LH'
- else:
- helpargs = '--help'
-
- msg = '''configure information for %s
-------------------------------------------
-%s''' % (pn, configopttext)
-
- if cmake:
- msg += '''
-
-The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
-------------------------------------------''' % (helpargs, pn)
- elif os.path.exists(configurescript):
- msg += '''
-
-The ./configure %s output for %s follows.
-------------------------------------------''' % (helpargs, pn)
-
- olddir = os.getcwd()
- tmppath = tempfile.mkdtemp()
- with tempfile.NamedTemporaryFile('w', delete=False) as tf:
- if not args.no_header:
- tf.write(msg + '\n')
- tf.close()
- try:
- try:
- cmd = 'cat %s' % tf.name
- if cmake:
- cmd += '; cmake %s %s 2>&1' % (helpargs, s)
- os.chdir(b)
- elif os.path.exists(configurescript):
- cmd += '; %s %s' % (configurescript, helpargs)
- if sys.stdout.isatty() and not args.no_pager:
- pager = os.environ.get('PAGER', 'less')
- cmd = '(%s) | %s' % (cmd, pager)
- subprocess.check_call(cmd, shell=True)
- except subprocess.CalledProcessError as e:
- return e.returncode
- finally:
- os.chdir(olddir)
- shutil.rmtree(tmppath)
- os.remove(tf.name)
-
-
-def register_commands(subparsers, context):
- """Register devtool subcommands from this plugin"""
- parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file in your workspace',
- description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that the recipe file itself must be in the workspace (i.e. as a result of "devtool add" or "devtool upgrade"); you can override this with the -a/--any-recipe option.',
- group='working')
- parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
- parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Edit any recipe, not just where the recipe file itself is in the workspace')
- parser_edit_recipe.set_defaults(func=edit_recipe)
-
- # NOTE: Needed to override the usage string here since the default
- # gets the order wrong - recipename must come before --arg
- parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
- usage='devtool configure-help [options] recipename [--arg ...]',
- description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
- group='working')
- parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
- parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
- parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
- parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
- parser_configure_help.set_defaults(func=configure_help)
OpenPOWER on IntegriCloud