summaryrefslogtreecommitdiffstats
path: root/yocto-poky/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'yocto-poky/scripts')
-rwxr-xr-xyocto-poky/scripts/bitbake-prserv-tool9
-rwxr-xr-xyocto-poky/scripts/bitbake-whatchanged2
-rwxr-xr-xyocto-poky/scripts/buildhistory-collect-srcrevs22
-rwxr-xr-xyocto-poky/scripts/cleanup-workdir2
-rwxr-xr-xyocto-poky/scripts/combo-layer49
-rwxr-xr-xyocto-poky/scripts/contrib/build-perf-test.sh45
-rwxr-xr-xyocto-poky/scripts/contrib/devtool-stress.py5
-rwxr-xr-xyocto-poky/scripts/contrib/mkefidisk.sh7
-rwxr-xr-xyocto-poky/scripts/contrib/python/generate-manifest-2.7.py15
-rwxr-xr-xyocto-poky/scripts/contrib/python/generate-manifest-3.5.py (renamed from yocto-poky/scripts/contrib/python/generate-manifest-3.4.py)14
-rwxr-xr-xyocto-poky/scripts/contrib/verify-homepage.py65
-rwxr-xr-xyocto-poky/scripts/create-pull-request22
-rwxr-xr-xyocto-poky/scripts/devtool92
-rwxr-xr-xyocto-poky/scripts/gen-lockedsig-cache6
-rwxr-xr-xyocto-poky/scripts/hob6
-rw-r--r--yocto-poky/scripts/lib/argparse_oe.py129
-rw-r--r--yocto-poky/scripts/lib/bsp/engine.py6
-rw-r--r--yocto-poky/scripts/lib/bsp/help.py10
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/layer/README2
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.19.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf2
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.19.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend8
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend62
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend62
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend61
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend8
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.19.bbappend)12
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend33
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend32
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend4
-rw-r--r--yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend (renamed from yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.19.bbappend)4
-rw-r--r--yocto-poky/scripts/lib/devtool/__init__.py100
-rw-r--r--yocto-poky/scripts/lib/devtool/build.py63
-rw-r--r--yocto-poky/scripts/lib/devtool/build_image.py (renamed from yocto-poky/scripts/lib/devtool/build-image.py)87
-rw-r--r--yocto-poky/scripts/lib/devtool/build_sdk.py65
-rw-r--r--yocto-poky/scripts/lib/devtool/deploy.py248
-rw-r--r--yocto-poky/scripts/lib/devtool/package.py29
-rw-r--r--yocto-poky/scripts/lib/devtool/runqemu.py3
-rw-r--r--yocto-poky/scripts/lib/devtool/sdk.py295
-rw-r--r--yocto-poky/scripts/lib/devtool/search.py82
-rw-r--r--yocto-poky/scripts/lib/devtool/standard.py596
-rw-r--r--yocto-poky/scripts/lib/devtool/upgrade.py168
-rw-r--r--yocto-poky/scripts/lib/devtool/utilcmds.py233
-rw-r--r--yocto-poky/scripts/lib/recipetool/append.py4
-rw-r--r--yocto-poky/scripts/lib/recipetool/create.py583
-rw-r--r--yocto-poky/scripts/lib/recipetool/create_buildsys.py777
-rw-r--r--yocto-poky/scripts/lib/recipetool/create_buildsys_python.py9
-rw-r--r--yocto-poky/scripts/lib/recipetool/create_kernel.py99
-rw-r--r--yocto-poky/scripts/lib/recipetool/create_kmod.py152
-rw-r--r--yocto-poky/scripts/lib/recipetool/create_npm.py156
-rw-r--r--yocto-poky/scripts/lib/recipetool/newappend.py17
-rw-r--r--yocto-poky/scripts/lib/recipetool/setvar.py75
-rw-r--r--yocto-poky/scripts/lib/scriptutils.py16
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/__init__.py0
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/base.py466
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/__init__.py20
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/bootloader.py216
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/partition.py314
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/constants.py57
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/errors.py103
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/__init__.py0
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/control.py46
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/f16.py24
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/ko.py37
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/options.py223
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/parser.py619
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/sections.py244
-rw-r--r--yocto-poky/scripts/lib/wic/3rdparty/pykickstart/version.py168
-rw-r--r--yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc3
-rw-r--r--yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg11
-rw-r--r--yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks8
-rw-r--r--yocto-poky/scripts/lib/wic/canned-wks/directdisk.wks4
-rw-r--r--yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks4
-rw-r--r--yocto-poky/scripts/lib/wic/conf.py7
-rw-r--r--yocto-poky/scripts/lib/wic/help.py32
-rw-r--r--yocto-poky/scripts/lib/wic/imager/baseimager.py2
-rw-r--r--yocto-poky/scripts/lib/wic/imager/direct.py29
-rw-r--r--yocto-poky/scripts/lib/wic/kickstart/__init__.py122
-rw-r--r--yocto-poky/scripts/lib/wic/kickstart/custom_commands/__init__.py7
-rw-r--r--yocto-poky/scripts/lib/wic/kickstart/custom_commands/wicboot.py60
-rw-r--r--yocto-poky/scripts/lib/wic/ksparser.py169
-rw-r--r--yocto-poky/scripts/lib/wic/partition.py (renamed from yocto-poky/scripts/lib/wic/kickstart/custom_commands/partition.py)178
-rw-r--r--yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py81
-rw-r--r--yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py3
-rw-r--r--yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py76
-rw-r--r--yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py53
-rw-r--r--yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py7
-rw-r--r--yocto-poky/scripts/lib/wic/plugins/source/rootfs.py14
-rw-r--r--yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py12
-rw-r--r--yocto-poky/scripts/lib/wic/utils/misc.py37
-rw-r--r--yocto-poky/scripts/lib/wic/utils/oe/misc.py18
-rw-r--r--yocto-poky/scripts/lib/wic/utils/partitionedfs.py36
-rwxr-xr-xyocto-poky/scripts/oe-buildenv-internal64
-rwxr-xr-xyocto-poky/scripts/oe-git-proxy42
-rwxr-xr-xyocto-poky/scripts/oe-pkgdata-util33
-rwxr-xr-xyocto-poky/scripts/oe-publish-sdk44
-rwxr-xr-xyocto-poky/scripts/oe-selftest477
-rwxr-xr-xyocto-poky/scripts/oe-setup-builddir15
-rw-r--r--yocto-poky/scripts/postinst-intercepts/update_font_cache4
-rw-r--r--yocto-poky/scripts/postinst-intercepts/update_gio_module_cache7
-rw-r--r--yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache2
-rwxr-xr-xyocto-poky/scripts/recipetool19
-rwxr-xr-xyocto-poky/scripts/relocate_sdk.py14
-rwxr-xr-xyocto-poky/scripts/runqemu117
-rwxr-xr-xyocto-poky/scripts/runqemu-gen-tapdevs9
-rwxr-xr-xyocto-poky/scripts/runqemu-internal278
-rwxr-xr-xyocto-poky/scripts/send-error-report6
-rwxr-xr-xyocto-poky/scripts/sstate-cache-management.sh4
-rwxr-xr-xyocto-poky/scripts/sstate-sysroot-cruft.sh33
-rwxr-xr-xyocto-poky/scripts/test-remote-image5
-rwxr-xr-xyocto-poky/scripts/wic4
-rwxr-xr-xyocto-poky/scripts/wipe-sysroot2
-rwxr-xr-xyocto-poky/scripts/yocto-bsp2
-rwxr-xr-xyocto-poky/scripts/yocto-kernel2
-rwxr-xr-xyocto-poky/scripts/yocto-layer8
154 files changed, 5085 insertions, 5060 deletions
diff --git a/yocto-poky/scripts/bitbake-prserv-tool b/yocto-poky/scripts/bitbake-prserv-tool
index 28c2416bf..fa31b5258 100755
--- a/yocto-poky/scripts/bitbake-prserv-tool
+++ b/yocto-poky/scripts/bitbake-prserv-tool
@@ -86,6 +86,15 @@ do_migrate_localcount ()
[ $# -eq 0 ] && help && exit 1
+case $2 in
+*.conf|*.inc)
+ ;;
+*)
+ echo ERROR: $2 must end with .conf or .inc!
+ exit 1
+ ;;
+esac
+
case $1 in
export)
do_export $2
diff --git a/yocto-poky/scripts/bitbake-whatchanged b/yocto-poky/scripts/bitbake-whatchanged
index 55cfe4b23..af54d16f8 100755
--- a/yocto-poky/scripts/bitbake-whatchanged
+++ b/yocto-poky/scripts/bitbake-whatchanged
@@ -190,7 +190,7 @@ def print_depchanged(d_new = None, d_old = None, verbose = False):
if sigdata_re.match(full_path_old) and sigdata_re.match(full_path_new):
output = bb.siggen.compare_sigfiles(full_path_old, full_path_new)
if output:
- print("\n=== The verbose changes of %s.do_%s:" % (pn, task))
+ print("\n=== The verbose changes of %s.%s:" % (pn, task))
print('\n'.join(output))
else:
# Format the output, the format is:
diff --git a/yocto-poky/scripts/buildhistory-collect-srcrevs b/yocto-poky/scripts/buildhistory-collect-srcrevs
index 58a270803..f3eb76bd0 100755
--- a/yocto-poky/scripts/buildhistory-collect-srcrevs
+++ b/yocto-poky/scripts/buildhistory-collect-srcrevs
@@ -18,7 +18,9 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-import os, sys
+import collections
+import os
+import sys
import optparse
import logging
@@ -65,16 +67,13 @@ def main():
else:
forcevariable = ''
- lastdir = ''
+ all_srcrevs = collections.defaultdict(list)
for root, dirs, files in os.walk(options.buildhistory_dir):
if '.git' in dirs:
dirs.remove('.git')
for fn in files:
if fn == 'latest_srcrev':
curdir = os.path.basename(os.path.dirname(root))
- if lastdir != curdir:
- print('# %s' % curdir)
- lastdir = curdir
fullpath = os.path.join(root, fn)
pn = os.path.basename(root)
srcrev = None
@@ -98,11 +97,20 @@ def main():
name = splitval[0].split('_')[1].strip()
srcrevs[name] = value
if srcrev and (options.reportall or srcrev != orig_srcrev):
- print('SRCREV_pn-%s%s = "%s"' % (pn, forcevariable, srcrev))
+ all_srcrevs[curdir].append((pn, None, srcrev))
for name, value in srcrevs.items():
orig = orig_srcrevs.get(name, orig_srcrev)
if options.reportall or value != orig:
- print('SRCREV_%s_pn-%s%s = "%s"' % (name, pn, forcevariable, value))
+ all_srcrevs[curdir].append((pn, name, srcrev))
+
+ for curdir, srcrevs in sorted(all_srcrevs.iteritems()):
+ if srcrevs:
+ print('# %s' % curdir)
+ for pn, name, srcrev in srcrevs:
+ if name:
+ print('SRCREV_%s_pn-%s%s = "%s"' % (name, pn, forcevariable, srcrev))
+ else:
+ print('SRCREV_pn-%s%s = "%s"' % (pn, forcevariable, srcrev))
if __name__ == "__main__":
diff --git a/yocto-poky/scripts/cleanup-workdir b/yocto-poky/scripts/cleanup-workdir
index a7f5a3a66..01ebd526e 100755
--- a/yocto-poky/scripts/cleanup-workdir
+++ b/yocto-poky/scripts/cleanup-workdir
@@ -194,5 +194,5 @@ if __name__ == '__main__':
except Exception:
ret = 2
import traceback
- traceback.print_exc(3)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/combo-layer b/yocto-poky/scripts/combo-layer
index 7435a176b..91270415f 100755
--- a/yocto-poky/scripts/combo-layer
+++ b/yocto-poky/scripts/combo-layer
@@ -73,7 +73,7 @@ class Configuration(object):
else:
# Apply special type transformations for some properties.
# Type matches the RawConfigParser.get*() methods.
- types = {'signoff': 'boolean'}
+ types = {'signoff': 'boolean', 'update': 'boolean'}
if name in types:
value = getattr(parser, 'get' + types[name])(section, name)
self.repos[repo][name] = value
@@ -380,7 +380,7 @@ tail -c +18 $tmpname | head -c -4
if not parent:
parent = '.'
# May run outside of the current directory, so do not assume that .git exists.
- filter_branch.extend(['--tree-filter', 'mkdir -p .git/tmptree && mv $(ls -1 -a | grep -v -e ^.git$ -e ^.$ -e ^..$) .git/tmptree && mkdir -p %s && mv .git/tmptree %s' % (parent, dest_dir)])
+ filter_branch.extend(['--tree-filter', 'mkdir -p .git/tmptree && find . -mindepth 1 -maxdepth 1 ! -name .git -print0 | xargs -0 -I SOURCE mv SOURCE .git/tmptree && mkdir -p %s && mv .git/tmptree %s' % (parent, dest_dir)])
filter_branch.append('HEAD')
runcmd(filter_branch)
runcmd('git update-ref -d refs/original/refs/heads/%s' % name)
@@ -567,7 +567,7 @@ def get_repos(conf, repo_names):
sys.exit(1)
if not repos:
- repos = conf.repos
+ repos = [ repo for repo in conf.repos if conf.repos[repo].get("update", True) ]
return repos
@@ -662,7 +662,14 @@ def action_update(conf, args):
patch_cmd_range = "%s..%s" % (repo['last_revision'], top_revision)
rev_cmd_range = patch_cmd_range
- file_filter = repo.get('file_filter',"")
+ file_filter = repo.get('file_filter',".")
+
+ # Filter out unwanted files
+ exclude = repo.get('file_exclude', '')
+ if exclude:
+ for path in exclude.split():
+ p = "%s/%s" % (dest_dir, path) if dest_dir != '.' else path
+ file_filter += " ':!%s'" % p
patch_cmd = "git format-patch -N %s --output-directory %s %s -- %s" % \
(prefix,repo_patch_dir, patch_cmd_range, file_filter)
@@ -681,38 +688,6 @@ def action_update(conf, args):
runcmd("%s %s %s %s" % (repo['hook'], patch, revlist[count], name))
count=count-1
- # Step 3a: Filter out unwanted files and patches.
- exclude = repo.get('file_exclude', '')
- if exclude:
- filter = ['filterdiff', '-p1']
- for path in exclude.split():
- filter.append('-x')
- filter.append('%s/%s' % (dest_dir, path) if dest_dir != '.' else path)
- for patch in patchlist[:]:
- filtered = patch + '.tmp'
- with open(filtered, 'w') as f:
- runcmd(filter + [patch], out=f)
- # Now check for empty patches.
- if runcmd(['filterdiff', '--list', filtered]):
- # Possibly modified.
- os.unlink(patch)
- os.rename(filtered, patch)
- else:
- # Empty, ignore it. Must also remove from revlist.
- with open(patch, 'r') as f:
- fromline = f.readline()
- if not fromline:
- # Patch must have been empty to start with. No need
- # to remove it.
- continue
- m = re.match(r'''^From ([0-9a-fA-F]+) .*\n''', fromline)
- rev = m.group(1)
- logger.debug('skipping empty patch %s = %s' % (patch, rev))
- os.unlink(patch)
- os.unlink(filtered)
- patchlist.remove(patch)
- revlist.remove(rev)
-
# Step 4: write patch list and revision list to file, for user to edit later
patchlist_file = os.path.join(os.getcwd(), patch_dir, "patchlist-%s" % name)
repo['patchlist'] = patchlist_file
@@ -945,5 +920,5 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/contrib/build-perf-test.sh b/yocto-poky/scripts/contrib/build-perf-test.sh
index cdd7885dc..7d99228c7 100755
--- a/yocto-poky/scripts/contrib/build-perf-test.sh
+++ b/yocto-poky/scripts/contrib/build-perf-test.sh
@@ -128,7 +128,7 @@ rev=$(git rev-parse --short HEAD) || exit 1
OUTDIR="$clonedir/build-perf-test/results-$rev-`date "+%Y%m%d%H%M%S"`"
BUILDDIR="$OUTDIR/build"
resultsfile="$OUTDIR/results.log"
-bboutput="$OUTDIR/bitbake.log"
+cmdoutput="$OUTDIR/commands.log"
myoutput="$OUTDIR/output.log"
globalres="$clonedir/build-perf-test/globalres.log"
@@ -180,14 +180,13 @@ time_count=0
declare -a SIZES
size_count=0
-bbtime () {
- local arg="$@"
- log " Timing: bitbake ${arg}"
+time_cmd () {
+ log " Timing: $*"
if [ $verbose -eq 0 ]; then
- /usr/bin/time -v -o $resultsfile bitbake ${arg} >> $bboutput
+ /usr/bin/time -v -o $resultsfile "$@" >> $cmdoutput
else
- /usr/bin/time -v -o $resultsfile bitbake ${arg}
+ /usr/bin/time -v -o $resultsfile "$@"
fi
ret=$?
if [ $ret -eq 0 ]; then
@@ -206,12 +205,16 @@ bbtime () {
log "More stats can be found in ${resultsfile}.${i}"
}
+bbtime () {
+ time_cmd bitbake "$@"
+}
+
#we don't time bitbake here
bbnotime () {
local arg="$@"
log " Running: bitbake ${arg}"
if [ $verbose -eq 0 ]; then
- bitbake ${arg} >> $bboutput
+ bitbake ${arg} >> $cmdoutput
else
bitbake ${arg}
fi
@@ -350,6 +353,33 @@ test3 () {
bbtime -p
}
+#
+# Test 4 - eSDK
+# Measure: eSDK size and installation time
+test4 () {
+ log "Running Test 4: eSDK size and installation time"
+ bbnotime $IMAGE -c do_populate_sdk_ext
+
+ esdk_installer=(tmp/deploy/sdk/*-toolchain-ext-*.sh)
+
+ if [ ${#esdk_installer[*]} -eq 1 ]; then
+ s=$((`stat -c %s "$esdk_installer"` / 1024))
+ SIZES[(( size_count++ ))]="$s"
+ log "Download SIZE of eSDK is: $s kB"
+
+ do_sync
+ time_cmd "$esdk_installer" -y -d "tmp/esdk-deploy"
+
+ s=$((`du -sb "tmp/esdk-deploy" | cut -f1` / 1024))
+ SIZES[(( size_count++ ))]="$s"
+ log "Install SIZE of eSDK is: $s kB"
+ else
+ log "ERROR: other than one sdk found (${esdk_installer[*]}), reporting size and time as 0."
+ SIZES[(( size_count++ ))]="0"
+ TIMES[(( time_count++ ))]="0"
+ fi
+
+}
# RUN!
@@ -359,6 +389,7 @@ test1_p2
test1_p3
test2
test3
+test4
# if we got til here write to global results
write_results
diff --git a/yocto-poky/scripts/contrib/devtool-stress.py b/yocto-poky/scripts/contrib/devtool-stress.py
index 4b35fc9d0..8cf92ca2f 100755
--- a/yocto-poky/scripts/contrib/devtool-stress.py
+++ b/yocto-poky/scripts/contrib/devtool-stress.py
@@ -35,6 +35,7 @@ import fnmatch
scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
sys.path.insert(0, scripts_lib_path)
import scriptutils
+import argparse_oe
logger = scriptutils.logger_create('devtool-stress')
def select_recipes(args):
@@ -204,8 +205,8 @@ def stress_modify(args):
def main():
- parser = argparse.ArgumentParser(description="devtool stress tester",
- epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+ parser = argparse_oe.ArgumentParser(description="devtool stress tester",
+ epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-r', '--resume-from', help='Resume from specified recipe', metavar='PN')
parser.add_argument('-o', '--only', help='Only test specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
diff --git a/yocto-poky/scripts/contrib/mkefidisk.sh b/yocto-poky/scripts/contrib/mkefidisk.sh
index cd4de0533..333284ff5 100755
--- a/yocto-poky/scripts/contrib/mkefidisk.sh
+++ b/yocto-poky/scripts/contrib/mkefidisk.sh
@@ -402,7 +402,7 @@ if [ -e "$GRUB_CFG" ]; then
sed -i "s/ LABEL=[^ ]*/ /" $GRUB_CFG
sed -i "s@ root=[^ ]*@ @" $GRUB_CFG
- sed -i "s@vmlinuz @vmlinuz root=$TARGET_ROOTFS ro rootwait quiet @" $GRUB_CFG
+ sed -i "s@vmlinuz @vmlinuz root=$TARGET_ROOTFS ro rootwait console=ttyS0 console=tty0 @" $GRUB_CFG
fi
# Look for a gummiboot installation
@@ -419,7 +419,7 @@ if [ -d "$GUMMI_ENTRIES" ]; then
sed -i "/initrd /d" $GUMMI_CFG
sed -i "s@ root=[^ ]*@ @" $GUMMI_CFG
- sed -i "s@options *LABEL=boot @options LABEL=Boot root=$TARGET_ROOTFS ro rootwait quiet @" $GUMMI_CFG
+ sed -i "s@options *LABEL=boot @options LABEL=Boot root=$TARGET_ROOTFS ro rootwait console=ttyS0 console=tty0 @" $GUMMI_CFG
fi
# Ensure we have at least one EFI bootloader configured
@@ -438,6 +438,9 @@ if [ -d $ROOTFS_MNT/etc/udev/ ] ; then
echo "$TARGET_DEVICE" >> $ROOTFS_MNT/etc/udev/mount.blacklist
fi
+# Add startup.nsh script for automated boot
+echo "fs0:\EFI\BOOT\bootx64.efi" > $BOOTFS_MNT/startup.nsh
+
# Call cleanup to unmount devices and images and remove the TMPDIR
cleanup
diff --git a/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py b/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py
index 936522efc..d93c943c6 100755
--- a/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py
+++ b/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py
@@ -164,7 +164,7 @@ if __name__ == "__main__":
#
m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re",
- "__future__.* _abcoll.* abc.* copy.* copy_reg.* ConfigParser.* " +
+ "__future__.* _abcoll.* abc.* ast.* copy.* copy_reg.* ConfigParser.* " +
"genericpath.* getopt.* linecache.* new.* " +
"os.* posixpath.* struct.* " +
"warnings.* site.* stat.* " +
@@ -244,16 +244,12 @@ if __name__ == "__main__":
m.addPackage( "${PN}-distutils-staticdev", "Python distribution utilities (static libraries)", "${PN}-distutils",
"config/lib*.a" ) # package
- m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core",
+ m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core ${PN}-email",
"config distutils" ) # package
m.addPackage( "${PN}-doctest", "Python framework for running examples in docstrings", "${PN}-core ${PN}-lang ${PN}-io ${PN}-re ${PN}-unittest ${PN}-debugger ${PN}-difflib",
"doctest.*" )
- # FIXME consider adding to some higher level package
- m.addPackage( "${PN}-elementtree", "Python elementree", "${PN}-core",
- "lib-dynload/_elementtree.so" )
-
m.addPackage( "${PN}-email", "Python email support", "${PN}-core ${PN}-io ${PN}-re ${PN}-mime ${PN}-audio ${PN}-image ${PN}-netclient",
"imaplib.* email" ) # package
@@ -322,6 +318,9 @@ if __name__ == "__main__":
m.addPackage( "${PN}-pkgutil", "Python package extension utility support", "${PN}-core",
"pkgutil.*")
+ m.addPackage( "${PN}-plistlib", "Generate and parse Mac OS X .plist files", "${PN}-core ${PN}-datetime ${PN}-io",
+ "plistlib.*")
+
m.addPackage( "${PN}-pprint", "Python pretty-print support", "${PN}-core ${PN}-io",
"pprint.*" )
@@ -376,8 +375,8 @@ if __name__ == "__main__":
m.addPackage( "${PN}-unixadmin", "Python Unix administration support", "${PN}-core",
"lib-dynload/nis.so lib-dynload/grp.so lib-dynload/pwd.so getpass.*" )
- m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-elementtree ${PN}-re",
- "lib-dynload/pyexpat.so xml xmllib.*" ) # package
+ m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-re",
+ "lib-dynload/_elementtree.so lib-dynload/pyexpat.so xml xmllib.*" ) # package
m.addPackage( "${PN}-xmlrpc", "Python XML-RPC support", "${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang",
"xmlrpclib.* SimpleXMLRPCServer.* DocXMLRPCServer.*" )
diff --git a/yocto-poky/scripts/contrib/python/generate-manifest-3.4.py b/yocto-poky/scripts/contrib/python/generate-manifest-3.5.py
index ca2fa6117..367b4b8b4 100755
--- a/yocto-poky/scripts/contrib/python/generate-manifest-3.4.py
+++ b/yocto-poky/scripts/contrib/python/generate-manifest-3.5.py
@@ -17,7 +17,7 @@ import os
import sys
import time
-VERSION = "3.4.2"
+VERSION = "3.5.0"
__author__ = "Michael 'Mickey' Lauer <mlauer@vanille-media.de>"
__version__ = "20140131"
@@ -167,7 +167,7 @@ if __name__ == "__main__":
#
m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re ${PN}-reprlib ${PN}-codecs ${PN}-io ${PN}-math",
- "__future__.* _abcoll.* abc.* copy.* copyreg.* ConfigParser.* " +
+ "__future__.* _abcoll.* abc.* ast.* copy.* copyreg.* ConfigParser.* " +
"genericpath.* getopt.* linecache.* new.* " +
"os.* posixpath.* struct.* " +
"warnings.* site.* stat.* " +
@@ -247,16 +247,12 @@ if __name__ == "__main__":
m.addPackage( "${PN}-distutils-staticdev", "Python distribution utilities (static libraries)", "${PN}-distutils",
"config/lib*.a" ) # package
- m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core",
+ m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core ${PN}-email",
"config distutils" ) # package
m.addPackage( "${PN}-doctest", "Python framework for running examples in docstrings", "${PN}-core ${PN}-lang ${PN}-io ${PN}-re ${PN}-unittest ${PN}-debugger ${PN}-difflib",
"doctest.*" )
- # FIXME consider adding to some higher level package
- m.addPackage( "${PN}-elementtree", "Python elementree", "${PN}-core",
- "lib-dynload/_elementtree.*.so" )
-
m.addPackage( "${PN}-email", "Python email support", "${PN}-core ${PN}-io ${PN}-re ${PN}-mime ${PN}-audio ${PN}-image ${PN}-netclient",
"imaplib.* email" ) # package
@@ -376,8 +372,8 @@ if __name__ == "__main__":
m.addPackage( "${PN}-unixadmin", "Python Unix administration support", "${PN}-core",
"lib-dynload/nis.*.so lib-dynload/grp.*.so lib-dynload/pwd.*.so getpass.*" )
- m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-elementtree ${PN}-re",
- "lib-dynload/pyexpat.*.so xml xmllib.*" ) # package
+ m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-re",
+ "lib-dynload/_elementtree.*.so lib-dynload/pyexpat.*.so xml xmllib.*" ) # package
m.addPackage( "${PN}-xmlrpc", "Python XML-RPC support", "${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang",
"xmlrpclib.* SimpleXMLRPCServer.* DocXMLRPCServer.* xmlrpc" )
diff --git a/yocto-poky/scripts/contrib/verify-homepage.py b/yocto-poky/scripts/contrib/verify-homepage.py
index 86cc82bca..265ff65d3 100755
--- a/yocto-poky/scripts/contrib/verify-homepage.py
+++ b/yocto-poky/scripts/contrib/verify-homepage.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
-# This script is used for verify HOMEPAGE.
+# This script can be used to verify HOMEPAGE values for all recipes in
+# the current configuration.
# The result is influenced by network environment, since the timeout of connect url is 5 seconds as default.
import sys
@@ -8,32 +9,25 @@ import os
import subprocess
import urllib2
-def search_bitbakepath():
- bitbakepath = ""
- # Search path to bitbake lib dir in order to load bb modules
- if os.path.exists(os.path.join(os.path.dirname(sys.argv[0]), '../../bitbake/lib/bb')):
- bitbakepath = os.path.join(os.path.dirname(sys.argv[0]), '../../bitbake/lib')
- bitbakepath = os.path.abspath(bitbakepath)
- else:
- # Look for bitbake/bin dir in PATH
- for pth in os.environ['PATH'].split(':'):
- if os.path.exists(os.path.join(pth, '../lib/bb')):
- bitbakepath = os.path.abspath(os.path.join(pth, '../lib'))
- break
- if not bitbakepath:
- sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
- sys.exit(1)
- return bitbakepath
-
-# For importing the following modules
-sys.path.insert(0, search_bitbakepath())
+# Allow importing scripts/lib modules
+scripts_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
+lib_path = scripts_path + '/lib'
+sys.path = sys.path + [lib_path]
+import scriptpath
+import scriptutils
+
+# Allow importing bitbake modules
+bitbakepath = scriptpath.add_bitbake_lib_path()
+
import bb.tinfoil
+logger = scriptutils.logger_create('verify_homepage')
+
def wgetHomepage(pn, homepage):
result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True)
if result:
- bb.warn("Failed to verify HOMEPAGE (%s) of %s" % (homepage, pn))
+ logger.warn("%s: failed to verify HOMEPAGE: %s " % (pn, homepage))
return 1
else:
return 0
@@ -42,22 +36,27 @@ def verifyHomepage(bbhandler):
pkg_pn = bbhandler.cooker.recipecache.pkg_pn
pnlist = sorted(pkg_pn)
count = 0
+ checked = []
for pn in pnlist:
- fn = pkg_pn[pn].pop()
- data = bb.cache.Cache.loadDataFull(fn, bbhandler.cooker.collection.get_file_appends(fn), bbhandler.config_data)
- homepage = data.getVar("HOMEPAGE")
- if homepage:
- try:
- urllib2.urlopen(homepage, timeout=5)
- except Exception:
- count = count + wgetHomepage(pn, homepage)
+ for fn in pkg_pn[pn]:
+ # There's no point checking multiple BBCLASSEXTENDed variants of the same recipe
+ realfn, _ = bb.cache.Cache.virtualfn2realfn(fn)
+ if realfn in checked:
+ continue
+ data = bb.cache.Cache.loadDataFull(realfn, bbhandler.cooker.collection.get_file_appends(realfn), bbhandler.config_data)
+ homepage = data.getVar("HOMEPAGE", True)
+ if homepage:
+ try:
+ urllib2.urlopen(homepage, timeout=5)
+ except Exception:
+ count = count + wgetHomepage(os.path.basename(realfn), homepage)
+ checked.append(realfn)
return count
if __name__=='__main__':
- failcount = 0
bbhandler = bb.tinfoil.Tinfoil()
bbhandler.prepare()
- print "Start to verify HOMEPAGE:"
+ logger.info("Start verifying HOMEPAGE:")
failcount = verifyHomepage(bbhandler)
- print "finish to verify HOMEPAGE."
- print "Summary: %s failed" % failcount
+ logger.info("Finished verifying HOMEPAGE.")
+ logger.info("Summary: %s failed" % failcount)
diff --git a/yocto-poky/scripts/create-pull-request b/yocto-poky/scripts/create-pull-request
index 19ba58869..479ad6efc 100755
--- a/yocto-poky/scripts/create-pull-request
+++ b/yocto-poky/scripts/create-pull-request
@@ -36,6 +36,7 @@ CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-h] [-o output_dir] [-m msg_body_file] [-s subject] [-r relative_to] [-i commit_id] [-d relative_dir] -u remote [-b branch]
-b branch Branch name in the specified remote (default: current branch)
+ -l local branch Local branch name (default: HEAD)
-c Create an RFC (Request for Comment) patch series
-h Display this help message
-i commit_id Ending commit (default: HEAD)
@@ -50,6 +51,7 @@ Usage: $CMD [-h] [-o output_dir] [-m msg_body_file] [-s subject] [-r relative_to
Examples:
$CMD -u contrib -b nitin/basic
$CMD -u contrib -r distro/master -i nitin/distro -b nitin/distro
+ $CMD -u contrib -r distro/master -i nitin/distro -b nitin/distro -l distro
$CMD -u contrib -r master -i misc -b nitin/misc -o pull-misc
$CMD -u contrib -p "RFC PATCH" -b nitin/experimental
$CMD -u contrib -i misc -b nitin/misc -d ./bitbake
@@ -57,11 +59,14 @@ EOM
}
# Parse and validate arguments
-while getopts "b:cd:hi:m:o:p:r:s:u:" OPT; do
+while getopts "b:cd:hi:m:o:p:r:s:u:l:" OPT; do
case $OPT in
b)
BRANCH="$OPTARG"
;;
+ l)
+ L_BRANCH="$OPTARG"
+ ;;
c)
RFC=1
;;
@@ -130,6 +135,11 @@ if [ -z "$BRANCH" ]; then
echo "NOTE: Assuming remote branch '$BRANCH', use -b to override."
fi
+if [ -z "$L_BRANCH" ]; then
+ L_BRANCH=HEAD
+ echo "NOTE: Assuming local branch HEAD, use -l to override."
+fi
+
if [ -z "$REMOTE_URL" ]; then
echo "ERROR: Missing parameter -u, no git remote!"
usage
@@ -185,6 +195,14 @@ fi
# Generate the patches and cover letter
git format-patch $extraopts -M40 --subject-prefix="$PREFIX" -n -o $ODIR --thread=shallow --cover-letter $RELATIVE_TO..$COMMIT_ID > /dev/null
+if [ -z "$(ls -A $ODIR 2> /dev/null)" ]; then
+ echo "ERROR: $ODIR is empty, no cover letter and patches was generated!"
+ echo " This is most likely due to that \$RRELATIVE_TO..\$COMMIT_ID"
+ echo " ($RELATIVE_TO..$COMMIT_ID) don't contain any differences."
+ rmdir $ODIR
+ exit 1
+fi
+
[ -n "$RELDIR" ] && cd $pdir
# Customize the cover letter
@@ -195,7 +213,7 @@ NEWER_GIT_VERSION=210
if [ $GIT_VERSION -lt $NEWER_GIT_VERSION ]; then
git request-pull $RELATIVE_TO $REMOTE_URL $COMMIT_ID >> "$PM"
else
- git request-pull $RELATIVE_TO $REMOTE_URL :$BRANCH >> "$PM"
+ git request-pull $RELATIVE_TO $REMOTE_URL $L_BRANCH:$BRANCH >> "$PM"
fi
if [ $? -ne 0 ]; then
echo "ERROR: git request-pull reported an error"
diff --git a/yocto-poky/scripts/devtool b/yocto-poky/scripts/devtool
index e4d9db301..478039065 100755
--- a/yocto-poky/scripts/devtool
+++ b/yocto-poky/scripts/devtool
@@ -37,6 +37,7 @@ lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
from devtool import DevtoolError, setup_tinfoil
import scriptutils
+import argparse_oe
logger = scriptutils.logger_create('devtool')
plugins = []
@@ -111,8 +112,37 @@ def read_workspace():
res = externalsrc_re.match(line.rstrip())
if res:
pn = res.group(2) or os.path.splitext(os.path.basename(fn))[0].split('_')[0]
+ # Find the recipe file within the workspace, if any
+ bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*')
+ recipefile = glob.glob(os.path.join(config.workspace_path,
+ 'recipes',
+ pn,
+ bbfile))
+ if recipefile:
+ recipefile = recipefile[0]
workspace[pn] = {'srctree': res.group(3),
- 'bbappend': fn}
+ 'bbappend': fn,
+ 'recipefile': recipefile}
+ logger.debug('Found recipe %s' % workspace[pn])
+
+def create_unlockedsigs():
+ """ This function will make unlocked-sigs.inc match the recipes in the
+ workspace. This runs on every run of devtool, but it lets us ensure
+ the unlocked items are in sync with the workspace. """
+
+ confdir = os.path.join(basepath, 'conf')
+ unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
+ bb.utils.mkdirhier(confdir)
+ with open(os.path.join(confdir, 'unlocked-sigs.inc'), 'w') as f:
+ f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
+ "# This layer was created by the OpenEmbedded devtool" +
+ " utility in order to\n" +
+ "# contain recipes that are unlocked.\n")
+
+ f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
+ for pn in workspace:
+ f.write(' ' + pn)
+ f.write('"')
def create_workspace(args, config, basepath, workspace):
if args.layerpath:
@@ -151,6 +181,10 @@ def _create_workspace(workspacedir, config, basepath):
f.write('\nIf you no longer need to use devtool you can remove the path to this\n')
f.write('workspace layer from your conf/bblayers.conf file (and then delete the\n')
f.write('layer, if you wish).\n')
+ f.write('\nNote that by default, if devtool fetches and unpacks source code, it\n')
+ f.write('will place it in a subdirectory of a "sources" subdirectory of the\n')
+ f.write('layer. If you prefer it to be elsewhere you can specify the source\n')
+ f.write('tree path on the command line.\n')
def _enable_workspace_layer(workspacedir, config, basepath):
"""Ensure the workspace layer is in bblayers.conf"""
@@ -177,17 +211,10 @@ def main():
# Default basepath
basepath = os.path.dirname(os.path.abspath(__file__))
- pth = basepath
- while pth != '' and pth != os.sep:
- if os.path.exists(os.path.join(pth, '.devtoolbase')):
- context.fixed_setup = True
- basepath = pth
- break
- pth = os.path.dirname(pth)
- parser = argparse.ArgumentParser(description="OpenEmbedded development tool",
- add_help=False,
- epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+ parser = argparse_oe.ArgumentParser(description="OpenEmbedded development tool",
+ add_help=False,
+ epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('--basepath', help='Base directory of SDK / build directory')
parser.add_argument('--bbpath', help='Explicitly specify the BBPATH, rather than getting it from the metadata')
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
@@ -209,17 +236,29 @@ def main():
if global_args.basepath:
# Override
basepath = global_args.basepath
- elif not context.fixed_setup:
- basepath = os.environ.get('BUILDDIR')
- if not basepath:
- logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
- sys.exit(1)
+ if os.path.exists(os.path.join(basepath, '.devtoolbase')):
+ context.fixed_setup = True
+ else:
+ pth = basepath
+ while pth != '' and pth != os.sep:
+ if os.path.exists(os.path.join(pth, '.devtoolbase')):
+ context.fixed_setup = True
+ basepath = pth
+ break
+ pth = os.path.dirname(pth)
+
+ if not context.fixed_setup:
+ basepath = os.environ.get('BUILDDIR')
+ if not basepath:
+ logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
+ sys.exit(1)
logger.debug('Using basepath %s' % basepath)
config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf'))
if not config.read():
return -1
+ context.config = config
bitbake_subdir = config.get('General', 'bitbake_subdir', '')
if bitbake_subdir:
@@ -255,13 +294,21 @@ def main():
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
+ subparsers.add_subparser_group('sdk', 'SDK maintenance', -2)
+ subparsers.add_subparser_group('advanced', 'Advanced', -1)
+ subparsers.add_subparser_group('starting', 'Beginning work on a recipe', 100)
+ subparsers.add_subparser_group('info', 'Getting information')
+ subparsers.add_subparser_group('working', 'Working on a recipe in the workspace')
+ subparsers.add_subparser_group('testbuild', 'Testing changes on target')
+
if not context.fixed_setup:
parser_create_workspace = subparsers.add_parser('create-workspace',
- help='Set up a workspace',
- description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.')
+ help='Set up workspace in an alternative location',
+ description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.',
+ group='advanced')
parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created')
parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration')
- parser_create_workspace.set_defaults(func=create_workspace)
+ parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True)
for plugin in plugins:
if hasattr(plugin, 'register_commands'):
@@ -269,8 +316,9 @@ def main():
args = parser.parse_args(unparsed_args, namespace=global_args)
- if args.subparser_name != 'create-workspace':
+ if not getattr(args, 'no_workspace', False):
read_workspace()
+ create_unlockedsigs()
try:
ret = args.func(args, config, basepath, workspace)
@@ -278,6 +326,8 @@ def main():
if str(err):
logger.error(str(err))
ret = 1
+ except argparse_oe.ArgumentUsageError as ae:
+ parser.error_subcommand(ae.message, ae.subcommand)
return ret
@@ -288,5 +338,5 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/gen-lockedsig-cache b/yocto-poky/scripts/gen-lockedsig-cache
index 6aa361400..0986a2165 100755
--- a/yocto-poky/scripts/gen-lockedsig-cache
+++ b/yocto-poky/scripts/gen-lockedsig-cache
@@ -13,9 +13,9 @@ def mkdir(d):
if e.errno != errno.EEXIST:
raise e
-if len(sys.argv) < 3:
+if len(sys.argv) < 5:
print("Incorrect number of arguments specified")
- print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir>")
+ print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir> <nativelsbstring>")
sys.exit(1)
print('Reading %s' % sys.argv[1])
@@ -30,7 +30,7 @@ files = set()
for s in sigs:
p = sys.argv[2] + "/" + s[:2] + "/*" + s + "*"
files |= set(glob.glob(p))
- p = sys.argv[2] + "/*/" + s[:2] + "/*" + s + "*"
+ p = sys.argv[2] + "/%s/" % sys.argv[4] + s[:2] + "/*" + s + "*"
files |= set(glob.glob(p))
print('Processing files')
diff --git a/yocto-poky/scripts/hob b/yocto-poky/scripts/hob
deleted file mode 100755
index 8d33ab178..000000000
--- a/yocto-poky/scripts/hob
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env bash
-export BB_ENV_EXTRAWHITE="DISABLE_SANITY_CHECKS $BB_ENV_EXTRAWHITE"
-DISABLE_SANITY_CHECKS=1 bitbake -u hob $@
-
-ret=$?
-exit $ret
diff --git a/yocto-poky/scripts/lib/argparse_oe.py b/yocto-poky/scripts/lib/argparse_oe.py
new file mode 100644
index 000000000..bf3ebaddf
--- /dev/null
+++ b/yocto-poky/scripts/lib/argparse_oe.py
@@ -0,0 +1,129 @@
+import sys
+import argparse
+from collections import defaultdict, OrderedDict
+
+class ArgumentUsageError(Exception):
+ """Exception class you can raise (and catch) in order to show the help"""
+ def __init__(self, message, subcommand=None):
+ self.message = message
+ self.subcommand = subcommand
+
+class ArgumentParser(argparse.ArgumentParser):
+ """Our own version of argparse's ArgumentParser"""
+ def __init__(self, *args, **kwargs):
+ kwargs.setdefault('formatter_class', OeHelpFormatter)
+ self._subparser_groups = OrderedDict()
+ super(ArgumentParser, self).__init__(*args, **kwargs)
+
+ def error(self, message):
+ sys.stderr.write('ERROR: %s\n' % message)
+ self.print_help()
+ sys.exit(2)
+
+ def error_subcommand(self, message, subcommand):
+ if subcommand:
+ for action in self._actions:
+ if isinstance(action, argparse._SubParsersAction):
+ for choice, subparser in action.choices.items():
+ if choice == subcommand:
+ subparser.error(message)
+ return
+ self.error(message)
+
+ def add_subparsers(self, *args, **kwargs):
+ ret = super(ArgumentParser, self).add_subparsers(*args, **kwargs)
+ # Need a way of accessing the parent parser
+ ret._parent_parser = self
+ # Ensure our class gets instantiated
+ ret._parser_class = ArgumentSubParser
+ # Hacky way of adding a method to the subparsers object
+ ret.add_subparser_group = self.add_subparser_group
+ return ret
+
+ def add_subparser_group(self, groupname, groupdesc, order=0):
+ self._subparser_groups[groupname] = (groupdesc, order)
+
+
+class ArgumentSubParser(ArgumentParser):
+ def __init__(self, *args, **kwargs):
+ if 'group' in kwargs:
+ self._group = kwargs.pop('group')
+ if 'order' in kwargs:
+ self._order = kwargs.pop('order')
+ super(ArgumentSubParser, self).__init__(*args, **kwargs)
+ for agroup in self._action_groups:
+ if agroup.title == 'optional arguments':
+ agroup.title = 'options'
+ break
+
+ def parse_known_args(self, args=None, namespace=None):
+ # This works around argparse not handling optional positional arguments being
+ # intermixed with other options. A pretty horrible hack, but we're not left
+ # with much choice given that the bug in argparse exists and it's difficult
+ # to subclass.
+ # Borrowed from http://stackoverflow.com/questions/20165843/argparse-how-to-handle-variable-number-of-arguments-nargs
+ # with an extra workaround (in format_help() below) for the positional
+ # arguments disappearing from the --help output, as well as structural tweaks.
+ # Originally simplified from http://bugs.python.org/file30204/test_intermixed.py
+ positionals = self._get_positional_actions()
+ for action in positionals:
+ # deactivate positionals
+ action.save_nargs = action.nargs
+ action.nargs = 0
+
+ namespace, remaining_args = super(ArgumentSubParser, self).parse_known_args(args, namespace)
+ for action in positionals:
+ # remove the empty positional values from namespace
+ if hasattr(namespace, action.dest):
+ delattr(namespace, action.dest)
+ for action in positionals:
+ action.nargs = action.save_nargs
+ # parse positionals
+ namespace, extras = super(ArgumentSubParser, self).parse_known_args(remaining_args, namespace)
+ return namespace, extras
+
+ def format_help(self):
+ # Quick, restore the positionals!
+ positionals = self._get_positional_actions()
+ for action in positionals:
+ if hasattr(action, 'save_nargs'):
+ action.nargs = action.save_nargs
+ return super(ArgumentParser, self).format_help()
+
+
+class OeHelpFormatter(argparse.HelpFormatter):
+ def _format_action(self, action):
+ if hasattr(action, '_get_subactions'):
+ # subcommands list
+ groupmap = defaultdict(list)
+ ordermap = {}
+ subparser_groups = action._parent_parser._subparser_groups
+ groups = sorted(subparser_groups.keys(), key=lambda item: subparser_groups[item][1], reverse=True)
+ for subaction in self._iter_indented_subactions(action):
+ parser = action._name_parser_map[subaction.dest]
+ group = getattr(parser, '_group', None)
+ groupmap[group].append(subaction)
+ if group not in groups:
+ groups.append(group)
+ order = getattr(parser, '_order', 0)
+ ordermap[subaction.dest] = order
+
+ lines = []
+ if len(groupmap) > 1:
+ groupindent = ' '
+ else:
+ groupindent = ''
+ for group in groups:
+ subactions = groupmap[group]
+ if not subactions:
+ continue
+ if groupindent:
+ if not group:
+ group = 'other'
+ groupdesc = subparser_groups.get(group, (group, 0))[0]
+ lines.append(' %s:' % groupdesc)
+ for subaction in sorted(subactions, key=lambda item: ordermap[item.dest], reverse=True):
+ lines.append('%s%s' % (groupindent, self._format_action(subaction).rstrip()))
+ return '\n'.join(lines)
+ else:
+ return super(OeHelpFormatter, self)._format_action(action)
diff --git a/yocto-poky/scripts/lib/bsp/engine.py b/yocto-poky/scripts/lib/bsp/engine.py
index d0d5d726a..66e2162ea 100644
--- a/yocto-poky/scripts/lib/bsp/engine.py
+++ b/yocto-poky/scripts/lib/bsp/engine.py
@@ -438,7 +438,7 @@ class ListInputLine(InputLine):
self.parse_error("No entries available for input list",
self.lineno, self.line)
choicepairs = self.gen_choicepair_list()
-
+
return choicepairs
def gen_choices(self, context = None, checklist = False):
@@ -1598,7 +1598,7 @@ def yocto_layer_create(layer_name, scripts_path, layer_output_dir, codedump, pro
yocto_common_create(layer_name, "layer", scripts_path, layer_output_dir, codedump, properties_file, properties, False)
print "\nNew layer created in %s.\n" % (layer_output_dir)
- print "Don't forget to add it to your BBLAYERS (for details see %s\README)." % (layer_output_dir)
+ print "Don't forget to add it to your BBLAYERS (for details see %s/README)." % (layer_output_dir)
def yocto_bsp_create(machine, arch, scripts_path, bsp_output_dir, codedump, properties_file, properties=None):
@@ -1822,7 +1822,7 @@ def yocto_layer_list_property_values(arch, property, scripts_path, properties_fi
sys.exit(1)
json.dump(values_list, of)
-
+
print_values(type, values_list)
diff --git a/yocto-poky/scripts/lib/bsp/help.py b/yocto-poky/scripts/lib/bsp/help.py
index 4cce100d1..85a09dd29 100644
--- a/yocto-poky/scripts/lib/bsp/help.py
+++ b/yocto-poky/scripts/lib/bsp/help.py
@@ -103,6 +103,7 @@ yocto_bsp_create_usage = """
usage: yocto-bsp create <bsp-name> <karch> [-o <DIRNAME> | --outdir <DIRNAME>]
[-i <JSON PROPERTY FILE> | --infile <JSON PROPERTY_FILE>]
+ [-c | --codedump] [-s | --skip-git-check]
This command creates a Yocto BSP based on the specified parameters.
The new BSP will be a new Yocto BSP layer contained by default within
@@ -113,7 +114,7 @@ yocto_bsp_create_usage = """
The value of the 'karch' parameter determines the set of files that
will be generated for the BSP, along with the specific set of
'properties' that will be used to fill out the BSP-specific portions
- of the BSP. The possible values for the 'karch' paramter can be
+ of the BSP. The possible values for the 'karch' parameter can be
listed via 'yocto-bsp list karch'.
NOTE: Once created, you should add your new layer to your
@@ -131,6 +132,7 @@ NAME
SYNOPSIS
yocto-bsp create <bsp-name> <karch> [-o <DIRNAME> | --outdir <DIRNAME>]
[-i <JSON PROPERTY FILE> | --infile <JSON PROPERTY_FILE>]
+ [-c | --codedump] [-s | --skip-git-check]
DESCRIPTION
This command creates a Yocto BSP based on the specified
@@ -142,7 +144,7 @@ DESCRIPTION
The value of the 'karch' parameter determines the set of files
that will be generated for the BSP, along with the specific set of
'properties' that will be used to fill out the BSP-specific
- portions of the BSP. The possible values for the 'karch' paramter
+ portions of the BSP. The possible values for the 'karch' parameter
can be listed via 'yocto-bsp list karch'.
The BSP-specific properties that define the values that will be
@@ -173,7 +175,7 @@ DESCRIPTION
BBLAYERS ?= " \\
/path/to/poky/meta \\
- /path/to/poky/meta-yocto \\
+ /path/to/poky/meta-poky \\
/path/to/poky/meta-mybsp \\
"
"""
@@ -756,7 +758,7 @@ DESCRIPTION
or config items along with patches. The named feature must end
with .scc and must not contain a feature directory to contain the
feature (this will be determined automatically), and a feature
- decription in double-quotes along with a capabilities string
+ description in double-quotes along with a capabilities string
(which for the time being can be one of: 'all' or 'board').
"""
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
index 7676ca883..00cf36042 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.4) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.4"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
deleted file mode 100644
index c56e9e56f..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
deleted file mode 100644
index 15b9b1b92..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
deleted file mode 100644
index caefcfc26..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 7c6bc7fdf..815c77b81 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -30,4 +30,4 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend
index 5af490dc5..b88a06c52 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -29,4 +29,4 @@ SRC_URI += "file://{{=machine}}-standard.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
index 1739ab3cf..d5abe4f61 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
@@ -33,7 +33,7 @@ MACHINE_FEATURES += "wifi efi pcbios"
{{ input type:"boolean" name:"xserver" prio:"50" msg:"Do you need support for X? (y/n)" default:"y" }}
-{{ if xserver == "y" and (kernel_choice == "linux-yocto_4.1" or kernel_choice == "linux-yocto_3.19" or kernel_choice == "linux-yocto_3.14"): }}
+{{ if xserver == "y" and (kernel_choice == "linux-yocto_4.4" or kernel_choice == "linux-yocto_4.1"): }}
{{ input type:"choicelist" name:"xserver_choice" prio:"50" msg:"Please select an xserver for this machine:" default:"xserver_vesa" }}
{{ input type:"choice" val:"xserver_vesa" msg:"VESA xserver support" }}
{{ input type:"choice" val:"xserver_i915" msg:"i915 xserver support" }}
@@ -49,7 +49,7 @@ MACHINE_FEATURES += "wifi efi pcbios"
{{ input type:"choice" val:"xserver_fbdev" msg:"fbdev xserver support" }}
{{ input type:"choice" val:"xserver_modesetting" msg:"modesetting xserver support" }}
-{{ if xserver == "y" and kernel_choice != "linux-yocto_4.1" and kernel_choice != "linux-yocto_3.19" and kernel_choice != "linux-yocto_3.14" and kernel_choice != "custom": xserver_choice = "xserver_i915" }}
+{{ if xserver == "y" and kernel_choice != "linux-yocto_4.4" and kernel_choice != "linux-yocto_4.1" and kernel_choice != "custom": xserver_choice = "xserver_i915" }}
{{ if xserver == "y": }}
XSERVER ?= "${XSERVER_X86_BASE} \
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
index 7676ca883..00cf36042 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.4) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.4"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
deleted file mode 100644
index c56e9e56f..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
deleted file mode 100644
index 15b9b1b92..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
deleted file mode 100644
index caefcfc26..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 7f203794e..815c77b81 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.19": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -30,4 +30,4 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend
deleted file mode 100644
index 41325ca80..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend
deleted file mode 100644
index b471742dc..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19" \ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend
index 761b9c66c..aecdff025 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -8,10 +8,10 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend
new file mode 100644
index 000000000..dd4de311a
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -0,0 +1,32 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.4": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/layer/README b/yocto-poky/scripts/lib/bsp/substrate/target/arch/layer/README
index 943dfc441..ca6527cd8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/layer/README
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/layer/README
@@ -52,7 +52,7 @@ other layers needed. e.g.:
BBLAYERS ?= " \
/path/to/yocto/meta \
- /path/to/yocto/meta-yocto \
+ /path/to/yocto/meta-poky \
/path/to/yocto/meta-yocto-bsp \
/path/to/yocto/meta-{{=layer_name}} \
"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
index 7676ca883..00cf36042 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.4) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.4"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
deleted file mode 100644
index c56e9e56f..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 7c6bc7fdf..815c77b81 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -30,4 +30,4 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend
index 5af490dc5..b88a06c52 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -29,4 +29,4 @@ SRC_URI += "file://{{=machine}}-standard.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
index 7676ca883..00cf36042 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.4) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.4"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
deleted file mode 100644
index c56e9e56f..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 7f203794e..815c77b81 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.19": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -30,4 +30,4 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend
deleted file mode 100644
index 858d5fcaa..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/edgerouter" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/edgerouter" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "dbe5b52e93ff114b2c0f5da6f6af91f52c18f2b8"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "6eddbf47875ef48ddc5864957a7b63363100782b"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend
index f08453147..57c90fa3a 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.19": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -29,4 +29,4 @@ SRC_URI += "file://{{=machine}}-standard.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
index 018146fc0..583c5e44f 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
@@ -74,7 +74,7 @@ XSERVER ?= "xserver-xorg \
xf86-input-evdev \
xf86-video-fbdev"
-PREFERRED_VERSION_u-boot ?= "v2015.07%"
+PREFERRED_VERSION_u-boot ?= "v2016.01%"
{{ input type:"edit" name:"uboot_entrypoint" prio:"40" msg:"Please specify a value for UBOOT_ENTRYPOINT:" default:"0x00000000" }}
UBOOT_ENTRYPOINT = "{{=uboot_entrypoint}}"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
index 7676ca883..00cf36042 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.4) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.4"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
deleted file mode 100644
index 4700d9802..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
deleted file mode 100644
index 15b9b1b92..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
deleted file mode 100644
index caefcfc26..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
new file mode 100644
index 000000000..815c77b81
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.4": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend
deleted file mode 100644
index 6f1e7b0a4..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend
deleted file mode 100644
index 44086af80..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend
index c327d16d7..b88a06c52 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.19": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -29,4 +29,4 @@ SRC_URI += "file://{{=machine}}-standard.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
index 7676ca883..00cf36042 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.4) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.4"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
index 355528595..7e3ce5ba1 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -19,16 +19,16 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
deleted file mode 100644
index 9e5aa976c..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ /dev/null
@@ -1,62 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/qemuppc" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
deleted file mode 100644
index 51fc7d0aa..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ /dev/null
@@ -1,62 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/common-pc" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 1faac4443..e256e08bd 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -59,4 +59,4 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend
deleted file mode 100644
index fbaed33b1..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ /dev/null
@@ -1,61 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base your new BSP branch on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose an existing machine branch to use for this BSP:" default:"standard/arm-versatile-926ejs" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta64" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend
index 82bb970cf..fce67b4aa 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -19,16 +19,16 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend
index 7b590ad3d..409793228 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.19": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -19,16 +19,16 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
@@ -58,4 +58,4 @@ SRC_URI += "file://{{=machine}}-standard.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
index dbbe9342f..00cf36042 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.4) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.19"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.4"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
deleted file mode 100644
index 4700d9802..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
deleted file mode 100644
index 15b9b1b92..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
deleted file mode 100644
index caefcfc26..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ /dev/null
@@ -1,33 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
new file mode 100644
index 000000000..815c77b81
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.4": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend
deleted file mode 100644
index 62a99d54b..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend
deleted file mode 100644
index a5dd37da9..000000000
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.19": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend
index 8d0bc9713..1e99a04dc 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -8,10 +8,10 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend
index c327d16d7..b88a06c52 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.19": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -29,4 +29,4 @@ SRC_URI += "file://{{=machine}}-standard.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "3.19" \ No newline at end of file
+#LINUX_VERSION = "4.4"
diff --git a/yocto-poky/scripts/lib/devtool/__init__.py b/yocto-poky/scripts/lib/devtool/__init__.py
index 50604e6e0..ff97dfc94 100644
--- a/yocto-poky/scripts/lib/devtool/__init__.py
+++ b/yocto-poky/scripts/lib/devtool/__init__.py
@@ -22,6 +22,7 @@ import os
import sys
import subprocess
import logging
+import re
logger = logging.getLogger('devtool')
@@ -100,18 +101,20 @@ def setup_tinfoil(config_only=False, basepath=None, tracking=False):
"""Initialize tinfoil api from bitbake"""
import scriptpath
orig_cwd = os.path.abspath(os.curdir)
- if basepath:
- os.chdir(basepath)
- bitbakepath = scriptpath.add_bitbake_lib_path()
- if not bitbakepath:
- logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
- sys.exit(1)
-
- import bb.tinfoil
- tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
- tinfoil.prepare(config_only)
- tinfoil.logger.setLevel(logger.getEffectiveLevel())
- os.chdir(orig_cwd)
+ try:
+ if basepath:
+ os.chdir(basepath)
+ bitbakepath = scriptpath.add_bitbake_lib_path()
+ if not bitbakepath:
+ logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+ sys.exit(1)
+
+ import bb.tinfoil
+ tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
+ tinfoil.prepare(config_only)
+ tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ finally:
+ os.chdir(orig_cwd)
return tinfoil
def get_recipe_file(cooker, pn):
@@ -126,7 +129,7 @@ def get_recipe_file(cooker, pn):
logger.error("Unable to find any recipe file matching %s" % pn)
return recipefile
-def parse_recipe(config, tinfoil, pn, appends):
+def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
"""Parse recipe of a package"""
import oe.recipeutils
recipefile = get_recipe_file(tinfoil.cooker, pn)
@@ -135,27 +138,44 @@ def parse_recipe(config, tinfoil, pn, appends):
return None
if appends:
append_files = tinfoil.cooker.collection.get_file_appends(recipefile)
- # Filter out appends from the workspace
- append_files = [path for path in append_files if
- not path.startswith(config.workspace_path)]
+ if filter_workspace:
+ # Filter out appends from the workspace
+ append_files = [path for path in append_files if
+ not path.startswith(config.workspace_path)]
else:
append_files = None
return oe.recipeutils.parse_recipe(recipefile, append_files,
tinfoil.config_data)
-def check_workspace_recipe(workspace, pn, checksrc=True):
+def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
"""
Check that a recipe is in the workspace and (optionally) that source
is present.
"""
- if not pn in workspace:
+
+ workspacepn = pn
+
+ for recipe, value in workspace.iteritems():
+ if recipe == pn:
+ break
+ if bbclassextend:
+ recipefile = value['recipefile']
+ if recipefile:
+ targets = get_bbclassextend_targets(recipefile, recipe)
+ if pn in targets:
+ workspacepn = recipe
+ break
+ else:
raise DevtoolError("No recipe named '%s' in your workspace" % pn)
+
if checksrc:
- srctree = workspace[pn]['srctree']
+ srctree = workspace[workspacepn]['srctree']
if not os.path.exists(srctree):
- raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, pn))
+ raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
if not os.listdir(srctree):
- raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, pn))
+ raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
+
+ return workspacepn
def use_external_build(same_dir, no_same_dir, d):
"""
@@ -197,3 +217,41 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base'):
bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
bb.process.run('git tag -f %s' % basetag, cwd=repodir)
+
+def recipe_to_append(recipefile, config, wildcard=False):
+ """
+ Convert a recipe file to a bbappend file path within the workspace.
+ NOTE: if the bbappend already exists, you should be using
+ workspace[args.recipename]['bbappend'] instead of calling this
+ function.
+ """
+ appendname = os.path.splitext(os.path.basename(recipefile))[0]
+ if wildcard:
+ appendname = re.sub(r'_.*', '_%', appendname)
+ appendpath = os.path.join(config.workspace_path, 'appends')
+ appendfile = os.path.join(appendpath, appendname + '.bbappend')
+ return appendfile
+
+def get_bbclassextend_targets(recipefile, pn):
+ """
+ Cheap function to get BBCLASSEXTEND and then convert that to the
+ list of targets that would result.
+ """
+ import bb.utils
+
+ values = {}
+ def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
+ values[varname] = origvalue
+ return origvalue, None, 0, True
+ with open(recipefile, 'r') as f:
+ bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
+
+ targets = []
+ bbclassextend = values.get('BBCLASSEXTEND', '').split()
+ if bbclassextend:
+ for variant in bbclassextend:
+ if variant == 'nativesdk':
+ targets.append('%s-%s' % (variant, pn))
+ elif variant in ['native', 'cross', 'crosssdk']:
+ targets.append('%s-%s' % (pn, variant))
+ return targets
diff --git a/yocto-poky/scripts/lib/devtool/build.py b/yocto-poky/scripts/lib/devtool/build.py
index 9b58858a6..48f6fe1be 100644
--- a/yocto-poky/scripts/lib/devtool/build.py
+++ b/yocto-poky/scripts/lib/devtool/build.py
@@ -25,51 +25,62 @@ from devtool import exec_build_env_command, check_workspace_recipe, DevtoolError
logger = logging.getLogger('devtool')
-def plugin_init(pluginlist):
- """Plugin initialization"""
- pass
-def _create_conf_file(values, conf_file=None):
- if not conf_file:
- fd, conf_file = tempfile.mkstemp(suffix='.conf')
- elif not os.path.exists(os.path.dirname(conf_file)):
- logger.debug("Creating folder %s" % os.path.dirname(conf_file))
- bb.utils.mkdirhier(os.path.dirname(conf_file))
- with open(conf_file, 'w') as f:
- for key, value in values.iteritems():
- f.write('%s = "%s"\n' % (key, value))
- return conf_file
+def _set_file_values(fn, values):
+ remaining = values.keys()
+
+ def varfunc(varname, origvalue, op, newlines):
+ newvalue = values.get(varname, origvalue)
+ remaining.remove(varname)
+ return (newvalue, '=', 0, True)
+
+ with open(fn, 'r') as f:
+ (updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
+
+ for item in remaining:
+ updated = True
+ newlines.append('%s = "%s"' % (item, values[item]))
+
+ if updated:
+ with open(fn, 'w') as f:
+ f.writelines(newlines)
+ return updated
+
+def _get_build_tasks(config):
+ tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
+ return ['do_%s' % task.strip() for task in tasks]
def build(args, config, basepath, workspace):
"""Entry point for the devtool 'build' subcommand"""
- check_workspace_recipe(workspace, args.recipename)
+ workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
- build_task = config.get('Build', 'build_task', 'populate_sysroot')
+ build_tasks = _get_build_tasks(config)
- postfile_param = ""
- postfile = ""
+ bbappend = workspace[workspacepn]['bbappend']
if args.disable_parallel_make:
logger.info("Disabling 'make' parallelism")
- postfile = os.path.join(basepath, 'conf', 'disable_parallelism.conf')
- _create_conf_file({'PARALLEL_MAKE':''}, postfile)
- postfile_param = "-R %s" % postfile
+ _set_file_values(bbappend, {'PARALLEL_MAKE': ''})
try:
- exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s %s' % (build_task, postfile_param, args.recipename), watch=True)
+ bbargs = []
+ for task in build_tasks:
+ if args.recipename.endswith('-native') and 'package' in task:
+ continue
+ bbargs.append('%s:%s' % (args.recipename, task))
+ exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
except bb.process.ExecutionError as e:
# We've already seen the output since watch=True, so just ensure we return something to the user
return e.exitcode
finally:
- if postfile:
- logger.debug('Removing postfile')
- os.remove(postfile)
+ if args.disable_parallel_make:
+ _set_file_values(bbappend, {'PARALLEL_MAKE': None})
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
parser_build = subparsers.add_parser('build', help='Build a recipe',
- description='Builds the specified recipe using bitbake',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
+ group='working')
parser_build.add_argument('recipename', help='Recipe to build')
parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
parser_build.set_defaults(func=build)
diff --git a/yocto-poky/scripts/lib/devtool/build-image.py b/yocto-poky/scripts/lib/devtool/build_image.py
index e53239dd2..e51d76647 100644
--- a/yocto-poky/scripts/lib/devtool/build-image.py
+++ b/yocto-poky/scripts/lib/devtool/build_image.py
@@ -25,6 +25,9 @@ from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, Devtool
logger = logging.getLogger('devtool')
+class TargetNotImageError(Exception):
+ pass
+
def _get_packages(tinfoil, workspace, config):
"""Get list of packages from recipes in the workspace."""
result = []
@@ -51,6 +54,24 @@ def build_image(args, config, basepath, workspace):
if not image:
raise DevtoolError('Unable to determine image to build, please specify one')
+ try:
+ if args.add_packages:
+ add_packages = args.add_packages.split(',')
+ else:
+ add_packages = None
+ result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
+ except TargetNotImageError:
+ if auto_image:
+ raise DevtoolError('Unable to determine image to build, please specify one')
+ else:
+ raise DevtoolError('Specified recipe %s is not an image recipe' % image)
+
+ if result == 0:
+ logger.info('Successfully built %s. You can find output files in %s'
+ % (image, outputdir))
+ return result
+
+def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
appendfile = os.path.join(config.workspace_path, 'appends',
'%s.bbappend' % image)
@@ -63,49 +84,71 @@ def build_image(args, config, basepath, workspace):
rd = parse_recipe(config, tinfoil, image, True)
if not rd:
# Error already shown
- return 1
+ return (1, None)
if not bb.data.inherits_class('image', rd):
- if auto_image:
- raise DevtoolError('Unable to determine image to build, please specify one')
- else:
- raise DevtoolError('Specified recipe %s is not an image recipe' % image)
+ raise TargetNotImageError()
+ outputdir = None
try:
- if workspace:
- packages = _get_packages(tinfoil, workspace, config)
- if packages:
- with open(appendfile, 'w') as afile:
- # include packages from workspace recipes into the image
- afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages))
- logger.info('Building image %s with the following '
- 'additional packages: %s', image, ' '.join(packages))
+ if workspace or add_packages:
+ if add_packages:
+ packages = add_packages
else:
- logger.warning('No packages to add, building image %s unmodified', image)
+ packages = _get_packages(tinfoil, workspace, config)
else:
- logger.warning('No recipes in workspace, building image %s unmodified', image)
+ packages = None
+ if not task:
+ if not packages and not add_packages and workspace:
+ logger.warning('No recipes in workspace, building image %s unmodified', image)
+ elif not packages:
+ logger.warning('No packages to add, building image %s unmodified', image)
- deploy_dir_image = tinfoil.config_data.getVar('DEPLOY_DIR_IMAGE', True)
+ if packages or extra_append:
+ bb.utils.mkdirhier(os.path.dirname(appendfile))
+ with open(appendfile, 'w') as afile:
+ if packages:
+ # include packages from workspace recipes into the image
+ afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages))
+ if not task:
+ logger.info('Building image %s with the following '
+ 'additional packages: %s', image, ' '.join(packages))
+ if extra_append:
+ for line in extra_append:
+ afile.write('%s\n' % line)
+
+ if task in ['populate_sdk', 'populate_sdk_ext']:
+ outputdir = rd.getVar('SDK_DEPLOY', True)
+ else:
+ outputdir = rd.getVar('DEPLOY_DIR_IMAGE', True)
tinfoil.shutdown()
- # run bitbake to build image
+ options = ''
+ if task:
+ options += '-c %s' % task
+
+ # run bitbake to build image (or specified task)
try:
exec_build_env_command(config.init_path, basepath,
- 'bitbake %s' % image, watch=True)
+ 'bitbake %s %s' % (options, image), watch=True)
except ExecutionError as err:
- return err.exitcode
+ return (err.exitcode, None)
finally:
if os.path.isfile(appendfile):
os.unlink(appendfile)
+ return (0, outputdir)
- logger.info('Successfully built %s. You can find output files in %s'
- % (image, deploy_dir_image))
def register_commands(subparsers, context):
"""Register devtool subcommands from the build-image plugin"""
parser = subparsers.add_parser('build-image',
help='Build image including workspace recipe packages',
description='Builds an image, extending it to include '
- 'packages from recipes in the workspace')
+ 'packages from recipes in the workspace',
+ group='testbuild', order=-10)
parser.add_argument('imagename', help='Image recipe to build', nargs='?')
+ parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
+ 'entire workspace, specify packages to be added to the image '
+ '(separate multiple packages by commas)',
+ metavar='PACKAGES')
parser.set_defaults(func=build_image)
diff --git a/yocto-poky/scripts/lib/devtool/build_sdk.py b/yocto-poky/scripts/lib/devtool/build_sdk.py
new file mode 100644
index 000000000..b89d65b0c
--- /dev/null
+++ b/yocto-poky/scripts/lib/devtool/build_sdk.py
@@ -0,0 +1,65 @@
+# Development tool - build-sdk command plugin
+#
+# Copyright (C) 2015-2016 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import subprocess
+import logging
+import glob
+import shutil
+import errno
+import sys
+import tempfile
+from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
+from devtool import build_image
+
+logger = logging.getLogger('devtool')
+
+
+def build_sdk(args, config, basepath, workspace):
+ """Entry point for the devtool build-sdk command"""
+
+ sdk_targets = config.get('SDK', 'sdk_targets', '').split()
+ if sdk_targets:
+ image = sdk_targets[0]
+ else:
+ raise DevtoolError('Unable to determine image to build SDK for')
+
+ extra_append = ['SDK_DERIVATIVE = "1"']
+ try:
+ result, outputdir = build_image.build_image_task(config,
+ basepath,
+ workspace,
+ image,
+ task='populate_sdk_ext',
+ extra_append=extra_append)
+ except build_image.TargetNotImageError:
+ raise DevtoolError('Unable to determine image to build SDK for')
+
+ if result == 0:
+ logger.info('Successfully built SDK. You can find output files in %s'
+ % outputdir)
+ return result
+
+
+def register_commands(subparsers, context):
+ """Register devtool subcommands"""
+ if context.fixed_setup:
+ parser_build_sdk = subparsers.add_parser('build-sdk',
+ help='Build a derivative SDK of this one',
+ description='Builds an extensible SDK based upon this one and the items in your workspace',
+ group='advanced')
+ parser_build_sdk.set_defaults(func=build_sdk)
diff --git a/yocto-poky/scripts/lib/devtool/deploy.py b/yocto-poky/scripts/lib/devtool/deploy.py
index c90c6b1f7..66644ccb6 100644
--- a/yocto-poky/scripts/lib/devtool/deploy.py
+++ b/yocto-poky/scripts/lib/devtool/deploy.py
@@ -1,6 +1,6 @@
# Development tool - deploy/undeploy command plugin
#
-# Copyright (C) 2014-2015 Intel Corporation
+# Copyright (C) 2014-2016 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -19,13 +19,127 @@
import os
import subprocess
import logging
+import tempfile
+import shutil
+import argparse_oe
from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
logger = logging.getLogger('devtool')
+deploylist_path = '/.devtool'
+
+def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
+ """
+ Prepare a shell script for running on the target to
+ deploy/undeploy files. We have to be careful what we put in this
+ script - only commands that are likely to be available on the
+ target are suitable (the target might be constrained, e.g. using
+ busybox rather than bash with coreutils).
+ """
+ lines = []
+ lines.append('#!/bin/sh')
+ lines.append('set -e')
+ if undeployall:
+ # Yes, I know this is crude - but it does work
+ lines.append('for entry in %s/*.list; do' % deploylist_path)
+ lines.append('[ ! -f $entry ] && exit')
+ lines.append('set `basename $entry | sed "s/.list//"`')
+ if dryrun:
+ if not deploy:
+ lines.append('echo "Previously deployed files for $1:"')
+ lines.append('manifest="%s/$1.list"' % deploylist_path)
+ lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
+ lines.append('if [ -f $manifest ] ; then')
+ # Read manifest in reverse and delete files / remove empty dirs
+ lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
+ lines.append(' do')
+ if dryrun:
+ lines.append(' if [ ! -d $file ] ; then')
+ lines.append(' echo $file')
+ lines.append(' fi')
+ else:
+ lines.append(' if [ -d $file ] ; then')
+ # Avoid deleting a preserved directory in case it has special perms
+ lines.append(' if [ ! -d $preservedir/$file ] ; then')
+ lines.append(' rmdir $file > /dev/null 2>&1 || true')
+ lines.append(' fi')
+ lines.append(' else')
+ lines.append(' rm $file')
+ lines.append(' fi')
+ lines.append(' done')
+ if not dryrun:
+ lines.append(' rm $manifest')
+ if not deploy and not dryrun:
+ # May as well remove all traces
+ lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
+ lines.append('fi')
+
+ if deploy:
+ if not nocheckspace:
+ # Check for available space
+ # FIXME This doesn't take into account files spread across multiple
+ # partitions, but doing that is non-trivial
+ # Find the part of the destination path that exists
+ lines.append('checkpath="$2"')
+ lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
+ lines.append('do')
+ lines.append(' checkpath=`dirname "$checkpath"`')
+ lines.append('done')
+ lines.append('freespace=`df -P $checkpath | sed "1d" | awk \'{ print $4 }\'`')
+ # First line of the file is the total space
+ lines.append('total=`head -n1 $3`')
+ lines.append('if [ $total -gt $freespace ] ; then')
+ lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
+ lines.append(' exit 1')
+ lines.append('fi')
+ if not nopreserve:
+ # Preserve any files that exist. Note that this will add to the
+ # preserved list with successive deployments if the list of files
+ # deployed changes, but because we've deleted any previously
+ # deployed files at this point it will never preserve anything
+ # that was deployed, only files that existed prior to any deploying
+ # (which makes the most sense)
+ lines.append('cat $3 | sed "1d" | while read file fsize')
+ lines.append('do')
+ lines.append(' if [ -e $file ] ; then')
+ lines.append(' dest="$preservedir/$file"')
+ lines.append(' mkdir -p `dirname $dest`')
+ lines.append(' mv $file $dest')
+ lines.append(' fi')
+ lines.append('done')
+ lines.append('rm $3')
+ lines.append('mkdir -p `dirname $manifest`')
+ lines.append('mkdir -p $2')
+ if verbose:
+ lines.append(' tar xv -C $2 -f - | tee $manifest')
+ else:
+ lines.append(' tar xv -C $2 -f - > $manifest')
+ lines.append('sed -i "s!^./!$2!" $manifest')
+ elif not dryrun:
+ # Put any preserved files back
+ lines.append('if [ -d $preservedir ] ; then')
+ lines.append(' cd $preservedir')
+ lines.append(' find . -type f -exec mv {} /{} \;')
+ lines.append(' cd /')
+ lines.append(' rm -rf $preservedir')
+ lines.append('fi')
+
+ if undeployall:
+ if not dryrun:
+ lines.append('echo "NOTE: Successfully undeployed $1"')
+ lines.append('done')
+
+ # Delete the script itself
+ lines.append('rm $0')
+ lines.append('')
+
+ return '\n'.join(lines)
+
+
def deploy(args, config, basepath, workspace):
"""Entry point for the devtool 'deploy' subcommand"""
import re
+ import math
import oe.recipeutils
check_workspace_recipe(workspace, args.recipename, checksrc=False)
@@ -36,9 +150,8 @@ def deploy(args, config, basepath, workspace):
destdir = '/'
else:
args.target = host
-
- deploy_dir = os.path.join(basepath, 'target_deploy', args.target)
- deploy_file = os.path.join(deploy_dir, args.recipename + '.list')
+ if not destdir.endswith('/'):
+ destdir += '/'
tinfoil = setup_tinfoil(basepath=basepath)
try:
@@ -52,60 +165,82 @@ def deploy(args, config, basepath, workspace):
'recipe? If so, the install step has not installed '
'any files.' % args.recipename)
+ filelist = []
+ ftotalsize = 0
+ for root, _, files in os.walk(recipe_outdir):
+ for fn in files:
+ # Get the size in kiB (since we'll be comparing it to the output of du -k)
+ # MUST use lstat() here not stat() or getfilesize() since we don't want to
+ # dereference symlinks
+ fsize = int(math.ceil(float(os.lstat(os.path.join(root, fn)).st_size)/1024))
+ ftotalsize += fsize
+ # The path as it would appear on the target
+ fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
+ filelist.append((fpath, fsize))
+
if args.dry_run:
print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
- for root, _, files in os.walk(recipe_outdir):
- for fn in files:
- print(' %s' % os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn))
+ for item, _ in filelist:
+ print(' %s' % item)
return 0
- if os.path.exists(deploy_file):
- if undeploy(args, config, basepath, workspace):
- # Error already shown
- return 1
extraoptions = ''
if args.no_host_check:
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
- if args.show_status:
- tarextractopts = 'xv'
- else:
- tarextractopts = 'x'
+ if not args.show_status:
extraoptions += ' -q'
- # We cannot use scp here, because it doesn't preserve symlinks
- ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s \'tar %s -C %s -f -\'' % (extraoptions, args.target, tarextractopts, destdir), cwd=recipe_outdir, shell=True)
+
+ # In order to delete previously deployed files and have the manifest file on
+ # the target, we write out a shell script and then copy it to the target
+ # so we can then run it (piping tar output to it).
+ # (We cannot use scp here, because it doesn't preserve symlinks.)
+ tmpdir = tempfile.mkdtemp(prefix='devtool')
+ try:
+ tmpscript = '/tmp/devtool_deploy.sh'
+ tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
+ shellscript = _prepare_remote_script(deploy=True,
+ verbose=args.show_status,
+ nopreserve=args.no_preserve,
+ nocheckspace=args.no_check_space)
+ # Write out the script to a file
+ with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
+ f.write(shellscript)
+ # Write out the file list
+ with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
+ f.write('%d\n' % ftotalsize)
+ for fpath, fsize in filelist:
+ f.write('%s %d\n' % (fpath, fsize))
+ # Copy them to the target
+ ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ if ret != 0:
+ raise DevtoolError('Failed to copy script to %s - rerun with -s to '
+ 'get a complete error message' % args.target)
+ finally:
+ shutil.rmtree(tmpdir)
+
+ # Now run the script
+ ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s \'sh %s %s %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
if ret != 0:
raise DevtoolError('Deploy failed - rerun with -s to get a complete '
'error message')
logger.info('Successfully deployed %s' % recipe_outdir)
- if not os.path.exists(deploy_dir):
- os.makedirs(deploy_dir)
-
files_list = []
for root, _, files in os.walk(recipe_outdir):
for filename in files:
filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
files_list.append(os.path.join(destdir, filename))
- with open(deploy_file, 'w') as fobj:
- fobj.write('\n'.join(files_list))
-
return 0
def undeploy(args, config, basepath, workspace):
"""Entry point for the devtool 'undeploy' subcommand"""
- deploy_file = os.path.join(basepath, 'target_deploy', args.target, args.recipename + '.list')
- if not os.path.exists(deploy_file):
- raise DevtoolError('%s has not been deployed' % args.recipename)
-
- if args.dry_run:
- print('Previously deployed files to be un-deployed for %s on target %s:' % (args.recipename, args.target))
- with open(deploy_file, 'r') as f:
- for line in f:
- print(' %s' % line.rstrip())
- return 0
+ if args.all and args.recipename:
+ raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
+ elif not args.recipename and not args.all:
+ raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
extraoptions = ''
if args.no_host_check:
@@ -113,36 +248,57 @@ def undeploy(args, config, basepath, workspace):
if not args.show_status:
extraoptions += ' -q'
- ret = subprocess.call("scp %s %s %s:/tmp" % (extraoptions, deploy_file, args.target), shell=True)
- if ret != 0:
- raise DevtoolError('Failed to copy file list to %s - rerun with -s to '
- 'get a complete error message' % args.target)
+ args.target = args.target.split(':')[0]
- ret = subprocess.call("ssh %s %s 'xargs -n1 rm -f </tmp/%s'" % (extraoptions, args.target, os.path.basename(deploy_file)), shell=True)
- if ret == 0:
- logger.info('Successfully undeployed %s' % args.recipename)
- os.remove(deploy_file)
- else:
+ tmpdir = tempfile.mkdtemp(prefix='devtool')
+ try:
+ tmpscript = '/tmp/devtool_undeploy.sh'
+ shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
+ # Write out the script to a file
+ with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
+ f.write(shellscript)
+ # Copy it to the target
+ ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ if ret != 0:
+ raise DevtoolError('Failed to copy script to %s - rerun with -s to '
+ 'get a complete error message' % args.target)
+ finally:
+ shutil.rmtree(tmpdir)
+
+ # Now run the script
+ ret = subprocess.call('ssh %s %s \'sh %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename), shell=True)
+ if ret != 0:
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
'error message')
- return ret
+ if not args.all and not args.dry_run:
+ logger.info('Successfully undeployed %s' % args.recipename)
+ return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from the deploy plugin"""
- parser_deploy = subparsers.add_parser('deploy-target', help='Deploy recipe output files to live target machine')
+ parser_deploy = subparsers.add_parser('deploy-target',
+ help='Deploy recipe output files to live target machine',
+ description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
+ group='testbuild')
parser_deploy.add_argument('recipename', help='Recipe to deploy')
parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
+ parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
+ parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
parser_deploy.set_defaults(func=deploy)
- parser_undeploy = subparsers.add_parser('undeploy-target', help='Undeploy recipe output files in live target machine')
- parser_undeploy.add_argument('recipename', help='Recipe to undeploy')
+ parser_undeploy = subparsers.add_parser('undeploy-target',
+ help='Undeploy recipe output files in live target machine',
+ description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
+ group='testbuild')
+ parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
+ parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
parser_undeploy.set_defaults(func=undeploy)
diff --git a/yocto-poky/scripts/lib/devtool/package.py b/yocto-poky/scripts/lib/devtool/package.py
index b8d84235c..afb5809a3 100644
--- a/yocto-poky/scripts/lib/devtool/package.py
+++ b/yocto-poky/scripts/lib/devtool/package.py
@@ -24,22 +24,21 @@ from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recip
logger = logging.getLogger('devtool')
-def plugin_init(pluginlist):
- """Plugin initialization"""
- pass
-
def package(args, config, basepath, workspace):
"""Entry point for the devtool 'package' subcommand"""
check_workspace_recipe(workspace, args.recipename)
- image_pkgtype = config.get('Package', 'image_pkgtype', '')
- if not image_pkgtype:
- tinfoil = setup_tinfoil(basepath=basepath)
- try:
- tinfoil.prepare(config_only=True)
+ tinfoil = setup_tinfoil(basepath=basepath)
+ try:
+ tinfoil.prepare(config_only=True)
+
+ image_pkgtype = config.get('Package', 'image_pkgtype', '')
+ if not image_pkgtype:
image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True)
- finally:
- tinfoil.shutdown()
+
+ deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True)
+ finally:
+ tinfoil.shutdown()
package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
try:
@@ -47,13 +46,17 @@ def package(args, config, basepath, workspace):
except bb.process.ExecutionError as e:
# We've already seen the output since watch=True, so just ensure we return something to the user
return e.exitcode
- logger.info('Your packages are in %s/tmp/deploy/%s' % (basepath, image_pkgtype))
+
+ logger.info('Your packages are in %s' % deploy_dir_pkg)
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from the package plugin"""
if context.fixed_setup:
- parser_package = subparsers.add_parser('package', help='Build packages for a recipe', description='Builds packages for a recipe\'s output files')
+ parser_package = subparsers.add_parser('package',
+ help='Build packages for a recipe',
+ description='Builds packages for a recipe\'s output files',
+ group='testbuild', order=-5)
parser_package.add_argument('recipename', help='Recipe to package')
parser_package.set_defaults(func=package)
diff --git a/yocto-poky/scripts/lib/devtool/runqemu.py b/yocto-poky/scripts/lib/devtool/runqemu.py
index 5282afba6..daee7fbbe 100644
--- a/yocto-poky/scripts/lib/devtool/runqemu.py
+++ b/yocto-poky/scripts/lib/devtool/runqemu.py
@@ -57,7 +57,8 @@ def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
if context.fixed_setup:
parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
- description='Runs QEMU to boot the specified image')
+ description='Runs QEMU to boot the specified image',
+ group='testbuild', order=-20)
parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
nargs=argparse.REMAINDER)
diff --git a/yocto-poky/scripts/lib/devtool/sdk.py b/yocto-poky/scripts/lib/devtool/sdk.py
index f15a6a9ed..46fd12bdb 100644
--- a/yocto-poky/scripts/lib/devtool/sdk.py
+++ b/yocto-poky/scripts/lib/devtool/sdk.py
@@ -1,4 +1,19 @@
# Development tool - sdk-update command plugin
+#
+# Copyright (C) 2015-2016 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import subprocess
@@ -7,14 +22,12 @@ import glob
import shutil
import errno
import sys
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
+import tempfile
+import re
+from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
logger = logging.getLogger('devtool')
-def plugin_init(pluginlist):
- """Plugin initialization"""
- pass
-
def parse_locked_sigs(sigfile_path):
"""Return <pn:task>:<hash> dictionary"""
sig_dict = {}
@@ -78,13 +91,26 @@ def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
logger.debug("Copying %s to %s" % (sb, dst))
shutil.copy(sb, dst)
+def check_manifest(fn, basepath):
+ import bb.utils
+ changedfiles = []
+ with open(fn, 'r') as f:
+ for line in f:
+ splitline = line.split()
+ if len(splitline) > 1:
+ chksum = splitline[0]
+ fpath = splitline[1]
+ curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
+ if chksum != curr_chksum:
+ logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
+ changedfiles.append(fpath)
+ return changedfiles
+
def sdk_update(args, config, basepath, workspace):
# Fetch locked-sigs.inc file from remote/local destination
updateserver = args.updateserver
if not updateserver:
updateserver = config.get('SDK', 'updateserver', '')
- if not updateserver:
- raise DevtoolError("Update server not specified in config file, you must specify it on the command line")
logger.debug("updateserver: %s" % updateserver)
# Make sure we are using sdk-update from within SDK
@@ -101,6 +127,18 @@ def sdk_update(args, config, basepath, workspace):
else:
is_remote = False
+ layers_dir = os.path.join(basepath, 'layers')
+ conf_dir = os.path.join(basepath, 'conf')
+
+ # Grab variable values
+ tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
+ try:
+ stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR', True)
+ sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS', True)
+ site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION', True)
+ finally:
+ tinfoil.shutdown()
+
if not is_remote:
# devtool sdk-update /local/path/to/latest/sdk
new_locked_sig_file_path = os.path.join(updateserver, 'conf/locked-sigs.inc')
@@ -111,6 +149,7 @@ def sdk_update(args, config, basepath, workspace):
logger.debug("Found conf/locked-sigs.inc in %s" % updateserver)
update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path)
logger.debug("update_dict = %s" % update_dict)
+ newsdk_path = updateserver
sstate_dir = os.path.join(newsdk_path, 'sstate-cache')
if not os.path.exists(sstate_dir):
logger.error("sstate-cache directory not found under %s" % newsdk_path)
@@ -124,72 +163,204 @@ def sdk_update(args, config, basepath, workspace):
install_sstate_objects(sstate_objects, updateserver.rstrip('/'), basepath)
logger.info("Updating configuration files")
new_conf_dir = os.path.join(updateserver, 'conf')
- old_conf_dir = os.path.join(basepath, 'conf')
- shutil.rmtree(old_conf_dir)
- shutil.copytree(new_conf_dir, old_conf_dir)
+ shutil.rmtree(conf_dir)
+ shutil.copytree(new_conf_dir, conf_dir)
logger.info("Updating layers")
new_layers_dir = os.path.join(updateserver, 'layers')
- old_layers_dir = os.path.join(basepath, 'layers')
- shutil.rmtree(old_layers_dir)
- ret = subprocess.call("cp -a %s %s" % (new_layers_dir, old_layers_dir), shell=True)
+ shutil.rmtree(layers_dir)
+ ret = subprocess.call("cp -a %s %s" % (new_layers_dir, layers_dir), shell=True)
if ret != 0:
- logger.error("Copying %s to %s failed" % (new_layers_dir, old_layers_dir))
+ logger.error("Copying %s to %s failed" % (new_layers_dir, layers_dir))
return ret
else:
# devtool sdk-update http://myhost/sdk
- tmpsdk_dir = '/tmp/sdk-ext'
- if os.path.exists(tmpsdk_dir):
+ tmpsdk_dir = tempfile.mkdtemp()
+ try:
+ os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
+ new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
+ # Fetch manifest from server
+ tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
+ ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
+ changedfiles = check_manifest(tmpmanifest, basepath)
+ if not changedfiles:
+ logger.info("Already up-to-date")
+ return 0
+ # Update metadata
+ logger.debug("Updating metadata via git ...")
+ #Check for the status before doing a fetch and reset
+ if os.path.exists(os.path.join(basepath, 'layers/.git')):
+ out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
+ if not out:
+ ret = subprocess.call("git fetch --all; git reset --hard", shell=True, cwd=layers_dir)
+ else:
+ logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
+ logger.error("Changed files:\n%s" % out);
+ return -1
+ else:
+ ret = -1
+ if ret != 0:
+ ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
+ if ret != 0:
+ logger.error("Updating metadata via git failed")
+ return ret
+ logger.debug("Updating conf files ...")
+ for changedfile in changedfiles:
+ ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
+ if ret != 0:
+ logger.error("Updating %s failed" % changedfile)
+ return ret
+
+ # Check if UNINATIVE_CHECKSUM changed
+ uninative = False
+ if 'conf/local.conf' in changedfiles:
+ def read_uninative_checksums(fn):
+ chksumitems = []
+ with open(fn, 'r') as f:
+ for line in f:
+ if line.startswith('UNINATIVE_CHECKSUM'):
+ splitline = re.split(r'[\[\]"\']', line)
+ if len(splitline) > 3:
+ chksumitems.append((splitline[1], splitline[3]))
+ return chksumitems
+
+ oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
+ newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
+ if oldsums != newsums:
+ uninative = True
+ for buildarch, chksum in newsums:
+ uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
+ mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
+ ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
+
+ # Ok, all is well at this point - move everything over
+ tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
+ if os.path.exists(tmplayers_dir):
+ shutil.rmtree(layers_dir)
+ shutil.move(tmplayers_dir, layers_dir)
+ for changedfile in changedfiles:
+ destfile = os.path.join(basepath, changedfile)
+ os.remove(destfile)
+ shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
+ os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
+ shutil.move(tmpmanifest, conf_dir)
+ if uninative:
+ shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
+ shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
+
+ if not sstate_mirrors:
+ with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
+ f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
+ f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver)
+ finally:
shutil.rmtree(tmpsdk_dir)
- os.makedirs(tmpsdk_dir)
- os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
- # Fetch locked-sigs.inc from update server
- ret = subprocess.call("wget -q -O - %s/conf/locked-sigs.inc > %s/locked-sigs.inc" % (updateserver, os.path.join(tmpsdk_dir, 'conf')), shell=True)
- if ret != 0:
- logger.error("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc failed" % (updateserver, os.path.join(tmpsdk_dir, 'conf')))
- return ret
- else:
- logger.info("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc succeeded" % (updateserver, os.path.join(tmpsdk_dir, 'conf')))
- new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf/locked-sigs.inc')
- update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path)
- logger.debug("update_dict = %s" % update_dict)
- if len(update_dict) == 0:
- logger.info("No need to update.")
- return 0
- # Update metadata
- logger.debug("Updating meta data via git ...")
- # Try using 'git pull', if failed, use 'git clone'
- if os.path.exists(os.path.join(basepath, 'layers/.git')):
- ret = subprocess.call("cd layers && git pull %s/layers/.git" % updateserver, shell=True)
+
+ if not args.skip_prepare:
+ # Find all potentially updateable tasks
+ sdk_update_targets = []
+ tasks = ['do_populate_sysroot', 'do_packagedata']
+ for root, _, files in os.walk(stamps_dir):
+ for fn in files:
+ if not '.sigdata.' in fn:
+ for task in tasks:
+ if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
+ sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
+ # Run bitbake command for the whole SDK
+ logger.info("Preparing build system... (This may take some time.)")
+ try:
+ exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
+ output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
+ runlines = []
+ for line in output.splitlines():
+ if 'Running task ' in line:
+ runlines.append(line)
+ if runlines:
+ logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
+ return -1
+ except bb.process.ExecutionError as e:
+ logger.error('Preparation failed:\n%s' % e.stdout)
+ return -1
+ return 0
+
+def sdk_install(args, config, basepath, workspace):
+ """Entry point for the devtool sdk-install command"""
+
+ import oe.recipeutils
+ import bb.process
+
+ for recipe in args.recipename:
+ if recipe in workspace:
+ raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
+
+ tasks = ['do_populate_sysroot', 'do_packagedata']
+ stampprefixes = {}
+ def checkstamp(recipe):
+ stampprefix = stampprefixes[recipe]
+ stamps = glob.glob(stampprefix + '*')
+ for stamp in stamps:
+ if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
+ return True
else:
- ret = -1
- if ret != 0:
- ret = subprocess.call("rm -rf layers && git clone %s/layers/.git" % updateserver, shell=True)
- if ret != 0:
- logger.error("Updating meta data via git failed")
- return ret
- logger.debug("Updating conf files ...")
- conf_files = ['local.conf', 'bblayers.conf', 'devtool.conf', 'locked-sigs.inc']
- for conf in conf_files:
- ret = subprocess.call("wget -q -O - %s/conf/%s > conf/%s" % (updateserver, conf, conf), shell=True)
- if ret != 0:
- logger.error("Update %s failed" % conf)
- return ret
- with open(os.path.join(basepath, 'conf/local.conf'), 'a') as f:
- f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver)
-
- # Run bitbake command for the whole SDK
- sdk_targets = config.get('SDK', 'sdk_targets')
- logger.info("Executing 'bitbake %s' ... (This may take some time.)" % sdk_targets)
+ return False
+
+ install_recipes = []
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
- exec_build_env_command(config.init_path, basepath, 'bitbake %s' % sdk_targets)
- except:
- logger.error('bitbake %s failed' % sdk_targets)
- return -1
- return 0
+ for recipe in args.recipename:
+ rd = parse_recipe(config, tinfoil, recipe, True)
+ if not rd:
+ return 1
+ stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP', True), tasks[0])
+ if checkstamp(recipe):
+ logger.info('%s is already installed' % recipe)
+ else:
+ install_recipes.append(recipe)
+ finally:
+ tinfoil.shutdown()
+
+ if install_recipes:
+ logger.info('Installing %s...' % ', '.join(install_recipes))
+ install_tasks = []
+ for recipe in install_recipes:
+ for task in tasks:
+ if recipe.endswith('-native') and 'package' in task:
+ continue
+ install_tasks.append('%s:%s' % (recipe, task))
+ options = ''
+ if not args.allow_build:
+ options += ' --setscene-only'
+ try:
+ exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
+ except bb.process.ExecutionError as e:
+ raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
+ failed = False
+ for recipe in install_recipes:
+ if checkstamp(recipe):
+ logger.info('Successfully installed %s' % recipe)
+ else:
+ raise DevtoolError('Failed to install %s - unavailable' % recipe)
+ failed = True
+ if failed:
+ return 2
def register_commands(subparsers, context):
"""Register devtool subcommands from the sdk plugin"""
if context.fixed_setup:
- parser_sdk = subparsers.add_parser('sdk-update', help='Update SDK components from a nominated location')
- parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from', nargs='?')
+ parser_sdk = subparsers.add_parser('sdk-update',
+ help='Update SDK components',
+ description='Updates installed SDK components from a remote server',
+ group='sdk')
+ updateserver = context.config.get('SDK', 'updateserver', '')
+ if updateserver:
+ parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
+ else:
+ parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
+ parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
parser_sdk.set_defaults(func=sdk_update)
+
+ parser_sdk_install = subparsers.add_parser('sdk-install',
+ help='Install additional SDK components',
+ description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
+ group='sdk')
+ parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
+ parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
+ parser_sdk_install.set_defaults(func=sdk_install)
diff --git a/yocto-poky/scripts/lib/devtool/search.py b/yocto-poky/scripts/lib/devtool/search.py
index c2f420c33..b44bed7f6 100644
--- a/yocto-poky/scripts/lib/devtool/search.py
+++ b/yocto-poky/scripts/lib/devtool/search.py
@@ -22,59 +22,67 @@ import bb
import logging
import argparse
import re
-from devtool import setup_tinfoil, DevtoolError
+from devtool import setup_tinfoil, parse_recipe, DevtoolError
logger = logging.getLogger('devtool')
def search(args, config, basepath, workspace):
"""Entry point for the devtool 'search' subcommand"""
- tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
- pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
- tinfoil.shutdown()
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
+ defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
- keyword_rc = re.compile(args.keyword)
+ keyword_rc = re.compile(args.keyword)
- for fn in os.listdir(pkgdata_dir):
- pfn = os.path.join(pkgdata_dir, fn)
- if not os.path.isfile(pfn):
- continue
+ for fn in os.listdir(pkgdata_dir):
+ pfn = os.path.join(pkgdata_dir, fn)
+ if not os.path.isfile(pfn):
+ continue
- packages = []
- match = False
- if keyword_rc.search(fn):
- match = True
+ packages = []
+ match = False
+ if keyword_rc.search(fn):
+ match = True
- if not match:
- with open(pfn, 'r') as f:
- for line in f:
- if line.startswith('PACKAGES:'):
- packages = line.split(':', 1)[1].strip().split()
+ if not match:
+ with open(pfn, 'r') as f:
+ for line in f:
+ if line.startswith('PACKAGES:'):
+ packages = line.split(':', 1)[1].strip().split()
- for pkg in packages:
- if keyword_rc.search(pkg):
- match = True
- break
- if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
- with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
- for line in f:
- if ': ' in line:
- splitline = line.split(':', 1)
- key = splitline[0]
- value = splitline[1].strip()
- if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'):
- if keyword_rc.search(value):
- match = True
- break
+ for pkg in packages:
+ if keyword_rc.search(pkg):
+ match = True
+ break
+ if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
+ with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
+ for line in f:
+ if ': ' in line:
+ splitline = line.split(':', 1)
+ key = splitline[0]
+ value = splitline[1].strip()
+ if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'):
+ if keyword_rc.search(value):
+ match = True
+ break
- if match:
- print(fn)
+ if match:
+ rd = parse_recipe(config, tinfoil, fn, True)
+ summary = rd.getVar('SUMMARY', True)
+ if summary == rd.expand(defsummary):
+ summary = ''
+ print("%s %s" % (fn.ljust(20), summary))
+ finally:
+ tinfoil.shutdown()
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
parser_search = subparsers.add_parser('search', help='Search available recipes',
- description='Searches for available target recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name on match.')
+ description='Searches for available target recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name on match.',
+ group='info')
parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed)')
- parser_search.set_defaults(func=search)
+ parser_search.set_defaults(func=search, no_workspace=True)
diff --git a/yocto-poky/scripts/lib/devtool/standard.py b/yocto-poky/scripts/lib/devtool/standard.py
index 5464d7b1f..77a82d559 100644
--- a/yocto-poky/scripts/lib/devtool/standard.py
+++ b/yocto-poky/scripts/lib/devtool/standard.py
@@ -20,13 +20,16 @@ import os
import sys
import re
import shutil
+import subprocess
import tempfile
import logging
import argparse
+import argparse_oe
import scriptutils
import errno
+import glob
from collections import OrderedDict
-from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, DevtoolError
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, DevtoolError
from devtool import parse_recipe
logger = logging.getLogger('devtool')
@@ -37,21 +40,62 @@ def add(args, config, basepath, workspace):
import bb
import oe.recipeutils
- if args.recipename in workspace:
- raise DevtoolError("recipe %s is already in your workspace" %
- args.recipename)
+ if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri:
+ raise argparse_oe.ArgumentUsageError('At least one of recipename, srctree, fetchuri or -f/--fetch must be specified', 'add')
+
+ # These are positional arguments, but because we're nice, allow
+ # specifying e.g. source tree without name, or fetch URI without name or
+ # source tree (if we can detect that that is what the user meant)
+ if '://' in args.recipename:
+ if not args.fetchuri:
+ if args.fetch:
+ raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
+ args.fetchuri = args.recipename
+ args.recipename = ''
+ elif args.srctree and '://' in args.srctree:
+ if not args.fetchuri:
+ if args.fetch:
+ raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
+ args.fetchuri = args.srctree
+ args.srctree = ''
+ elif args.recipename and not args.srctree:
+ if os.sep in args.recipename:
+ args.srctree = args.recipename
+ args.recipename = None
+ elif os.path.isdir(args.recipename):
+ logger.warn('Ambiguous argument %s - assuming you mean it to be the recipe name')
- reason = oe.recipeutils.validate_pn(args.recipename)
- if reason:
- raise DevtoolError(reason)
+ if args.fetch:
+ if args.fetchuri:
+ raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
+ else:
+ # FIXME should show a warning that -f/--fetch is deprecated here
+ args.fetchuri = args.fetch
- # FIXME this ought to be in validate_pn but we're using that in other contexts
- if '/' in args.recipename:
- raise DevtoolError('"/" is not a valid character in recipe names')
+ if args.recipename:
+ if args.recipename in workspace:
+ raise DevtoolError("recipe %s is already in your workspace" %
+ args.recipename)
+ reason = oe.recipeutils.validate_pn(args.recipename)
+ if reason:
+ raise DevtoolError(reason)
+
+ # FIXME this ought to be in validate_pn but we're using that in other contexts
+ if '/' in args.recipename:
+ raise DevtoolError('"/" is not a valid character in recipe names')
+
+ if args.srctree:
+ srctree = os.path.abspath(args.srctree)
+ srctreeparent = None
+ tmpsrcdir = None
+ else:
+ srctree = None
+ srctreeparent = get_default_srctree(config)
+ bb.utils.mkdirhier(srctreeparent)
+ tmpsrcdir = tempfile.mkdtemp(prefix='devtoolsrc', dir=srctreeparent)
- srctree = os.path.abspath(args.srctree)
- if os.path.exists(srctree):
- if args.fetch:
+ if srctree and os.path.exists(srctree):
+ if args.fetchuri:
if not os.path.isdir(srctree):
raise DevtoolError("Cannot fetch into source tree path %s as "
"it exists and is not a directory" %
@@ -60,55 +104,108 @@ def add(args, config, basepath, workspace):
raise DevtoolError("Cannot fetch into source tree path %s as "
"it already exists and is non-empty" %
srctree)
- elif not args.fetch:
- raise DevtoolError("Specified source tree %s could not be found" %
- srctree)
-
- appendpath = os.path.join(config.workspace_path, 'appends')
- if not os.path.exists(appendpath):
- os.makedirs(appendpath)
+ elif not args.fetchuri:
+ if args.srctree:
+ raise DevtoolError("Specified source tree %s could not be found" %
+ args.srctree)
+ elif srctree:
+ raise DevtoolError("No source tree exists at default path %s - "
+ "either create and populate this directory, "
+ "or specify a path to a source tree, or a "
+ "URI to fetch source from" % srctree)
+ else:
+ raise DevtoolError("You must either specify a source tree "
+ "or a URI to fetch source from")
- recipedir = os.path.join(config.workspace_path, 'recipes', args.recipename)
- bb.utils.mkdirhier(recipedir)
- rfv = None
if args.version:
if '_' in args.version or ' ' in args.version:
raise DevtoolError('Invalid version string "%s"' % args.version)
- rfv = args.version
- if args.fetch:
- if args.fetch.startswith('git://'):
- rfv = 'git'
- elif args.fetch.startswith('svn://'):
- rfv = 'svn'
- elif args.fetch.startswith('hg://'):
- rfv = 'hg'
- if rfv:
- bp = "%s_%s" % (args.recipename, rfv)
- else:
- bp = args.recipename
- recipefile = os.path.join(recipedir, "%s.bb" % bp)
+
if args.color == 'auto' and sys.stdout.isatty():
color = 'always'
else:
color = args.color
extracmdopts = ''
- if args.fetch:
- source = args.fetch
- extracmdopts = '-x %s' % srctree
+ if args.fetchuri:
+ source = args.fetchuri
+ if srctree:
+ extracmdopts += ' -x %s' % srctree
+ else:
+ extracmdopts += ' -x %s' % tmpsrcdir
else:
source = srctree
+ if args.recipename:
+ extracmdopts += ' -N %s' % args.recipename
if args.version:
extracmdopts += ' -V %s' % args.version
if args.binary:
extracmdopts += ' -b'
+ if args.also_native:
+ extracmdopts += ' --also-native'
+ if args.src_subdir:
+ extracmdopts += ' --src-subdir "%s"' % args.src_subdir
+
+ tempdir = tempfile.mkdtemp(prefix='devtool')
try:
- stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, recipefile, source, extracmdopts))
- except bb.process.ExecutionError as e:
- raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
+ try:
+ stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, tempdir, source, extracmdopts))
+ except bb.process.ExecutionError as e:
+ if e.exitcode == 15:
+ raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
+ else:
+ raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
+
+ recipes = glob.glob(os.path.join(tempdir, '*.bb'))
+ if recipes:
+ recipename = os.path.splitext(os.path.basename(recipes[0]))[0].split('_')[0]
+ if recipename in workspace:
+ raise DevtoolError('A recipe with the same name as the one being created (%s) already exists in your workspace' % recipename)
+ recipedir = os.path.join(config.workspace_path, 'recipes', recipename)
+ bb.utils.mkdirhier(recipedir)
+ recipefile = os.path.join(recipedir, os.path.basename(recipes[0]))
+ appendfile = recipe_to_append(recipefile, config)
+ if os.path.exists(appendfile):
+ # This shouldn't be possible, but just in case
+ raise DevtoolError('A recipe with the same name as the one being created already exists in your workspace')
+ if os.path.exists(recipefile):
+ raise DevtoolError('A recipe file %s already exists in your workspace; this shouldn\'t be there - please delete it before continuing' % recipefile)
+ if tmpsrcdir:
+ srctree = os.path.join(srctreeparent, recipename)
+ if os.path.exists(tmpsrcdir):
+ if os.path.exists(srctree):
+ if os.path.isdir(srctree):
+ try:
+ os.rmdir(srctree)
+ except OSError as e:
+ if e.errno == errno.ENOTEMPTY:
+ raise DevtoolError('Source tree path %s already exists and is not empty' % srctree)
+ else:
+ raise
+ else:
+ raise DevtoolError('Source tree path %s already exists and is not a directory' % srctree)
+ logger.info('Using default source tree path %s' % srctree)
+ shutil.move(tmpsrcdir, srctree)
+ else:
+ raise DevtoolError('Couldn\'t find source tree created by recipetool')
+ bb.utils.mkdirhier(recipedir)
+ shutil.move(recipes[0], recipefile)
+ # Move any additional files created by recipetool
+ for fn in os.listdir(tempdir):
+ shutil.move(os.path.join(tempdir, fn), recipedir)
+ else:
+ raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout))
+ attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
+ if os.path.exists(attic_recipe):
+ logger.warn('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
+ finally:
+ if tmpsrcdir and os.path.exists(tmpsrcdir):
+ shutil.rmtree(tmpsrcdir)
+ shutil.rmtree(tempdir)
- _add_md5(config, args.recipename, recipefile)
+ for fn in os.listdir(recipedir):
+ _add_md5(config, recipename, os.path.join(recipedir, fn))
- if args.fetch and not args.no_git:
+ if args.fetchuri and not args.no_git:
setup_git_repo(srctree, args.version, 'devtool')
initial_rev = None
@@ -121,7 +218,10 @@ def add(args, config, basepath, workspace):
if not rd:
return 1
- appendfile = os.path.join(appendpath, '%s.bbappend' % bp)
+ if args.src_subdir:
+ srctree = os.path.join(srctree, args.src_subdir)
+
+ bb.utils.mkdirhier(os.path.dirname(appendfile))
with open(appendfile, 'w') as f:
f.write('inherit externalsrc\n')
f.write('EXTERNALSRC = "%s"\n' % srctree)
@@ -138,7 +238,18 @@ def add(args, config, basepath, workspace):
f.write(' rm -f ${D}/singletask.lock\n')
f.write('}\n')
- _add_md5(config, args.recipename, appendfile)
+ if bb.data.inherits_class('npm', rd):
+ f.write('do_install_append() {\n')
+ f.write(' # Remove files added to source dir by devtool/externalsrc\n')
+ f.write(' rm -f ${NPM_INSTALLDIR}/singletask.lock\n')
+ f.write(' rm -rf ${NPM_INSTALLDIR}/.git\n')
+ f.write(' rm -rf ${NPM_INSTALLDIR}/oe-local-files\n')
+ f.write(' for symlink in ${EXTERNALSRC_SYMLINKS} ; do\n')
+ f.write(' rm -f ${NPM_INSTALLDIR}/${symlink%%:*}\n')
+ f.write(' done\n')
+ f.write('}\n')
+
+ _add_md5(config, recipename, appendfile)
logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
@@ -238,7 +349,7 @@ def extract(args, config, basepath, workspace):
return 1
srctree = os.path.abspath(args.srctree)
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, rd)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd)
logger.info('Source tree extracted to %s' % srctree)
if initial_rev:
@@ -246,6 +357,28 @@ def extract(args, config, basepath, workspace):
else:
return 1
+def sync(args, config, basepath, workspace):
+ """Entry point for the devtool 'sync' subcommand"""
+ import bb
+
+ tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+ if not tinfoil:
+ # Error already shown
+ return 1
+
+ rd = parse_recipe(config, tinfoil, args.recipename, True)
+ if not rd:
+ return 1
+
+ srctree = os.path.abspath(args.srctree)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd)
+ logger.info('Source tree %s synchronized' % srctree)
+
+ if initial_rev:
+ return 0
+ else:
+ return 1
+
class BbTaskExecutor(object):
"""Class for executing bitbake tasks for a recipe
@@ -261,7 +394,7 @@ class BbTaskExecutor(object):
def exec_func(self, func, report):
"""Run bitbake task function"""
if not func in self.executed:
- deps = self.rdata.getVarFlag(func, 'deps')
+ deps = self.rdata.getVarFlag(func, 'deps', False)
if deps:
for taskdepfunc in deps:
self.exec_func(taskdepfunc, True)
@@ -269,14 +402,51 @@ class BbTaskExecutor(object):
logger.info('Executing %s...' % func)
fn = self.rdata.getVar('FILE', True)
localdata = bb.build._task_data(fn, func, self.rdata)
- bb.build.exec_func(func, localdata)
+ try:
+ bb.build.exec_func(func, localdata)
+ except bb.build.FuncFailed as e:
+ raise DevtoolError(str(e))
self.executed.append(func)
-def _prep_extract_operation(config, basepath, recipename):
+class PatchTaskExecutor(BbTaskExecutor):
+ def __init__(self, rdata):
+ self.check_git = False
+ super(PatchTaskExecutor, self).__init__(rdata)
+
+ def exec_func(self, func, report):
+ from oe.patch import GitApplyTree
+ srcsubdir = self.rdata.getVar('S', True)
+ haspatches = False
+ if func == 'do_patch':
+ patchdir = os.path.join(srcsubdir, 'patches')
+ if os.path.exists(patchdir):
+ if os.listdir(patchdir):
+ haspatches = True
+ else:
+ os.rmdir(patchdir)
+
+ super(PatchTaskExecutor, self).exec_func(func, report)
+ if self.check_git and os.path.exists(srcsubdir):
+ if func == 'do_patch':
+ if os.path.exists(patchdir):
+ shutil.rmtree(patchdir)
+ if haspatches:
+ stdout, _ = bb.process.run('git status --porcelain patches', cwd=srcsubdir)
+ if stdout:
+ bb.process.run('git checkout patches', cwd=srcsubdir)
+
+ stdout, _ = bb.process.run('git status --porcelain', cwd=srcsubdir)
+ if stdout:
+ bb.process.run('git add .; git commit -a -m "Committing changes from %s\n\n%s"' % (func, GitApplyTree.ignore_commit_prefix + ' - from %s' % func), cwd=srcsubdir)
+
+
+def _prep_extract_operation(config, basepath, recipename, tinfoil=None):
"""HACK: Ugly workaround for making sure that requirements are met when
trying to extract a package. Returns the tinfoil instance to be used."""
- tinfoil = setup_tinfoil(basepath=basepath)
+ if not tinfoil:
+ tinfoil = setup_tinfoil(basepath=basepath)
+
rd = parse_recipe(config, tinfoil, recipename, True)
if not rd:
return None
@@ -293,7 +463,7 @@ def _prep_extract_operation(config, basepath, recipename):
return tinfoil
-def _extract_source(srctree, keep_temp, devbranch, d):
+def _extract_source(srctree, keep_temp, devbranch, sync, d):
"""Extract sources of a recipe"""
import bb.event
import oe.recipeutils
@@ -312,21 +482,26 @@ def _extract_source(srctree, keep_temp, devbranch, d):
_check_compatible_recipe(pn, d)
- if os.path.exists(srctree):
- if not os.path.isdir(srctree):
- raise DevtoolError("output path %s exists and is not a directory" %
- srctree)
- elif os.listdir(srctree):
- raise DevtoolError("output path %s already exists and is "
- "non-empty" % srctree)
+ if sync:
+ if not os.path.exists(srctree):
+ raise DevtoolError("output path %s does not exist" % srctree)
+ else:
+ if os.path.exists(srctree):
+ if not os.path.isdir(srctree):
+ raise DevtoolError("output path %s exists and is not a directory" %
+ srctree)
+ elif os.listdir(srctree):
+ raise DevtoolError("output path %s already exists and is "
+ "non-empty" % srctree)
- if 'noexec' in (d.getVarFlags('do_unpack', False) or []):
- raise DevtoolError("The %s recipe has do_unpack disabled, unable to "
- "extract source" % pn)
+ if 'noexec' in (d.getVarFlags('do_unpack', False) or []):
+ raise DevtoolError("The %s recipe has do_unpack disabled, unable to "
+ "extract source" % pn)
- # Prepare for shutil.move later on
- bb.utils.mkdirhier(srctree)
- os.rmdir(srctree)
+ if not sync:
+ # Prepare for shutil.move later on
+ bb.utils.mkdirhier(srctree)
+ os.rmdir(srctree)
# We don't want notes to be printed, they are too verbose
origlevel = bb.logger.getEffectiveLevel()
@@ -352,7 +527,7 @@ def _extract_source(srctree, keep_temp, devbranch, d):
# We don't want to move the source to STAGING_KERNEL_DIR here
crd.setVar('STAGING_KERNEL_DIR', '${S}')
- task_executor = BbTaskExecutor(crd)
+ task_executor = PatchTaskExecutor(crd)
crd.setVar('EXTERNALSRC_forcevariable', '')
@@ -366,6 +541,8 @@ def _extract_source(srctree, keep_temp, devbranch, d):
task_executor.exec_func('do_kernel_checkout', False)
srcsubdir = crd.getVar('S', True)
+ task_executor.check_git = True
+
# Move local source files into separate subdir
recipe_patches = [os.path.basename(patch) for patch in
oe.recipeutils.get_recipe_patches(crd)]
@@ -399,13 +576,6 @@ def _extract_source(srctree, keep_temp, devbranch, d):
scriptutils.git_convert_standalone_clone(srcsubdir)
- patchdir = os.path.join(srcsubdir, 'patches')
- haspatches = False
- if os.path.exists(patchdir):
- if os.listdir(patchdir):
- haspatches = True
- else:
- os.rmdir(patchdir)
# Make sure that srcsubdir exists
bb.utils.mkdirhier(srcsubdir)
if not os.path.exists(srcsubdir) or not os.listdir(srcsubdir):
@@ -425,18 +595,47 @@ def _extract_source(srctree, keep_temp, devbranch, d):
bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
- if os.path.exists(patchdir):
- shutil.rmtree(patchdir)
- if haspatches:
- bb.process.run('git checkout patches', cwd=srcsubdir)
+ kconfig = None
+ if bb.data.inherits_class('kernel-yocto', d):
+ # Store generate and store kernel config
+ logger.info('Generating kernel config')
+ task_executor.exec_func('do_configure', False)
+ kconfig = os.path.join(crd.getVar('B', True), '.config')
+
+
+ tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
+ srctree_localdir = os.path.join(srctree, 'oe-local-files')
+
+ if sync:
+ bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
- # Move oe-local-files directory to srctree
- if os.path.exists(os.path.join(tempdir, 'oe-local-files')):
- logger.info('Adding local source files to srctree...')
- shutil.move(os.path.join(tempdir, 'oe-local-files'), srcsubdir)
+ # Move oe-local-files directory to srctree
+ # As the oe-local-files is not part of the constructed git tree,
+ # remove them directly during the synchrounizating might surprise
+ # the users. Instead, we move it to oe-local-files.bak and remind
+ # user in the log message.
+ if os.path.exists(srctree_localdir + '.bak'):
+ shutil.rmtree(srctree_localdir, srctree_localdir + '.bak')
+
+ if os.path.exists(srctree_localdir):
+ logger.info('Backing up current local file directory %s' % srctree_localdir)
+ shutil.move(srctree_localdir, srctree_localdir + '.bak')
+
+ if os.path.exists(tempdir_localdir):
+ logger.info('Syncing local source files to srctree...')
+ shutil.copytree(tempdir_localdir, srctree_localdir)
+ else:
+ # Move oe-local-files directory to srctree
+ if os.path.exists(tempdir_localdir):
+ logger.info('Adding local source files to srctree...')
+ shutil.move(tempdir_localdir, srcsubdir)
+ shutil.move(srcsubdir, srctree)
+
+ if kconfig:
+ logger.info('Copying kernel config to srctree')
+ shutil.copy2(kconfig, srctree)
- shutil.move(srcsubdir, srctree)
finally:
bb.logger.setLevel(origlevel)
@@ -468,7 +667,7 @@ def _check_preserve(config, recipename):
import bb.utils
origfile = os.path.join(config.workspace_path, '.devtool_md5')
newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
- preservepath = os.path.join(config.workspace_path, 'attic')
+ preservepath = os.path.join(config.workspace_path, 'attic', recipename)
with open(origfile, 'r') as f:
with open(newfile, 'w') as tf:
for line in f.readlines():
@@ -503,18 +702,7 @@ def modify(args, config, basepath, workspace):
raise DevtoolError("recipe %s is already in your workspace" %
args.recipename)
- if not args.extract and not os.path.isdir(args.srctree):
- raise DevtoolError("directory %s does not exist or not a directory "
- "(specify -x to extract source from recipe)" %
- args.srctree)
- if args.extract:
- tinfoil = _prep_extract_operation(config, basepath, args.recipename)
- if not tinfoil:
- # Error already shown
- return 1
- else:
- tinfoil = setup_tinfoil(basepath=basepath)
-
+ tinfoil = setup_tinfoil(basepath=basepath)
rd = parse_recipe(config, tinfoil, args.recipename, True)
if not rd:
return 1
@@ -526,12 +714,23 @@ def modify(args, config, basepath, workspace):
raise DevtoolError("recipe %s is already in your workspace" %
pn)
+ if args.srctree:
+ srctree = os.path.abspath(args.srctree)
+ else:
+ srctree = get_default_srctree(config, pn)
+
+ if args.no_extract and not os.path.isdir(srctree):
+ raise DevtoolError("--no-extract specified and source path %s does "
+ "not exist or is not a directory" %
+ srctree)
+ if not args.no_extract:
+ tinfoil = _prep_extract_operation(config, basepath, pn, tinfoil)
+ if not tinfoil:
+ # Error already shown
+ return 1
+
recipefile = rd.getVar('FILE', True)
- appendname = os.path.splitext(os.path.basename(recipefile))[0]
- if args.wildcard:
- appendname = re.sub(r'_.*', '_%', appendname)
- appendpath = os.path.join(config.workspace_path, 'appends')
- appendfile = os.path.join(appendpath, appendname + '.bbappend')
+ appendfile = recipe_to_append(recipefile, config, args.wildcard)
if os.path.exists(appendfile):
raise DevtoolError("Another variant of recipe %s is already in your "
"workspace (only one variant of a recipe can "
@@ -542,29 +741,28 @@ def modify(args, config, basepath, workspace):
initial_rev = None
commits = []
- srctree = os.path.abspath(args.srctree)
- if args.extract:
- initial_rev = _extract_source(args.srctree, False, args.branch, rd)
+ if not args.no_extract:
+ initial_rev = _extract_source(srctree, False, args.branch, False, rd)
if not initial_rev:
return 1
logger.info('Source tree extracted to %s' % srctree)
# Get list of commits since this revision
- (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=args.srctree)
+ (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree)
commits = stdout.split()
else:
- if os.path.exists(os.path.join(args.srctree, '.git')):
+ if os.path.exists(os.path.join(srctree, '.git')):
# Check if it's a tree previously extracted by us
try:
- (stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=args.srctree)
+ (stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=srctree)
except bb.process.ExecutionError:
stdout = ''
for line in stdout.splitlines():
if line.startswith('*'):
- (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=args.srctree)
+ (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
initial_rev = stdout.rstrip()
if not initial_rev:
# Otherwise, just grab the head revision
- (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=args.srctree)
+ (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
initial_rev = stdout.rstrip()
# Check that recipe isn't using a shared workdir
@@ -575,8 +773,7 @@ def modify(args, config, basepath, workspace):
srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
srctree = os.path.join(srctree, srcsubdir)
- if not os.path.exists(appendpath):
- os.makedirs(appendpath)
+ bb.utils.mkdirhier(os.path.dirname(appendfile))
with open(appendfile, 'w') as f:
f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n')
# Local files can be modified/tracked in separate subdir under srctree
@@ -593,7 +790,12 @@ def modify(args, config, basepath, workspace):
f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
if bb.data.inherits_class('kernel', rd):
- f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout do_fetch do_unpack do_patch"\n')
+ f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
+ 'do_fetch do_unpack do_patch do_kernel_configme do_kernel_configcheck"\n')
+ f.write('\ndo_configure_append() {\n'
+ ' cp ${B}/.config ${S}/.config.baseline\n'
+ ' ln -sfT ${B}/.config ${S}/.config.new\n'
+ '}\n')
if initial_rev:
f.write('\n# initial_rev: %s\n' % initial_rev)
for commit in commits:
@@ -603,6 +805,8 @@ def modify(args, config, basepath, workspace):
logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
+ tinfoil.shutdown()
+
return 0
def _get_patchset_revs(args, srctree, recipe_path):
@@ -735,6 +939,33 @@ def _export_patches(srctree, rd, start_rev, destdir):
return (updated, added, existing_patches)
+def _create_kconfig_diff(srctree, rd, outfile):
+ """Create a kconfig fragment"""
+ # Only update config fragment if both config files exist
+ orig_config = os.path.join(srctree, '.config.baseline')
+ new_config = os.path.join(srctree, '.config.new')
+ if os.path.exists(orig_config) and os.path.exists(new_config):
+ cmd = ['diff', '--new-line-format=%L', '--old-line-format=',
+ '--unchanged-line-format=', orig_config, new_config]
+ pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = pipe.communicate()
+ if pipe.returncode == 1:
+ logger.info("Updating config fragment %s" % outfile)
+ with open(outfile, 'w') as fobj:
+ fobj.write(stdout)
+ elif pipe.returncode == 0:
+ logger.info("Would remove config fragment %s" % outfile)
+ if os.path.exists(outfile):
+ # Remove fragment file in case of empty diff
+ logger.info("Removing config fragment %s" % outfile)
+ os.unlink(outfile)
+ else:
+ raise bb.process.ExecutionError(cmd, pipe.returncode, stdout, stderr)
+ return True
+ return False
+
+
def _export_local_files(srctree, rd, destdir):
"""Copy local files from srctree to given location.
Returns three-tuple of dicts:
@@ -755,6 +986,7 @@ def _export_local_files(srctree, rd, destdir):
updated = OrderedDict()
added = OrderedDict()
removed = OrderedDict()
+ local_files_dir = os.path.join(srctree, 'oe-local-files')
git_files = _git_ls_tree(srctree)
if 'oe-local-files' in git_files:
# If tracked by Git, take the files from srctree HEAD. First get
@@ -765,11 +997,32 @@ def _export_local_files(srctree, rd, destdir):
env=dict(os.environ, GIT_WORK_TREE=destdir,
GIT_INDEX_FILE=tmp_index))
new_set = _git_ls_tree(srctree, tree, True).keys()
- elif os.path.isdir(os.path.join(srctree, 'oe-local-files')):
+ elif os.path.isdir(local_files_dir):
# If not tracked by Git, just copy from working copy
new_set = _ls_tree(os.path.join(srctree, 'oe-local-files'))
bb.process.run(['cp', '-ax',
os.path.join(srctree, 'oe-local-files', '.'), destdir])
+ else:
+ new_set = []
+
+ # Special handling for kernel config
+ if bb.data.inherits_class('kernel-yocto', rd):
+ fragment_fn = 'devtool-fragment.cfg'
+ fragment_path = os.path.join(destdir, fragment_fn)
+ if _create_kconfig_diff(srctree, rd, fragment_path):
+ if os.path.exists(fragment_path):
+ if fragment_fn not in new_set:
+ new_set.append(fragment_fn)
+ # Copy fragment to local-files
+ if os.path.isdir(local_files_dir):
+ shutil.copy2(fragment_path, local_files_dir)
+ else:
+ if fragment_fn in new_set:
+ new_set.remove(fragment_fn)
+ # Remove fragment from local-files
+ if os.path.exists(os.path.join(local_files_dir, fragment_fn)):
+ os.unlink(os.path.join(local_files_dir, fragment_fn))
+
if new_set is not None:
for fname in new_set:
if fname in existing_files:
@@ -857,15 +1110,15 @@ def _update_recipe_srcrev(args, srctree, rd, config_data):
'changes')
_remove_source_files(args, remove_files, destpath)
+ return True
-def _update_recipe_patch(args, config, srctree, rd, config_data):
+def _update_recipe_patch(args, config, workspace, srctree, rd, config_data):
"""Implement the 'patch' mode of update-recipe"""
import bb
import oe.recipeutils
recipefile = rd.getVar('FILE', True)
- append = os.path.join(config.workspace_path, 'appends', '%s.bbappend' %
- os.path.splitext(os.path.basename(recipefile))[0])
+ append = workspace[args.recipename]['bbappend']
if not os.path.exists(append):
raise DevtoolError('unable to find workspace bbappend for recipe %s' %
args.recipename)
@@ -959,10 +1212,12 @@ def _update_recipe_patch(args, config, srctree, rd, config_data):
elif not updatefiles:
# Neither patches nor recipe were updated
logger.info('No patches or files need updating')
+ return False
finally:
shutil.rmtree(tempdir)
_remove_source_files(args, remove_files, destpath)
+ return True
def _guess_recipe_update_mode(srctree, rdata):
"""Guess the recipe update mode to use"""
@@ -1011,15 +1266,16 @@ def update_recipe(args, config, basepath, workspace):
mode = args.mode
if mode == 'srcrev':
- _update_recipe_srcrev(args, srctree, rd, tinfoil.config_data)
+ updated = _update_recipe_srcrev(args, srctree, rd, tinfoil.config_data)
elif mode == 'patch':
- _update_recipe_patch(args, config, srctree, rd, tinfoil.config_data)
+ updated = _update_recipe_patch(args, config, workspace, srctree, rd, tinfoil.config_data)
else:
raise DevtoolError('update_recipe: invalid mode %s' % mode)
- rf = rd.getVar('FILE', True)
- if rf.startswith(config.workspace_path):
- logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
+ if updated:
+ rf = rd.getVar('FILE', True)
+ if rf.startswith(config.workspace_path):
+ logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
return 0
@@ -1028,7 +1284,12 @@ def status(args, config, basepath, workspace):
"""Entry point for the devtool 'status' subcommand"""
if workspace:
for recipe, value in workspace.iteritems():
- print("%s: %s" % (recipe, value['srctree']))
+ recipefile = value['recipefile']
+ if recipefile:
+ recipestr = ' (%s)' % recipefile
+ else:
+ recipestr = ''
+ print("%s: %s%s" % (recipe, value['srctree'], recipestr))
else:
logger.info('No recipes currently in your workspace - you can use "devtool modify" to work on an existing recipe or "devtool add" to add a new one')
return 0
@@ -1055,8 +1316,17 @@ def reset(args, config, basepath, workspace):
logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
else:
logger.info('Cleaning sysroot for recipes %s...' % ', '.join(recipes))
+ # If the recipe file itself was created in the workspace, and
+ # it uses BBCLASSEXTEND, then we need to also clean the other
+ # variants
+ targets = []
+ for recipe in recipes:
+ targets.append(recipe)
+ recipefile = workspace[recipe]['recipefile']
+ if recipefile:
+ targets.extend(get_bbclassextend_targets(recipefile, recipe))
try:
- exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(recipes))
+ exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(targets))
except bb.process.ExecutionError as e:
raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
'wish, you may specify -n/--no-clean to '
@@ -1066,7 +1336,7 @@ def reset(args, config, basepath, workspace):
for pn in recipes:
_check_preserve(config, pn)
- preservepath = os.path.join(config.workspace_path, 'attic', pn)
+ preservepath = os.path.join(config.workspace_path, 'attic', pn, pn)
def preservedir(origdir):
if os.path.exists(origdir):
for root, dirs, files in os.walk(origdir):
@@ -1075,58 +1345,96 @@ def reset(args, config, basepath, workspace):
_move_file(os.path.join(origdir, fn),
os.path.join(preservepath, fn))
for dn in dirs:
- os.rmdir(os.path.join(root, dn))
+ preservedir(os.path.join(root, dn))
os.rmdir(origdir)
preservedir(os.path.join(config.workspace_path, 'recipes', pn))
# We don't automatically create this dir next to appends, but the user can
preservedir(os.path.join(config.workspace_path, 'appends', pn))
+ srctree = workspace[pn]['srctree']
+ if os.path.isdir(srctree):
+ if os.listdir(srctree):
+ # We don't want to risk wiping out any work in progress
+ logger.info('Leaving source tree %s as-is; if you no '
+ 'longer need it then please delete it manually'
+ % srctree)
+ else:
+ # This is unlikely, but if it's empty we can just remove it
+ os.rmdir(srctree)
+
return 0
+def get_default_srctree(config, recipename=''):
+ """Get the default srctree path"""
+ srctreeparent = config.get('General', 'default_source_parent_dir', config.workspace_path)
+ if recipename:
+ return os.path.join(srctreeparent, 'sources', recipename)
+ else:
+ return os.path.join(srctreeparent, 'sources')
+
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
+
+ defsrctree = get_default_srctree(context.config)
parser_add = subparsers.add_parser('add', help='Add a new recipe',
- description='Adds a new recipe')
- parser_add.add_argument('recipename', help='Name for new recipe to add')
- parser_add.add_argument('srctree', help='Path to external source tree')
+ description='Adds a new recipe to the workspace to build a specified source tree. Can optionally fetch a remote URI and unpack it to create the source tree.',
+ group='starting', order=100)
+ parser_add.add_argument('recipename', nargs='?', help='Name for new recipe to add (just name - no version, path or extension). If not specified, will attempt to auto-detect it.')
+ parser_add.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
+ parser_add.add_argument('fetchuri', nargs='?', help='Fetch the specified URI and extract it to create the source tree')
group = parser_add.add_mutually_exclusive_group()
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
- parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree', metavar='URI')
+ parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
- parser_add.add_argument('--no-git', '-g', help='If -f/--fetch is specified, do not set up source tree as a git repository', action="store_true")
- parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true')
+ parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
+ parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure). Useful with binary packages e.g. RPMs.', action='store_true')
+ parser_add.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
+ parser_add.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
parser_add.set_defaults(func=add)
parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
- description='Enables modifying the source for an existing recipe',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
- parser_modify.add_argument('recipename', help='Name for recipe to edit')
- parser_modify.add_argument('srctree', help='Path to external source tree')
+ description='Sets up the build environment to modify the source for an existing recipe. The default behaviour is to extract the source being fetched by the recipe into a git tree so you can work on it; alternatively if you already have your own pre-prepared source tree you can specify -n/--no-extract.',
+ group='starting', order=90)
+ parser_modify.add_argument('recipename', help='Name of existing recipe to edit (just name - no version, path or extension)')
+ parser_modify.add_argument('srctree', nargs='?', help='Path to external source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
parser_modify.add_argument('--wildcard', '-w', action="store_true", help='Use wildcard for unversioned bbappend')
- parser_modify.add_argument('--extract', '-x', action="store_true", help='Extract source as well')
+ group = parser_modify.add_mutually_exclusive_group()
+ group.add_argument('--extract', '-x', action="store_true", help='Extract source for recipe (default)')
+ group.add_argument('--no-extract', '-n', action="store_true", help='Do not extract source, expect it to exist')
group = parser_modify.add_mutually_exclusive_group()
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
- parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (only when using -x)')
+ parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
parser_modify.set_defaults(func=modify)
parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
description='Extracts the source for an existing recipe',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
- parser_extract.add_argument('recipename', help='Name for recipe to extract the source for')
+ group='advanced')
+ parser_extract.add_argument('recipename', help='Name of recipe to extract the source for')
parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
- parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
+ parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (default "%(default)s")')
parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_extract.set_defaults(func=extract)
+ parser_extract.set_defaults(func=extract, no_workspace=True)
+
+ parser_sync = subparsers.add_parser('sync', help='Synchronize the source tree for an existing recipe',
+ description='Synchronize the previously extracted source tree for an existing recipe',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ group='advanced')
+ parser_sync.add_argument('recipename', help='Name of recipe to sync the source for')
+ parser_sync.add_argument('srctree', help='Path to the source tree')
+ parser_sync.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
+ parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
+ parser_sync.set_defaults(func=sync)
parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
- description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV)')
+ description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV). Note that these changes need to have been committed to the git repository in order to be recognised.',
+ group='working', order=-90)
parser_update_recipe.add_argument('recipename', help='Name of recipe to update')
parser_update_recipe.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
- parser_update_recipe.add_argument('--initial-rev', help='Starting revision for patches')
+ parser_update_recipe.add_argument('--initial-rev', help='Override starting revision for patches')
parser_update_recipe.add_argument('--append', '-a', help='Write changes to a bbappend in the specified layer instead of the recipe', metavar='LAYERDIR')
parser_update_recipe.add_argument('--wildcard-version', '-w', help='In conjunction with -a/--append, use a wildcard to make the bbappend apply to any recipe version', action='store_true')
parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
@@ -1134,12 +1442,12 @@ def register_commands(subparsers, context):
parser_status = subparsers.add_parser('status', help='Show workspace status',
description='Lists recipes currently in your workspace and the paths to their respective external source trees',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ group='info', order=100)
parser_status.set_defaults(func=status)
parser_reset = subparsers.add_parser('reset', help='Remove a recipe from your workspace',
description='Removes the specified recipe from your workspace (resetting its state)',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ group='working', order=-100)
parser_reset.add_argument('recipename', nargs='?', help='Recipe to reset')
parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
diff --git a/yocto-poky/scripts/lib/devtool/upgrade.py b/yocto-poky/scripts/lib/devtool/upgrade.py
index 6bac44bb5..a085f78c4 100644
--- a/yocto-poky/scripts/lib/devtool/upgrade.py
+++ b/yocto-poky/scripts/lib/devtool/upgrade.py
@@ -33,10 +33,6 @@ from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_r
logger = logging.getLogger('devtool')
-def plugin_init(pluginlist):
- """Plugin initialization"""
- pass
-
def _run(cmd, cwd=''):
logger.debug("Running command %s> %s" % (cwd,cmd))
return bb.process.run('%s' % cmd, cwd=cwd)
@@ -66,26 +62,6 @@ def _get_checksums(rf):
checksums[cs] = m.group(1)
return checksums
-def _replace_checksums(rf, md5, sha256):
- if not md5 and not sha256:
- return
- checksums = {'md5sum':md5, 'sha256sum':sha256}
- with open(rf + ".tmp", "w+") as tmprf:
- with open(rf) as f:
- for line in f:
- m = None
- for cs in checksums.keys():
- m = re.match("^SRC_URI\[%s\].*=.*\"(.*)\"" % cs, line)
- if m:
- if checksums[cs]:
- oldcheck = m.group(1)
- newcheck = checksums[cs]
- line = line.replace(oldcheck, newcheck)
- break
- tmprf.write(line)
- os.rename(rf + ".tmp", rf)
-
-
def _remove_patch_dirs(recipefolder):
for root, dirs, files in os.walk(recipefolder):
for d in dirs:
@@ -105,23 +81,21 @@ def _rename_recipe_dirs(oldpv, newpv, path):
if olddir.find(oldpv) != -1:
newdir = olddir.replace(oldpv, newpv)
if olddir != newdir:
- _run('mv %s %s' % (olddir, newdir))
+ shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
-def _rename_recipe_file(bpn, oldpv, newpv, path):
- oldrecipe = "%s_%s.bb" % (bpn, oldpv)
- newrecipe = "%s_%s.bb" % (bpn, newpv)
- if os.path.isfile(os.path.join(path, oldrecipe)):
+def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
+ oldrecipe = os.path.basename(oldrecipe)
+ if oldrecipe.endswith('_%s.bb' % oldpv):
+ newrecipe = '%s_%s.bb' % (bpn, newpv)
if oldrecipe != newrecipe:
- _run('mv %s %s' % (oldrecipe, newrecipe), cwd=path)
+ shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
else:
- recipe = "%s_git.bb" % bpn
- if os.path.isfile(os.path.join(path, recipe)):
- newrecipe = recipe
+ newrecipe = oldrecipe
return os.path.join(path, newrecipe)
-def _rename_recipe_files(bpn, oldpv, newpv, path):
+def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
_rename_recipe_dirs(oldpv, newpv, path)
- return _rename_recipe_file(bpn, oldpv, newpv, path)
+ return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
def _write_append(rc, srctree, same_dir, no_same_dir, rev, workspace, d):
"""Writes an append file"""
@@ -171,7 +145,15 @@ def _get_uri(rd):
srcuris = rd.getVar('SRC_URI', True).split()
if not len(srcuris):
raise DevtoolError('SRC_URI not found on recipe')
- srcuri = srcuris[0] # it is assumed, URI is at first position
+ # Get first non-local entry in SRC_URI - usually by convention it's
+ # the first entry, but not always!
+ srcuri = None
+ for entry in srcuris:
+ if not entry.startswith('file://'):
+ srcuri = entry
+ break
+ if not srcuri:
+ raise DevtoolError('Unable to find non-local entry in SRC_URI')
srcrev = '${AUTOREV}'
if '://' in srcuri:
# Fetch a URL
@@ -205,7 +187,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
md5 = None
sha256 = None
else:
- __run('git checkout -b devtool-%s' % newpv)
+ __run('git checkout devtool-base -b devtool-%s' % newpv)
tmpdir = tempfile.mkdtemp(prefix='devtool')
try:
@@ -214,9 +196,19 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
raise DevtoolError(e)
tmpsrctree = _get_srctree(tmpdir)
+ srctree = os.path.abspath(srctree)
+
+ # Delete all sources so we ensure no stray files are left over
+ for item in os.listdir(srctree):
+ if item in ['.git', 'oe-local-files']:
+ continue
+ itempath = os.path.join(srctree, item)
+ if os.path.isdir(itempath):
+ shutil.rmtree(itempath)
+ else:
+ os.remove(itempath)
- scrtree = os.path.abspath(srctree)
-
+ # Copy in new ones
_copy_source_code(tmpsrctree, srctree)
(stdout,_) = __run('git ls-files --modified --others --exclude-standard')
@@ -255,19 +247,20 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
return (rev, md5, sha256)
-def _create_new_recipe(newpv, md5, sha256, srcrev, workspace, tinfoil, rd):
+def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd):
"""Creates the new recipe under workspace"""
- crd = rd.createCopy()
- bpn = crd.getVar('BPN', True)
+ bpn = rd.getVar('BPN', True)
path = os.path.join(workspace, 'recipes', bpn)
bb.utils.mkdirhier(path)
- oe.recipeutils.copy_recipe_files(crd, path)
+ oe.recipeutils.copy_recipe_files(rd, path)
- oldpv = crd.getVar('PV', True)
+ oldpv = rd.getVar('PV', True)
if not newpv:
newpv = oldpv
- fullpath = _rename_recipe_files(bpn, oldpv, newpv, path)
+ origpath = rd.getVar('FILE', True)
+ fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
+ logger.debug('Upgraded %s => %s' % (origpath, fullpath))
newvalues = {}
if _recipe_contains(rd, 'PV') and newpv != oldpv:
@@ -276,15 +269,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, workspace, tinfoil, rd):
if srcrev:
newvalues['SRCREV'] = srcrev
- if newvalues:
- rd = oe.recipeutils.parse_recipe(fullpath, None, tinfoil.config_data)
- oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
+ if srcbranch:
+ src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
+ changed = False
+ replacing = True
+ new_src_uri = []
+ for entry in src_uri:
+ scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
+ if replacing and scheme in ['git', 'gitsm']:
+ branch = params.get('branch', 'master')
+ if rd.expand(branch) != srcbranch:
+ # Handle case where branch is set through a variable
+ res = re.match(r'\$\{([^}@]+)\}', branch)
+ if res:
+ newvalues[res.group(1)] = srcbranch
+ # We know we won't change SRC_URI now, so break out
+ break
+ else:
+ params['branch'] = srcbranch
+ entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
+ changed = True
+ replacing = False
+ new_src_uri.append(entry)
+ if changed:
+ newvalues['SRC_URI'] = ' '.join(new_src_uri)
+
+ newvalues['PR'] = None
if md5 and sha256:
- # Unfortunately, oe.recipeutils.patch_recipe cannot update flags.
- # once the latter feature is implemented, we should call patch_recipe
- # instead of the following function
- _replace_checksums(fullpath, md5, sha256)
+ newvalues['SRC_URI[md5sum]'] = md5
+ newvalues['SRC_URI[sha256sum]'] = sha256
+
+ rd = oe.recipeutils.parse_recipe(fullpath, None, tinfoil.config_data)
+ oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
return fullpath
@@ -295,13 +312,10 @@ def upgrade(args, config, basepath, workspace):
raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
if not args.version and not args.srcrev:
raise DevtoolError("You must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option")
-
- reason = oe.recipeutils.validate_pn(args.recipename)
- if reason:
- raise DevtoolError(reason)
+ if args.srcbranch and not args.srcrev:
+ raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
-
rd = parse_recipe(config, tinfoil, args.recipename, True)
if not rd:
return 1
@@ -312,37 +326,53 @@ def upgrade(args, config, basepath, workspace):
if pn in workspace:
raise DevtoolError("recipe %s is already in your workspace" % pn)
+ if args.srctree:
+ srctree = os.path.abspath(args.srctree)
+ else:
+ srctree = standard.get_default_srctree(config, pn)
+
standard._check_compatible_recipe(pn, rd)
- if rd.getVar('PV', True) == args.version and rd.getVar('SRCREV', True) == args.srcrev:
- raise DevtoolError("Current and upgrade versions are the same version" % version)
+ old_srcrev = rd.getVar('SRCREV', True)
+ if old_srcrev == 'INVALID':
+ old_srcrev = None
+ if old_srcrev and not args.srcrev:
+ raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
+ if rd.getVar('PV', True) == args.version and old_srcrev == args.srcrev:
+ raise DevtoolError("Current and upgrade versions are the same version")
rf = None
try:
- rev1 = standard._extract_source(args.srctree, False, 'devtool-orig', rd)
- rev2, md5, sha256 = _extract_new_source(args.version, args.srctree, args.no_patch,
+ rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd)
+ rev2, md5, sha256 = _extract_new_source(args.version, srctree, args.no_patch,
args.srcrev, args.branch, args.keep_temp,
tinfoil, rd)
- rf = _create_new_recipe(args.version, md5, sha256, args.srcrev, config.workspace_path, tinfoil, rd)
+ rf = _create_new_recipe(args.version, md5, sha256, args.srcrev, args.srcbranch, config.workspace_path, tinfoil, rd)
except bb.process.CmdError as e:
- _upgrade_error(e, rf, args.srctree)
+ _upgrade_error(e, rf, srctree)
except DevtoolError as e:
- _upgrade_error(e, rf, args.srctree)
+ _upgrade_error(e, rf, srctree)
standard._add_md5(config, pn, os.path.dirname(rf))
- af = _write_append(rf, args.srctree, args.same_dir, args.no_same_dir, rev2,
+ af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
config.workspace_path, rd)
standard._add_md5(config, pn, af)
- logger.info('Upgraded source extracted to %s' % args.srctree)
+ logger.info('Upgraded source extracted to %s' % srctree)
+ logger.info('New recipe is %s' % rf)
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
+
+ defsrctree = standard.get_default_srctree(context.config)
+
parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
- description='Upgrades an existing recipe to a new upstream version')
- parser_upgrade.add_argument('recipename', help='Name for recipe to extract the source for')
- parser_upgrade.add_argument('srctree', help='Path to where to extract the source tree')
+ description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
+ group='starting')
+ parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
+ parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV)')
parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (if fetching from an SCM such as git)')
+ parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
group = parser_upgrade.add_mutually_exclusive_group()
diff --git a/yocto-poky/scripts/lib/devtool/utilcmds.py b/yocto-poky/scripts/lib/devtool/utilcmds.py
new file mode 100644
index 000000000..b761a80f8
--- /dev/null
+++ b/yocto-poky/scripts/lib/devtool/utilcmds.py
@@ -0,0 +1,233 @@
+# Development tool - utility commands plugin
+#
+# Copyright (C) 2015-2016 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Devtool utility plugins"""
+
+import os
+import sys
+import shutil
+import tempfile
+import logging
+import argparse
+import subprocess
+import scriptutils
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
+from devtool import parse_recipe
+
+logger = logging.getLogger('devtool')
+
+
+def edit_recipe(args, config, basepath, workspace):
+ """Entry point for the devtool 'edit-recipe' subcommand"""
+ if args.any_recipe:
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.recipename, True)
+ if not rd:
+ return 1
+ recipefile = rd.getVar('FILE', True)
+ finally:
+ tinfoil.shutdown()
+ else:
+ check_workspace_recipe(workspace, args.recipename)
+ recipefile = workspace[args.recipename]['recipefile']
+ if not recipefile:
+ raise DevtoolError("Recipe file for %s is not under the workspace" %
+ args.recipename)
+
+ return scriptutils.run_editor(recipefile)
+
+
+def configure_help(args, config, basepath, workspace):
+ """Entry point for the devtool 'configure-help' subcommand"""
+ import oe.utils
+
+ check_workspace_recipe(workspace, args.recipename)
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
+ if not rd:
+ return 1
+ b = rd.getVar('B', True)
+ s = rd.getVar('S', True)
+ configurescript = os.path.join(s, 'configure')
+ confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
+ configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS', True) or '')
+ extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF', True) or '')
+ extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE', True) or '')
+ do_configure = rd.getVar('do_configure', True) or ''
+ do_configure_noexpand = rd.getVar('do_configure', False) or ''
+ packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
+ autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
+ cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
+ cmake_do_configure = rd.getVar('cmake_do_configure', True)
+ pn = rd.getVar('PN', True)
+ finally:
+ tinfoil.shutdown()
+
+ if 'doc' in packageconfig:
+ del packageconfig['doc']
+
+ if autotools and not os.path.exists(configurescript):
+ logger.info('Running do_configure to generate configure script')
+ try:
+ stdout, _ = exec_build_env_command(config.init_path, basepath,
+ 'bitbake -c configure %s' % args.recipename,
+ stderr=subprocess.STDOUT)
+ except bb.process.ExecutionError:
+ pass
+
+ if confdisabled or do_configure.strip() in ('', ':'):
+ raise DevtoolError("do_configure task has been disabled for this recipe")
+ elif args.no_pager and not os.path.exists(configurescript):
+ raise DevtoolError("No configure script found and no other information to display")
+ else:
+ configopttext = ''
+ if autotools and configureopts:
+ configopttext = '''
+Arguments currently passed to the configure script:
+
+%s
+
+Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
+ if extra_oeconf:
+ configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
+
+%s''' % extra_oeconf
+
+ elif cmake:
+ in_cmake = False
+ cmake_cmd = ''
+ for line in cmake_do_configure.splitlines():
+ if in_cmake:
+ cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
+ if not line.endswith('\\'):
+ break
+ if line.lstrip().startswith('cmake '):
+ cmake_cmd = line.strip().rstrip('\\')
+ if line.endswith('\\'):
+ in_cmake = True
+ else:
+ break
+ if cmake_cmd:
+ configopttext = '''
+The current cmake command line:
+
+%s
+
+Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
+
+%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
+ else:
+ configopttext = '''
+The current implementation of cmake_do_configure:
+
+cmake_do_configure() {
+%s
+}
+
+Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
+
+%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
+
+ elif do_configure:
+ configopttext = '''
+The current implementation of do_configure:
+
+do_configure() {
+%s
+}''' % do_configure.rstrip()
+ if '${EXTRA_OECONF}' in do_configure_noexpand:
+ configopttext += '''
+
+Arguments specified through EXTRA_OECONF (which you can change or add to easily):
+
+%s''' % extra_oeconf
+
+ if packageconfig:
+ configopttext += '''
+
+Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
+
+ if args.arg:
+ helpargs = ' '.join(args.arg)
+ elif cmake:
+ helpargs = '-LH'
+ else:
+ helpargs = '--help'
+
+ msg = '''configure information for %s
+------------------------------------------
+%s''' % (pn, configopttext)
+
+ if cmake:
+ msg += '''
+
+The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
+------------------------------------------''' % (helpargs, pn)
+ elif os.path.exists(configurescript):
+ msg += '''
+
+The ./configure %s output for %s follows.
+------------------------------------------''' % (helpargs, pn)
+
+ olddir = os.getcwd()
+ tmppath = tempfile.mkdtemp()
+ with tempfile.NamedTemporaryFile('w', delete=False) as tf:
+ if not args.no_header:
+ tf.write(msg + '\n')
+ tf.close()
+ try:
+ try:
+ cmd = 'cat %s' % tf.name
+ if cmake:
+ cmd += '; cmake %s %s 2>&1' % (helpargs, s)
+ os.chdir(b)
+ elif os.path.exists(configurescript):
+ cmd += '; %s %s' % (configurescript, helpargs)
+ if sys.stdout.isatty() and not args.no_pager:
+ pager = os.environ.get('PAGER', 'less')
+ cmd = '(%s) | %s' % (cmd, pager)
+ subprocess.check_call(cmd, shell=True)
+ except subprocess.CalledProcessError as e:
+ return e.returncode
+ finally:
+ os.chdir(olddir)
+ shutil.rmtree(tmppath)
+ os.remove(tf.name)
+
+
+def register_commands(subparsers, context):
+ """Register devtool subcommands from this plugin"""
+ parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file in your workspace',
+ description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that the recipe file itself must be in the workspace (i.e. as a result of "devtool add" or "devtool upgrade"); you can override this with the -a/--any-recipe option.',
+ group='working')
+ parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
+ parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Edit any recipe, not just where the recipe file itself is in the workspace')
+ parser_edit_recipe.set_defaults(func=edit_recipe)
+
+ # NOTE: Needed to override the usage string here since the default
+ # gets the order wrong - recipename must come before --arg
+ parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
+ usage='devtool configure-help [options] recipename [--arg ...]',
+ description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
+ group='working')
+ parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
+ parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
+ parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
+ parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
+ parser_configure_help.set_defaults(func=configure_help)
diff --git a/yocto-poky/scripts/lib/recipetool/append.py b/yocto-poky/scripts/lib/recipetool/append.py
index 7fe411520..558fd25ac 100644
--- a/yocto-poky/scripts/lib/recipetool/append.py
+++ b/yocto-poky/scripts/lib/recipetool/append.py
@@ -343,6 +343,8 @@ def appendsrc(args, files, rd, extralines=None):
simplified = {}
src_uri = rd.getVar('SRC_URI', True).split()
for uri in src_uri:
+ if uri.endswith(';'):
+ uri = uri[:-1]
simple_uri = bb.fetch.URI(uri)
simple_uri.params = {}
simplified[str(simple_uri)] = uri
@@ -433,7 +435,7 @@ def target_path(targetpath):
return targetpath
-def register_command(subparsers):
+def register_commands(subparsers):
common = argparse.ArgumentParser(add_help=False)
common.add_argument('-m', '--machine', help='Make bbappend changes specific to a machine only', metavar='MACHINE')
common.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
diff --git a/yocto-poky/scripts/lib/recipetool/create.py b/yocto-poky/scripts/lib/recipetool/create.py
index 8305e4364..bb9fb9b04 100644
--- a/yocto-poky/scripts/lib/recipetool/create.py
+++ b/yocto-poky/scripts/lib/recipetool/create.py
@@ -1,6 +1,6 @@
# Recipe creation tool - create command plugin
#
-# Copyright (C) 2014-2015 Intel Corporation
+# Copyright (C) 2014-2016 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -21,9 +21,11 @@ import argparse
import glob
import fnmatch
import re
+import json
import logging
import scriptutils
import urlparse
+import hashlib
logger = logging.getLogger('recipetool')
@@ -39,14 +41,187 @@ def tinfoil_init(instance):
global tinfoil
tinfoil = instance
-class RecipeHandler():
+class RecipeHandler(object):
+ recipelibmap = {}
+ recipeheadermap = {}
+ recipecmakefilemap = {}
+ recipebinmap = {}
+
+ @staticmethod
+ def load_libmap(d):
+ '''Load library->recipe mapping'''
+ import oe.package
+
+ if RecipeHandler.recipelibmap:
+ return
+ # First build up library->package mapping
+ shlib_providers = oe.package.read_shlib_providers(d)
+ libdir = d.getVar('libdir', True)
+ base_libdir = d.getVar('base_libdir', True)
+ libpaths = list(set([base_libdir, libdir]))
+ libname_re = re.compile('^lib(.+)\.so.*$')
+ pkglibmap = {}
+ for lib, item in shlib_providers.iteritems():
+ for path, pkg in item.iteritems():
+ if path in libpaths:
+ res = libname_re.match(lib)
+ if res:
+ libname = res.group(1)
+ if not libname in pkglibmap:
+ pkglibmap[libname] = pkg[0]
+ else:
+ logger.debug('unable to extract library name from %s' % lib)
+
+ # Now turn it into a library->recipe mapping
+ pkgdata_dir = d.getVar('PKGDATA_DIR', True)
+ for libname, pkg in pkglibmap.iteritems():
+ try:
+ with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
+ for line in f:
+ if line.startswith('PN:'):
+ RecipeHandler.recipelibmap[libname] = line.split(':', 1)[-1].strip()
+ break
+ except IOError as ioe:
+ if ioe.errno == 2:
+ logger.warn('unable to find a pkgdata file for package %s' % pkg)
+ else:
+ raise
+
+ # Some overrides - these should be mapped to the virtual
+ RecipeHandler.recipelibmap['GL'] = 'virtual/libgl'
+ RecipeHandler.recipelibmap['EGL'] = 'virtual/egl'
+ RecipeHandler.recipelibmap['GLESv2'] = 'virtual/libgles2'
+
+ @staticmethod
+ def load_devel_filemap(d):
+ '''Build up development file->recipe mapping'''
+ if RecipeHandler.recipeheadermap:
+ return
+ pkgdata_dir = d.getVar('PKGDATA_DIR', True)
+ includedir = d.getVar('includedir', True)
+ cmakedir = os.path.join(d.getVar('libdir', True), 'cmake')
+ for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')):
+ with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
+ pn = None
+ headers = []
+ cmakefiles = []
+ for line in f:
+ if line.startswith('PN:'):
+ pn = line.split(':', 1)[-1].strip()
+ elif line.startswith('FILES_INFO:'):
+ val = line.split(':', 1)[1].strip()
+ dictval = json.loads(val)
+ for fullpth in sorted(dictval):
+ if fullpth.startswith(includedir) and fullpth.endswith('.h'):
+ headers.append(os.path.relpath(fullpth, includedir))
+ elif fullpth.startswith(cmakedir) and fullpth.endswith('.cmake'):
+ cmakefiles.append(os.path.relpath(fullpth, cmakedir))
+ if pn and headers:
+ for header in headers:
+ RecipeHandler.recipeheadermap[header] = pn
+ if pn and cmakefiles:
+ for fn in cmakefiles:
+ RecipeHandler.recipecmakefilemap[fn] = pn
+
@staticmethod
- def checkfiles(path, speclist):
+ def load_binmap(d):
+ '''Build up native binary->recipe mapping'''
+ if RecipeHandler.recipebinmap:
+ return
+ sstate_manifests = d.getVar('SSTATE_MANIFESTS', True)
+ staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE', True)
+ build_arch = d.getVar('BUILD_ARCH', True)
+ fileprefix = 'manifest-%s-' % build_arch
+ for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)):
+ with open(fn, 'r') as f:
+ pn = os.path.basename(fn).rsplit('.', 1)[0][len(fileprefix):]
+ for line in f:
+ if line.startswith(staging_bindir_native):
+ prog = os.path.basename(line.rstrip())
+ RecipeHandler.recipebinmap[prog] = pn
+
+ @staticmethod
+ def checkfiles(path, speclist, recursive=False):
results = []
- for spec in speclist:
- results.extend(glob.glob(os.path.join(path, spec)))
+ if recursive:
+ for root, _, files in os.walk(path):
+ for fn in files:
+ for spec in speclist:
+ if fnmatch.fnmatch(fn, spec):
+ results.append(os.path.join(root, fn))
+ else:
+ for spec in speclist:
+ results.extend(glob.glob(os.path.join(path, spec)))
return results
+ @staticmethod
+ def handle_depends(libdeps, pcdeps, deps, outlines, values, d):
+ if pcdeps:
+ recipemap = read_pkgconfig_provides(d)
+ if libdeps:
+ RecipeHandler.load_libmap(d)
+
+ ignorelibs = ['socket']
+ ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'musl', 'tar-native', 'binutils-native', 'coreutils-native']
+
+ unmappedpc = []
+ pcdeps = list(set(pcdeps))
+ for pcdep in pcdeps:
+ if isinstance(pcdep, basestring):
+ recipe = recipemap.get(pcdep, None)
+ if recipe:
+ deps.append(recipe)
+ else:
+ if not pcdep.startswith('$'):
+ unmappedpc.append(pcdep)
+ else:
+ for item in pcdep:
+ recipe = recipemap.get(pcdep, None)
+ if recipe:
+ deps.append(recipe)
+ break
+ else:
+ unmappedpc.append('(%s)' % ' or '.join(pcdep))
+
+ unmappedlibs = []
+ for libdep in libdeps:
+ if isinstance(libdep, tuple):
+ lib, header = libdep
+ else:
+ lib = libdep
+ header = None
+
+ if lib in ignorelibs:
+ logger.debug('Ignoring library dependency %s' % lib)
+ continue
+
+ recipe = RecipeHandler.recipelibmap.get(lib, None)
+ if recipe:
+ deps.append(recipe)
+ elif recipe is None:
+ if header:
+ RecipeHandler.load_devel_filemap(d)
+ recipe = RecipeHandler.recipeheadermap.get(header, None)
+ if recipe:
+ deps.append(recipe)
+ elif recipe is None:
+ unmappedlibs.append(lib)
+ else:
+ unmappedlibs.append(lib)
+
+ deps = set(deps).difference(set(ignoredeps))
+
+ if unmappedpc:
+ outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmappedpc))
+ outlines.append('# (this is based on recipes that have previously been built and packaged)')
+
+ if unmappedlibs:
+ outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs))))
+ outlines.append('# (this is based on recipes that have previously been built and packaged)')
+
+ if deps:
+ values['DEPENDS'] = ' '.join(deps)
+
def genfunction(self, outlines, funcname, content, python=False, forcespace=False):
if python:
prefix = 'python '
@@ -70,10 +245,64 @@ class RecipeHandler():
outlines.append('}')
outlines.append('')
- def process(self, srctree, classes, lines_before, lines_after, handled):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
return False
+def validate_pv(pv):
+ if not pv or '_version' in pv.lower() or pv[0] not in '0123456789':
+ return False
+ return True
+
+def determine_from_filename(srcfile):
+ """Determine name and version from a filename"""
+ part = ''
+ if '.tar.' in srcfile:
+ namepart = srcfile.split('.tar.')[0].lower()
+ else:
+ namepart = os.path.splitext(srcfile)[0].lower()
+ splitval = namepart.rsplit('_', 1)
+ if len(splitval) == 1:
+ splitval = namepart.rsplit('-', 1)
+ pn = splitval[0].replace('_', '-')
+ if len(splitval) > 1:
+ if splitval[1][0] in '0123456789':
+ pv = splitval[1]
+ else:
+ pn = '-'.join(splitval).replace('_', '-')
+ pv = None
+ else:
+ pv = None
+ return (pn, pv)
+
+def determine_from_url(srcuri):
+ """Determine name and version from a URL"""
+ pn = None
+ pv = None
+ parseres = urlparse.urlparse(srcuri.lower().split(';', 1)[0])
+ if parseres.path:
+ if 'github.com' in parseres.netloc:
+ res = re.search(r'.*/(.*?)/archive/(.*)-final\.(tar|zip)', parseres.path)
+ if res:
+ pn = res.group(1).strip().replace('_', '-')
+ pv = res.group(2).strip().replace('_', '.')
+ else:
+ res = re.search(r'.*/(.*?)/archive/v?(.*)\.(tar|zip)', parseres.path)
+ if res:
+ pn = res.group(1).strip().replace('_', '-')
+ pv = res.group(2).strip().replace('_', '.')
+ elif 'bitbucket.org' in parseres.netloc:
+ res = re.search(r'.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9a-zA-Z_.]*)\.(tar|zip)', parseres.path)
+ if res:
+ pn = res.group(1).strip().replace('_', '-')
+ pv = res.group(2).strip().replace('_', '.')
+
+ if not pn and not pv:
+ srcfile = os.path.basename(parseres.path.rstrip('/'))
+ pn, pv = determine_from_filename(srcfile)
+
+ logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv))
+ return (pn, pv)
def supports_srcrev(uri):
localdata = bb.data.createCopy(tinfoil.config_data)
@@ -88,6 +317,16 @@ def supports_srcrev(uri):
return True
return False
+def reformat_git_uri(uri):
+ '''Convert any http[s]://....git URI into git://...;protocol=http[s]'''
+ checkuri = uri.split(';', 1)[0]
+ if checkuri.endswith('.git') or '/git/' in checkuri:
+ res = re.match('(https?)://([^;]+(\.git)?)(;.*)?$', uri)
+ if res:
+ # Need to switch the URI around so that the git fetcher is used
+ return 'git://%s;protocol=%s%s' % (res.group(2), res.group(1), res.group(4) or '')
+ return uri
+
def create_recipe(args):
import bb.process
import tempfile
@@ -103,16 +342,11 @@ def create_recipe(args):
srcrev = '${AUTOREV}'
if '://' in args.source:
# Fetch a URL
- fetchuri = urlparse.urldefrag(args.source)[0]
+ fetchuri = reformat_git_uri(urlparse.urldefrag(args.source)[0])
if args.binary:
# Assume the archive contains the directory structure verbatim
# so we need to extract to a subdirectory
fetchuri += ';subdir=%s' % os.path.splitext(os.path.basename(urlparse.urlsplit(fetchuri).path))[0]
- git_re = re.compile('(https?)://([^;]+\.git)(;.*)?')
- res = git_re.match(fetchuri)
- if res:
- # Need to switch the URI around so that the git fetcher is used
- fetchuri = 'git://%s;protocol=%s%s' % (res.group(2), res.group(1), res.group(3) or '')
srcuri = fetchuri
rev_re = re.compile(';rev=([^;]+)')
res = rev_re.search(srcuri)
@@ -121,11 +355,17 @@ def create_recipe(args):
srcuri = rev_re.sub('', srcuri)
tempsrc = tempfile.mkdtemp(prefix='recipetool-')
srctree = tempsrc
+ if fetchuri.startswith('npm://'):
+ # Check if npm is available
+ npm = bb.utils.which(tinfoil.config_data.getVar('PATH', True), 'npm')
+ if not npm:
+ logger.error('npm:// URL requested but npm is not available - you need to either build nodejs-native or install npm using your package manager')
+ sys.exit(1)
logger.info('Fetching %s...' % srcuri)
try:
checksums = scriptutils.fetch_uri(tinfoil.config_data, fetchuri, srctree, srcrev)
- except bb.fetch2.FetchError:
- # Error already printed
+ except bb.fetch2.BBFetchException as e:
+ logger.error(str(e).rstrip())
sys.exit(1)
dirlist = os.listdir(srctree)
if 'git.indirectionsymlink' in dirlist:
@@ -149,10 +389,35 @@ def create_recipe(args):
if not os.path.isdir(args.source):
logger.error('Invalid source directory %s' % args.source)
sys.exit(1)
- srcuri = ''
srctree = args.source
+ srcuri = ''
+ if os.path.exists(os.path.join(srctree, '.git')):
+ # Try to get upstream repo location from origin remote
+ try:
+ stdout, _ = bb.process.run('git remote -v', cwd=srctree, shell=True)
+ except bb.process.ExecutionError as e:
+ stdout = None
+ if stdout:
+ for line in stdout.splitlines():
+ splitline = line.split()
+ if len(splitline) > 1:
+ if splitline[0] == 'origin' and '://' in splitline[1]:
+ srcuri = reformat_git_uri(splitline[1])
+ srcsubdir = 'git'
+ break
+
+ if args.src_subdir:
+ srcsubdir = os.path.join(srcsubdir, args.src_subdir)
+ srctree_use = os.path.join(srctree, args.src_subdir)
+ else:
+ srctree_use = srctree
- outfile = args.outfile
+ if args.outfile and os.path.isdir(args.outfile):
+ outfile = None
+ outdir = args.outfile
+ else:
+ outfile = args.outfile
+ outdir = None
if outfile and outfile != '-':
if os.path.exists(outfile):
logger.error('Output file %s already exists' % outfile)
@@ -166,7 +431,7 @@ def create_recipe(args):
lines_before.append('# (Feel free to remove these comments when editing.)')
lines_before.append('#')
- licvalues = guess_license(srctree)
+ licvalues = guess_license(srctree_use)
lic_files_chksum = []
if licvalues:
licenses = []
@@ -195,29 +460,50 @@ def create_recipe(args):
lines_before.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
lines_before.append('')
+ classes = []
+
# FIXME This is kind of a hack, we probably ought to be using bitbake to do this
- # we'd also want a way to automatically set outfile based upon auto-detecting these values from the source if possible
- recipefn = os.path.splitext(os.path.basename(outfile))[0]
- fnsplit = recipefn.split('_')
- if len(fnsplit) > 1:
- pn = fnsplit[0]
- pv = fnsplit[1]
- else:
- pn = recipefn
- pv = None
+ pn = None
+ pv = None
+ if outfile:
+ recipefn = os.path.splitext(os.path.basename(outfile))[0]
+ fnsplit = recipefn.split('_')
+ if len(fnsplit) > 1:
+ pn = fnsplit[0]
+ pv = fnsplit[1]
+ else:
+ pn = recipefn
if args.version:
pv = args.version
+ if args.name:
+ pn = args.name
+ if args.name.endswith('-native'):
+ if args.also_native:
+ logger.error('--also-native cannot be specified for a recipe named *-native (*-native denotes a recipe that is already only for native) - either remove the -native suffix from the name or drop --also-native')
+ sys.exit(1)
+ classes.append('native')
+ elif args.name.startswith('nativesdk-'):
+ if args.also_native:
+ logger.error('--also-native cannot be specified for a recipe named nativesdk-* (nativesdk-* denotes a recipe that is already only for nativesdk)')
+ sys.exit(1)
+ classes.append('nativesdk')
+
if pv and pv not in 'git svn hg'.split():
realpv = pv
else:
realpv = None
- if srcuri:
- if realpv:
- srcuri = srcuri.replace(realpv, '${PV}')
- else:
+ if srcuri and not realpv or not pn:
+ name_pn, name_pv = determine_from_url(srcuri)
+ if name_pn and not pn:
+ pn = name_pn
+ if name_pv and not realpv:
+ realpv = name_pv
+
+
+ if not srcuri:
lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
lines_before.append('SRC_URI = "%s"' % srcuri)
(md5value, sha256value) = checksums
@@ -232,13 +518,7 @@ def create_recipe(args):
lines_before.append('SRCREV = "%s"' % srcrev)
lines_before.append('')
- if srcsubdir and pv:
- if srcsubdir == "%s-%s" % (pn, pv):
- # This would be the default, so we don't need to set S in the recipe
- srcsubdir = ''
if srcsubdir:
- if pv and pv not in 'git svn hg'.split():
- srcsubdir = srcsubdir.replace(pv, '${PV}')
lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir)
lines_before.append('')
@@ -251,25 +531,133 @@ def create_recipe(args):
lines_after.append('')
# Find all plugins that want to register handlers
- handlers = []
+ logger.debug('Loading recipe handlers')
+ raw_handlers = []
for plugin in plugins:
if hasattr(plugin, 'register_recipe_handlers'):
- plugin.register_recipe_handlers(handlers)
+ plugin.register_recipe_handlers(raw_handlers)
+ # Sort handlers by priority
+ handlers = []
+ for i, handler in enumerate(raw_handlers):
+ if isinstance(handler, tuple):
+ handlers.append((handler[0], handler[1], i))
+ else:
+ handlers.append((handler, 0, i))
+ handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
+ for handler, priority, _ in handlers:
+ logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
+ handlers = [item[0] for item in handlers]
# Apply the handlers
- classes = []
handled = []
+ handled.append(('license', licvalues))
if args.binary:
classes.append('bin_package')
handled.append('buildsystem')
+ extravalues = {}
for handler in handlers:
- handler.process(srctree, classes, lines_before, lines_after, handled)
+ handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
+
+ extrafiles = extravalues.pop('extrafiles', {})
+
+ if not realpv:
+ realpv = extravalues.get('PV', None)
+ if realpv:
+ if not validate_pv(realpv):
+ realpv = None
+ else:
+ realpv = realpv.lower().split()[0]
+ if '_' in realpv:
+ realpv = realpv.replace('_', '-')
+ if not pn:
+ pn = extravalues.get('PN', None)
+ if pn:
+ if pn.startswith('GNU '):
+ pn = pn[4:]
+ if ' ' in pn:
+ # Probably a descriptive identifier rather than a proper name
+ pn = None
+ else:
+ pn = pn.lower()
+ if '_' in pn:
+ pn = pn.replace('_', '-')
+
+ if not outfile:
+ if not pn:
+ logger.error('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile')
+ # devtool looks for this specific exit code, so don't change it
+ sys.exit(15)
+ else:
+ if srcuri and srcuri.startswith(('git://', 'hg://', 'svn://')):
+ outfile = '%s_%s.bb' % (pn, srcuri.split(':', 1)[0])
+ elif realpv:
+ outfile = '%s_%s.bb' % (pn, realpv)
+ else:
+ outfile = '%s.bb' % pn
+ if outdir:
+ outfile = os.path.join(outdir, outfile)
+ # We need to check this again
+ if os.path.exists(outfile):
+ logger.error('Output file %s already exists' % outfile)
+ sys.exit(1)
+
+ # Move any extra files the plugins created to a directory next to the recipe
+ if extrafiles:
+ if outfile == '-':
+ extraoutdir = pn
+ else:
+ extraoutdir = os.path.join(os.path.dirname(outfile), pn)
+ bb.utils.mkdirhier(extraoutdir)
+ for destfn, extrafile in extrafiles.iteritems():
+ shutil.move(extrafile, os.path.join(extraoutdir, destfn))
+
+ lines = lines_before
+ lines_before = []
+ skipblank = True
+ for line in lines:
+ if skipblank:
+ skipblank = False
+ if not line:
+ continue
+ if line.startswith('S = '):
+ if realpv and pv not in 'git svn hg'.split():
+ line = line.replace(realpv, '${PV}')
+ if pn:
+ line = line.replace(pn, '${BPN}')
+ if line == 'S = "${WORKDIR}/${BPN}-${PV}"':
+ skipblank = True
+ continue
+ elif line.startswith('SRC_URI = '):
+ if realpv:
+ line = line.replace(realpv, '${PV}')
+ elif line.startswith('PV = '):
+ if realpv:
+ line = re.sub('"[^+]*\+', '"%s+' % realpv, line)
+ lines_before.append(line)
+
+ if args.also_native:
+ lines = lines_after
+ lines_after = []
+ bbclassextend = None
+ for line in lines:
+ if line.startswith('BBCLASSEXTEND ='):
+ splitval = line.split('"')
+ if len(splitval) > 1:
+ bbclassextend = splitval[1].split()
+ if not 'native' in bbclassextend:
+ bbclassextend.insert(0, 'native')
+ line = 'BBCLASSEXTEND = "%s"' % ' '.join(bbclassextend)
+ lines_after.append(line)
+ if not bbclassextend:
+ lines_after.append('BBCLASSEXTEND = "native"')
outlines = []
outlines.extend(lines_before)
if classes:
+ if outlines[-1] and not outlines[-1].startswith('#'):
+ outlines.append('')
outlines.append('inherit %s' % ' '.join(classes))
outlines.append('')
outlines.extend(lines_after)
@@ -339,19 +727,92 @@ def get_license_md5sums(d, static_only=False):
md5sums['5f30f0716dfdd0d91eb439ebec522ec2'] = 'LGPLv2'
md5sums['55ca817ccb7d5b5b66355690e9abc605'] = 'LGPLv2'
md5sums['252890d9eee26aab7b432e8b8a616475'] = 'LGPLv2'
+ md5sums['3214f080875748938ba060314b4f727d'] = 'LGPLv2'
+ md5sums['db979804f025cf55aabec7129cb671ed'] = 'LGPLv2'
md5sums['d32239bcb673463ab874e80d47fae504'] = 'GPLv3'
md5sums['f27defe1e96c2e1ecd4e0c9be8967949'] = 'GPLv3'
md5sums['6a6a8e020838b23406c81b19c1d46df6'] = 'LGPLv3'
md5sums['3b83ef96387f14655fc854ddc3c6bd57'] = 'Apache-2.0'
md5sums['385c55653886acac3821999a3ccd17b3'] = 'Artistic-1.0 | GPL-2.0' # some perl modules
+ md5sums['54c7042be62e169199200bc6477f04d1'] = 'BSD-3-Clause'
return md5sums
+def crunch_license(licfile):
+ '''
+ Remove non-material text from a license file and then check
+ its md5sum against a known list. This works well for licenses
+ which contain a copyright statement, but is also a useful way
+ to handle people's insistence upon reformatting the license text
+ slightly (with no material difference to the text of the
+ license).
+ '''
+
+ import oe.utils
+
+ # Note: these are carefully constructed!
+ license_title_re = re.compile('^\(?(#+ *)?(The )?.{1,10} [Ll]icen[sc]e( \(.{1,10}\))?\)?:?$')
+ license_statement_re = re.compile('^This (project|software) is( free software)? released under the .{1,10} [Ll]icen[sc]e:?$')
+ copyright_re = re.compile('^(#+)? *Copyright .*$')
+
+ crunched_md5sums = {}
+ # The following two were gleaned from the "forever" npm package
+ crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
+ crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT'
+ # https://github.com/vasi/pixz/blob/master/LICENSE
+ crunched_md5sums['2f03392b40bbe663597b5bd3cc5ebdb9'] = 'BSD-2-Clause'
+ # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
+ crunched_md5sums['e72e5dfef0b1a4ca8a3d26a60587db66'] = 'BSD-2-Clause'
+ # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
+ crunched_md5sums['8be76ac6d191671f347ee4916baa637e'] = 'GPLv2'
+ # https://github.com/datto/dattobd/blob/master/COPYING
+ # http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/GPLv2.TXT
+ crunched_md5sums['1d65c5ad4bf6489f85f4812bf08ae73d'] = 'GPLv2'
+ # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
+ # http://git.neil.brown.name/?p=mdadm.git;a=blob;f=COPYING;h=d159169d1050894d3ea3b98e1c965c4058208fe1;hb=HEAD
+ crunched_md5sums['fb530f66a7a89ce920f0e912b5b66d4b'] = 'GPLv2'
+ # https://github.com/gkos/nrf24/blob/master/COPYING
+ crunched_md5sums['7b6aaa4daeafdfa6ed5443fd2684581b'] = 'GPLv2'
+ # https://github.com/josch09/resetusb/blob/master/COPYING
+ crunched_md5sums['8b8ac1d631a4d220342e83bcf1a1fbc3'] = 'GPLv3'
+ # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
+ crunched_md5sums['2ea316ed973ae176e502e2297b574bb3'] = 'LGPLv2.1'
+ # unixODBC-2.3.4 COPYING
+ crunched_md5sums['1daebd9491d1e8426900b4fa5a422814'] = 'LGPLv2.1'
+ # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
+ crunched_md5sums['2ebfb3bb49b9a48a075cc1425e7f4129'] = 'LGPLv3'
+ lictext = []
+ with open(licfile, 'r') as f:
+ for line in f:
+ # Drop opening statements
+ if copyright_re.match(line):
+ continue
+ elif license_title_re.match(line):
+ continue
+ elif license_statement_re.match(line):
+ continue
+ # Squash spaces, and replace smart quotes, double quotes
+ # and backticks with single quotes
+ line = oe.utils.squashspaces(line.strip()).decode("utf-8")
+ line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
+ if line:
+ lictext.append(line)
+
+ m = hashlib.md5()
+ try:
+ m.update(' '.join(lictext))
+ md5val = m.hexdigest()
+ except UnicodeEncodeError:
+ md5val = None
+ lictext = ''
+ license = crunched_md5sums.get(md5val, None)
+ return license, md5val, lictext
+
def guess_license(srctree):
import bb
md5sums = get_license_md5sums(tinfoil.config_data)
licenses = []
- licspecs = ['LICENSE*', 'COPYING*', '*[Ll]icense*', 'LICENCE*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*']
+ licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*']
licfiles = []
for root, dirs, files in os.walk(srctree):
for fn in files:
@@ -362,13 +823,44 @@ def guess_license(srctree):
licfiles.append(fullpath)
for licfile in licfiles:
md5value = bb.utils.md5_file(licfile)
- license = md5sums.get(md5value, 'Unknown')
+ license = md5sums.get(md5value, None)
+ if not license:
+ license, crunched_md5, lictext = crunch_license(licfile)
+ if not license:
+ license = 'Unknown'
licenses.append((license, os.path.relpath(licfile, srctree), md5value))
# FIXME should we grab at least one source file with a license header and add that too?
return licenses
+def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
+ """
+ Given a list of (license, path, md5sum) as returned by guess_license(),
+ a dict of package name to path mappings, write out a set of
+ package-specific LICENSE values.
+ """
+ pkglicenses = {pn: []}
+ for license, licpath, _ in licvalues:
+ for pkgname, pkgpath in packages.iteritems():
+ if licpath.startswith(pkgpath + '/'):
+ if pkgname in pkglicenses:
+ pkglicenses[pkgname].append(license)
+ else:
+ pkglicenses[pkgname] = [license]
+ break
+ else:
+ # Accumulate on the main package
+ pkglicenses[pn].append(license)
+ outlicenses = {}
+ for pkgname in packages:
+ license = ' '.join(list(set(pkglicenses.get(pkgname, ['Unknown']))))
+ if license == 'Unknown' and pkgname in fallback_licenses:
+ license = fallback_licenses[pkgname]
+ outlines.append('LICENSE_%s = "%s"' % (pkgname, license))
+ outlicenses[pkgname] = license.split()
+ return outlicenses
+
def read_pkgconfig_provides(d):
pkgdatadir = d.getVar('PKGDATA_DIR', True)
pkgmap = {}
@@ -454,15 +946,18 @@ def convert_debian(debpath):
return values
-def register_command(subparsers):
+def register_commands(subparsers):
parser_create = subparsers.add_parser('create',
help='Create a new recipe',
description='Creates a new recipe from a source tree')
parser_create.add_argument('source', help='Path or URL to source')
- parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create', required=True)
+ parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create')
parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true')
parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s')
+ parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)')
parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)')
parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true')
+ parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
+ parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
parser_create.set_defaults(func=create_recipe)
diff --git a/yocto-poky/scripts/lib/recipetool/create_buildsys.py b/yocto-poky/scripts/lib/recipetool/create_buildsys.py
index 931ef3b33..f84ec3dc6 100644
--- a/yocto-poky/scripts/lib/recipetool/create_buildsys.py
+++ b/yocto-poky/scripts/lib/recipetool/create_buildsys.py
@@ -1,6 +1,6 @@
# Recipe creation tool - create command build system handlers
#
-# Copyright (C) 2014 Intel Corporation
+# Copyright (C) 2014-2016 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -17,23 +17,35 @@
import re
import logging
-from recipetool.create import RecipeHandler, read_pkgconfig_provides
+import glob
+from recipetool.create import RecipeHandler, validate_pv
logger = logging.getLogger('recipetool')
tinfoil = None
+plugins = None
+
+def plugin_init(pluginlist):
+ # Take a reference to the list so we can use it later
+ global plugins
+ plugins = pluginlist
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
+
class CmakeRecipeHandler(RecipeHandler):
- def process(self, srctree, classes, lines_before, lines_after, handled):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']):
classes.append('cmake')
+ values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues)
+ classes.extend(values.pop('inherit', '').split())
+ for var, value in values.iteritems():
+ lines_before.append('%s = "%s"' % (var, value))
lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:')
lines_after.append('EXTRA_OECMAKE = ""')
lines_after.append('')
@@ -41,8 +53,266 @@ class CmakeRecipeHandler(RecipeHandler):
return True
return False
+ @staticmethod
+ def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None):
+ # Find all plugins that want to register handlers
+ logger.debug('Loading cmake handlers')
+ handlers = []
+ for plugin in plugins:
+ if hasattr(plugin, 'register_cmake_handlers'):
+ plugin.register_cmake_handlers(handlers)
+
+ values = {}
+ inherits = []
+
+ if cmakelistsfile:
+ srcfiles = [cmakelistsfile]
+ else:
+ srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt'])
+
+ # Note that some of these are non-standard, but probably better to
+ # be able to map them anyway if we see them
+ cmake_pkgmap = {'alsa': 'alsa-lib',
+ 'aspell': 'aspell',
+ 'atk': 'atk',
+ 'bison': 'bison-native',
+ 'boost': 'boost',
+ 'bzip2': 'bzip2',
+ 'cairo': 'cairo',
+ 'cups': 'cups',
+ 'curl': 'curl',
+ 'curses': 'ncurses',
+ 'cvs': 'cvs',
+ 'drm': 'libdrm',
+ 'dbus': 'dbus',
+ 'dbusglib': 'dbus-glib',
+ 'egl': 'virtual/egl',
+ 'expat': 'expat',
+ 'flex': 'flex-native',
+ 'fontconfig': 'fontconfig',
+ 'freetype': 'freetype',
+ 'gettext': '',
+ 'git': '',
+ 'gio': 'glib-2.0',
+ 'giounix': 'glib-2.0',
+ 'glew': 'glew',
+ 'glib': 'glib-2.0',
+ 'glib2': 'glib-2.0',
+ 'glu': 'libglu',
+ 'glut': 'freeglut',
+ 'gobject': 'glib-2.0',
+ 'gperf': 'gperf-native',
+ 'gnutls': 'gnutls',
+ 'gtk2': 'gtk+',
+ 'gtk3': 'gtk+3',
+ 'gtk': 'gtk+3',
+ 'harfbuzz': 'harfbuzz',
+ 'icu': 'icu',
+ 'intl': 'virtual/libintl',
+ 'jpeg': 'jpeg',
+ 'libarchive': 'libarchive',
+ 'libiconv': 'virtual/libiconv',
+ 'liblzma': 'xz',
+ 'libxml2': 'libxml2',
+ 'libxslt': 'libxslt',
+ 'opengl': 'virtual/libgl',
+ 'openmp': '',
+ 'openssl': 'openssl',
+ 'pango': 'pango',
+ 'perl': '',
+ 'perllibs': '',
+ 'pkgconfig': '',
+ 'png': 'libpng',
+ 'pthread': '',
+ 'pythoninterp': '',
+ 'pythonlibs': '',
+ 'ruby': 'ruby-native',
+ 'sdl': 'libsdl',
+ 'sdl2': 'libsdl2',
+ 'subversion': 'subversion-native',
+ 'swig': 'swig-native',
+ 'tcl': 'tcl-native',
+ 'threads': '',
+ 'tiff': 'tiff',
+ 'wget': 'wget',
+ 'x11': 'libx11',
+ 'xcb': 'libxcb',
+ 'xext': 'libxext',
+ 'xfixes': 'libxfixes',
+ 'zlib': 'zlib',
+ }
+
+ pcdeps = []
+ libdeps = []
+ deps = []
+ unmappedpkgs = []
+
+ proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE)
+ pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
+ pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
+ findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
+ findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
+ checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
+ include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
+ subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
+ dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
+
+ def find_cmake_package(pkg):
+ RecipeHandler.load_devel_filemap(tinfoil.config_data)
+ for fn, pn in RecipeHandler.recipecmakefilemap.iteritems():
+ splitname = fn.split('/')
+ if len(splitname) > 1:
+ if splitname[0].lower().startswith(pkg.lower()):
+ if splitname[1] == '%s-config.cmake' % pkg.lower() or splitname[1] == '%sConfig.cmake' % pkg or splitname[1] == 'Find%s.cmake' % pkg:
+ return pn
+ return None
+
+ def interpret_value(value):
+ return value.strip('"')
+
+ def parse_cmake_file(fn, paths=None):
+ searchpaths = (paths or []) + [os.path.dirname(fn)]
+ logger.debug('Parsing file %s' % fn)
+ with open(fn, 'r') as f:
+ for line in f:
+ line = line.strip()
+ for handler in handlers:
+ if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
+ continue
+ res = include_re.match(line)
+ if res:
+ includefn = bb.utils.which(':'.join(searchpaths), res.group(1))
+ if includefn:
+ parse_cmake_file(includefn, searchpaths)
+ else:
+ logger.debug('Unable to recurse into include file %s' % res.group(1))
+ continue
+ res = subdir_re.match(line)
+ if res:
+ subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt')
+ if os.path.exists(subdirfn):
+ parse_cmake_file(subdirfn, searchpaths)
+ else:
+ logger.debug('Unable to recurse into subdirectory file %s' % subdirfn)
+ continue
+ res = proj_re.match(line)
+ if res:
+ extravalues['PN'] = interpret_value(res.group(1).split()[0])
+ continue
+ res = pkgcm_re.match(line)
+ if res:
+ res = dep_re.findall(res.group(2))
+ if res:
+ pcdeps.extend([interpret_value(x[0]) for x in res])
+ inherits.append('pkgconfig')
+ continue
+ res = pkgsm_re.match(line)
+ if res:
+ res = dep_re.findall(res.group(2))
+ if res:
+ # Note: appending a tuple here!
+ item = tuple((interpret_value(x[0]) for x in res))
+ if len(item) == 1:
+ item = item[0]
+ pcdeps.append(item)
+ inherits.append('pkgconfig')
+ continue
+ res = findpackage_re.match(line)
+ if res:
+ origpkg = res.group(1)
+ pkg = interpret_value(origpkg)
+ found = False
+ for handler in handlers:
+ if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values):
+ logger.debug('Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__))
+ found = True
+ break
+ if found:
+ continue
+ elif pkg == 'Gettext':
+ inherits.append('gettext')
+ elif pkg == 'Perl':
+ inherits.append('perlnative')
+ elif pkg == 'PkgConfig':
+ inherits.append('pkgconfig')
+ elif pkg == 'PythonInterp':
+ inherits.append('pythonnative')
+ elif pkg == 'PythonLibs':
+ inherits.append('python-dir')
+ else:
+ # Try to map via looking at installed CMake packages in pkgdata
+ dep = find_cmake_package(pkg)
+ if dep:
+ logger.debug('Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep))
+ deps.append(dep)
+ else:
+ dep = cmake_pkgmap.get(pkg.lower(), None)
+ if dep:
+ logger.debug('Mapped CMake package %s to recipe %s via internal list' % (pkg, dep))
+ deps.append(dep)
+ elif dep is None:
+ unmappedpkgs.append(origpkg)
+ continue
+ res = checklib_re.match(line)
+ if res:
+ lib = interpret_value(res.group(1))
+ if not lib.startswith('$'):
+ libdeps.append(lib)
+ res = findlibrary_re.match(line)
+ if res:
+ libs = res.group(2).split()
+ for lib in libs:
+ if lib in ['HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR'] or lib.startswith(('NO_', 'CMAKE_', 'ONLY_CMAKE_')):
+ break
+ lib = interpret_value(lib)
+ if not lib.startswith('$'):
+ libdeps.append(lib)
+ if line.lower().startswith('useswig'):
+ deps.append('swig-native')
+ continue
+
+ parse_cmake_file(srcfiles[0])
+
+ if unmappedpkgs:
+ outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs))))
+
+ RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
+
+ for handler in handlers:
+ handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
+
+ if inherits:
+ values['inherit'] = ' '.join(list(set(inherits)))
+
+ return values
+
+
+class CmakeExtensionHandler(object):
+ '''Base class for CMake extension handlers'''
+ def process_line(self, srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
+ '''
+ Handle a line parsed out of an CMake file.
+ Return True if you've completely handled the passed in line, otherwise return False.
+ '''
+ return False
+
+ def process_findpackage(self, srctree, fn, pkg, deps, outlines, inherits, values):
+ '''
+ Handle a find_package package parsed out of a CMake file.
+ Return True if you've completely handled the passed in package, otherwise return False.
+ '''
+ return False
+
+ def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
+ '''
+ Apply any desired post-processing on the output
+ '''
+ return
+
+
+
class SconsRecipeHandler(RecipeHandler):
- def process(self, srctree, classes, lines_before, lines_after, handled):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
@@ -55,8 +325,9 @@ class SconsRecipeHandler(RecipeHandler):
return True
return False
+
class QmakeRecipeHandler(RecipeHandler):
- def process(self, srctree, classes, lines_before, lines_after, handled):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
@@ -66,15 +337,16 @@ class QmakeRecipeHandler(RecipeHandler):
return True
return False
+
class AutotoolsRecipeHandler(RecipeHandler):
- def process(self, srctree, classes, lines_before, lines_after, handled):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
autoconf = False
if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']):
autoconf = True
- values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree)
+ values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues)
classes.extend(values.pop('inherit', '').split())
for var, value in values.iteritems():
lines_before.append('%s = "%s"' % (var, value))
@@ -88,7 +360,24 @@ class AutotoolsRecipeHandler(RecipeHandler):
autoconf = True
break
+ if autoconf and not ('PV' in extravalues and 'PN' in extravalues):
+ # Last resort
+ conffile = RecipeHandler.checkfiles(srctree, ['configure'])
+ if conffile:
+ with open(conffile[0], 'r') as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith('VERSION=') or line.startswith('PACKAGE_VERSION='):
+ pv = line.split('=')[1].strip('"\'')
+ if pv and not 'PV' in extravalues and validate_pv(pv):
+ extravalues['PV'] = pv
+ elif line.startswith('PACKAGE_NAME=') or line.startswith('PACKAGE='):
+ pn = line.split('=')[1].strip('"\'')
+ if pn and not 'PN' in extravalues:
+ extravalues['PN'] = pn
+
if autoconf:
+ lines_before.append('')
lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory')
lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the')
lines_before.append('# inherit line')
@@ -102,136 +391,311 @@ class AutotoolsRecipeHandler(RecipeHandler):
return False
@staticmethod
- def extract_autotools_deps(outlines, srctree, acfile=None):
+ def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None):
import shlex
- import oe.package
+
+ # Find all plugins that want to register handlers
+ logger.debug('Loading autotools handlers')
+ handlers = []
+ for plugin in plugins:
+ if hasattr(plugin, 'register_autotools_handlers'):
+ plugin.register_autotools_handlers(handlers)
values = {}
inherits = []
- # FIXME this mapping is very thin
+ # Hardcoded map, we also use a dynamic one based on what's in the sysroot
progmap = {'flex': 'flex-native',
'bison': 'bison-native',
- 'm4': 'm4-native'}
+ 'm4': 'm4-native',
+ 'tar': 'tar-native',
+ 'ar': 'binutils-native',
+ 'ranlib': 'binutils-native',
+ 'ld': 'binutils-native',
+ 'strip': 'binutils-native',
+ 'libtool': '',
+ 'autoconf': '',
+ 'autoheader': '',
+ 'automake': '',
+ 'uname': '',
+ 'rm': '',
+ 'cp': '',
+ 'mv': '',
+ 'find': '',
+ 'awk': '',
+ 'sed': '',
+ }
progclassmap = {'gconftool-2': 'gconf',
- 'pkg-config': 'pkgconfig'}
-
- ignoredeps = ['gcc-runtime', 'glibc', 'uclibc']
-
- pkg_re = re.compile('PKG_CHECK_MODULES\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)[),].*')
- lib_re = re.compile('AC_CHECK_LIB\(\[?([a-zA-Z0-9]*)\]?, .*')
- progs_re = re.compile('_PROGS?\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)\]?[),].*')
+ 'pkg-config': 'pkgconfig',
+ 'python': 'pythonnative',
+ 'python3': 'python3native',
+ 'perl': 'perlnative',
+ 'makeinfo': 'texinfo',
+ }
+
+ pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
+ pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
+ lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
+ libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
+ progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?')
-
- # Build up lib library->package mapping
- shlib_providers = oe.package.read_shlib_providers(tinfoil.config_data)
- libdir = tinfoil.config_data.getVar('libdir', True)
- base_libdir = tinfoil.config_data.getVar('base_libdir', True)
- libpaths = list(set([base_libdir, libdir]))
- libname_re = re.compile('^lib(.+)\.so.*$')
- pkglibmap = {}
- for lib, item in shlib_providers.iteritems():
- for path, pkg in item.iteritems():
- if path in libpaths:
- res = libname_re.match(lib)
- if res:
- libname = res.group(1)
- if not libname in pkglibmap:
- pkglibmap[libname] = pkg[0]
- else:
- logger.debug('unable to extract library name from %s' % lib)
-
- # Now turn it into a library->recipe mapping
- recipelibmap = {}
- pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
- for libname, pkg in pkglibmap.iteritems():
- try:
- with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
- for line in f:
- if line.startswith('PN:'):
- recipelibmap[libname] = line.split(':', 1)[-1].strip()
- break
- except IOError as ioe:
- if ioe.errno == 2:
- logger.warn('unable to find a pkgdata file for package %s' % pkg)
- else:
- raise
+ ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
+ am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
+ define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
+
+ defines = {}
+ def subst_defines(value):
+ newvalue = value
+ for define, defval in defines.iteritems():
+ newvalue = newvalue.replace(define, defval)
+ if newvalue != value:
+ return subst_defines(newvalue)
+ return value
+
+ def process_value(value):
+ value = value.replace('[', '').replace(']', '')
+ if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('):
+ cmd = subst_defines(value[value.index('(')+1:-1])
+ try:
+ if '|' in cmd:
+ cmd = 'set -o pipefail; ' + cmd
+ stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True)
+ ret = stdout.rstrip()
+ except bb.process.ExecutionError as e:
+ ret = ''
+ elif value.startswith('m4_'):
+ return None
+ ret = subst_defines(value)
+ if ret:
+ ret = ret.strip('"\'')
+ return ret
# Since a configure.ac file is essentially a program, this is only ever going to be
# a hack unfortunately; but it ought to be enough of an approximation
if acfile:
srcfiles = [acfile]
else:
- srcfiles = RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in'])
+ srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in'])
+
pcdeps = []
+ libdeps = []
deps = []
unmapped = []
- unmappedlibs = []
- with open(srcfiles[0], 'r') as f:
- for line in f:
- if 'PKG_CHECK_MODULES' in line:
- res = pkg_re.search(line)
+
+ RecipeHandler.load_binmap(tinfoil.config_data)
+
+ def process_macro(keyword, value):
+ for handler in handlers:
+ if handler.process_macro(srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
+ return
+ if keyword == 'PKG_CHECK_MODULES':
+ res = pkg_re.search(value)
+ if res:
+ res = dep_re.findall(res.group(1))
if res:
- res = dep_re.findall(res.group(1))
- if res:
- pcdeps.extend([x[0] for x in res])
- inherits.append('pkgconfig')
- if line.lstrip().startswith('AM_GNU_GETTEXT'):
- inherits.append('gettext')
- elif 'AC_CHECK_PROG' in line or 'AC_PATH_PROG' in line:
- res = progs_re.search(line)
+ pcdeps.extend([x[0] for x in res])
+ inherits.append('pkgconfig')
+ elif keyword == 'PKG_CHECK_EXISTS':
+ res = pkgce_re.search(value)
+ if res:
+ res = dep_re.findall(res.group(1))
if res:
- for prog in shlex.split(res.group(1)):
- prog = prog.split()[0]
- progclass = progclassmap.get(prog, None)
- if progclass:
- inherits.append(progclass)
- else:
+ pcdeps.extend([x[0] for x in res])
+ inherits.append('pkgconfig')
+ elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'):
+ inherits.append('gettext')
+ elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'):
+ deps.append('intltool-native')
+ elif keyword == 'AM_PATH_GLIB_2_0':
+ deps.append('glib-2.0')
+ elif keyword in ('AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG'):
+ res = progs_re.search(value)
+ if res:
+ for prog in shlex.split(res.group(1)):
+ prog = prog.split()[0]
+ for handler in handlers:
+ if handler.process_prog(srctree, keyword, value, prog, deps, outlines, inherits, values):
+ return
+ progclass = progclassmap.get(prog, None)
+ if progclass:
+ inherits.append(progclass)
+ else:
+ progdep = RecipeHandler.recipebinmap.get(prog, None)
+ if not progdep:
progdep = progmap.get(prog, None)
- if progdep:
- deps.append(progdep)
- else:
- if not prog.startswith('$'):
- unmapped.append(prog)
- elif 'AC_CHECK_LIB' in line:
- res = lib_re.search(line)
+ if progdep:
+ deps.append(progdep)
+ elif progdep is None:
+ if not prog.startswith('$'):
+ unmapped.append(prog)
+ elif keyword == 'AC_CHECK_LIB':
+ res = lib_re.search(value)
+ if res:
+ lib = res.group(1)
+ if not lib.startswith('$'):
+ libdeps.append(lib)
+ elif keyword == 'AX_CHECK_LIBRARY':
+ res = libx_re.search(value)
+ if res:
+ lib = res.group(2)
+ if not lib.startswith('$'):
+ header = res.group(1)
+ libdeps.append((lib, header))
+ elif keyword == 'AC_PATH_X':
+ deps.append('libx11')
+ elif keyword in ('AX_BOOST', 'BOOST_REQUIRE'):
+ deps.append('boost')
+ elif keyword in ('AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX'):
+ deps.append('flex-native')
+ elif keyword in ('AC_PROG_YACC', 'AX_PROG_BISON'):
+ deps.append('bison-native')
+ elif keyword == 'AX_CHECK_ZLIB':
+ deps.append('zlib')
+ elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
+ deps.append('openssl')
+ elif keyword == 'AX_LIB_CURL':
+ deps.append('curl')
+ elif keyword == 'AX_LIB_BEECRYPT':
+ deps.append('beecrypt')
+ elif keyword == 'AX_LIB_EXPAT':
+ deps.append('expat')
+ elif keyword == 'AX_LIB_GCRYPT':
+ deps.append('libgcrypt')
+ elif keyword == 'AX_LIB_NETTLE':
+ deps.append('nettle')
+ elif keyword == 'AX_LIB_READLINE':
+ deps.append('readline')
+ elif keyword == 'AX_LIB_SQLITE3':
+ deps.append('sqlite3')
+ elif keyword == 'AX_LIB_TAGLIB':
+ deps.append('taglib')
+ elif keyword == 'AX_PKG_SWIG':
+ deps.append('swig')
+ elif keyword == 'AX_PROG_XSLTPROC':
+ deps.append('libxslt-native')
+ elif keyword == 'AX_WITH_CURSES':
+ deps.append('ncurses')
+ elif keyword == 'AX_PATH_BDB':
+ deps.append('db')
+ elif keyword == 'AX_PATH_LIB_PCRE':
+ deps.append('libpcre')
+ elif keyword == 'AC_INIT':
+ if extravalues is not None:
+ res = ac_init_re.match(value)
if res:
- lib = res.group(1)
- libdep = recipelibmap.get(lib, None)
- if libdep:
- deps.append(libdep)
- else:
- if libdep is None:
- if not lib.startswith('$'):
- unmappedlibs.append(lib)
- elif 'AC_PATH_X' in line:
- deps.append('libx11')
-
- if unmapped:
- outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(unmapped))
-
- if unmappedlibs:
- outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(unmappedlibs))
- outlines.append('# (this is based on recipes that have previously been built and packaged)')
+ extravalues['PN'] = process_value(res.group(1))
+ pv = process_value(res.group(2))
+ if validate_pv(pv):
+ extravalues['PV'] = pv
+ elif keyword == 'AM_INIT_AUTOMAKE':
+ if extravalues is not None:
+ if 'PN' not in extravalues:
+ res = am_init_re.match(value)
+ if res:
+ if res.group(1) != 'AC_PACKAGE_NAME':
+ extravalues['PN'] = process_value(res.group(1))
+ pv = process_value(res.group(2))
+ if validate_pv(pv):
+ extravalues['PV'] = pv
+ elif keyword == 'define(':
+ res = define_re.match(value)
+ if res:
+ key = res.group(2).strip('[]')
+ value = process_value(res.group(3))
+ if value is not None:
+ defines[key] = value
+
+ keywords = ['PKG_CHECK_MODULES',
+ 'PKG_CHECK_EXISTS',
+ 'AM_GNU_GETTEXT',
+ 'AM_GLIB_GNU_GETTEXT',
+ 'GETTEXT_PACKAGE',
+ 'AC_PROG_INTLTOOL',
+ 'IT_PROG_INTLTOOL',
+ 'AM_PATH_GLIB_2_0',
+ 'AC_CHECK_PROG',
+ 'AC_PATH_PROG',
+ 'AX_WITH_PROG',
+ 'AC_CHECK_LIB',
+ 'AX_CHECK_LIBRARY',
+ 'AC_PATH_X',
+ 'AX_BOOST',
+ 'BOOST_REQUIRE',
+ 'AC_PROG_LEX',
+ 'AM_PROG_LEX',
+ 'AX_PROG_FLEX',
+ 'AC_PROG_YACC',
+ 'AX_PROG_BISON',
+ 'AX_CHECK_ZLIB',
+ 'AX_CHECK_OPENSSL',
+ 'AX_LIB_CRYPTO',
+ 'AX_LIB_CURL',
+ 'AX_LIB_BEECRYPT',
+ 'AX_LIB_EXPAT',
+ 'AX_LIB_GCRYPT',
+ 'AX_LIB_NETTLE',
+ 'AX_LIB_READLINE'
+ 'AX_LIB_SQLITE3',
+ 'AX_LIB_TAGLIB',
+ 'AX_PKG_SWIG',
+ 'AX_PROG_XSLTPROC',
+ 'AX_WITH_CURSES',
+ 'AX_PATH_BDB',
+ 'AX_PATH_LIB_PCRE',
+ 'AC_INIT',
+ 'AM_INIT_AUTOMAKE',
+ 'define(',
+ ]
+
+ for handler in handlers:
+ handler.extend_keywords(keywords)
+
+ for srcfile in srcfiles:
+ nesting = 0
+ in_keyword = ''
+ partial = ''
+ with open(srcfile, 'r') as f:
+ for line in f:
+ if in_keyword:
+ partial += ' ' + line.strip()
+ if partial.endswith('\\'):
+ partial = partial[:-1]
+ nesting = nesting + line.count('(') - line.count(')')
+ if nesting == 0:
+ process_macro(in_keyword, partial)
+ partial = ''
+ in_keyword = ''
+ else:
+ for keyword in keywords:
+ if keyword in line:
+ nesting = line.count('(') - line.count(')')
+ if nesting > 0:
+ partial = line.strip()
+ if partial.endswith('\\'):
+ partial = partial[:-1]
+ in_keyword = keyword
+ else:
+ process_macro(keyword, line.strip())
+ break
- recipemap = read_pkgconfig_provides(tinfoil.config_data)
- unmapped = []
- for pcdep in pcdeps:
- recipe = recipemap.get(pcdep, None)
- if recipe:
- deps.append(recipe)
- else:
- if not pcdep.startswith('$'):
- unmapped.append(pcdep)
+ if in_keyword:
+ process_macro(in_keyword, partial)
- deps = set(deps).difference(set(ignoredeps))
+ if extravalues:
+ for k,v in extravalues.items():
+ if v:
+ if v.startswith('$') or v.startswith('@') or v.startswith('%'):
+ del extravalues[k]
+ else:
+ extravalues[k] = v.strip('"\'').rstrip('()')
if unmapped:
- outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmapped))
- outlines.append('# (this is based on recipes that have previously been built and packaged)')
+ outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped))))
- if deps:
- values['DEPENDS'] = ' '.join(deps)
+ RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
+
+ for handler in handlers:
+ handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
if inherits:
values['inherit'] = ' '.join(list(set(inherits)))
@@ -239,8 +703,37 @@ class AutotoolsRecipeHandler(RecipeHandler):
return values
+class AutotoolsExtensionHandler(object):
+ '''Base class for Autotools extension handlers'''
+ def process_macro(self, srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
+ '''
+ Handle a macro parsed out of an autotools file. Note that if you want this to be called
+ for any macro other than the ones AutotoolsRecipeHandler already looks for, you'll need
+ to add it to the keywords list in extend_keywords().
+ Return True if you've completely handled the passed in macro, otherwise return False.
+ '''
+ return False
+
+ def extend_keywords(self, keywords):
+ '''Adds keywords to be recognised by the parser (so that you get a call to process_macro)'''
+ return
+
+ def process_prog(self, srctree, keyword, value, prog, deps, outlines, inherits, values):
+ '''
+ Handle an AC_PATH_PROG, AC_CHECK_PROG etc. line
+ Return True if you've completely handled the passed in macro, otherwise return False.
+ '''
+ return False
+
+ def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
+ '''
+ Apply any desired post-processing on the output
+ '''
+ return
+
+
class MakefileRecipeHandler(RecipeHandler):
- def process(self, srctree, classes, lines_before, lines_after, handled):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
@@ -307,10 +800,60 @@ class MakefileRecipeHandler(RecipeHandler):
self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
+class VersionFileRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+ if 'PV' not in extravalues:
+ # Look for a VERSION or version file containing a single line consisting
+ # only of a version number
+ filelist = RecipeHandler.checkfiles(srctree, ['VERSION', 'version'])
+ version = None
+ for fileitem in filelist:
+ linecount = 0
+ with open(fileitem, 'r') as f:
+ for line in f:
+ line = line.rstrip().strip('"\'')
+ linecount += 1
+ if line:
+ if linecount > 1:
+ version = None
+ break
+ else:
+ if validate_pv(line):
+ version = line
+ if version:
+ extravalues['PV'] = version
+ break
+
+
+class SpecFileRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+ if 'PV' in extravalues and 'PN' in extravalues:
+ return
+ filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True)
+ pn = None
+ pv = None
+ for fileitem in filelist:
+ linecount = 0
+ with open(fileitem, 'r') as f:
+ for line in f:
+ if line.startswith('Name:') and not pn:
+ pn = line.split(':')[1].strip()
+ if line.startswith('Version:') and not pv:
+ pv = line.split(':')[1].strip()
+ if pv or pn:
+ if pv and not 'PV' in extravalues and validate_pv(pv):
+ extravalues['PV'] = pv
+ if pn and not 'PN' in extravalues:
+ extravalues['PN'] = pn
+ break
+
def register_recipe_handlers(handlers):
- # These are in a specific order so that the right one is detected first
- handlers.append(CmakeRecipeHandler())
- handlers.append(AutotoolsRecipeHandler())
- handlers.append(SconsRecipeHandler())
- handlers.append(QmakeRecipeHandler())
- handlers.append(MakefileRecipeHandler())
+ # Set priorities with some gaps so that other plugins can insert
+ # their own handlers (so avoid changing these numbers)
+ handlers.append((CmakeRecipeHandler(), 50))
+ handlers.append((AutotoolsRecipeHandler(), 40))
+ handlers.append((SconsRecipeHandler(), 30))
+ handlers.append((QmakeRecipeHandler(), 20))
+ handlers.append((MakefileRecipeHandler(), 10))
+ handlers.append((VersionFileRecipeHandler(), -1))
+ handlers.append((SpecFileRecipeHandler(), -1))
diff --git a/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py b/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py
index e0af2a0f5..c3823307a 100644
--- a/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py
+++ b/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py
@@ -159,7 +159,7 @@ class PythonRecipeHandler(RecipeHandler):
def __init__(self):
pass
- def process(self, srctree, classes, lines_before, lines_after, handled):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
@@ -278,7 +278,10 @@ class PythonRecipeHandler(RecipeHandler):
for k in sorted(bbinfo):
v = bbinfo[k]
mdinfo.append('{} = "{}"'.format(k, v))
- lines_before[src_uri_line-1:src_uri_line-1] = mdinfo
+ if src_uri_line:
+ lines_before[src_uri_line-1:src_uri_line-1] = mdinfo
+ else:
+ lines_before.extend(mdinfo)
mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
@@ -713,4 +716,4 @@ def has_non_literals(value):
def register_recipe_handlers(handlers):
# We need to make sure this is ahead of the makefile fallback handler
- handlers.insert(0, PythonRecipeHandler())
+ handlers.append((PythonRecipeHandler(), 70))
diff --git a/yocto-poky/scripts/lib/recipetool/create_kernel.py b/yocto-poky/scripts/lib/recipetool/create_kernel.py
new file mode 100644
index 000000000..c6e86bd2b
--- /dev/null
+++ b/yocto-poky/scripts/lib/recipetool/create_kernel.py
@@ -0,0 +1,99 @@
+# Recipe creation tool - kernel support plugin
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import re
+import logging
+from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
+
+logger = logging.getLogger('recipetool')
+
+tinfoil = None
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+
+class KernelRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+ import bb.process
+ if 'buildsystem' in handled:
+ return False
+
+ for tell in ['arch', 'firmware', 'Kbuild', 'Kconfig']:
+ if not os.path.exists(os.path.join(srctree, tell)):
+ return False
+
+ handled.append('buildsystem')
+ del lines_after[:]
+ del classes[:]
+ template = os.path.join(tinfoil.config_data.getVar('COREBASE', True), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb')
+ def handle_var(varname, origvalue, op, newlines):
+ if varname in ['SRCREV', 'SRCREV_machine']:
+ while newlines[-1].startswith('#'):
+ del newlines[-1]
+ try:
+ stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree, shell=True)
+ except bb.process.ExecutionError as e:
+ stdout = None
+ if stdout:
+ return stdout.strip(), op, 0, True
+ elif varname == 'LINUX_VERSION':
+ makefile = os.path.join(srctree, 'Makefile')
+ if os.path.exists(makefile):
+ kversion = -1
+ kpatchlevel = -1
+ ksublevel = -1
+ kextraversion = ''
+ with open(makefile, 'r') as f:
+ for i, line in enumerate(f):
+ if i > 10:
+ break
+ if line.startswith('VERSION ='):
+ kversion = int(line.split('=')[1].strip())
+ elif line.startswith('PATCHLEVEL ='):
+ kpatchlevel = int(line.split('=')[1].strip())
+ elif line.startswith('SUBLEVEL ='):
+ ksublevel = int(line.split('=')[1].strip())
+ elif line.startswith('EXTRAVERSION ='):
+ kextraversion = line.split('=')[1].strip()
+ version = ''
+ if kversion > -1 and kpatchlevel > -1:
+ version = '%d.%d' % (kversion, kpatchlevel)
+ if ksublevel > -1:
+ version += '.%d' % ksublevel
+ version += kextraversion
+ if version:
+ return version, op, 0, True
+ elif varname == 'SRC_URI':
+ while newlines[-1].startswith('#'):
+ del newlines[-1]
+ elif varname == 'COMPATIBLE_MACHINE':
+ while newlines[-1].startswith('#'):
+ del newlines[-1]
+ machine = tinfoil.config_data.getVar('MACHINE', True)
+ return machine, op, 0, True
+ return origvalue, op, 0, True
+ with open(template, 'r') as f:
+ varlist = ['SRCREV', 'SRCREV_machine', 'SRC_URI', 'LINUX_VERSION', 'COMPATIBLE_MACHINE']
+ (_, newlines) = bb.utils.edit_metadata(f, varlist, handle_var)
+ lines_before[:] = [line.rstrip('\n') for line in newlines]
+
+ return True
+
+def register_recipe_handlers(handlers):
+ handlers.append((KernelRecipeHandler(), 100))
diff --git a/yocto-poky/scripts/lib/recipetool/create_kmod.py b/yocto-poky/scripts/lib/recipetool/create_kmod.py
new file mode 100644
index 000000000..fe39edb28
--- /dev/null
+++ b/yocto-poky/scripts/lib/recipetool/create_kmod.py
@@ -0,0 +1,152 @@
+# Recipe creation tool - kernel module support plugin
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import re
+import logging
+from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
+
+logger = logging.getLogger('recipetool')
+
+tinfoil = None
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+
+class KernelModuleRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+ import bb.process
+ if 'buildsystem' in handled:
+ return False
+
+ module_inc_re = re.compile(r'^#include\s+<linux/module.h>$')
+ makefiles = []
+ is_module = False
+
+ makefiles = []
+
+ files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True)
+ if files:
+ for cfile in files:
+ # Look in same dir or parent for Makefile
+ for makefile in [os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile')]:
+ if makefile in makefiles:
+ break
+ else:
+ if os.path.exists(makefile):
+ makefiles.append(makefile)
+ break
+ else:
+ continue
+ with open(cfile, 'r') as f:
+ for line in f:
+ if module_inc_re.match(line.strip()):
+ is_module = True
+ break
+ if is_module:
+ break
+
+ if is_module:
+ classes.append('module')
+ handled.append('buildsystem')
+ # module.bbclass and the classes it inherits do most of the hard
+ # work, but we need to tweak it slightly depending on what the
+ # Makefile does (and there is a range of those)
+ # Check the makefile for the appropriate install target
+ install_lines = []
+ compile_lines = []
+ in_install = False
+ in_compile = False
+ install_target = None
+ with open(makefile, 'r') as f:
+ for line in f:
+ if line.startswith('install:'):
+ if not install_lines:
+ in_install = True
+ install_target = 'install'
+ elif line.startswith('modules_install:'):
+ install_lines = []
+ in_install = True
+ install_target = 'modules_install'
+ elif line.startswith('modules:'):
+ compile_lines = []
+ in_compile = True
+ elif line.startswith(('all:', 'default:')):
+ if not compile_lines:
+ in_compile = True
+ elif line:
+ if line[0] == '\t':
+ if in_install:
+ install_lines.append(line)
+ elif in_compile:
+ compile_lines.append(line)
+ elif ':' in line:
+ in_install = False
+ in_compile = False
+
+ def check_target(lines, install):
+ kdirpath = ''
+ manual_install = False
+ for line in lines:
+ splitline = line.split()
+ if splitline[0] in ['make', 'gmake', '$(MAKE)']:
+ if '-C' in splitline:
+ idx = splitline.index('-C') + 1
+ if idx < len(splitline):
+ kdirpath = splitline[idx]
+ break
+ elif install and splitline[0] == 'install':
+ if '.ko' in line:
+ manual_install = True
+ return kdirpath, manual_install
+
+ kdirpath = None
+ manual_install = False
+ if install_lines:
+ kdirpath, manual_install = check_target(install_lines, install=True)
+ if compile_lines and not kdirpath:
+ kdirpath, _ = check_target(compile_lines, install=False)
+
+ if manual_install or not install_lines:
+ lines_after.append('EXTRA_OEMAKE_append_task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"')
+ elif install_target and install_target != 'modules_install':
+ lines_after.append('MODULES_INSTALL_TARGET = "install"')
+
+ warnmsg = None
+ kdirvar = None
+ if kdirpath:
+ res = re.match(r'\$\(([^$)]+)\)', kdirpath)
+ if res:
+ kdirvar = res.group(1)
+ if kdirvar != 'KERNEL_SRC':
+ lines_after.append('EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar)
+ elif kdirpath.startswith('/lib/'):
+ warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile'
+ if not kdirvar and not warnmsg:
+ warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile'
+ if warnmsg:
+ warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.'
+ logger.warn(warnmsg)
+ lines_after.append('# %s' % warnmsg)
+
+ return True
+
+ return False
+
+def register_recipe_handlers(handlers):
+ handlers.append((KernelModuleRecipeHandler(), 15))
diff --git a/yocto-poky/scripts/lib/recipetool/create_npm.py b/yocto-poky/scripts/lib/recipetool/create_npm.py
new file mode 100644
index 000000000..b3ffcdbc5
--- /dev/null
+++ b/yocto-poky/scripts/lib/recipetool/create_npm.py
@@ -0,0 +1,156 @@
+# Recipe creation tool - node.js NPM module support plugin
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import logging
+import subprocess
+import tempfile
+import shutil
+import json
+from recipetool.create import RecipeHandler, split_pkg_licenses
+
+logger = logging.getLogger('recipetool')
+
+
+tinfoil = None
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+
+class NpmRecipeHandler(RecipeHandler):
+ lockdownpath = None
+
+ def _handle_license(self, data):
+ '''
+ Handle the license value from an npm package.json file
+ '''
+ license = None
+ if 'license' in data:
+ license = data['license']
+ if isinstance(license, dict):
+ license = license.get('type', None)
+ return None
+
+ def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before):
+ try:
+ runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH', True))
+ bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
+ except bb.process.ExecutionError as e:
+ logger.warn('npm shrinkwrap failed:\n%s' % e.stdout)
+ return
+
+ tmpfile = os.path.join(localfilesdir, 'npm-shrinkwrap.json')
+ shutil.move(os.path.join(srctree, 'npm-shrinkwrap.json'), tmpfile)
+ extravalues.setdefault('extrafiles', {})
+ extravalues['extrafiles']['npm-shrinkwrap.json'] = tmpfile
+ lines_before.append('NPM_SHRINKWRAP := "${THISDIR}/${PN}/npm-shrinkwrap.json"')
+
+ def _lockdown(self, srctree, localfilesdir, extravalues, lines_before):
+ runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH', True))
+ if not NpmRecipeHandler.lockdownpath:
+ NpmRecipeHandler.lockdownpath = tempfile.mkdtemp('recipetool-npm-lockdown')
+ bb.process.run('npm install lockdown --prefix %s' % NpmRecipeHandler.lockdownpath,
+ cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
+ relockbin = os.path.join(NpmRecipeHandler.lockdownpath, 'node_modules', 'lockdown', 'relock.js')
+ if not os.path.exists(relockbin):
+ logger.warn('Could not find relock.js within lockdown directory; skipping lockdown')
+ return
+ try:
+ bb.process.run('node %s' % relockbin, cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
+ except bb.process.ExecutionError as e:
+ logger.warn('lockdown-relock failed:\n%s' % e.stdout)
+ return
+
+ tmpfile = os.path.join(localfilesdir, 'lockdown.json')
+ shutil.move(os.path.join(srctree, 'lockdown.json'), tmpfile)
+ extravalues.setdefault('extrafiles', {})
+ extravalues['extrafiles']['lockdown.json'] = tmpfile
+ lines_before.append('NPM_LOCKDOWN := "${THISDIR}/${PN}/lockdown.json"')
+
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+ import bb.utils
+ import oe
+ from collections import OrderedDict
+
+ if 'buildsystem' in handled:
+ return False
+
+ def read_package_json(fn):
+ with open(fn, 'r') as f:
+ return json.loads(f.read())
+
+ files = RecipeHandler.checkfiles(srctree, ['package.json'])
+ if files:
+ data = read_package_json(files[0])
+ if 'name' in data and 'version' in data:
+ extravalues['PN'] = data['name']
+ extravalues['PV'] = data['version']
+ classes.append('npm')
+ handled.append('buildsystem')
+ if 'description' in data:
+ lines_before.append('SUMMARY = "%s"' % data['description'])
+ if 'homepage' in data:
+ lines_before.append('HOMEPAGE = "%s"' % data['homepage'])
+
+ # Shrinkwrap
+ localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm')
+ self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before)
+
+ # Lockdown
+ self._lockdown(srctree, localfilesdir, extravalues, lines_before)
+
+ # Split each npm module out to is own package
+ npmpackages = oe.package.npm_split_package_dirs(srctree)
+ for item in handled:
+ if isinstance(item, tuple):
+ if item[0] == 'license':
+ licvalues = item[1]
+ break
+ if licvalues:
+ # Augment the license list with information we have in the packages
+ licenses = {}
+ license = self._handle_license(data)
+ if license:
+ licenses['${PN}'] = license
+ for pkgname, pkgitem in npmpackages.iteritems():
+ _, pdata = pkgitem
+ license = self._handle_license(pdata)
+ if license:
+ licenses[pkgname] = license
+ # Now write out the package-specific license values
+ # We need to strip out the json data dicts for this since split_pkg_licenses
+ # isn't expecting it
+ packages = OrderedDict((x,y[0]) for x,y in npmpackages.iteritems())
+ packages['${PN}'] = ''
+ pkglicenses = split_pkg_licenses(licvalues, packages, lines_after, licenses)
+ all_licenses = list(set([item for pkglicense in pkglicenses.values() for item in pkglicense]))
+ # Go back and update the LICENSE value since we have a bit more
+ # information than when that was written out (and we know all apply
+ # vs. there being a choice, so we can join them with &)
+ for i, line in enumerate(lines_before):
+ if line.startswith('LICENSE = '):
+ lines_before[i] = 'LICENSE = "%s"' % ' & '.join(all_licenses)
+ break
+
+ return True
+
+ return False
+
+def register_recipe_handlers(handlers):
+ handlers.append((NpmRecipeHandler(), 60))
diff --git a/yocto-poky/scripts/lib/recipetool/newappend.py b/yocto-poky/scripts/lib/recipetool/newappend.py
index 77b74cb73..bdf0693ec 100644
--- a/yocto-poky/scripts/lib/recipetool/newappend.py
+++ b/yocto-poky/scripts/lib/recipetool/newappend.py
@@ -25,18 +25,15 @@ import errno
import logging
import os
import re
+import subprocess
import sys
+import scriptutils
logger = logging.getLogger('recipetool')
tinfoil = None
-def plugin_init(pluginlist):
- # Don't need to do anything here right now, but plugins must have this function defined
- pass
-
-
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
@@ -94,17 +91,21 @@ def newappend(args):
bb.utils.mkdirhier(os.path.dirname(append_path))
try:
- open(append_path, 'a')
+ open(append_path, 'a').close()
except (OSError, IOError) as exc:
logger.critical(str(exc))
return 1
- print(append_path)
+ if args.edit:
+ return scriptutils.run_editor([append_path, recipe_path])
+ else:
+ print(append_path)
-def register_command(subparsers):
+def register_commands(subparsers):
parser = subparsers.add_parser('newappend',
help='Create a bbappend for the specified target in the specified layer')
+ parser.add_argument('-e', '--edit', help='Edit the new append. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.', action='store_true')
parser.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
parser.add_argument('destlayer', help='Base directory of the destination layer to write the bbappend to', type=layer)
parser.add_argument('target', help='Target recipe/provide to append')
diff --git a/yocto-poky/scripts/lib/recipetool/setvar.py b/yocto-poky/scripts/lib/recipetool/setvar.py
new file mode 100644
index 000000000..657d2b6a7
--- /dev/null
+++ b/yocto-poky/scripts/lib/recipetool/setvar.py
@@ -0,0 +1,75 @@
+# Recipe creation tool - set variable plugin
+#
+# Copyright (C) 2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import sys
+import os
+import argparse
+import glob
+import fnmatch
+import re
+import logging
+import scriptutils
+
+logger = logging.getLogger('recipetool')
+
+tinfoil = None
+plugins = None
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+def setvar(args):
+ import oe.recipeutils
+
+ if args.delete:
+ if args.value:
+ logger.error('-D/--delete and specifying a value are mutually exclusive')
+ return 1
+ value = None
+ else:
+ if args.value is None:
+ logger.error('You must specify a value if not using -D/--delete')
+ return 1
+ value = args.value
+ varvalues = {args.varname: value}
+
+ if args.recipe_only:
+ patches = [oe.recipeutils.patch_recipe_file(args.recipefile, varvalues, patch=args.patch)]
+ else:
+ rd = oe.recipeutils.parse_recipe(args.recipefile, None, tinfoil.config_data)
+ if not rd:
+ return 1
+ patches = oe.recipeutils.patch_recipe(rd, args.recipefile, varvalues, patch=args.patch)
+ if args.patch:
+ for patch in patches:
+ for line in patch:
+ sys.stdout.write(line)
+ return 0
+
+
+def register_commands(subparsers):
+ parser_setvar = subparsers.add_parser('setvar',
+ help='Set a variable within a recipe',
+ description='Adds/updates the value a variable is set to in a recipe')
+ parser_setvar.add_argument('recipefile', help='Recipe file to update')
+ parser_setvar.add_argument('varname', help='Variable name to set')
+ parser_setvar.add_argument('value', nargs='?', help='New value to set the variable to')
+ parser_setvar.add_argument('--recipe-only', '-r', help='Do not set variable in any include file if present', action='store_true')
+ parser_setvar.add_argument('--patch', '-p', help='Create a patch to make the change instead of modifying the recipe', action='store_true')
+ parser_setvar.add_argument('--delete', '-D', help='Delete the specified value instead of setting it', action='store_true')
+ parser_setvar.set_defaults(func=setvar)
diff --git a/yocto-poky/scripts/lib/scriptutils.py b/yocto-poky/scripts/lib/scriptutils.py
index e79a195b1..aef19d3d7 100644
--- a/yocto-poky/scripts/lib/scriptutils.py
+++ b/yocto-poky/scripts/lib/scriptutils.py
@@ -19,6 +19,8 @@ import sys
import os
import logging
import glob
+import argparse
+import subprocess
def logger_create(name):
logger = logging.getLogger(name)
@@ -100,3 +102,17 @@ def fetch_uri(d, uri, destdir, srcrev=None):
os.chdir(olddir)
return ret
+def run_editor(fn):
+ if isinstance(fn, basestring):
+ params = '"%s"' % fn
+ else:
+ params = ''
+ for fnitem in fn:
+ params += ' "%s"' % fnitem
+
+ editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
+ try:
+ return subprocess.check_call('%s %s' % (editor, params), shell=True)
+ except OSError as exc:
+ logger.error("Execution of editor '%s' failed: %s", editor, exc)
+ return 1
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/__init__.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/__init__.py
+++ /dev/null
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/base.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/base.py
deleted file mode 100644
index e6c8f56f9..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/base.py
+++ /dev/null
@@ -1,466 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2006, 2007, 2008 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-"""
-Base classes for creating commands and syntax version object.
-
-This module exports several important base classes:
-
- BaseData - The base abstract class for all data objects. Data objects
- are contained within a BaseHandler object.
-
- BaseHandler - The base abstract class from which versioned kickstart
- handler are derived. Subclasses of BaseHandler hold
- BaseData and KickstartCommand objects.
-
- DeprecatedCommand - An abstract subclass of KickstartCommand that should
- be further subclassed by users of this module. When
- a subclass is used, a warning message will be
- printed.
-
- KickstartCommand - The base abstract class for all kickstart commands.
- Command objects are contained within a BaseHandler
- object.
-"""
-import gettext
-gettext.textdomain("pykickstart")
-_ = lambda x: gettext.ldgettext("pykickstart", x)
-
-import types
-import warnings
-from pykickstart.errors import *
-from pykickstart.ko import *
-from pykickstart.parser import Packages
-from pykickstart.version import versionToString
-
-###
-### COMMANDS
-###
-class KickstartCommand(KickstartObject):
- """The base class for all kickstart commands. This is an abstract class."""
- removedKeywords = []
- removedAttrs = []
-
- def __init__(self, writePriority=0, *args, **kwargs):
- """Create a new KickstartCommand instance. This method must be
- provided by all subclasses, but subclasses must call
- KickstartCommand.__init__ first. Instance attributes:
-
- currentCmd -- The name of the command in the input file that
- caused this handler to be run.
- currentLine -- The current unprocessed line from the input file
- that caused this handler to be run.
- handler -- A reference to the BaseHandler subclass this
- command is contained withing. This is needed to
- allow referencing of Data objects.
- lineno -- The current line number in the input file.
- writePriority -- An integer specifying when this command should be
- printed when iterating over all commands' __str__
- methods. The higher the number, the later this
- command will be written. All commands with the
- same priority will be written alphabetically.
- """
-
- # We don't want people using this class by itself.
- if self.__class__ is KickstartCommand:
- raise TypeError, "KickstartCommand is an abstract class."
-
- KickstartObject.__init__(self, *args, **kwargs)
-
- self.writePriority = writePriority
-
- # These will be set by the dispatcher.
- self.currentCmd = ""
- self.currentLine = ""
- self.handler = None
- self.lineno = 0
-
- # If a subclass provides a removedKeywords list, remove all the
- # members from the kwargs list before we start processing it. This
- # ensures that subclasses don't continue to recognize arguments that
- # were removed.
- for arg in filter(kwargs.has_key, self.removedKeywords):
- kwargs.pop(arg)
-
- def __call__(self, *args, **kwargs):
- """Set multiple attributes on a subclass of KickstartCommand at once
- via keyword arguments. Valid attributes are anything specified in
- a subclass, but unknown attributes will be ignored.
- """
- for (key, val) in kwargs.items():
- # Ignore setting attributes that were removed in a subclass, as
- # if they were unknown attributes.
- if key in self.removedAttrs:
- continue
-
- if hasattr(self, key):
- setattr(self, key, val)
-
- def __str__(self):
- """Return a string formatted for output to a kickstart file. This
- method must be provided by all subclasses.
- """
- return KickstartObject.__str__(self)
-
- def parse(self, args):
- """Parse the list of args and set data on the KickstartCommand object.
- This method must be provided by all subclasses.
- """
- raise TypeError, "parse() not implemented for KickstartCommand"
-
- def apply(self, instroot="/"):
- """Write out the configuration related to the KickstartCommand object.
- Subclasses which do not provide this method will not have their
- configuration written out.
- """
- return
-
- def dataList(self):
- """For commands that can occur multiple times in a single kickstart
- file (like network, part, etc.), return the list that we should
- append more data objects to.
- """
- return None
-
- def deleteRemovedAttrs(self):
- """Remove all attributes from self that are given in the removedAttrs
- list. This method should be called from __init__ in a subclass,
- but only after the superclass's __init__ method has been called.
- """
- for attr in filter(lambda k: hasattr(self, k), self.removedAttrs):
- delattr(self, attr)
-
- # Set the contents of the opts object (an instance of optparse.Values
- # returned by parse_args) as attributes on the KickstartCommand object.
- # It's useful to call this from KickstartCommand subclasses after parsing
- # the arguments.
- def _setToSelf(self, optParser, opts):
- self._setToObj(optParser, opts, self)
-
- # Sets the contents of the opts object (an instance of optparse.Values
- # returned by parse_args) as attributes on the provided object obj. It's
- # useful to call this from KickstartCommand subclasses that handle lists
- # of objects (like partitions, network devices, etc.) and need to populate
- # a Data object.
- def _setToObj(self, optParser, opts, obj):
- for key in filter (lambda k: getattr(opts, k) != None, optParser.keys()):
- setattr(obj, key, getattr(opts, key))
-
-class DeprecatedCommand(KickstartCommand):
- """Specify that a command is deprecated and no longer has any function.
- Any command that is deprecated should be subclassed from this class,
- only specifying an __init__ method that calls the superclass's __init__.
- This is an abstract class.
- """
- def __init__(self, writePriority=None, *args, **kwargs):
- # We don't want people using this class by itself.
- if self.__class__ is KickstartCommand:
- raise TypeError, "DeprecatedCommand is an abstract class."
-
- # Create a new DeprecatedCommand instance.
- KickstartCommand.__init__(self, writePriority, *args, **kwargs)
-
- def __str__(self):
- """Placeholder since DeprecatedCommands don't work anymore."""
- return ""
-
- def parse(self, args):
- """Print a warning message if the command is seen in the input file."""
- mapping = {"lineno": self.lineno, "cmd": self.currentCmd}
- warnings.warn(_("Ignoring deprecated command on line %(lineno)s: The %(cmd)s command has been deprecated and no longer has any effect. It may be removed from future releases, which will result in a fatal error from kickstart. Please modify your kickstart file to remove this command.") % mapping, DeprecationWarning)
-
-
-###
-### HANDLERS
-###
-class BaseHandler(KickstartObject):
- """Each version of kickstart syntax is provided by a subclass of this
- class. These subclasses are what users will interact with for parsing,
- extracting data, and writing out kickstart files. This is an abstract
- class.
-
- version -- The version this syntax handler supports. This is set by
- a class attribute of a BaseHandler subclass and is used to
- set up the command dict. It is for read-only use.
- """
- version = None
-
- def __init__(self, mapping=None, dataMapping=None, commandUpdates=None,
- dataUpdates=None, *args, **kwargs):
- """Create a new BaseHandler instance. This method must be provided by
- all subclasses, but subclasses must call BaseHandler.__init__ first.
-
- mapping -- A custom map from command strings to classes,
- useful when creating your own handler with
- special command objects. It is otherwise unused
- and rarely needed. If you give this argument,
- the mapping takes the place of the default one
- and so must include all commands you want
- recognized.
- dataMapping -- This is the same as mapping, but for data
- objects. All the same comments apply.
- commandUpdates -- This is similar to mapping, but does not take
- the place of the defaults entirely. Instead,
- this mapping is applied after the defaults and
- updates it with just the commands you want to
- modify.
- dataUpdates -- This is the same as commandUpdates, but for
- data objects.
-
-
- Instance attributes:
-
- commands -- A mapping from a string command to a KickstartCommand
- subclass object that handles it. Multiple strings can
- map to the same object, but only one instance of the
- command object should ever exist. Most users should
- never have to deal with this directly, as it is
- manipulated internally and called through dispatcher.
- currentLine -- The current unprocessed line from the input file
- that caused this handler to be run.
- packages -- An instance of pykickstart.parser.Packages which
- describes the packages section of the input file.
- platform -- A string describing the hardware platform, which is
- needed only by system-config-kickstart.
- scripts -- A list of pykickstart.parser.Script instances, which is
- populated by KickstartParser.addScript and describes the
- %pre/%post/%traceback script section of the input file.
- """
-
- # We don't want people using this class by itself.
- if self.__class__ is BaseHandler:
- raise TypeError, "BaseHandler is an abstract class."
-
- KickstartObject.__init__(self, *args, **kwargs)
-
- # This isn't really a good place for these, but it's better than
- # everything else I can think of.
- self.scripts = []
- self.packages = Packages()
- self.platform = ""
-
- # These will be set by the dispatcher.
- self.commands = {}
- self.currentLine = 0
-
- # A dict keyed by an integer priority number, with each value being a
- # list of KickstartCommand subclasses. This dict is maintained by
- # registerCommand and used in __str__. No one else should be touching
- # it.
- self._writeOrder = {}
-
- self._registerCommands(mapping, dataMapping, commandUpdates, dataUpdates)
-
- def __str__(self):
- """Return a string formatted for output to a kickstart file."""
- retval = ""
-
- if self.platform != "":
- retval += "#platform=%s\n" % self.platform
-
- retval += "#version=%s\n" % versionToString(self.version)
-
- lst = self._writeOrder.keys()
- lst.sort()
-
- for prio in lst:
- for obj in self._writeOrder[prio]:
- retval += obj.__str__()
-
- for script in self.scripts:
- retval += script.__str__()
-
- retval += self.packages.__str__()
-
- return retval
-
- def _insertSorted(self, lst, obj):
- length = len(lst)
- i = 0
-
- while i < length:
- # If the two classes have the same name, it's because we are
- # overriding an existing class with one from a later kickstart
- # version, so remove the old one in favor of the new one.
- if obj.__class__.__name__ > lst[i].__class__.__name__:
- i += 1
- elif obj.__class__.__name__ == lst[i].__class__.__name__:
- lst[i] = obj
- return
- elif obj.__class__.__name__ < lst[i].__class__.__name__:
- break
-
- if i >= length:
- lst.append(obj)
- else:
- lst.insert(i, obj)
-
- def _setCommand(self, cmdObj):
- # Add an attribute on this version object. We need this to provide a
- # way for clients to access the command objects. We also need to strip
- # off the version part from the front of the name.
- if cmdObj.__class__.__name__.find("_") != -1:
- name = unicode(cmdObj.__class__.__name__.split("_", 1)[1])
- else:
- name = unicode(cmdObj.__class__.__name__).lower()
-
- setattr(self, name.lower(), cmdObj)
-
- # Also, add the object into the _writeOrder dict in the right place.
- if cmdObj.writePriority is not None:
- if self._writeOrder.has_key(cmdObj.writePriority):
- self._insertSorted(self._writeOrder[cmdObj.writePriority], cmdObj)
- else:
- self._writeOrder[cmdObj.writePriority] = [cmdObj]
-
- def _registerCommands(self, mapping=None, dataMapping=None, commandUpdates=None,
- dataUpdates=None):
- if mapping == {} or mapping == None:
- from pykickstart.handlers.control import commandMap
- cMap = commandMap[self.version]
- else:
- cMap = mapping
-
- if dataMapping == {} or dataMapping == None:
- from pykickstart.handlers.control import dataMap
- dMap = dataMap[self.version]
- else:
- dMap = dataMapping
-
- if type(commandUpdates) == types.DictType:
- cMap.update(commandUpdates)
-
- if type(dataUpdates) == types.DictType:
- dMap.update(dataUpdates)
-
- for (cmdName, cmdClass) in cMap.iteritems():
- # First make sure we haven't instantiated this command handler
- # already. If we have, we just need to make another mapping to
- # it in self.commands.
- cmdObj = None
-
- for (key, val) in self.commands.iteritems():
- if val.__class__.__name__ == cmdClass.__name__:
- cmdObj = val
- break
-
- # If we didn't find an instance in self.commands, create one now.
- if cmdObj == None:
- cmdObj = cmdClass()
- self._setCommand(cmdObj)
-
- # Finally, add the mapping to the commands dict.
- self.commands[cmdName] = cmdObj
- self.commands[cmdName].handler = self
-
- # We also need to create attributes for the various data objects.
- # No checks here because dMap is a bijection. At least, that's what
- # the comment says. Hope no one screws that up.
- for (dataName, dataClass) in dMap.iteritems():
- setattr(self, dataName, dataClass)
-
- def dispatcher(self, args, lineno):
- """Call the appropriate KickstartCommand handler for the current line
- in the kickstart file. A handler for the current command should
- be registered, though a handler of None is not an error. Returns
- the data object returned by KickstartCommand.parse.
-
- args -- A list of arguments to the current command
- lineno -- The line number in the file, for error reporting
- """
- cmd = args[0]
-
- if not self.commands.has_key(cmd):
- raise KickstartParseError, formatErrorMsg(lineno, msg=_("Unknown command: %s" % cmd))
- elif self.commands[cmd] != None:
- self.commands[cmd].currentCmd = cmd
- self.commands[cmd].currentLine = self.currentLine
- self.commands[cmd].lineno = lineno
-
- # The parser returns the data object that was modified. This could
- # be a BaseData subclass that should be put into a list, or it
- # could be the command handler object itself.
- obj = self.commands[cmd].parse(args[1:])
- lst = self.commands[cmd].dataList()
- if lst is not None:
- lst.append(obj)
-
- return obj
-
- def maskAllExcept(self, lst):
- """Set all entries in the commands dict to None, except the ones in
- the lst. All other commands will not be processed.
- """
- self._writeOrder = {}
-
- for (key, val) in self.commands.iteritems():
- if not key in lst:
- self.commands[key] = None
-
- def hasCommand(self, cmd):
- """Return true if there is a handler for the string cmd."""
- return hasattr(self, cmd)
-
-
-###
-### DATA
-###
-class BaseData(KickstartObject):
- """The base class for all data objects. This is an abstract class."""
- removedKeywords = []
- removedAttrs = []
-
- def __init__(self, *args, **kwargs):
- """Create a new BaseData instance.
-
- lineno -- Line number in the ks-file where this object was defined
- """
-
- # We don't want people using this class by itself.
- if self.__class__ is BaseData:
- raise TypeError, "BaseData is an abstract class."
-
- KickstartObject.__init__(self, *args, **kwargs)
- self.lineno = 0
-
- def __str__(self):
- """Return a string formatted for output to a kickstart file."""
- return ""
-
- def __call__(self, *args, **kwargs):
- """Set multiple attributes on a subclass of BaseData at once via
- keyword arguments. Valid attributes are anything specified in a
- subclass, but unknown attributes will be ignored.
- """
- for (key, val) in kwargs.items():
- # Ignore setting attributes that were removed in a subclass, as
- # if they were unknown attributes.
- if key in self.removedAttrs:
- continue
-
- if hasattr(self, key):
- setattr(self, key, val)
-
- def deleteRemovedAttrs(self):
- """Remove all attributes from self that are given in the removedAttrs
- list. This method should be called from __init__ in a subclass,
- but only after the superclass's __init__ method has been called.
- """
- for attr in filter(lambda k: hasattr(self, k), self.removedAttrs):
- delattr(self, attr)
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/__init__.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/__init__.py
deleted file mode 100644
index 2d9455093..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2009 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-import bootloader, partition
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/bootloader.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/bootloader.py
deleted file mode 100644
index c2b552f68..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/bootloader.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2007 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-from pykickstart.base import *
-from pykickstart.options import *
-
-class FC3_Bootloader(KickstartCommand):
- removedKeywords = KickstartCommand.removedKeywords
- removedAttrs = KickstartCommand.removedAttrs
-
- def __init__(self, writePriority=10, *args, **kwargs):
- KickstartCommand.__init__(self, writePriority, *args, **kwargs)
- self.op = self._getParser()
-
- self.driveorder = kwargs.get("driveorder", [])
- self.appendLine = kwargs.get("appendLine", "")
- self.forceLBA = kwargs.get("forceLBA", False)
- self.linear = kwargs.get("linear", True)
- self.location = kwargs.get("location", "")
- self.md5pass = kwargs.get("md5pass", "")
- self.password = kwargs.get("password", "")
- self.upgrade = kwargs.get("upgrade", False)
- self.useLilo = kwargs.get("useLilo", False)
-
- self.deleteRemovedAttrs()
-
- def _getArgsAsStr(self):
- retval = ""
-
- if self.appendLine != "":
- retval += " --append=\"%s\"" % self.appendLine
- if self.linear:
- retval += " --linear"
- if self.location:
- retval += " --location=%s" % self.location
- if hasattr(self, "forceLBA") and self.forceLBA:
- retval += " --lba32"
- if self.password != "":
- retval += " --password=\"%s\"" % self.password
- if self.md5pass != "":
- retval += " --md5pass=\"%s\"" % self.md5pass
- if self.upgrade:
- retval += " --upgrade"
- if self.useLilo:
- retval += " --useLilo"
- if len(self.driveorder) > 0:
- retval += " --driveorder=\"%s\"" % ",".join(self.driveorder)
-
- return retval
-
- def __str__(self):
- retval = KickstartCommand.__str__(self)
-
- if self.location != "":
- retval += "# System bootloader configuration\nbootloader"
- retval += self._getArgsAsStr() + "\n"
-
- return retval
-
- def _getParser(self):
- def driveorder_cb (option, opt_str, value, parser):
- for d in value.split(','):
- parser.values.ensure_value(option.dest, []).append(d)
-
- op = KSOptionParser()
- op.add_option("--append", dest="appendLine")
- op.add_option("--linear", dest="linear", action="store_true",
- default=True)
- op.add_option("--nolinear", dest="linear", action="store_false")
- op.add_option("--location", dest="location", type="choice",
- default="mbr",
- choices=["mbr", "partition", "none", "boot"])
- op.add_option("--lba32", dest="forceLBA", action="store_true",
- default=False)
- op.add_option("--password", dest="password", default="")
- op.add_option("--md5pass", dest="md5pass", default="")
- op.add_option("--upgrade", dest="upgrade", action="store_true",
- default=False)
- op.add_option("--useLilo", dest="useLilo", action="store_true",
- default=False)
- op.add_option("--driveorder", dest="driveorder", action="callback",
- callback=driveorder_cb, nargs=1, type="string")
- return op
-
- def parse(self, args):
- (opts, extra) = self.op.parse_args(args=args, lineno=self.lineno)
- self._setToSelf(self.op, opts)
-
- if self.currentCmd == "lilo":
- self.useLilo = True
-
- return self
-
-class FC4_Bootloader(FC3_Bootloader):
- removedKeywords = FC3_Bootloader.removedKeywords + ["linear", "useLilo"]
- removedAttrs = FC3_Bootloader.removedAttrs + ["linear", "useLilo"]
-
- def __init__(self, writePriority=10, *args, **kwargs):
- FC3_Bootloader.__init__(self, writePriority, *args, **kwargs)
-
- def _getArgsAsStr(self):
- retval = ""
- if self.appendLine != "":
- retval += " --append=\"%s\"" % self.appendLine
- if self.location:
- retval += " --location=%s" % self.location
- if hasattr(self, "forceLBA") and self.forceLBA:
- retval += " --lba32"
- if self.password != "":
- retval += " --password=\"%s\"" % self.password
- if self.md5pass != "":
- retval += " --md5pass=\"%s\"" % self.md5pass
- if self.upgrade:
- retval += " --upgrade"
- if len(self.driveorder) > 0:
- retval += " --driveorder=\"%s\"" % ",".join(self.driveorder)
- return retval
-
- def _getParser(self):
- op = FC3_Bootloader._getParser(self)
- op.remove_option("--linear")
- op.remove_option("--nolinear")
- op.remove_option("--useLilo")
- return op
-
- def parse(self, args):
- (opts, extra) = self.op.parse_args(args=args, lineno=self.lineno)
- self._setToSelf(self.op, opts)
- return self
-
-class F8_Bootloader(FC4_Bootloader):
- removedKeywords = FC4_Bootloader.removedKeywords
- removedAttrs = FC4_Bootloader.removedAttrs
-
- def __init__(self, writePriority=10, *args, **kwargs):
- FC4_Bootloader.__init__(self, writePriority, *args, **kwargs)
-
- self.timeout = kwargs.get("timeout", None)
- self.default = kwargs.get("default", "")
-
- def _getArgsAsStr(self):
- ret = FC4_Bootloader._getArgsAsStr(self)
-
- if self.timeout is not None:
- ret += " --timeout=%d" %(self.timeout,)
- if self.default:
- ret += " --default=%s" %(self.default,)
-
- return ret
-
- def _getParser(self):
- op = FC4_Bootloader._getParser(self)
- op.add_option("--timeout", dest="timeout", type="int")
- op.add_option("--default", dest="default")
- return op
-
-class F12_Bootloader(F8_Bootloader):
- removedKeywords = F8_Bootloader.removedKeywords
- removedAttrs = F8_Bootloader.removedAttrs
-
- def _getParser(self):
- op = F8_Bootloader._getParser(self)
- op.add_option("--lba32", dest="forceLBA", deprecated=1, action="store_true")
- return op
-
-class F14_Bootloader(F12_Bootloader):
- removedKeywords = F12_Bootloader.removedKeywords + ["forceLBA"]
- removedAttrs = F12_Bootloader.removedKeywords + ["forceLBA"]
-
- def _getParser(self):
- op = F12_Bootloader._getParser(self)
- op.remove_option("--lba32")
- return op
-
-class F15_Bootloader(F14_Bootloader):
- removedKeywords = F14_Bootloader.removedKeywords
- removedAttrs = F14_Bootloader.removedAttrs
-
- def __init__(self, writePriority=10, *args, **kwargs):
- F14_Bootloader.__init__(self, writePriority, *args, **kwargs)
-
- self.isCrypted = kwargs.get("isCrypted", False)
-
- def _getArgsAsStr(self):
- ret = F14_Bootloader._getArgsAsStr(self)
-
- if self.isCrypted:
- ret += " --iscrypted"
-
- return ret
-
- def _getParser(self):
- def password_cb(option, opt_str, value, parser):
- parser.values.isCrypted = True
- parser.values.password = value
-
- op = F14_Bootloader._getParser(self)
- op.add_option("--iscrypted", dest="isCrypted", action="store_true", default=False)
- op.add_option("--md5pass", action="callback", callback=password_cb, nargs=1, type="string")
- return op
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/partition.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/partition.py
deleted file mode 100644
index b564b1a7a..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/commands/partition.py
+++ /dev/null
@@ -1,314 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2005, 2006, 2007, 2008 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-from pykickstart.base import *
-from pykickstart.errors import *
-from pykickstart.options import *
-
-import gettext
-import warnings
-_ = lambda x: gettext.ldgettext("pykickstart", x)
-
-class FC3_PartData(BaseData):
- removedKeywords = BaseData.removedKeywords
- removedAttrs = BaseData.removedAttrs
-
- def __init__(self, *args, **kwargs):
- BaseData.__init__(self, *args, **kwargs)
- self.active = kwargs.get("active", False)
- self.primOnly = kwargs.get("primOnly", False)
- self.end = kwargs.get("end", 0)
- self.fstype = kwargs.get("fstype", "")
- self.grow = kwargs.get("grow", False)
- self.maxSizeMB = kwargs.get("maxSizeMB", 0)
- self.format = kwargs.get("format", True)
- self.onbiosdisk = kwargs.get("onbiosdisk", "")
- self.disk = kwargs.get("disk", "")
- self.onPart = kwargs.get("onPart", "")
- self.recommended = kwargs.get("recommended", False)
- self.size = kwargs.get("size", None)
- self.start = kwargs.get("start", 0)
- self.mountpoint = kwargs.get("mountpoint", "")
-
- def __eq__(self, y):
- if self.mountpoint:
- return self.mountpoint == y.mountpoint
- else:
- return False
-
- def _getArgsAsStr(self):
- retval = ""
-
- if self.active:
- retval += " --active"
- if self.primOnly:
- retval += " --asprimary"
- if hasattr(self, "end") and self.end != 0:
- retval += " --end=%s" % self.end
- if self.fstype != "":
- retval += " --fstype=\"%s\"" % self.fstype
- if self.grow:
- retval += " --grow"
- if self.maxSizeMB > 0:
- retval += " --maxsize=%d" % self.maxSizeMB
- if not self.format:
- retval += " --noformat"
- if self.onbiosdisk != "":
- retval += " --onbiosdisk=%s" % self.onbiosdisk
- if self.disk != "":
- retval += " --ondisk=%s" % self.disk
- if self.onPart != "":
- retval += " --onpart=%s" % self.onPart
- if self.recommended:
- retval += " --recommended"
- if self.size and self.size != 0:
- retval += " --size=%sk" % self.size
- if hasattr(self, "start") and self.start != 0:
- retval += " --start=%s" % self.start
-
- return retval
-
- def __str__(self):
- retval = BaseData.__str__(self)
- if self.mountpoint:
- mountpoint_str = "%s" % self.mountpoint
- else:
- mountpoint_str = "(No mount point)"
- retval += "part %s%s\n" % (mountpoint_str, self._getArgsAsStr())
- return retval
-
-class FC4_PartData(FC3_PartData):
- removedKeywords = FC3_PartData.removedKeywords
- removedAttrs = FC3_PartData.removedAttrs
-
- def __init__(self, *args, **kwargs):
- FC3_PartData.__init__(self, *args, **kwargs)
- self.bytesPerInode = kwargs.get("bytesPerInode", 4096)
- self.fsopts = kwargs.get("fsopts", "")
- self.label = kwargs.get("label", "")
-
- def _getArgsAsStr(self):
- retval = FC3_PartData._getArgsAsStr(self)
-
- if hasattr(self, "bytesPerInode") and self.bytesPerInode != 0:
- retval += " --bytes-per-inode=%d" % self.bytesPerInode
- if self.fsopts != "":
- retval += " --fsoptions=\"%s\"" % self.fsopts
- if self.label != "":
- retval += " --label=%s" % self.label
-
- return retval
-
-class F9_PartData(FC4_PartData):
- removedKeywords = FC4_PartData.removedKeywords + ["bytesPerInode"]
- removedAttrs = FC4_PartData.removedAttrs + ["bytesPerInode"]
-
- def __init__(self, *args, **kwargs):
- FC4_PartData.__init__(self, *args, **kwargs)
- self.deleteRemovedAttrs()
-
- self.fsopts = kwargs.get("fsopts", "")
- self.label = kwargs.get("label", "")
- self.fsprofile = kwargs.get("fsprofile", "")
- self.encrypted = kwargs.get("encrypted", False)
- self.passphrase = kwargs.get("passphrase", "")
-
- def _getArgsAsStr(self):
- retval = FC4_PartData._getArgsAsStr(self)
-
- if self.fsprofile != "":
- retval += " --fsprofile=\"%s\"" % self.fsprofile
- if self.encrypted:
- retval += " --encrypted"
-
- if self.passphrase != "":
- retval += " --passphrase=\"%s\"" % self.passphrase
-
- return retval
-
-class F11_PartData(F9_PartData):
- removedKeywords = F9_PartData.removedKeywords + ["start", "end"]
- removedAttrs = F9_PartData.removedAttrs + ["start", "end"]
-
-class F12_PartData(F11_PartData):
- removedKeywords = F11_PartData.removedKeywords
- removedAttrs = F11_PartData.removedAttrs
-
- def __init__(self, *args, **kwargs):
- F11_PartData.__init__(self, *args, **kwargs)
-
- self.escrowcert = kwargs.get("escrowcert", "")
- self.backuppassphrase = kwargs.get("backuppassphrase", False)
-
- def _getArgsAsStr(self):
- retval = F11_PartData._getArgsAsStr(self)
-
- if self.encrypted and self.escrowcert != "":
- retval += " --escrowcert=\"%s\"" % self.escrowcert
-
- if self.backuppassphrase:
- retval += " --backuppassphrase"
-
- return retval
-
-F14_PartData = F12_PartData
-
-class FC3_Partition(KickstartCommand):
- removedKeywords = KickstartCommand.removedKeywords
- removedAttrs = KickstartCommand.removedAttrs
-
- def __init__(self, writePriority=130, *args, **kwargs):
- KickstartCommand.__init__(self, writePriority, *args, **kwargs)
- self.op = self._getParser()
-
- self.partitions = kwargs.get("partitions", [])
-
- def __str__(self):
- retval = ""
-
- for part in self.partitions:
- retval += part.__str__()
-
- if retval != "":
- return "# Disk partitioning information\n" + retval
- else:
- return ""
-
- def _getParser(self):
- def part_cb (option, opt_str, value, parser):
- if value.startswith("/dev/"):
- parser.values.ensure_value(option.dest, value[5:])
- else:
- parser.values.ensure_value(option.dest, value)
-
- op = KSOptionParser()
- op.add_option("--active", dest="active", action="store_true",
- default=False)
- op.add_option("--asprimary", dest="primOnly", action="store_true",
- default=False)
- op.add_option("--end", dest="end", action="store", type="int",
- nargs=1)
- op.add_option("--fstype", "--type", dest="fstype")
- op.add_option("--grow", dest="grow", action="store_true", default=False)
- op.add_option("--maxsize", dest="maxSizeMB", action="store", type="int",
- nargs=1)
- op.add_option("--noformat", dest="format", action="store_false",
- default=True)
- op.add_option("--onbiosdisk", dest="onbiosdisk")
- op.add_option("--ondisk", "--ondrive", dest="disk")
- op.add_option("--onpart", "--usepart", dest="onPart", action="callback",
- callback=part_cb, nargs=1, type="string")
- op.add_option("--recommended", dest="recommended", action="store_true",
- default=False)
- op.add_option("--size", dest="size", action="store", type="size",
- nargs=1)
- op.add_option("--start", dest="start", action="store", type="int",
- nargs=1)
- return op
-
- def parse(self, args):
- (opts, extra) = self.op.parse_args(args=args, lineno=self.lineno)
-
- pd = self.handler.PartData()
- self._setToObj(self.op, opts, pd)
- pd.lineno = self.lineno
- if extra:
- pd.mountpoint = extra[0]
- if pd in self.dataList():
- warnings.warn(_("A partition with the mountpoint %s has already been defined.") % pd.mountpoint)
- else:
- pd.mountpoint = None
-
- return pd
-
- def dataList(self):
- return self.partitions
-
-class FC4_Partition(FC3_Partition):
- removedKeywords = FC3_Partition.removedKeywords
- removedAttrs = FC3_Partition.removedAttrs
-
- def __init__(self, writePriority=130, *args, **kwargs):
- FC3_Partition.__init__(self, writePriority, *args, **kwargs)
-
- def part_cb (option, opt_str, value, parser):
- if value.startswith("/dev/"):
- parser.values.ensure_value(option.dest, value[5:])
- else:
- parser.values.ensure_value(option.dest, value)
-
- def _getParser(self):
- op = FC3_Partition._getParser(self)
- op.add_option("--bytes-per-inode", dest="bytesPerInode", action="store",
- type="int", nargs=1)
- op.add_option("--fsoptions", dest="fsopts")
- op.add_option("--label", dest="label")
- return op
-
-class F9_Partition(FC4_Partition):
- removedKeywords = FC4_Partition.removedKeywords
- removedAttrs = FC4_Partition.removedAttrs
-
- def __init__(self, writePriority=130, *args, **kwargs):
- FC4_Partition.__init__(self, writePriority, *args, **kwargs)
-
- def part_cb (option, opt_str, value, parser):
- if value.startswith("/dev/"):
- parser.values.ensure_value(option.dest, value[5:])
- else:
- parser.values.ensure_value(option.dest, value)
-
- def _getParser(self):
- op = FC4_Partition._getParser(self)
- op.add_option("--bytes-per-inode", deprecated=1)
- op.add_option("--fsprofile")
- op.add_option("--encrypted", action="store_true", default=False)
- op.add_option("--passphrase")
- return op
-
-class F11_Partition(F9_Partition):
- removedKeywords = F9_Partition.removedKeywords
- removedAttrs = F9_Partition.removedAttrs
-
- def _getParser(self):
- op = F9_Partition._getParser(self)
- op.add_option("--start", deprecated=1)
- op.add_option("--end", deprecated=1)
- return op
-
-class F12_Partition(F11_Partition):
- removedKeywords = F11_Partition.removedKeywords
- removedAttrs = F11_Partition.removedAttrs
-
- def _getParser(self):
- op = F11_Partition._getParser(self)
- op.add_option("--escrowcert")
- op.add_option("--backuppassphrase", action="store_true", default=False)
- return op
-
-class F14_Partition(F12_Partition):
- removedKeywords = F12_Partition.removedKeywords
- removedAttrs = F12_Partition.removedAttrs
-
- def _getParser(self):
- op = F12_Partition._getParser(self)
- op.remove_option("--bytes-per-inode")
- op.remove_option("--start")
- op.remove_option("--end")
- return op
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/constants.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/constants.py
deleted file mode 100644
index 5e12fc80e..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/constants.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2005-2007 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-CLEARPART_TYPE_LINUX = 0
-CLEARPART_TYPE_ALL = 1
-CLEARPART_TYPE_NONE = 2
-
-DISPLAY_MODE_CMDLINE = 0
-DISPLAY_MODE_GRAPHICAL = 1
-DISPLAY_MODE_TEXT = 2
-
-FIRSTBOOT_DEFAULT = 0
-FIRSTBOOT_SKIP = 1
-FIRSTBOOT_RECONFIG = 2
-
-KS_MISSING_PROMPT = 0
-KS_MISSING_IGNORE = 1
-
-SELINUX_DISABLED = 0
-SELINUX_ENFORCING = 1
-SELINUX_PERMISSIVE = 2
-
-KS_SCRIPT_PRE = 0
-KS_SCRIPT_POST = 1
-KS_SCRIPT_TRACEBACK = 2
-
-KS_WAIT = 0
-KS_REBOOT = 1
-KS_SHUTDOWN = 2
-
-KS_INSTKEY_SKIP = -99
-
-BOOTPROTO_DHCP = "dhcp"
-BOOTPROTO_BOOTP = "bootp"
-BOOTPROTO_STATIC = "static"
-BOOTPROTO_QUERY = "query"
-BOOTPROTO_IBFT = "ibft"
-
-GROUP_REQUIRED = 0
-GROUP_DEFAULT = 1
-GROUP_ALL = 2
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/errors.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/errors.py
deleted file mode 100644
index a234d99d4..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/errors.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#
-# errors.py: Kickstart error handling.
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-"""
-Error handling classes and functions.
-
-This module exports a single function:
-
- formatErrorMsg - Properly formats an error message.
-
-It also exports several exception classes:
-
- KickstartError - A generic exception class.
-
- KickstartParseError - An exception for errors relating to parsing.
-
- KickstartValueError - An exception for errors relating to option
- processing.
-
- KickstartVersionError - An exception for errors relating to unsupported
- syntax versions.
-"""
-import gettext
-_ = lambda x: gettext.ldgettext("pykickstart", x)
-
-def formatErrorMsg(lineno, msg=""):
- """Properly format the error message msg for inclusion in an exception."""
- if msg != "":
- mapping = {"lineno": lineno, "msg": msg}
- return _("The following problem occurred on line %(lineno)s of the kickstart file:\n\n%(msg)s\n") % mapping
- else:
- return _("There was a problem reading from line %s of the kickstart file") % lineno
-
-class KickstartError(Exception):
- """A generic exception class for unspecific error conditions."""
- def __init__(self, val = ""):
- """Create a new KickstartError exception instance with the descriptive
- message val. val should be the return value of formatErrorMsg.
- """
- Exception.__init__(self)
- self.value = val
-
- def __str__ (self):
- return self.value
-
-class KickstartParseError(KickstartError):
- """An exception class for errors when processing the input file, such as
- unknown options, commands, or sections.
- """
- def __init__(self, msg):
- """Create a new KickstartParseError exception instance with the
- descriptive message val. val should be the return value of
- formatErrorMsg.
- """
- KickstartError.__init__(self, msg)
-
- def __str__(self):
- return self.value
-
-class KickstartValueError(KickstartError):
- """An exception class for errors when processing arguments to commands,
- such as too many arguments, too few arguments, or missing required
- arguments.
- """
- def __init__(self, msg):
- """Create a new KickstartValueError exception instance with the
- descriptive message val. val should be the return value of
- formatErrorMsg.
- """
- KickstartError.__init__(self, msg)
-
- def __str__ (self):
- return self.value
-
-class KickstartVersionError(KickstartError):
- """An exception class for errors related to using an incorrect version of
- kickstart syntax.
- """
- def __init__(self, msg):
- """Create a new KickstartVersionError exception instance with the
- descriptive message val. val should be the return value of
- formatErrorMsg.
- """
- KickstartError.__init__(self, msg)
-
- def __str__ (self):
- return self.value
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/__init__.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/__init__.py
+++ /dev/null
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/control.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/control.py
deleted file mode 100644
index 8dc80d1eb..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/control.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2007, 2008, 2009, 2010 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-from pykickstart.version import *
-from pykickstart.commands import *
-
-# This map is keyed on kickstart syntax version as provided by
-# pykickstart.version. Within each sub-dict is a mapping from command name
-# to the class that handles it. This is an onto mapping - that is, multiple
-# command names can map to the same class. However, the Handler will ensure
-# that only one instance of each class ever exists.
-commandMap = {
- # based on f15
- F16: {
- "bootloader": bootloader.F15_Bootloader,
- "part": partition.F14_Partition,
- "partition": partition.F14_Partition,
- },
-}
-
-# This map is keyed on kickstart syntax version as provided by
-# pykickstart.version. Within each sub-dict is a mapping from a data object
-# name to the class that provides it. This is a bijective mapping - that is,
-# each name maps to exactly one data class and all data classes have a name.
-# More than one instance of each class is allowed to exist, however.
-dataMap = {
- F16: {
- "PartData": partition.F14_PartData,
- },
-}
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/f16.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/f16.py
deleted file mode 100644
index 3c52f8d75..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/handlers/f16.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2011 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-from pykickstart.base import *
-from pykickstart.version import *
-
-class F16Handler(BaseHandler):
- version = F16
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/ko.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/ko.py
deleted file mode 100644
index 1350d19c7..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/ko.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2009 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-"""
-Base classes for internal pykickstart use.
-
-The module exports the following important classes:
-
- KickstartObject - The base class for all classes in pykickstart
-"""
-
-class KickstartObject(object):
- """The base class for all other classes in pykickstart."""
- def __init__(self, *args, **kwargs):
- """Create a new KickstartObject instance. All other classes in
- pykickstart should be derived from this one. Instance attributes:
- """
- pass
-
- def __str__(self):
- return ""
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/options.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/options.py
deleted file mode 100644
index ebc23eda6..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/options.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2005, 2006, 2007 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-"""
-Specialized option handling.
-
-This module exports two classes:
-
- KSOptionParser - A specialized subclass of OptionParser to be used
- in BaseHandler subclasses.
-
- KSOption - A specialized subclass of Option.
-"""
-import warnings
-from copy import copy
-from optparse import *
-
-from constants import *
-from errors import *
-from version import *
-
-import gettext
-_ = lambda x: gettext.ldgettext("pykickstart", x)
-
-class KSOptionParser(OptionParser):
- """A specialized subclass of optparse.OptionParser to handle extra option
- attribute checking, work error reporting into the KickstartParseError
- framework, and to turn off the default help.
- """
- def exit(self, status=0, msg=None):
- pass
-
- def error(self, msg):
- if self.lineno != None:
- raise KickstartParseError, formatErrorMsg(self.lineno, msg=msg)
- else:
- raise KickstartParseError, msg
-
- def keys(self):
- retval = []
-
- for opt in self.option_list:
- if opt not in retval:
- retval.append(opt.dest)
-
- return retval
-
- def _init_parsing_state (self):
- OptionParser._init_parsing_state(self)
- self.option_seen = {}
-
- def check_values (self, values, args):
- def seen(self, option):
- return self.option_seen.has_key(option)
-
- def usedTooNew(self, option):
- return option.introduced and option.introduced > self.version
-
- def usedDeprecated(self, option):
- return option.deprecated
-
- def usedRemoved(self, option):
- return option.removed and option.removed <= self.version
-
- for option in filter(lambda o: isinstance(o, Option), self.option_list):
- if option.required and not seen(self, option):
- raise KickstartValueError, formatErrorMsg(self.lineno, _("Option %s is required") % option)
- elif seen(self, option) and usedTooNew(self, option):
- mapping = {"option": option, "intro": versionToString(option.introduced),
- "version": versionToString(self.version)}
- self.error(_("The %(option)s option was introduced in version %(intro)s, but you are using kickstart syntax version %(version)s.") % mapping)
- elif seen(self, option) and usedRemoved(self, option):
- mapping = {"option": option, "removed": versionToString(option.removed),
- "version": versionToString(self.version)}
-
- if option.removed == self.version:
- self.error(_("The %(option)s option is no longer supported.") % mapping)
- else:
- self.error(_("The %(option)s option was removed in version %(removed)s, but you are using kickstart syntax version %(version)s.") % mapping)
- elif seen(self, option) and usedDeprecated(self, option):
- mapping = {"lineno": self.lineno, "option": option}
- warnings.warn(_("Ignoring deprecated option on line %(lineno)s: The %(option)s option has been deprecated and no longer has any effect. It may be removed from future releases, which will result in a fatal error from kickstart. Please modify your kickstart file to remove this option.") % mapping, DeprecationWarning)
-
- return (values, args)
-
- def parse_args(self, *args, **kwargs):
- if kwargs.has_key("lineno"):
- self.lineno = kwargs.pop("lineno")
-
- return OptionParser.parse_args(self, **kwargs)
-
- def __init__(self, mapping=None, version=None):
- """Create a new KSOptionParser instance. Each KickstartCommand
- subclass should create one instance of KSOptionParser, providing
- at least the lineno attribute. mapping and version are not required.
- Instance attributes:
-
- mapping -- A mapping from option strings to different values.
- version -- The version of the kickstart syntax we are checking
- against.
- """
- OptionParser.__init__(self, option_class=KSOption,
- add_help_option=False,
- conflict_handler="resolve")
- if mapping is None:
- self.map = {}
- else:
- self.map = mapping
-
- self.lineno = None
- self.option_seen = {}
- self.version = version
-
-def _check_ksboolean(option, opt, value):
- if value.lower() in ("on", "yes", "true", "1"):
- return True
- elif value.lower() in ("off", "no", "false", "0"):
- return False
- else:
- mapping = {"opt": opt, "value": value}
- raise OptionValueError(_("Option %(opt)s: invalid boolean value: %(value)r") % mapping)
-
-def _check_string(option, opt, value):
- if len(value) > 2 and value.startswith("--"):
- mapping = {"opt": opt, "value": value}
- raise OptionValueError(_("Option %(opt)s: invalid string value: %(value)r") % mapping)
- else:
- return value
-
-def _check_size(option, opt, value):
- # Former default was MB
- if value.isdigit():
- return int(value) * 1024L
-
- mapping = {"opt": opt, "value": value}
- if not value[:-1].isdigit():
- raise OptionValueError(_("Option %(opt)s: invalid size value: %(value)r") % mapping)
-
- size = int(value[:-1])
- if value.endswith("k") or value.endswith("K"):
- return size
- if value.endswith("M"):
- return size * 1024L
- if value.endswith("G"):
- return size * 1024L * 1024L
- raise OptionValueError(_("Option %(opt)s: invalid size value: %(value)r") % mapping)
-
-# Creates a new Option class that supports several new attributes:
-# - required: any option with this attribute must be supplied or an exception
-# is thrown
-# - introduced: the kickstart syntax version that this option first appeared
-# in - an exception will be raised if the option is used and
-# the specified syntax version is less than the value of this
-# attribute
-# - deprecated: the kickstart syntax version that this option was deprecated
-# in - a DeprecationWarning will be thrown if the option is
-# used and the specified syntax version is greater than the
-# value of this attribute
-# - removed: the kickstart syntax version that this option was removed in - an
-# exception will be raised if the option is used and the specified
-# syntax version is greated than the value of this attribute
-# Also creates a new type:
-# - ksboolean: support various kinds of boolean values on an option
-# And two new actions:
-# - map : allows you to define an opt -> val mapping such that dest gets val
-# when opt is seen
-# - map_extend: allows you to define an opt -> [val1, ... valn] mapping such
-# that dest gets a list of vals built up when opt is seen
-class KSOption (Option):
- ATTRS = Option.ATTRS + ['introduced', 'deprecated', 'removed', 'required']
- ACTIONS = Option.ACTIONS + ("map", "map_extend",)
- STORE_ACTIONS = Option.STORE_ACTIONS + ("map", "map_extend",)
-
- TYPES = Option.TYPES + ("ksboolean", "string", "size")
- TYPE_CHECKER = copy(Option.TYPE_CHECKER)
- TYPE_CHECKER["ksboolean"] = _check_ksboolean
- TYPE_CHECKER["string"] = _check_string
- TYPE_CHECKER["size"] = _check_size
-
- def _check_required(self):
- if self.required and not self.takes_value():
- raise OptionError(_("Required flag set for option that doesn't take a value"), self)
-
- # Make sure _check_required() is called from the constructor!
- CHECK_METHODS = Option.CHECK_METHODS + [_check_required]
-
- def process (self, opt, value, values, parser):
- Option.process(self, opt, value, values, parser)
- parser.option_seen[self] = 1
-
- # Override default take_action method to handle our custom actions.
- def take_action(self, action, dest, opt, value, values, parser):
- if action == "map":
- values.ensure_value(dest, parser.map[opt.lstrip('-')])
- elif action == "map_extend":
- values.ensure_value(dest, []).extend(parser.map[opt.lstrip('-')])
- else:
- Option.take_action(self, action, dest, opt, value, values, parser)
-
- def takes_value(self):
- # Deprecated options don't take a value.
- return Option.takes_value(self) and not self.deprecated
-
- def __init__(self, *args, **kwargs):
- self.deprecated = False
- self.required = False
- Option.__init__(self, *args, **kwargs)
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/parser.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/parser.py
deleted file mode 100644
index 9c9674bf7..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/parser.py
+++ /dev/null
@@ -1,619 +0,0 @@
-#
-# parser.py: Kickstart file parser.
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2005, 2006, 2007, 2008, 2011 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-"""
-Main kickstart file processing module.
-
-This module exports several important classes:
-
- Script - Representation of a single %pre, %post, or %traceback script.
-
- Packages - Representation of the %packages section.
-
- KickstartParser - The kickstart file parser state machine.
-"""
-
-from collections import Iterator
-import os
-import shlex
-import sys
-import tempfile
-from copy import copy
-from optparse import *
-
-import constants
-from errors import KickstartError, KickstartParseError, KickstartValueError, formatErrorMsg
-from ko import KickstartObject
-from sections import *
-import version
-
-import gettext
-_ = lambda x: gettext.ldgettext("pykickstart", x)
-
-STATE_END = "end"
-STATE_COMMANDS = "commands"
-
-ver = version.DEVEL
-
-
-class PutBackIterator(Iterator):
- def __init__(self, iterable):
- self._iterable = iter(iterable)
- self._buf = None
-
- def __iter__(self):
- return self
-
- def put(self, s):
- self._buf = s
-
- def next(self):
- if self._buf:
- retval = self._buf
- self._buf = None
- return retval
- else:
- return self._iterable.next()
-
-###
-### SCRIPT HANDLING
-###
-class Script(KickstartObject):
- """A class representing a single kickstart script. If functionality beyond
- just a data representation is needed (for example, a run method in
- anaconda), Script may be subclassed. Although a run method is not
- provided, most of the attributes of Script have to do with running the
- script. Instances of Script are held in a list by the Version object.
- """
- def __init__(self, script, *args , **kwargs):
- """Create a new Script instance. Instance attributes:
-
- errorOnFail -- If execution of the script fails, should anaconda
- stop, display an error, and then reboot without
- running any other scripts?
- inChroot -- Does the script execute in anaconda's chroot
- environment or not?
- interp -- The program that should be used to interpret this
- script.
- lineno -- The line number this script starts on.
- logfile -- Where all messages from the script should be logged.
- script -- A string containing all the lines of the script.
- type -- The type of the script, which can be KS_SCRIPT_* from
- pykickstart.constants.
- """
- KickstartObject.__init__(self, *args, **kwargs)
- self.script = "".join(script)
-
- self.interp = kwargs.get("interp", "/bin/sh")
- self.inChroot = kwargs.get("inChroot", False)
- self.lineno = kwargs.get("lineno", None)
- self.logfile = kwargs.get("logfile", None)
- self.errorOnFail = kwargs.get("errorOnFail", False)
- self.type = kwargs.get("type", constants.KS_SCRIPT_PRE)
-
- def __str__(self):
- """Return a string formatted for output to a kickstart file."""
- retval = ""
-
- if self.type == constants.KS_SCRIPT_PRE:
- retval += '\n%pre'
- elif self.type == constants.KS_SCRIPT_POST:
- retval += '\n%post'
- elif self.type == constants.KS_SCRIPT_TRACEBACK:
- retval += '\n%traceback'
-
- if self.interp != "/bin/sh" and self.interp != "":
- retval += " --interpreter=%s" % self.interp
- if self.type == constants.KS_SCRIPT_POST and not self.inChroot:
- retval += " --nochroot"
- if self.logfile != None:
- retval += " --logfile %s" % self.logfile
- if self.errorOnFail:
- retval += " --erroronfail"
-
- if self.script.endswith("\n"):
- if ver >= version.F8:
- return retval + "\n%s%%end\n" % self.script
- else:
- return retval + "\n%s\n" % self.script
- else:
- if ver >= version.F8:
- return retval + "\n%s\n%%end\n" % self.script
- else:
- return retval + "\n%s\n" % self.script
-
-
-##
-## PACKAGE HANDLING
-##
-class Group:
- """A class representing a single group in the %packages section."""
- def __init__(self, name="", include=constants.GROUP_DEFAULT):
- """Create a new Group instance. Instance attributes:
-
- name -- The group's identifier
- include -- The level of how much of the group should be included.
- Values can be GROUP_* from pykickstart.constants.
- """
- self.name = name
- self.include = include
-
- def __str__(self):
- """Return a string formatted for output to a kickstart file."""
- if self.include == constants.GROUP_REQUIRED:
- return "@%s --nodefaults" % self.name
- elif self.include == constants.GROUP_ALL:
- return "@%s --optional" % self.name
- else:
- return "@%s" % self.name
-
- def __cmp__(self, other):
- if self.name < other.name:
- return -1
- elif self.name > other.name:
- return 1
- return 0
-
-class Packages(KickstartObject):
- """A class representing the %packages section of the kickstart file."""
- def __init__(self, *args, **kwargs):
- """Create a new Packages instance. Instance attributes:
-
- addBase -- Should the Base group be installed even if it is
- not specified?
- default -- Should the default package set be selected?
- excludedList -- A list of all the packages marked for exclusion in
- the %packages section, without the leading minus
- symbol.
- excludeDocs -- Should documentation in each package be excluded?
- groupList -- A list of Group objects representing all the groups
- specified in the %packages section. Names will be
- stripped of the leading @ symbol.
- excludedGroupList -- A list of Group objects representing all the
- groups specified for removal in the %packages
- section. Names will be stripped of the leading
- -@ symbols.
- handleMissing -- If unknown packages are specified in the %packages
- section, should it be ignored or not? Values can
- be KS_MISSING_* from pykickstart.constants.
- packageList -- A list of all the packages specified in the
- %packages section.
- instLangs -- A list of languages to install.
- """
- KickstartObject.__init__(self, *args, **kwargs)
-
- self.addBase = True
- self.default = False
- self.excludedList = []
- self.excludedGroupList = []
- self.excludeDocs = False
- self.groupList = []
- self.handleMissing = constants.KS_MISSING_PROMPT
- self.packageList = []
- self.instLangs = None
-
- def __str__(self):
- """Return a string formatted for output to a kickstart file."""
- pkgs = ""
-
- if not self.default:
- grps = self.groupList
- grps.sort()
- for grp in grps:
- pkgs += "%s\n" % grp.__str__()
-
- p = self.packageList
- p.sort()
- for pkg in p:
- pkgs += "%s\n" % pkg
-
- grps = self.excludedGroupList
- grps.sort()
- for grp in grps:
- pkgs += "-%s\n" % grp.__str__()
-
- p = self.excludedList
- p.sort()
- for pkg in p:
- pkgs += "-%s\n" % pkg
-
- if pkgs == "":
- return ""
-
- retval = "\n%packages"
-
- if self.default:
- retval += " --default"
- if self.excludeDocs:
- retval += " --excludedocs"
- if not self.addBase:
- retval += " --nobase"
- if self.handleMissing == constants.KS_MISSING_IGNORE:
- retval += " --ignoremissing"
- if self.instLangs:
- retval += " --instLangs=%s" % self.instLangs
-
- if ver >= version.F8:
- return retval + "\n" + pkgs + "\n%end\n"
- else:
- return retval + "\n" + pkgs + "\n"
-
- def _processGroup (self, line):
- op = OptionParser()
- op.add_option("--nodefaults", action="store_true", default=False)
- op.add_option("--optional", action="store_true", default=False)
-
- (opts, extra) = op.parse_args(args=line.split())
-
- if opts.nodefaults and opts.optional:
- raise KickstartValueError, _("Group cannot specify both --nodefaults and --optional")
-
- # If the group name has spaces in it, we have to put it back together
- # now.
- grp = " ".join(extra)
-
- if opts.nodefaults:
- self.groupList.append(Group(name=grp, include=constants.GROUP_REQUIRED))
- elif opts.optional:
- self.groupList.append(Group(name=grp, include=constants.GROUP_ALL))
- else:
- self.groupList.append(Group(name=grp, include=constants.GROUP_DEFAULT))
-
- def add (self, pkgList):
- """Given a list of lines from the input file, strip off any leading
- symbols and add the result to the appropriate list.
- """
- existingExcludedSet = set(self.excludedList)
- existingPackageSet = set(self.packageList)
- newExcludedSet = set()
- newPackageSet = set()
-
- excludedGroupList = []
-
- for pkg in pkgList:
- stripped = pkg.strip()
-
- if stripped[0] == "@":
- self._processGroup(stripped[1:])
- elif stripped[0] == "-":
- if stripped[1] == "@":
- excludedGroupList.append(Group(name=stripped[2:]))
- else:
- newExcludedSet.add(stripped[1:])
- else:
- newPackageSet.add(stripped)
-
- # Groups have to be excluded in two different ways (note: can't use
- # sets here because we have to store objects):
- excludedGroupNames = map(lambda g: g.name, excludedGroupList)
-
- # First, an excluded group may be cancelling out a previously given
- # one. This is often the case when using %include. So there we should
- # just remove the group from the list.
- self.groupList = filter(lambda g: g.name not in excludedGroupNames, self.groupList)
-
- # Second, the package list could have included globs which are not
- # processed by pykickstart. In that case we need to preserve a list of
- # excluded groups so whatever tool doing package/group installation can
- # take appropriate action.
- self.excludedGroupList.extend(excludedGroupList)
-
- existingPackageSet = (existingPackageSet - newExcludedSet) | newPackageSet
- existingExcludedSet = (existingExcludedSet - existingPackageSet) | newExcludedSet
-
- self.packageList = list(existingPackageSet)
- self.excludedList = list(existingExcludedSet)
-
-
-###
-### PARSER
-###
-class KickstartParser:
- """The kickstart file parser class as represented by a basic state
- machine. To create a specialized parser, make a subclass and override
- any of the methods you care about. Methods that don't need to do
- anything may just pass. However, _stateMachine should never be
- overridden.
- """
- def __init__ (self, handler, followIncludes=True, errorsAreFatal=True,
- missingIncludeIsFatal=True):
- """Create a new KickstartParser instance. Instance attributes:
-
- errorsAreFatal -- Should errors cause processing to halt, or
- just print a message to the screen? This
- is most useful for writing syntax checkers
- that may want to continue after an error is
- encountered.
- followIncludes -- If %include is seen, should the included
- file be checked as well or skipped?
- handler -- An instance of a BaseHandler subclass. If
- None, the input file will still be parsed
- but no data will be saved and no commands
- will be executed.
- missingIncludeIsFatal -- Should missing include files be fatal, even
- if errorsAreFatal is False?
- """
- self.errorsAreFatal = errorsAreFatal
- self.followIncludes = followIncludes
- self.handler = handler
- self.currentdir = {}
- self.missingIncludeIsFatal = missingIncludeIsFatal
-
- self._state = STATE_COMMANDS
- self._includeDepth = 0
- self._line = ""
-
- self.version = self.handler.version
-
- global ver
- ver = self.version
-
- self._sections = {}
- self.setupSections()
-
- def _reset(self):
- """Reset the internal variables of the state machine for a new kickstart file."""
- self._state = STATE_COMMANDS
- self._includeDepth = 0
-
- def getSection(self, s):
- """Return a reference to the requested section (s must start with '%'s),
- or raise KeyError if not found.
- """
- return self._sections[s]
-
- def handleCommand (self, lineno, args):
- """Given the list of command and arguments, call the Version's
- dispatcher method to handle the command. Returns the command or
- data object returned by the dispatcher. This method may be
- overridden in a subclass if necessary.
- """
- if self.handler:
- self.handler.currentCmd = args[0]
- self.handler.currentLine = self._line
- retval = self.handler.dispatcher(args, lineno)
-
- return retval
-
- def registerSection(self, obj):
- """Given an instance of a Section subclass, register the new section
- with the parser. Calling this method means the parser will
- recognize your new section and dispatch into the given object to
- handle it.
- """
- if not obj.sectionOpen:
- raise TypeError, "no sectionOpen given for section %s" % obj
-
- if not obj.sectionOpen.startswith("%"):
- raise TypeError, "section %s tag does not start with a %%" % obj.sectionOpen
-
- self._sections[obj.sectionOpen] = obj
-
- def _finalize(self, obj):
- """Called at the close of a kickstart section to take any required
- actions. Internally, this is used to add scripts once we have the
- whole body read.
- """
- obj.finalize()
- self._state = STATE_COMMANDS
-
- def _handleSpecialComments(self, line):
- """Kickstart recognizes a couple special comments."""
- if self._state != STATE_COMMANDS:
- return
-
- # Save the platform for s-c-kickstart.
- if line[:10] == "#platform=":
- self.handler.platform = self._line[11:]
-
- def _readSection(self, lineIter, lineno):
- obj = self._sections[self._state]
-
- while True:
- try:
- line = lineIter.next()
- if line == "":
- # This section ends at the end of the file.
- if self.version >= version.F8:
- raise KickstartParseError, formatErrorMsg(lineno, msg=_("Section does not end with %%end."))
-
- self._finalize(obj)
- except StopIteration:
- break
-
- lineno += 1
-
- # Throw away blank lines and comments, unless the section wants all
- # lines.
- if self._isBlankOrComment(line) and not obj.allLines:
- continue
-
- if line.startswith("%"):
- args = shlex.split(line)
-
- if args and args[0] == "%end":
- # This is a properly terminated section.
- self._finalize(obj)
- break
- elif args and args[0] == "%ksappend":
- continue
- elif args and (self._validState(args[0]) or args[0] in ["%include", "%ksappend"]):
- # This is an unterminated section.
- if self.version >= version.F8:
- raise KickstartParseError, formatErrorMsg(lineno, msg=_("Section does not end with %%end."))
-
- # Finish up. We do not process the header here because
- # kicking back out to STATE_COMMANDS will ensure that happens.
- lineIter.put(line)
- lineno -= 1
- self._finalize(obj)
- break
- else:
- # This is just a line within a section. Pass it off to whatever
- # section handles it.
- obj.handleLine(line)
-
- return lineno
-
- def _validState(self, st):
- """Is the given section tag one that has been registered with the parser?"""
- return st in self._sections.keys()
-
- def _tryFunc(self, fn):
- """Call the provided function (which doesn't take any arguments) and
- do the appropriate error handling. If errorsAreFatal is False, this
- function will just print the exception and keep going.
- """
- try:
- fn()
- except Exception, msg:
- if self.errorsAreFatal:
- raise
- else:
- print msg
-
- def _isBlankOrComment(self, line):
- return line.isspace() or line == "" or line.lstrip()[0] == '#'
-
- def _stateMachine(self, lineIter):
- # For error reporting.
- lineno = 0
-
- while True:
- # Get the next line out of the file, quitting if this is the last line.
- try:
- self._line = lineIter.next()
- if self._line == "":
- break
- except StopIteration:
- break
-
- lineno += 1
-
- # Eliminate blank lines, whitespace-only lines, and comments.
- if self._isBlankOrComment(self._line):
- self._handleSpecialComments(self._line)
- continue
-
- # Remove any end-of-line comments.
- sanitized = self._line.split("#")[0]
-
- # Then split the line.
- args = shlex.split(sanitized.rstrip())
-
- if args[0] == "%include":
- # This case comes up primarily in ksvalidator.
- if not self.followIncludes:
- continue
-
- if len(args) == 1 or not args[1]:
- raise KickstartParseError, formatErrorMsg(lineno)
-
- self._includeDepth += 1
-
- try:
- self.readKickstart(args[1], reset=False)
- except KickstartError:
- # Handle the include file being provided over the
- # network in a %pre script. This case comes up in the
- # early parsing in anaconda.
- if self.missingIncludeIsFatal:
- raise
-
- self._includeDepth -= 1
- continue
-
- # Now on to the main event.
- if self._state == STATE_COMMANDS:
- if args[0] == "%ksappend":
- # This is handled by the preprocess* functions, so continue.
- continue
- elif args[0][0] == '%':
- # This is the beginning of a new section. Handle its header
- # here.
- newSection = args[0]
- if not self._validState(newSection):
- raise KickstartParseError, formatErrorMsg(lineno, msg=_("Unknown kickstart section: %s" % newSection))
-
- self._state = newSection
- obj = self._sections[self._state]
- self._tryFunc(lambda: obj.handleHeader(lineno, args))
-
- # This will handle all section processing, kicking us back
- # out to STATE_COMMANDS at the end with the current line
- # being the next section header, etc.
- lineno = self._readSection(lineIter, lineno)
- else:
- # This is a command in the command section. Dispatch to it.
- self._tryFunc(lambda: self.handleCommand(lineno, args))
- elif self._state == STATE_END:
- break
-
- def readKickstartFromString (self, s, reset=True):
- """Process a kickstart file, provided as the string str."""
- if reset:
- self._reset()
-
- # Add a "" to the end of the list so the string reader acts like the
- # file reader and we only get StopIteration when we're after the final
- # line of input.
- i = PutBackIterator(s.splitlines(True) + [""])
- self._stateMachine (i)
-
- def readKickstart(self, f, reset=True):
- """Process a kickstart file, given by the filename f."""
- if reset:
- self._reset()
-
- # an %include might not specify a full path. if we don't try to figure
- # out what the path should have been, then we're unable to find it
- # requiring full path specification, though, sucks. so let's make
- # the reading "smart" by keeping track of what the path is at each
- # include depth.
- if not os.path.exists(f):
- if self.currentdir.has_key(self._includeDepth - 1):
- if os.path.exists(os.path.join(self.currentdir[self._includeDepth - 1], f)):
- f = os.path.join(self.currentdir[self._includeDepth - 1], f)
-
- cd = os.path.dirname(f)
- if not cd.startswith("/"):
- cd = os.path.abspath(cd)
- self.currentdir[self._includeDepth] = cd
-
- try:
- s = file(f).read()
- except IOError, e:
- raise KickstartError, formatErrorMsg(0, msg=_("Unable to open input kickstart file: %s") % e.strerror)
-
- self.readKickstartFromString(s, reset=False)
-
- def setupSections(self):
- """Install the sections all kickstart files support. You may override
- this method in a subclass, but should avoid doing so unless you know
- what you're doing.
- """
- self._sections = {}
-
- # Install the sections all kickstart files support.
- self.registerSection(PreScriptSection(self.handler, dataObj=Script))
- self.registerSection(PostScriptSection(self.handler, dataObj=Script))
- self.registerSection(TracebackScriptSection(self.handler, dataObj=Script))
- self.registerSection(PackageSection(self.handler))
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/sections.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/sections.py
deleted file mode 100644
index 44df856b8..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/sections.py
+++ /dev/null
@@ -1,244 +0,0 @@
-#
-# sections.py: Kickstart file sections.
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2011 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-"""
-This module exports the classes that define a section of a kickstart file. A
-section is a chunk of the file starting with a %tag and ending with a %end.
-Examples of sections include %packages, %pre, and %post.
-
-You may use this module to define your own custom sections which will be
-treated just the same as a predefined one by the kickstart parser. All that
-is necessary is to create a new subclass of Section and call
-parser.registerSection with an instance of your new class.
-"""
-from constants import *
-from options import KSOptionParser
-from version import *
-
-class Section(object):
- """The base class for defining kickstart sections. You are free to
- subclass this as appropriate.
-
- Class attributes:
-
- allLines -- Does this section require the parser to call handleLine
- for every line in the section, even blanks and comments?
- sectionOpen -- The string that denotes the start of this section. You
- must start your tag with a percent sign.
- timesSeen -- This attribute is for informational purposes only. It is
- incremented every time handleHeader is called to keep
- track of the number of times a section of this type is
- seen.
- """
- allLines = False
- sectionOpen = ""
- timesSeen = 0
-
- def __init__(self, handler, **kwargs):
- """Create a new Script instance. At the least, you must pass in an
- instance of a baseHandler subclass.
-
- Valid kwargs:
-
- dataObj --
- """
- self.handler = handler
-
- self.version = self.handler.version
-
- self.dataObj = kwargs.get("dataObj", None)
-
- def finalize(self):
- """This method is called when the %end tag for a section is seen. It
- is not required to be provided.
- """
- pass
-
- def handleLine(self, line):
- """This method is called for every line of a section. Take whatever
- action is appropriate. While this method is not required to be
- provided, not providing it does not make a whole lot of sense.
-
- Arguments:
-
- line -- The complete line, with any trailing newline.
- """
- pass
-
- def handleHeader(self, lineno, args):
- """This method is called when the opening tag for a section is seen.
- Not all sections will need this method, though all provided with
- kickstart include one.
-
- Arguments:
-
- args -- A list of all strings passed as arguments to the section
- opening tag.
- """
- self.timesSeen += 1
-
-class NullSection(Section):
- """This defines a section that pykickstart will recognize but do nothing
- with. If the parser runs across a %section that has no object registered,
- it will raise an error. Sometimes, you may want to simply ignore those
- sections instead. This class is useful for that purpose.
- """
- def __init__(self, *args, **kwargs):
- """Create a new NullSection instance. You must pass a sectionOpen
- parameter (including a leading '%') for the section you wish to
- ignore.
- """
- Section.__init__(self, *args, **kwargs)
- self.sectionOpen = kwargs.get("sectionOpen")
-
-class ScriptSection(Section):
- allLines = True
-
- def __init__(self, *args, **kwargs):
- Section.__init__(self, *args, **kwargs)
- self._script = {}
- self._resetScript()
-
- def _getParser(self):
- op = KSOptionParser(self.version)
- op.add_option("--erroronfail", dest="errorOnFail", action="store_true",
- default=False)
- op.add_option("--interpreter", dest="interpreter", default="/bin/sh")
- op.add_option("--log", "--logfile", dest="log")
- return op
-
- def _resetScript(self):
- self._script = {"interp": "/bin/sh", "log": None, "errorOnFail": False,
- "lineno": None, "chroot": False, "body": []}
-
- def handleLine(self, line):
- self._script["body"].append(line)
-
- def finalize(self):
- if " ".join(self._script["body"]).strip() == "":
- return
-
- kwargs = {"interp": self._script["interp"],
- "inChroot": self._script["chroot"],
- "lineno": self._script["lineno"],
- "logfile": self._script["log"],
- "errorOnFail": self._script["errorOnFail"],
- "type": self._script["type"]}
-
- s = self.dataObj (self._script["body"], **kwargs)
- self._resetScript()
-
- if self.handler:
- self.handler.scripts.append(s)
-
- def handleHeader(self, lineno, args):
- """Process the arguments to a %pre/%post/%traceback header for later
- setting on a Script instance once the end of the script is found.
- This method may be overridden in a subclass if necessary.
- """
- Section.handleHeader(self, lineno, args)
- op = self._getParser()
-
- (opts, extra) = op.parse_args(args=args[1:], lineno=lineno)
-
- self._script["interp"] = opts.interpreter
- self._script["lineno"] = lineno
- self._script["log"] = opts.log
- self._script["errorOnFail"] = opts.errorOnFail
- if hasattr(opts, "nochroot"):
- self._script["chroot"] = not opts.nochroot
-
-class PreScriptSection(ScriptSection):
- sectionOpen = "%pre"
-
- def _resetScript(self):
- ScriptSection._resetScript(self)
- self._script["type"] = KS_SCRIPT_PRE
-
-class PostScriptSection(ScriptSection):
- sectionOpen = "%post"
-
- def _getParser(self):
- op = ScriptSection._getParser(self)
- op.add_option("--nochroot", dest="nochroot", action="store_true",
- default=False)
- return op
-
- def _resetScript(self):
- ScriptSection._resetScript(self)
- self._script["chroot"] = True
- self._script["type"] = KS_SCRIPT_POST
-
-class TracebackScriptSection(ScriptSection):
- sectionOpen = "%traceback"
-
- def _resetScript(self):
- ScriptSection._resetScript(self)
- self._script["type"] = KS_SCRIPT_TRACEBACK
-
-class PackageSection(Section):
- sectionOpen = "%packages"
-
- def handleLine(self, line):
- if not self.handler:
- return
-
- (h, s, t) = line.partition('#')
- line = h.rstrip()
-
- self.handler.packages.add([line])
-
- def handleHeader(self, lineno, args):
- """Process the arguments to the %packages header and set attributes
- on the Version's Packages instance appropriate. This method may be
- overridden in a subclass if necessary.
- """
- Section.handleHeader(self, lineno, args)
- op = KSOptionParser(version=self.version)
- op.add_option("--excludedocs", dest="excludedocs", action="store_true",
- default=False)
- op.add_option("--ignoremissing", dest="ignoremissing",
- action="store_true", default=False)
- op.add_option("--nobase", dest="nobase", action="store_true",
- default=False)
- op.add_option("--ignoredeps", dest="resolveDeps", action="store_false",
- deprecated=FC4, removed=F9)
- op.add_option("--resolvedeps", dest="resolveDeps", action="store_true",
- deprecated=FC4, removed=F9)
- op.add_option("--default", dest="defaultPackages", action="store_true",
- default=False, introduced=F7)
- op.add_option("--instLangs", dest="instLangs", type="string",
- default="", introduced=F9)
-
- (opts, extra) = op.parse_args(args=args[1:], lineno=lineno)
-
- self.handler.packages.excludeDocs = opts.excludedocs
- self.handler.packages.addBase = not opts.nobase
- if opts.ignoremissing:
- self.handler.packages.handleMissing = KS_MISSING_IGNORE
- else:
- self.handler.packages.handleMissing = KS_MISSING_PROMPT
-
- if opts.defaultPackages:
- self.handler.packages.default = True
-
- if opts.instLangs:
- self.handler.packages.instLangs = opts.instLangs
diff --git a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/version.py b/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/version.py
deleted file mode 100644
index 8a8e6aad2..000000000
--- a/yocto-poky/scripts/lib/wic/3rdparty/pykickstart/version.py
+++ /dev/null
@@ -1,168 +0,0 @@
-#
-# Chris Lumens <clumens@redhat.com>
-#
-# Copyright 2006, 2007, 2008, 2009, 2010 Red Hat, Inc.
-#
-# This copyrighted material is made available to anyone wishing to use, modify,
-# copy, or redistribute it subject to the terms and conditions of the GNU
-# General Public License v.2. This program is distributed in the hope that it
-# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
-# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc., 51
-# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
-# trademarks that are incorporated in the source code or documentation are not
-# subject to the GNU General Public License and may only be used or replicated
-# with the express permission of Red Hat, Inc.
-#
-"""
-Methods for working with kickstart versions.
-
-This module defines several symbolic constants that specify kickstart syntax
-versions. Each version corresponds roughly to one release of Red Hat Linux,
-Red Hat Enterprise Linux, or Fedora Core as these are where most syntax
-changes take place.
-
-This module also exports several functions:
-
- makeVersion - Given a version number, return an instance of the
- matching handler class.
-
- returnClassForVersion - Given a version number, return the matching
- handler class. This does not return an
- instance of that class, however.
-
- stringToVersion - Convert a string representation of a version number
- into the symbolic constant.
-
- versionToString - Perform the reverse mapping.
-
- versionFromFile - Read a kickstart file and determine the version of
- syntax it uses. This requires the kickstart file to
- have a version= comment in it.
-"""
-import imputil, re, sys
-
-import gettext
-_ = lambda x: gettext.ldgettext("pykickstart", x)
-
-from pykickstart.errors import KickstartVersionError
-
-# Symbolic names for internal version numbers.
-RHEL3 = 900
-FC3 = 1000
-RHEL4 = 1100
-FC4 = 2000
-FC5 = 3000
-FC6 = 4000
-RHEL5 = 4100
-F7 = 5000
-F8 = 6000
-F9 = 7000
-F10 = 8000
-F11 = 9000
-F12 = 10000
-F13 = 11000
-RHEL6 = 11100
-F14 = 12000
-F15 = 13000
-F16 = 14000
-
-# This always points at the latest version and is the default.
-DEVEL = F16
-
-# A one-to-one mapping from string representations to version numbers.
-versionMap = {
- "DEVEL": DEVEL,
- "FC3": FC3, "FC4": FC4, "FC5": FC5, "FC6": FC6, "F7": F7, "F8": F8,
- "F9": F9, "F10": F10, "F11": F11, "F12": F12, "F13": F13,
- "F14": F14, "F15": F15, "F16": F16,
- "RHEL3": RHEL3, "RHEL4": RHEL4, "RHEL5": RHEL5, "RHEL6": RHEL6
-}
-
-def stringToVersion(s):
- """Convert string into one of the provided version constants. Raises
- KickstartVersionError if string does not match anything.
- """
- # First try these short forms.
- try:
- return versionMap[s.upper()]
- except KeyError:
- pass
-
- # Now try the Fedora versions.
- m = re.match("^fedora.* (\d+)$", s, re.I)
-
- if m and m.group(1):
- if versionMap.has_key("FC" + m.group(1)):
- return versionMap["FC" + m.group(1)]
- elif versionMap.has_key("F" + m.group(1)):
- return versionMap["F" + m.group(1)]
- else:
- raise KickstartVersionError(_("Unsupported version specified: %s") % s)
-
- # Now try the RHEL versions.
- m = re.match("^red hat enterprise linux.* (\d+)([\.\d]*)$", s, re.I)
-
- if m and m.group(1):
- if versionMap.has_key("RHEL" + m.group(1)):
- return versionMap["RHEL" + m.group(1)]
- else:
- raise KickstartVersionError(_("Unsupported version specified: %s") % s)
-
- # If nothing else worked, we're out of options.
- raise KickstartVersionError(_("Unsupported version specified: %s") % s)
-
-def versionToString(version, skipDevel=False):
- """Convert version into a string representation of the version number.
- This is the reverse operation of stringToVersion. Raises
- KickstartVersionError if version does not match anything.
- """
- if not skipDevel and version == versionMap["DEVEL"]:
- return "DEVEL"
-
- for (key, val) in versionMap.iteritems():
- if key == "DEVEL":
- continue
- elif val == version:
- return key
-
- raise KickstartVersionError(_("Unsupported version specified: %s") % version)
-
-def returnClassForVersion(version=DEVEL):
- """Return the class of the syntax handler for version. version can be
- either a string or the matching constant. Raises KickstartValueError
- if version does not match anything.
- """
- try:
- version = int(version)
- module = "%s" % versionToString(version, skipDevel=True)
- except ValueError:
- module = "%s" % version
- version = stringToVersion(version)
-
- module = module.lower()
-
- try:
- import pykickstart.handlers
- sys.path.extend(pykickstart.handlers.__path__)
- found = imputil.imp.find_module(module)
- loaded = imputil.imp.load_module(module, found[0], found[1], found[2])
-
- for (k, v) in loaded.__dict__.iteritems():
- if k.lower().endswith("%shandler" % module):
- return v
- except:
- raise KickstartVersionError(_("Unsupported version specified: %s") % version)
-
-def makeVersion(version=DEVEL):
- """Return a new instance of the syntax handler for version. version can be
- either a string or the matching constant. This function is useful for
- standalone programs which just need to handle a specific version of
- kickstart syntax (as provided by a command line argument, for example)
- and need to instantiate the correct object.
- """
- cl = returnClassForVersion(version)
- return cl()
diff --git a/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc b/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc
new file mode 100644
index 000000000..5cf2fd1f3
--- /dev/null
+++ b/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc
@@ -0,0 +1,3 @@
+# This file is included into 3 canned wks files from this directory
+part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
+part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024
diff --git a/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg b/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg
new file mode 100644
index 000000000..a16bd6ac6
--- /dev/null
+++ b/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg
@@ -0,0 +1,11 @@
+# This is an example configuration file for syslinux.
+PROMPT 0
+TIMEOUT 10
+
+ALLOWOPTIONS 1
+SERIAL 0 115200
+
+DEFAULT boot
+LABEL boot
+KERNEL /vmlinuz
+APPEND label=boot root=/dev/sda2 rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0
diff --git a/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks b/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks
new file mode 100644
index 000000000..3529e05c8
--- /dev/null
+++ b/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.wks
@@ -0,0 +1,8 @@
+# short-description: Create a 'pcbios' direct disk image with custom bootloader config
+# long-description: Creates a partitioned legacy BIOS disk image that the user
+# can directly dd to boot media. The bootloader configuration source is a user file.
+
+include common.wks.inc
+
+bootloader --configfile="directdisk-bootloader-config.cfg"
+
diff --git a/yocto-poky/scripts/lib/wic/canned-wks/directdisk.wks b/yocto-poky/scripts/lib/wic/canned-wks/directdisk.wks
index af4c9eada..6db74a78b 100644
--- a/yocto-poky/scripts/lib/wic/canned-wks/directdisk.wks
+++ b/yocto-poky/scripts/lib/wic/canned-wks/directdisk.wks
@@ -2,9 +2,7 @@
# long-description: Creates a partitioned legacy BIOS disk image that the user
# can directly dd to boot media.
-
-part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
-part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024
+include common.wks.inc
bootloader --timeout=0 --append="rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0"
diff --git a/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
index 8fc38b54d..a6518a0f4 100644
--- a/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ b/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
@@ -2,9 +2,7 @@
# long-description: Creates a partitioned legacy BIOS disk image that the user
# can directly use to boot a qemu machine.
-
-part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
-part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024
+include common.wks.inc
bootloader --timeout=0 --append="vga=0 uvesafb.mode_option=640x480-32 root=/dev/vda2 rw mem=256M ip=192.168.7.2::192.168.7.1:255.255.255.0 oprofile.timer=1 rootfstype=ext4 "
diff --git a/yocto-poky/scripts/lib/wic/conf.py b/yocto-poky/scripts/lib/wic/conf.py
index 1d4363a52..f7d56d046 100644
--- a/yocto-poky/scripts/lib/wic/conf.py
+++ b/yocto-poky/scripts/lib/wic/conf.py
@@ -17,8 +17,8 @@
import os
+from wic.ksparser import KickStart, KickStartError
from wic import msger
-from wic import kickstart
from wic.utils import misc
@@ -87,7 +87,10 @@ class ConfigMgr(object):
if not ksconf:
return
- ksobj = kickstart.read_kickstart(ksconf)
+ try:
+ ksobj = KickStart(ksconf)
+ except KickStartError as err:
+ msger.error(str(err))
self.create['ks'] = ksobj
self.create['name'] = os.path.splitext(os.path.basename(ksconf))[0]
diff --git a/yocto-poky/scripts/lib/wic/help.py b/yocto-poky/scripts/lib/wic/help.py
index 9a778b69d..405d25a87 100644
--- a/yocto-poky/scripts/lib/wic/help.py
+++ b/yocto-poky/scripts/lib/wic/help.py
@@ -617,7 +617,7 @@ DESCRIPTION
This command creates a partition on the system and uses the
following syntax:
- part <mountpoint>
+ part [<mountpoint>]
The <mountpoint> is where the partition will be mounted and
must take of one of the following forms:
@@ -626,6 +626,16 @@ DESCRIPTION
swap: The partition will be used as swap space.
+ If a <mountpoint> is not specified the partition will be created
+ but will not be mounted.
+
+ Partitions with a <mountpoint> specified will be automatically mounted.
+ This is achieved by wic adding entries to the fstab during image
+ generation. In order for a valid fstab to be generated one of the
+ --ondrive, --ondisk or --use-uuid partition options must be used for
+ each partition that specifies a mountpoint.
+
+
The following are supported 'part' options:
--size: The minimum partition size. Specify an integer value
@@ -740,8 +750,28 @@ DESCRIPTION
bootloader command-line - for example, the syslinux
APPEND or grub kernel command line.
+ --configfile: Specifies a user defined configuration file for
+ the bootloader. This file must be located in the
+ canned-wks folder or could be the full path to the
+ file. Using this option will override any other
+ bootloader option.
+
Note that bootloader functionality and boot partitions are
implemented by the various --source plugins that implement
bootloader functionality; the bootloader command essentially
provides a means of modifying bootloader configuration.
+
+ * include
+
+ This command allows the user to include the content of .wks file
+ into original .wks file.
+
+ Command uses the following syntax:
+
+ include <file>
+
+ The <file> is either path to the file or its name. If name is
+ specified wic will try to find file in the directories with canned
+ .wks files.
+
"""
diff --git a/yocto-poky/scripts/lib/wic/imager/baseimager.py b/yocto-poky/scripts/lib/wic/imager/baseimager.py
index acbe94858..760cf8a58 100644
--- a/yocto-poky/scripts/lib/wic/imager/baseimager.py
+++ b/yocto-poky/scripts/lib/wic/imager/baseimager.py
@@ -85,7 +85,7 @@ class BaseImageCreator(object):
# No ks provided when called by convertor, so skip the dependency check
if self.ks:
# If we have btrfs partition we need to check necessary tools
- for part in self.ks.handler.partition.partitions:
+ for part in self.ks.partitions:
if part.fstype and part.fstype == "btrfs":
self._dep_checks.append("mkfs.btrfs")
break
diff --git a/yocto-poky/scripts/lib/wic/imager/direct.py b/yocto-poky/scripts/lib/wic/imager/direct.py
index d5603fa91..a1b424965 100644
--- a/yocto-poky/scripts/lib/wic/imager/direct.py
+++ b/yocto-poky/scripts/lib/wic/imager/direct.py
@@ -27,7 +27,7 @@
import os
import shutil
-from wic import kickstart, msger
+from wic import msger
from wic.utils import fs_related
from wic.utils.oe.misc import get_bitbake_var
from wic.utils.partitionedfs import Image
@@ -64,7 +64,7 @@ class DirectImageCreator(BaseImageCreator):
self.__disks = {}
self.__disk_format = "direct"
self._disk_names = []
- self.ptable_format = self.ks.handler.bootloader.ptable
+ self.ptable_format = self.ks.bootloader.ptable
self.oe_builddir = oe_builddir
if image_output_dir:
@@ -151,15 +151,15 @@ class DirectImageCreator(BaseImageCreator):
"please check your kickstart setting.")
# Set a default partition if no partition is given out
- if not self.ks.handler.partition.partitions:
+ if not self.ks.partitions:
partstr = "part / --size 1900 --ondisk sda --fstype=ext3"
args = partstr.split()
- part = self.ks.handler.partition.parse(args[1:])
- if part not in self.ks.handler.partition.partitions:
- self.ks.handler.partition.partitions.append(part)
+ part = self.ks.parse(args[1:])
+ if part not in self.ks.partitions:
+ self.ks.partitions.append(part)
# partitions list from kickstart file
- return kickstart.get_partitions(self.ks)
+ return self.ks.partitions
def get_disk_names(self):
""" Returns a list of physical target disk names (e.g., 'sdb') which
@@ -206,7 +206,7 @@ class DirectImageCreator(BaseImageCreator):
bootloader object, the default can be explicitly set using the
--source bootloader param.
"""
- return self.ks.handler.bootloader.source
+ return self.ks.bootloader.source
#
# Actual implemention
@@ -224,16 +224,19 @@ class DirectImageCreator(BaseImageCreator):
for part in parts:
# as a convenience, set source to the boot partition source
# instead of forcing it to be set via bootloader --source
- if not self.ks.handler.bootloader.source and part.mountpoint == "/boot":
- self.ks.handler.bootloader.source = part.source
+ if not self.ks.bootloader.source and part.mountpoint == "/boot":
+ self.ks.bootloader.source = part.source
fstab_path = self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
+ shutil.rmtree(self.workdir)
+ os.mkdir(self.workdir)
+
for part in parts:
# get rootfs size from bitbake variable if it's not set in .ks file
if not part.size:
# and if rootfs name is specified for the partition
- image_name = part.get_rootfs()
+ image_name = part.rootfs_dir
if image_name:
# Bitbake variable ROOTFS_SIZE is calculated in
# Image._get_rootfs_size method from meta/lib/oe/image.py
@@ -336,13 +339,13 @@ class DirectImageCreator(BaseImageCreator):
msg += 'The following build artifacts were used to create the image(s):\n'
for part in parts:
- if part.get_rootfs() is None:
+ if part.rootfs_dir is None:
continue
if part.mountpoint == '/':
suffix = ':'
else:
suffix = '["%s"]:' % (part.mountpoint or part.label)
- msg += ' ROOTFS_DIR%s%s\n' % (suffix.ljust(20), part.get_rootfs())
+ msg += ' ROOTFS_DIR%s%s\n' % (suffix.ljust(20), part.rootfs_dir)
msg += ' BOOTIMG_DIR: %s\n' % self.bootimg_dir
msg += ' KERNEL_DIR: %s\n' % self.kernel_dir
diff --git a/yocto-poky/scripts/lib/wic/kickstart/__init__.py b/yocto-poky/scripts/lib/wic/kickstart/__init__.py
deleted file mode 100644
index c9b0e51f3..000000000
--- a/yocto-poky/scripts/lib/wic/kickstart/__init__.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python -tt
-#
-# Copyright (c) 2007 Red Hat, Inc.
-# Copyright (c) 2009, 2010, 2011 Intel, Inc.
-#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by the Free
-# Software Foundation; version 2 of the License
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-# for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc., 59
-# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-
-import os, sys, re
-import shutil
-import subprocess
-import string
-
-import pykickstart.sections as kssections
-import pykickstart.commands as kscommands
-import pykickstart.constants as ksconstants
-import pykickstart.errors as kserrors
-import pykickstart.parser as ksparser
-import pykickstart.version as ksversion
-from pykickstart.handlers.control import commandMap
-from pykickstart.handlers.control import dataMap
-
-from wic import msger
-from wic.utils import errors, misc, runner, fs_related as fs
-from custom_commands import wicboot, partition
-
-def read_kickstart(path):
- """Parse a kickstart file and return a KickstartParser instance.
-
- This is a simple utility function which takes a path to a kickstart file,
- parses it and returns a pykickstart KickstartParser instance which can
- be then passed to an ImageCreator constructor.
-
- If an error occurs, a CreatorError exception is thrown.
- """
-
- #version = ksversion.makeVersion()
- #ks = ksparser.KickstartParser(version)
-
- using_version = ksversion.DEVEL
- commandMap[using_version]["bootloader"] = wicboot.Wic_Bootloader
- commandMap[using_version]["part"] = partition.Wic_Partition
- commandMap[using_version]["partition"] = partition.Wic_Partition
- dataMap[using_version]["PartData"] = partition.Wic_PartData
- superclass = ksversion.returnClassForVersion(version=using_version)
-
- class KSHandlers(superclass):
- def __init__(self):
- superclass.__init__(self, mapping=commandMap[using_version])
-
- kickstart = ksparser.KickstartParser(KSHandlers(), errorsAreFatal=True)
-
- try:
- kickstart.readKickstart(path)
- except (kserrors.KickstartParseError, kserrors.KickstartError), err:
- msger.warning("Errors occurred when parsing kickstart file: %s\n" % path)
- msger.error("%s" % err)
-
- return kickstart
-
-def get_image_size(kickstart, default=None):
- __size = 0
- for part in kickstart.handler.partition.partitions:
- if part.mountpoint == "/" and part.size:
- __size = part.size
- if __size > 0:
- return int(__size) * 1024L
- else:
- return default
-
-def get_image_fstype(kickstart, default=None):
- for part in kickstart.handler.partition.partitions:
- if part.mountpoint == "/" and part.fstype:
- return part.fstype
- return default
-
-def get_image_fsopts(kickstart, default=None):
- for part in kickstart.handler.partition.partitions:
- if part.mountpoint == "/" and part.fsopts:
- return part.fsopts
- return default
-
-def get_timeout(kickstart, default=None):
- if not hasattr(kickstart.handler.bootloader, "timeout"):
- return default
- if kickstart.handler.bootloader.timeout is None:
- return default
- return int(kickstart.handler.bootloader.timeout)
-
-def get_kernel_args(kickstart, default="ro rd.live.image"):
- if not hasattr(kickstart.handler.bootloader, "appendLine"):
- return default
- if kickstart.handler.bootloader.appendLine is None:
- return default
- return "%s %s" %(default, kickstart.handler.bootloader.appendLine)
-
-def get_menu_args(kickstart, default=""):
- if not hasattr(kickstart.handler.bootloader, "menus"):
- return default
- if kickstart.handler.bootloader.menus in (None, ""):
- return default
- return "%s" % kickstart.handler.bootloader.menus
-
-def get_default_kernel(kickstart, default=None):
- if not hasattr(kickstart.handler.bootloader, "default"):
- return default
- if not kickstart.handler.bootloader.default:
- return default
- return kickstart.handler.bootloader.default
-
-def get_partitions(kickstart):
- return kickstart.handler.partition.partitions
diff --git a/yocto-poky/scripts/lib/wic/kickstart/custom_commands/__init__.py b/yocto-poky/scripts/lib/wic/kickstart/custom_commands/__init__.py
deleted file mode 100644
index e4ae40622..000000000
--- a/yocto-poky/scripts/lib/wic/kickstart/custom_commands/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from partition import Wic_Partition
-from partition import Wic_PartData
-
-__all__ = (
- "Wic_Partition",
- "Wic_PartData",
-)
diff --git a/yocto-poky/scripts/lib/wic/kickstart/custom_commands/wicboot.py b/yocto-poky/scripts/lib/wic/kickstart/custom_commands/wicboot.py
deleted file mode 100644
index a3e1852be..000000000
--- a/yocto-poky/scripts/lib/wic/kickstart/custom_commands/wicboot.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (c) 2014, Intel Corporation.
-# All rights reserved.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# DESCRIPTION
-# This module provides the OpenEmbedded bootloader object definitions.
-#
-# AUTHORS
-# Tom Zanussi <tom.zanussi (at] linux.intel.com>
-#
-from pykickstart.commands.bootloader import F8_Bootloader
-
-class Wic_Bootloader(F8_Bootloader):
- def __init__(self, writePriority=10, appendLine="", driveorder=None,
- forceLBA=False, location="", md5pass="", password="",
- upgrade=False, menus=""):
- F8_Bootloader.__init__(self, writePriority, appendLine, driveorder,
- forceLBA, location, md5pass, password, upgrade)
-
- self.menus = ""
- self.ptable = "msdos"
- self.source = ""
-
- def _getArgsAsStr(self):
- retval = F8_Bootloader._getArgsAsStr(self)
-
- if self.menus == "":
- retval += " --menus=%s" %(self.menus,)
- if self.ptable:
- retval += " --ptable=\"%s\"" %(self.ptable,)
- if self.source:
- retval += " --source=%s" % self.source
-
- return retval
-
- def _getParser(self):
- parser = F8_Bootloader._getParser(self)
- parser.add_option("--menus", dest="menus")
- parser.add_option("--ptable", dest="ptable", choices=("msdos", "gpt"),
- default="msdos")
- # use specified source plugin to implement bootloader-specific methods
- parser.add_option("--source", type="string", action="store",
- dest="source", default=None)
- return parser
-
diff --git a/yocto-poky/scripts/lib/wic/ksparser.py b/yocto-poky/scripts/lib/wic/ksparser.py
new file mode 100644
index 000000000..8c3f80882
--- /dev/null
+++ b/yocto-poky/scripts/lib/wic/ksparser.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python -tt
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (c) 2016 Intel, Inc.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; version 2 of the License
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+# for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 59
+# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# DESCRIPTION
+# This module provides parser for kickstart format
+#
+# AUTHORS
+# Ed Bartosh <ed.bartosh> (at] linux.intel.com>
+
+"""Kickstart parser module."""
+
+import os
+import shlex
+from argparse import ArgumentParser, ArgumentError, ArgumentTypeError
+
+from wic import msger
+from wic.partition import Partition
+from wic.utils.misc import find_canned
+
+class KickStartError(Exception):
+ """Custom exception."""
+ pass
+
+class KickStartParser(ArgumentParser):
+ """
+ This class overwrites error method to throw exception
+ instead of producing usage message(default argparse behavior).
+ """
+ def error(self, message):
+ raise ArgumentError(None, message)
+
+def sizetype(arg):
+ """
+ Custom type for ArgumentParser
+ Converts size string in <num>[K|k|M|G] format into the integer value
+ """
+ if arg.isdigit():
+ return int(arg) * 1024L
+
+ if not arg[:-1].isdigit():
+ raise ArgumentTypeError("Invalid size: %r" % arg)
+
+ size = int(arg[:-1])
+ if arg.endswith("k") or arg.endswith("K"):
+ return size
+ if arg.endswith("M"):
+ return size * 1024L
+ if arg.endswith("G"):
+ return size * 1024L * 1024L
+
+ raise ArgumentTypeError("Invalid size: %r" % arg)
+
+def overheadtype(arg):
+ """
+ Custom type for ArgumentParser
+ Converts overhead string to float and checks if it's bigger than 1.0
+ """
+ try:
+ result = float(arg)
+ except ValueError:
+ raise ArgumentTypeError("Invalid value: %r" % arg)
+
+ if result < 1.0:
+ raise ArgumentTypeError("Overhead factor should be > 1.0" % arg)
+
+ return result
+
+def cannedpathtype(arg):
+ """
+ Custom type for ArgumentParser
+ Tries to find file in the list of canned wks paths
+ """
+ scripts_path = os.path.abspath(os.path.dirname(__file__) + '../../..')
+ result = find_canned(scripts_path, arg)
+ if not result:
+ raise ArgumentTypeError("file not found: %s" % arg)
+ return result
+
+class KickStart(object):
+ """"Kickstart parser implementation."""
+
+ def __init__(self, confpath):
+
+ self.partitions = []
+ self.bootloader = None
+ self.lineno = 0
+ self.partnum = 0
+
+ parser = KickStartParser()
+ subparsers = parser.add_subparsers()
+
+ part = subparsers.add_parser('part')
+ part.add_argument('mountpoint')
+ part.add_argument('--active', action='store_true')
+ part.add_argument('--align', type=int)
+ part.add_argument("--extra-space", type=sizetype, default=10*1024L)
+ part.add_argument('--fsoptions', dest='fsopts')
+ part.add_argument('--fstype')
+ part.add_argument('--label')
+ part.add_argument('--no-table', action='store_true')
+ part.add_argument('--ondisk', '--ondrive', dest='disk')
+ part.add_argument("--overhead-factor", type=overheadtype, default=1.3)
+ part.add_argument('--part-type')
+ part.add_argument('--rootfs-dir')
+ part.add_argument('--size', type=sizetype, default=0)
+ part.add_argument('--source')
+ part.add_argument('--sourceparams')
+ part.add_argument('--use-uuid', action='store_true')
+ part.add_argument('--uuid')
+
+ bootloader = subparsers.add_parser('bootloader')
+ bootloader.add_argument('--append')
+ bootloader.add_argument('--configfile')
+ bootloader.add_argument('--ptable', choices=('msdos', 'gpt'),
+ default='msdos')
+ bootloader.add_argument('--timeout', type=int)
+ bootloader.add_argument('--source')
+
+ include = subparsers.add_parser('include')
+ include.add_argument('path', type=cannedpathtype)
+
+ self._parse(parser, confpath)
+ if not self.bootloader:
+ msger.warning('bootloader config not specified, using defaults')
+ self.bootloader = bootloader.parse_args([])
+
+ def _parse(self, parser, confpath):
+ """
+ Parse file in .wks format using provided parser.
+ """
+ with open(confpath) as conf:
+ lineno = 0
+ for line in conf:
+ line = line.strip()
+ lineno += 1
+ if line and line[0] != '#':
+ try:
+ parsed = parser.parse_args(shlex.split(line))
+ except ArgumentError as err:
+ raise KickStartError('%s:%d: %s' % \
+ (confpath, lineno, err))
+ if line.startswith('part'):
+ self.partnum += 1
+ self.partitions.append(Partition(parsed, self.partnum))
+ elif line.startswith('include'):
+ self._parse(parser, parsed.path)
+ elif line.startswith('bootloader'):
+ if not self.bootloader:
+ self.bootloader = parsed
+ else:
+ err = "%s:%d: more than one bootloader specified" \
+ % (confpath, lineno)
+ raise KickStartError(err)
diff --git a/yocto-poky/scripts/lib/wic/kickstart/custom_commands/partition.py b/yocto-poky/scripts/lib/wic/partition.py
index eee25a493..f40d1bc8b 100644
--- a/yocto-poky/scripts/lib/wic/kickstart/custom_commands/partition.py
+++ b/yocto-poky/scripts/lib/wic/partition.py
@@ -1,7 +1,7 @@
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
-# Copyright (c) 2013, Intel Corporation.
+# Copyright (c) 2013-2016 Intel Corporation.
# All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,14 +22,12 @@
#
# AUTHORS
# Tom Zanussi <tom.zanussi (at] linux.intel.com>
-#
+# Ed Bartosh <ed.bartosh> (at] linux.intel.com>
import os
import tempfile
import uuid
-from optparse import OptionValueError
-from pykickstart.commands.partition import FC4_PartData, FC4_Partition
from wic.utils.oe.misc import msger, parse_sourceparams
from wic.utils.oe.misc import exec_cmd, exec_native_cmd
from wic.plugin import pluginmgr
@@ -40,96 +38,33 @@ partition_methods = {
"do_configure_partition":None,
}
-class Wic_PartData(FC4_PartData):
- removedKeywords = FC4_PartData.removedKeywords
- removedAttrs = FC4_PartData.removedAttrs
-
- def __init__(self, *args, **kwargs):
- FC4_PartData.__init__(self, *args, **kwargs)
- self.deleteRemovedAttrs()
- self.align = kwargs.get("align", None)
- self.extopts = kwargs.get("extopts", None)
- self.part_type = kwargs.get("part_type", None)
- self.source = kwargs.get("source", None)
- self.sourceparams = kwargs.get("sourceparams", None)
- self.rootfs = kwargs.get("rootfs-dir", None)
- self.no_table = kwargs.get("no-table", False)
- self.extra_space = kwargs.get("extra-space", "10M")
- self.overhead_factor = kwargs.get("overhead-factor", 1.3)
- self._use_uuid = False
- self.uuid = kwargs.get("uuid", None)
- self.use_uuid = kwargs.get("use-uuid", False)
- self.source_file = ""
- self.size = 0
-
- def _getArgsAsStr(self):
- retval = FC4_PartData._getArgsAsStr(self)
-
- if self.align:
- retval += " --align=%d" % self.align
- if self.extopts:
- retval += " --extoptions=%s" % self.extopts
- if self.part_type:
- retval += " --part-type=%s" % self.part_type
- if self.source:
- retval += " --source=%s" % self.source
- if self.sourceparams:
- retval += " --sourceparams=%s" % self.sourceparams
- if self.rootfs:
- retval += " --rootfs-dir=%s" % self.rootfs
- if self.no_table:
- retval += " --no-table"
- if self.use_uuid:
- retval += " --use-uuid"
- if self.uuid:
- retval += " --uuid=%s" % self.uuid
- retval += " --extra-space=%s" % self.extra_space
- retval += " --overhead-factor=%f" % self.overhead_factor
-
- return retval
-
- @property
- def use_uuid(self):
- return self._use_uuid
-
- @use_uuid.setter
- def use_uuid(self, value):
- self._use_uuid = value
- if value and not self.uuid:
+class Partition(object):
+
+ def __init__(self, args, lineno):
+ self.args = args
+ self.active = args.active
+ self.align = args.align
+ self.disk = args.disk
+ self.extra_space = args.extra_space
+ self.fsopts = args.fsopts
+ self.fstype = args.fstype
+ self.label = args.label
+ self.mountpoint = args.mountpoint
+ self.no_table = args.no_table
+ self.overhead_factor = args.overhead_factor
+ self.part_type = args.part_type
+ self.rootfs_dir = args.rootfs_dir
+ self.size = args.size
+ self.source = args.source
+ self.sourceparams = args.sourceparams
+ self.use_uuid = args.use_uuid
+ self.uuid = args.uuid
+ if args.use_uuid and not self.uuid:
self.uuid = str(uuid.uuid4())
- def get_rootfs(self):
- """
- Acessor for rootfs dir
- """
- return self.rootfs
-
- def set_rootfs(self, rootfs):
- """
- Acessor for actual rootfs dir, which must be set by source
- plugins.
- """
- self.rootfs = rootfs
-
- def get_size(self):
- """
- Accessor for partition size, 0 or --size before set_size().
- """
- return self.size
-
- def set_size(self, size):
- """
- Accessor for actual partition size, which must be set by source
- plugins.
- """
- self.size = size
-
- def set_source_file(self, source_file):
- """
- Accessor for source_file, the location of the generated partition
- image, which must be set by source plugins.
- """
- self.source_file = source_file
+ self.lineno = lineno
+ self.source_file = ""
+ self.sourceparams_dict = {}
def get_extra_block_count(self, current_blocks):
"""
@@ -154,14 +89,12 @@ class Wic_PartData(FC4_PartData):
else:
return 0
- def prepare(self, creator, cr_workdir, oe_builddir, rootfs_dir, bootimg_dir,
- kernel_dir, native_sysroot):
+ def prepare(self, creator, cr_workdir, oe_builddir, rootfs_dir,
+ bootimg_dir, kernel_dir, native_sysroot):
"""
Prepare content for individual partitions, depending on
partition command parameters.
"""
- self.sourceparams_dict = {}
-
if self.sourceparams:
self.sourceparams_dict = parse_sourceparams(self.sourceparams)
@@ -173,6 +106,7 @@ class Wic_PartData(FC4_PartData):
if self.fstype and self.fstype == "swap":
self.prepare_swap_partition(cr_workdir, oe_builddir,
native_sysroot)
+ self.source_file = "%s/fs.%s" % (cr_workdir, self.fstype)
elif self.fstype:
rootfs = "%s/fs_%s.%s.%s" % (cr_workdir, self.label,
self.lineno, self.fstype)
@@ -297,7 +231,7 @@ class Wic_PartData(FC4_PartData):
mkfs_cmd = "mkfs.%s -F %s %s %s -d %s" % \
(self.fstype, extra_imagecmd, rootfs, label_str, rootfs_dir)
- exec_native_cmd(pseudo + mkfs_cmd, native_sysroot)
+ exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
def prepare_rootfs_btrfs(self, rootfs, oe_builddir, rootfs_dir,
native_sysroot, pseudo):
@@ -330,7 +264,7 @@ class Wic_PartData(FC4_PartData):
mkfs_cmd = "mkfs.%s -b %d -r %s %s %s" % \
(self.fstype, rootfs_size * 1024, rootfs_dir, label_str, rootfs)
- exec_native_cmd(pseudo + mkfs_cmd, native_sysroot)
+ exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
def prepare_rootfs_vfat(self, rootfs, oe_builddir, rootfs_dir,
native_sysroot, pseudo):
@@ -378,7 +312,7 @@ class Wic_PartData(FC4_PartData):
"""
squashfs_cmd = "mksquashfs %s %s -noappend" % \
(rootfs_dir, rootfs)
- exec_native_cmd(pseudo + squashfs_cmd, native_sysroot)
+ exec_native_cmd(squashfs_cmd, native_sysroot, pseudo=pseudo)
def prepare_empty_partition_ext(self, rootfs, oe_builddir,
native_sysroot):
@@ -478,49 +412,3 @@ class Wic_PartData(FC4_PartData):
mkswap_cmd = "mkswap %s -U %s %s" % (label_str, str(uuid.uuid1()), path)
exec_native_cmd(mkswap_cmd, native_sysroot)
-
-class Wic_Partition(FC4_Partition):
- removedKeywords = FC4_Partition.removedKeywords
- removedAttrs = FC4_Partition.removedAttrs
-
- def _getParser(self):
- def overhead_cb(option, opt_str, value, parser):
- if value < 1:
- raise OptionValueError("Option %s: invalid value: %r" % \
- (option, value))
- setattr(parser.values, option.dest, value)
-
- parser = FC4_Partition._getParser(self)
-
- # The alignment value is given in kBytes. e.g., value 8 means that
- # the partition is aligned to start from 8096 byte boundary.
- parser.add_option("--align", type="int", action="store", dest="align",
- default=None)
- parser.add_option("--extoptions", type="string", action="store", dest="extopts",
- default=None)
- parser.add_option("--part-type", type="string", action="store", dest="part_type",
- default=None)
- # use specified source file to fill the partition
- # and calculate partition size
- parser.add_option("--source", type="string", action="store",
- dest="source", default=None)
- # comma-separated list of param=value pairs
- parser.add_option("--sourceparams", type="string", action="store",
- dest="sourceparams", default=None)
- # use specified rootfs path to fill the partition
- parser.add_option("--rootfs-dir", type="string", action="store",
- dest="rootfs", default=None)
- # wether to add the partition in the partition table
- parser.add_option("--no-table", dest="no_table", action="store_true",
- default=False)
- # extra space beyond the partition size
- parser.add_option("--extra-space", dest="extra_space", action="store",
- type="size", nargs=1, default="10M")
- parser.add_option("--overhead-factor", dest="overhead_factor",
- action="callback", callback=overhead_cb, type="float",
- nargs=1, default=1.3)
- parser.add_option("--use-uuid", dest="use_uuid", action="store_true",
- default=False)
- parser.add_option("--uuid")
-
- return parser
diff --git a/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py b/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py
index fa63c6abd..a4734c9b3 100644
--- a/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -27,8 +27,9 @@
import os
import shutil
-from wic import kickstart, msger
+from wic import msger
from wic.pluginbase import SourcePlugin
+from wic.utils.misc import get_custom_config
from wic.utils.oe.misc import exec_cmd, exec_native_cmd, get_bitbake_var, \
BOOTDD_EXTRA_SPACE
@@ -45,22 +46,34 @@ class BootimgEFIPlugin(SourcePlugin):
"""
Create loader-specific (grub-efi) config
"""
- options = creator.ks.handler.bootloader.appendLine
+ configfile = creator.ks.bootloader.configfile
+ custom_cfg = None
+ if configfile:
+ custom_cfg = get_custom_config(configfile)
+ if custom_cfg:
+ # Use a custom configuration for grub
+ grubefi_conf = custom_cfg
+ msger.debug("Using custom configuration file "
+ "%s for grub.cfg" % configfile)
+ else:
+ msger.error("configfile is specified but failed to "
+ "get it from %s." % configfile)
+
+ if not custom_cfg:
+ # Create grub configuration using parameters from wks file
+ bootloader = creator.ks.bootloader
- grubefi_conf = ""
- grubefi_conf += "serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1\n"
- grubefi_conf += "default=boot\n"
- timeout = kickstart.get_timeout(creator.ks)
- if not timeout:
- timeout = 0
- grubefi_conf += "timeout=%s\n" % timeout
- grubefi_conf += "menuentry 'boot'{\n"
+ grubefi_conf = ""
+ grubefi_conf += "serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1\n"
+ grubefi_conf += "default=boot\n"
+ grubefi_conf += "timeout=%s\n" % bootloader.timeout
+ grubefi_conf += "menuentry 'boot'{\n"
- kernel = "/bzImage"
+ kernel = "/bzImage"
- grubefi_conf += "linux %s root=%s rootwait %s\n" \
- % (kernel, creator.rootdev, options)
- grubefi_conf += "}\n"
+ grubefi_conf += "linux %s root=%s rootwait %s\n" \
+ % (kernel, creator.rootdev, bootloader.append)
+ grubefi_conf += "}\n"
msger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg" \
% cr_workdir)
@@ -79,15 +92,11 @@ class BootimgEFIPlugin(SourcePlugin):
install_cmd = "install -d %s/loader/entries" % hdddir
exec_cmd(install_cmd)
- options = creator.ks.handler.bootloader.appendLine
-
- timeout = kickstart.get_timeout(creator.ks)
- if not timeout:
- timeout = 0
+ bootloader = creator.ks.bootloader
loader_conf = ""
loader_conf += "default boot\n"
- loader_conf += "timeout %d\n" % timeout
+ loader_conf += "timeout %d\n" % bootloader.timeout
msger.debug("Writing gummiboot config %s/hdd/boot/loader/loader.conf" \
% cr_workdir)
@@ -95,12 +104,28 @@ class BootimgEFIPlugin(SourcePlugin):
cfg.write(loader_conf)
cfg.close()
- kernel = "/bzImage"
+ configfile = creator.ks.bootloader.configfile
+ custom_cfg = None
+ if configfile:
+ custom_cfg = get_custom_config(configfile)
+ if custom_cfg:
+ # Use a custom configuration for gummiboot
+ boot_conf = custom_cfg
+ msger.debug("Using custom configuration file "
+ "%s for gummiboots's boot.conf" % configfile)
+ else:
+ msger.error("configfile is specified but failed to "
+ "get it from %s." % configfile)
+
+ if not custom_cfg:
+ # Create gummiboot configuration using parameters from wks file
+ kernel = "/bzImage"
- boot_conf = ""
- boot_conf += "title boot\n"
- boot_conf += "linux %s\n" % kernel
- boot_conf += "options LABEL=Boot root=%s %s\n" % (creator.rootdev, options)
+ boot_conf = ""
+ boot_conf += "title boot\n"
+ boot_conf += "linux %s\n" % kernel
+ boot_conf += "options LABEL=Boot root=%s %s\n" % \
+ (creator.rootdev, bootloader.append)
msger.debug("Writing gummiboot config %s/hdd/boot/loader/entries/boot.conf" \
% cr_workdir)
@@ -117,8 +142,6 @@ class BootimgEFIPlugin(SourcePlugin):
Called before do_prepare_partition(), creates loader-specific config
"""
hdddir = "%s/hdd/boot" % cr_workdir
- rm_cmd = "rm -rf %s" % cr_workdir
- exec_cmd(rm_cmd)
install_cmd = "install -d %s/EFI/BOOT" % hdddir
exec_cmd(install_cmd)
@@ -210,5 +233,5 @@ class BootimgEFIPlugin(SourcePlugin):
out = exec_cmd(du_cmd)
bootimg_size = out.split()[0]
- part.set_size(bootimg_size)
- part.set_source_file(bootimg)
+ part.size = bootimg_size
+ part.source_file = bootimg
diff --git a/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py b/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py
index bc2ca0f6f..b76c1211a 100644
--- a/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py
@@ -71,9 +71,6 @@ class BootimgPartitionPlugin(SourcePlugin):
- copies all files listed in IMAGE_BOOT_FILES variable
"""
hdddir = "%s/boot" % cr_workdir
- rm_cmd = "rm -rf %s/boot" % cr_workdir
- exec_cmd(rm_cmd)
-
install_cmd = "install -d %s" % hdddir
exec_cmd(install_cmd)
diff --git a/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
index 96ed54dba..5b719bf3b 100644
--- a/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
@@ -27,8 +27,9 @@
import os
from wic.utils.errors import ImageError
-from wic import kickstart, msger
+from wic import msger
from wic.utils import runner
+from wic.utils.misc import get_custom_config
from wic.pluginbase import SourcePlugin
from wic.utils.oe.misc import exec_cmd, exec_native_cmd, \
get_bitbake_var, BOOTDD_EXTRA_SPACE
@@ -77,40 +78,49 @@ class BootimgPcbiosPlugin(SourcePlugin):
Called before do_prepare_partition(), creates syslinux config
"""
hdddir = "%s/hdd/boot" % cr_workdir
- rm_cmd = "rm -rf " + cr_workdir
- exec_cmd(rm_cmd)
install_cmd = "install -d %s" % hdddir
exec_cmd(install_cmd)
- splash = os.path.join(cr_workdir, "/hdd/boot/splash.jpg")
- if os.path.exists(splash):
- splashline = "menu background splash.jpg"
- else:
- splashline = ""
-
- options = creator.ks.handler.bootloader.appendLine
-
- syslinux_conf = ""
- syslinux_conf += "PROMPT 0\n"
- timeout = kickstart.get_timeout(creator.ks)
- if not timeout:
- timeout = 0
- syslinux_conf += "TIMEOUT " + str(timeout) + "\n"
- syslinux_conf += "\n"
- syslinux_conf += "ALLOWOPTIONS 1\n"
- syslinux_conf += "SERIAL 0 115200\n"
- syslinux_conf += "\n"
- if splashline:
- syslinux_conf += "%s\n" % splashline
- syslinux_conf += "DEFAULT boot\n"
- syslinux_conf += "LABEL boot\n"
-
- kernel = "/vmlinuz"
- syslinux_conf += "KERNEL " + kernel + "\n"
-
- syslinux_conf += "APPEND label=boot root=%s %s\n" % \
- (creator.rootdev, options)
+ bootloader = creator.ks.bootloader
+
+ custom_cfg = None
+ if bootloader.configfile:
+ custom_cfg = get_custom_config(bootloader.configfile)
+ if custom_cfg:
+ # Use a custom configuration for grub
+ syslinux_conf = custom_cfg
+ msger.debug("Using custom configuration file "
+ "%s for syslinux.cfg" % bootloader.configfile)
+ else:
+ msger.error("configfile is specified but failed to "
+ "get it from %s." % bootloader.configfile)
+
+ if not custom_cfg:
+ # Create syslinux configuration using parameters from wks file
+ splash = os.path.join(cr_workdir, "/hdd/boot/splash.jpg")
+ if os.path.exists(splash):
+ splashline = "menu background splash.jpg"
+ else:
+ splashline = ""
+
+ syslinux_conf = ""
+ syslinux_conf += "PROMPT 0\n"
+ syslinux_conf += "TIMEOUT " + str(bootloader.timeout) + "\n"
+ syslinux_conf += "\n"
+ syslinux_conf += "ALLOWOPTIONS 1\n"
+ syslinux_conf += "SERIAL 0 115200\n"
+ syslinux_conf += "\n"
+ if splashline:
+ syslinux_conf += "%s\n" % splashline
+ syslinux_conf += "DEFAULT boot\n"
+ syslinux_conf += "LABEL boot\n"
+
+ kernel = "/vmlinuz"
+ syslinux_conf += "KERNEL " + kernel + "\n"
+
+ syslinux_conf += "APPEND label=boot root=%s %s\n" % \
+ (creator.rootdev, bootloader.append)
msger.debug("Writing syslinux config %s/hdd/boot/syslinux.cfg" \
% cr_workdir)
@@ -194,7 +204,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
out = exec_cmd(du_cmd)
bootimg_size = out.split()[0]
- part.set_size(bootimg_size)
- part.set_source_file(bootimg)
+ part.size = int(out.split()[0])
+ part.source_file = bootimg
diff --git a/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
index 9472d8abb..bc9928314 100644
--- a/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
@@ -23,8 +23,9 @@
import os
import re
import shutil
+import glob
-from wic import kickstart, msger
+from wic import msger
from wic.pluginbase import SourcePlugin
from wic.utils.oe.misc import exec_cmd, exec_native_cmd, get_bitbake_var
@@ -64,13 +65,11 @@ class IsoImagePlugin(SourcePlugin):
else:
splashline = ""
- options = creator.ks.handler.bootloader.appendLine
-
- timeout = kickstart.get_timeout(creator.ks, 10)
+ bootloader = creator.ks.bootloader
syslinux_conf = ""
syslinux_conf += "PROMPT 0\n"
- syslinux_conf += "TIMEOUT %s \n" % timeout
+ syslinux_conf += "TIMEOUT %s \n" % (bootloader.timeout or 10)
syslinux_conf += "\n"
syslinux_conf += "ALLOWOPTIONS 1\n"
syslinux_conf += "SERIAL 0 115200\n"
@@ -82,7 +81,8 @@ class IsoImagePlugin(SourcePlugin):
kernel = "/bzImage"
syslinux_conf += "KERNEL " + kernel + "\n"
- syslinux_conf += "APPEND initrd=/initrd LABEL=boot %s\n" % options
+ syslinux_conf += "APPEND initrd=/initrd LABEL=boot %s\n" \
+ % bootloader.append
msger.debug("Writing syslinux config %s/ISO/isolinux/isolinux.cfg" \
% cr_workdir)
@@ -100,14 +100,13 @@ class IsoImagePlugin(SourcePlugin):
else:
splashline = ""
- options = creator.ks.handler.bootloader.appendLine
+ bootloader = creator.ks.bootloader
grubefi_conf = ""
grubefi_conf += "serial --unit=0 --speed=115200 --word=8 "
grubefi_conf += "--parity=no --stop=1\n"
grubefi_conf += "default=boot\n"
- timeout = kickstart.get_timeout(creator.ks, 10)
- grubefi_conf += "timeout=%s\n" % timeout
+ grubefi_conf += "timeout=%s\n" % (bootloader.timeout or 10)
grubefi_conf += "\n"
grubefi_conf += "search --set=root --label %s " % part.label
grubefi_conf += "\n"
@@ -116,7 +115,7 @@ class IsoImagePlugin(SourcePlugin):
kernel = "/bzImage"
grubefi_conf += "linux %s rootwait %s\n" \
- % (kernel, options)
+ % (kernel, bootloader.append)
grubefi_conf += "initrd /initrd \n"
grubefi_conf += "}\n"
@@ -152,8 +151,7 @@ class IsoImagePlugin(SourcePlugin):
if not machine_arch:
msger.error("Couldn't find MACHINE_ARCH, exiting.\n")
- initrd = "%s/%s-initramfs-%s.%s" \
- % (initrd_dir, image_name, machine_arch, image_type)
+ initrd = glob.glob('%s/%s*%s.%s' % (initrd_dir, image_name, machine_arch, image_type))[0]
if not os.path.exists(initrd):
# Create initrd from rootfs directory
@@ -176,8 +174,8 @@ class IsoImagePlugin(SourcePlugin):
else:
msger.error("Couldn't find or build initrd, exiting.\n")
- exec_cmd("cd %s && find . | cpio -o -H newc >%s/initrd.cpio " \
- % (initrd_dir, cr_workdir), as_shell=True)
+ exec_cmd("cd %s && find . | cpio -o -H newc -R +0:+0 >./initrd.cpio " \
+ % initrd_dir, as_shell=True)
exec_cmd("gzip -f -9 -c %s/initrd.cpio > %s" \
% (cr_workdir, initrd), as_shell=True)
shutil.rmtree(initrd_dir)
@@ -210,11 +208,14 @@ class IsoImagePlugin(SourcePlugin):
if not os.path.exists("%s/syslinux" % syslinux_dir):
msger.info("Building syslinux...\n")
exec_cmd("bitbake syslinux")
- msger.info("Building syslinux-native...\n")
- exec_cmd("bitbake syslinux-native")
if not os.path.exists("%s/syslinux" % syslinux_dir):
msger.error("Please build syslinux first\n")
+ # Make sure syslinux is available in native sysroot
+ if not os.path.exists("%s/usr/bin/syslinux" % native_sysroot):
+ msger.info("Building syslinux-native...\n")
+ exec_cmd("bitbake syslinux-native")
+
#Make sure mkisofs is available in native sysroot
if not os.path.isfile("%s/usr/bin/mkisofs" % native_sysroot):
msger.info("Building cdrtools-native...\n")
@@ -264,26 +265,26 @@ class IsoImagePlugin(SourcePlugin):
isodir = "%s/ISO" % cr_workdir
- if part.rootfs is None:
+ if part.rootfs_dir is None:
if not 'ROOTFS_DIR' in rootfs_dir:
msger.error("Couldn't find --rootfs-dir, exiting.\n")
rootfs_dir = rootfs_dir['ROOTFS_DIR']
else:
- if part.rootfs in rootfs_dir:
- rootfs_dir = rootfs_dir[part.rootfs]
- elif part.rootfs:
- rootfs_dir = part.rootfs
+ if part.rootfs_dir in rootfs_dir:
+ rootfs_dir = rootfs_dir[part.rootfs_dir]
+ elif part.rootfs_dir:
+ rootfs_dir = part.rootfs_dir
else:
msg = "Couldn't find --rootfs-dir=%s connection "
msg += "or it is not a valid path, exiting.\n"
- msger.error(msg % part.rootfs)
+ msger.error(msg % part.rootfs_dir)
if not os.path.isdir(rootfs_dir):
rootfs_dir = get_bitbake_var("IMAGE_ROOTFS")
if not os.path.isdir(rootfs_dir):
msger.error("Couldn't find IMAGE_ROOTFS, exiting.\n")
- part.set_rootfs(rootfs_dir)
+ part.rootfs_dir = rootfs_dir
# Prepare rootfs.img
hdd_dir = get_bitbake_var("HDDDIR")
@@ -304,7 +305,7 @@ class IsoImagePlugin(SourcePlugin):
# which contains rootfs
du_cmd = "du -bks %s" % rootfs_dir
out = exec_cmd(du_cmd)
- part.set_size(int(out.split()[0]))
+ part.size = int(out.split()[0])
part.extra_space = 0
part.overhead_factor = 1.2
part.prepare_rootfs(cr_workdir, oe_builddir, rootfs_dir, \
@@ -504,8 +505,8 @@ class IsoImagePlugin(SourcePlugin):
out = exec_cmd(du_cmd)
isoimg_size = int(out.split()[0])
- part.set_size(isoimg_size)
- part.set_source_file(iso_img)
+ part.size = isoimg_size
+ part.source_file = iso_img
@classmethod
def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,
diff --git a/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py b/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py
index f0691baa9..0472f536b 100644
--- a/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py
@@ -67,13 +67,14 @@ class RawCopyPlugin(SourcePlugin):
return
src = os.path.join(bootimg_dir, source_params['file'])
- dst = src
+ dst = os.path.join(cr_workdir, source_params['file'])
if 'skip' in source_params:
- dst = os.path.join(cr_workdir, source_params['file'])
dd_cmd = "dd if=%s of=%s ibs=%s skip=1 conv=notrunc" % \
(src, dst, source_params['skip'])
- exec_cmd(dd_cmd)
+ else:
+ dd_cmd = "cp %s %s" % (src, dst)
+ exec_cmd(dd_cmd)
# get the size in the right units for kickstart (kB)
du_cmd = "du -Lbks %s" % dst
diff --git a/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py b/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py
index a90712b24..425da8b22 100644
--- a/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py
+++ b/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py
@@ -61,23 +61,23 @@ class RootfsPlugin(SourcePlugin):
'prepares' the partition to be incorporated into the image.
In this case, prepare content for legacy bios boot partition.
"""
- if part.rootfs is None:
+ if part.rootfs_dir is None:
if not 'ROOTFS_DIR' in krootfs_dir:
msg = "Couldn't find --rootfs-dir, exiting"
msger.error(msg)
rootfs_dir = krootfs_dir['ROOTFS_DIR']
else:
- if part.rootfs in krootfs_dir:
- rootfs_dir = krootfs_dir[part.rootfs]
- elif part.rootfs:
- rootfs_dir = part.rootfs
+ if part.rootfs_dir in krootfs_dir:
+ rootfs_dir = krootfs_dir[part.rootfs_dir]
+ elif part.rootfs_dir:
+ rootfs_dir = part.rootfs_dir
else:
msg = "Couldn't find --rootfs-dir=%s connection"
msg += " or it is not a valid path, exiting"
- msger.error(msg % part.rootfs)
+ msger.error(msg % part.rootfs_dir)
real_rootfs_dir = cls.__get_rootfs_dir(rootfs_dir)
- part.set_rootfs(real_rootfs_dir)
+ part.rootfs_dir = real_rootfs_dir
part.prepare_rootfs(cr_workdir, oe_builddir, real_rootfs_dir, native_sysroot)
diff --git a/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py b/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py
index 76e7b033f..3d60e6f0f 100644
--- a/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py
+++ b/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py
@@ -19,7 +19,6 @@
#
import os
-from wic import kickstart
from wic import msger
from wic.utils import syslinux
from wic.utils import runner
@@ -78,15 +77,12 @@ class RootfsPlugin(SourcePlugin):
Called before do_prepare_partition()
"""
- options = image_creator.ks.handler.bootloader.appendLine
+ bootloader = image_creator.ks.bootloader
syslinux_conf = ""
syslinux_conf += "PROMPT 0\n"
- timeout = kickstart.get_timeout(image_creator.ks)
- if not timeout:
- timeout = 0
- syslinux_conf += "TIMEOUT " + str(timeout) + "\n"
+ syslinux_conf += "TIMEOUT " + str(bootloader.timeout) + "\n"
syslinux_conf += "ALLOWOPTIONS 1\n"
# Derive SERIAL... line from from kernel boot parameters
@@ -97,7 +93,7 @@ class RootfsPlugin(SourcePlugin):
syslinux_conf += " KERNEL /boot/bzImage\n"
syslinux_conf += " APPEND label=boot root=%s %s\n" % \
- (image_creator.rootdev, options)
+ (image_creator.rootdev, bootloader.append)
syslinux_cfg = os.path.join(image_creator.rootfs_dir['ROOTFS_DIR'], "boot", "syslinux.cfg")
msger.debug("Writing syslinux config %s" % syslinux_cfg)
@@ -144,7 +140,7 @@ class RootfsPlugin(SourcePlugin):
real_rootfs_dir = cls._get_rootfs_dir(rootfs_dir)
- part.set_rootfs(real_rootfs_dir)
+ part.rootfs_dir = real_rootfs_dir
part.prepare_rootfs(image_creator_workdir, oe_builddir, real_rootfs_dir, native_sysroot)
# install syslinux into rootfs partition
diff --git a/yocto-poky/scripts/lib/wic/utils/misc.py b/yocto-poky/scripts/lib/wic/utils/misc.py
index 9d750694d..1415ae906 100644
--- a/yocto-poky/scripts/lib/wic/utils/misc.py
+++ b/yocto-poky/scripts/lib/wic/utils/misc.py
@@ -17,6 +17,7 @@
import os
import time
+import wic.engine
def build_name(kscfg, release=None, prefix=None, suffix=None):
"""Construct and return an image name string.
@@ -56,3 +57,39 @@ def build_name(kscfg, release=None, prefix=None, suffix=None):
ret = prefix + name + suffix
return ret
+
+def find_canned(scripts_path, file_name):
+ """
+ Find a file either by its path or by name in the canned files dir.
+
+ Return None if not found
+ """
+ if os.path.exists(file_name):
+ return file_name
+
+ layers_canned_wks_dir = wic.engine.build_canned_image_list(scripts_path)
+ for canned_wks_dir in layers_canned_wks_dir:
+ for root, dirs, files in os.walk(canned_wks_dir):
+ for fname in files:
+ if fname == file_name:
+ fullpath = os.path.join(canned_wks_dir, fname)
+ return fullpath
+
+def get_custom_config(boot_file):
+ """
+ Get the custom configuration to be used for the bootloader.
+
+ Return None if the file can't be found.
+ """
+ scripts_path = os.path.abspath(os.path.dirname(__file__))
+ # Get the scripts path of poky
+ for x in range(0, 3):
+ scripts_path = os.path.dirname(scripts_path)
+
+ cfg_file = find_canned(scripts_path, boot_file)
+ if cfg_file:
+ with open(cfg_file, "r") as f:
+ config = f.read()
+ return config
+
+ return None
diff --git a/yocto-poky/scripts/lib/wic/utils/oe/misc.py b/yocto-poky/scripts/lib/wic/utils/oe/misc.py
index c6d2e5f20..81239ac35 100644
--- a/yocto-poky/scripts/lib/wic/utils/oe/misc.py
+++ b/yocto-poky/scripts/lib/wic/utils/oe/misc.py
@@ -89,7 +89,7 @@ def cmd_in_path(cmd, path):
return bb.utils.which(path, cmd) != "" or False
-def exec_native_cmd(cmd_and_args, native_sysroot, catch=3):
+def exec_native_cmd(cmd_and_args, native_sysroot, catch=3, pseudo=""):
"""
Execute native command, catching stderr, stdout
@@ -97,6 +97,12 @@ def exec_native_cmd(cmd_and_args, native_sysroot, catch=3):
Always need to execute native commands as_shell
"""
+ # The reason -1 is used is because there may be "export" commands.
+ args = cmd_and_args.split(';')[-1].split()
+ msger.debug(args)
+
+ if pseudo:
+ cmd_and_args = pseudo + cmd_and_args
native_paths = \
"%s/sbin:%s/usr/sbin:%s/usr/bin" % \
(native_sysroot, native_sysroot, native_sysroot)
@@ -104,18 +110,16 @@ def exec_native_cmd(cmd_and_args, native_sysroot, catch=3):
(native_paths, cmd_and_args)
msger.debug("exec_native_cmd: %s" % cmd_and_args)
- # The reason -1 is used is because there may be "export" commands.
- args = cmd_and_args.split(';')[-1].split()
- msger.debug(args)
-
# If the command isn't in the native sysroot say we failed.
if cmd_in_path(args[0], native_paths):
ret, out = _exec_cmd(native_cmd_and_args, True, catch)
else:
ret = 127
- if ret == 127: # shell command-not-found
- prog = args[0]
+ prog = args[0]
+ # shell command-not-found
+ if ret == 127 \
+ or (pseudo and ret == 1 and out == "Can't find '%s' in $PATH." % prog):
msg = "A native program %s required to build the image "\
"was not found (see details above).\n\n" % prog
recipe = NATIVE_RECIPES.get(prog)
diff --git a/yocto-poky/scripts/lib/wic/utils/partitionedfs.py b/yocto-poky/scripts/lib/wic/utils/partitionedfs.py
index 5a103bbc7..ad596d26f 100644
--- a/yocto-poky/scripts/lib/wic/utils/partitionedfs.py
+++ b/yocto-poky/scripts/lib/wic/utils/partitionedfs.py
@@ -96,25 +96,23 @@ class Image(object):
# Converting kB to sectors for parted
size = size * 1024 / self.sector_size
- # We still need partition for "/" or non-subvolume
- if mountpoint == "/" or not fsopts:
- part = {'ks_pnum': ks_pnum, # Partition number in the KS file
- 'size': size, # In sectors
- 'mountpoint': mountpoint, # Mount relative to chroot
- 'source_file': source_file, # partition contents
- 'fstype': fstype, # Filesystem type
- 'fsopts': fsopts, # Filesystem mount options
- 'label': label, # Partition label
- 'disk_name': disk_name, # physical disk name holding partition
- 'device': None, # kpartx device node for partition
- 'num': None, # Partition number
- 'boot': boot, # Bootable flag
- 'align': align, # Partition alignment
- 'no_table' : no_table, # Partition does not appear in partition table
- 'part_type' : part_type, # Partition type
- 'uuid': uuid} # Partition UUID
-
- self.__add_partition(part)
+ part = {'ks_pnum': ks_pnum, # Partition number in the KS file
+ 'size': size, # In sectors
+ 'mountpoint': mountpoint, # Mount relative to chroot
+ 'source_file': source_file, # partition contents
+ 'fstype': fstype, # Filesystem type
+ 'fsopts': fsopts, # Filesystem mount options
+ 'label': label, # Partition label
+ 'disk_name': disk_name, # physical disk name holding partition
+ 'device': None, # kpartx device node for partition
+ 'num': None, # Partition number
+ 'boot': boot, # Bootable flag
+ 'align': align, # Partition alignment
+ 'no_table' : no_table, # Partition does not appear in partition table
+ 'part_type' : part_type, # Partition type
+ 'uuid': uuid} # Partition UUID
+
+ self.__add_partition(part)
def layout_partitions(self, ptable_format="msdos"):
""" Layout the partitions, meaning calculate the position of every
diff --git a/yocto-poky/scripts/oe-buildenv-internal b/yocto-poky/scripts/oe-buildenv-internal
index 9ed272153..e04db0398 100755
--- a/yocto-poky/scripts/oe-buildenv-internal
+++ b/yocto-poky/scripts/oe-buildenv-internal
@@ -24,7 +24,7 @@ if [ -z "$OEROOT" ]; then
return 1
fi
-if [ -z "$OE_SKIP_SDK_CHECK" -a ! -z "$OECORE_SDK_VERSION" ]; then
+if [ -z "$OE_SKIP_SDK_CHECK" ] && [ -n "$OECORE_SDK_VERSION" ]; then
echo >&2 "Error: The OE SDK/ADT was detected as already being present in this shell environment. Please use a clean shell when sourcing this environment script."
return 1
fi
@@ -33,24 +33,26 @@ fi
# sanity.bbclass because bitbake's source code doesn't even pass
# parsing stage when used with python v3, so we catch it here so we
# can offer a meaningful error message.
-py_v3_check=`/usr/bin/env python --version 2>&1 | grep "Python 3"`
-if [ "$py_v3_check" != "" ]; then
- echo >&2 "Bitbake is not compatible with python v3"
- echo >&2 "Please set up python v2 as your default python interpreter"
- return 1
+py_v3_check=$(/usr/bin/env python --version 2>&1 | grep "Python 3")
+if [ -n "$py_v3_check" ]; then
+ echo >&2 "Bitbake is not compatible with python v3"
+ echo >&2 "Please set up python v2 as your default python interpreter"
+ return 1
fi
+unset py_v3_check
# Similarly, we now have code that doesn't parse correctly with older
# versions of Python, and rather than fixing that and being eternally
# vigilant for any other new feature use, just check the version here.
-py_v26_check=`python -c 'import sys; print sys.version_info >= (2,7,3)'`
+py_v26_check=$(python -c 'import sys; print sys.version_info >= (2,7,3)')
if [ "$py_v26_check" != "True" ]; then
- echo >&2 "BitBake requires Python 2.7.3 or later"
- return 1
+ echo >&2 "BitBake requires Python 2.7.3 or later"
+ return 1
fi
+unset py_v26_check
-if [ "x$BDIR" = "x" ]; then
- if [ "x$1" = "x" ]; then
+if [ -z "$BDIR" ]; then
+ if [ -z "$1" ]; then
BDIR="build"
else
BDIR="$1"
@@ -62,48 +64,58 @@ if [ "x$BDIR" = "x" ]; then
# Remove any possible trailing slashes. This is used to work around
# buggy readlink in Ubuntu 10.04 that doesn't ignore trailing slashes
# and hence "readlink -f new_dir_to_be_created/" returns empty.
- BDIR=`echo $BDIR | sed -re 's|/+$||'`
+ BDIR=$(echo $BDIR | sed -re 's|/+$||')
- BDIR=`readlink -f "$BDIR"`
+ BDIR=$(readlink -f "$BDIR")
if [ -z "$BDIR" ]; then
- PARENTDIR=`dirname "$1"`
+ PARENTDIR=$(dirname "$1")
echo >&2 "Error: the directory $PARENTDIR does not exist?"
return 1
fi
fi
- if [ "x$2" != "x" ]; then
+ if [ -n "$2" ]; then
BITBAKEDIR="$2"
fi
fi
-if expr "$BDIR" : '/.*' > /dev/null ; then
+if [ "${BDIR#/}" != "$BDIR" ]; then
BUILDDIR="$BDIR"
else
- BUILDDIR="`pwd`/$BDIR"
+ BUILDDIR="$(pwd)/$BDIR"
fi
unset BDIR
-if [ "x$BITBAKEDIR" = "x" ]; then
- BITBAKEDIR="$OEROOT/bitbake$BBEXTRA/"
+if [ -z "$BITBAKEDIR" ]; then
+ BITBAKEDIR="$OEROOT/bitbake$BBEXTRA"
fi
-BITBAKEDIR=`readlink -f "$BITBAKEDIR"`
-BUILDDIR=`readlink -f "$BUILDDIR"`
+BITBAKEDIR=$(readlink -f "$BITBAKEDIR")
+BUILDDIR=$(readlink -f "$BUILDDIR")
-if ! (test -d "$BITBAKEDIR"); then
+if [ ! -d "$BITBAKEDIR" ]; then
echo >&2 "Error: The bitbake directory ($BITBAKEDIR) does not exist! Please ensure a copy of bitbake exists at this location"
return 1
fi
# Make sure our paths are at the beginning of $PATH
-NEWPATHS="${OEROOT}/scripts:$BITBAKEDIR/bin:"
-PATH=$NEWPATHS$(echo $PATH | sed -e "s|:$NEWPATHS|:|g" -e "s|^$NEWPATHS||")
-unset BITBAKEDIR NEWPATHS
+for newpath in "$BITBAKEDIR/bin" "$OEROOT/scripts"; do
+ # Remove any existences of $newpath from $PATH
+ PATH=$(echo $PATH | sed -re "s#(^|:)$newpath(:|$)#\2#g;s#^:##")
+
+ # Add $newpath to $PATH
+ PATH="$newpath:$PATH"
+done
+unset BITBAKEDIR newpath
# Used by the runqemu script
export BUILDDIR
export PATH
-export BB_ENV_EXTRAWHITE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \
+
+BB_ENV_EXTRAWHITE_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \
HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \
all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \
SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \
SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR"
+
+BB_ENV_EXTRAWHITE="$(echo $BB_ENV_EXTRAWHITE $BB_ENV_EXTRAWHITE_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')"
+
+export BB_ENV_EXTRAWHITE
diff --git a/yocto-poky/scripts/oe-git-proxy b/yocto-poky/scripts/oe-git-proxy
index d2e9f925b..124790240 100755
--- a/yocto-poky/scripts/oe-git-proxy
+++ b/yocto-poky/scripts/oe-git-proxy
@@ -1,10 +1,12 @@
#!/bin/bash
# oe-git-proxy is a simple tool to be via GIT_PROXY_COMMAND. It uses socat
-# to make SOCKS5 or HTTPS proxy connections. It uses ALL_PROXY to determine the
-# proxy server, protocol, and port. It uses NO_PROXY to skip using the proxy for
-# a comma delimited list of hosts, host globs (*.example.com), IPs, or CIDR
-# masks (192.168.1.0/24). It is known to work with both bash and dash shells.
+# to make SOCKS5 or HTTPS proxy connections.
+# It uses ALL_PROXY or all_proxy or http_proxy to determine the proxy server,
+# protocol, and port.
+# It uses NO_PROXY to skip using the proxy for a comma delimited list of
+# hosts, host globs (*.example.com), IPs, or CIDR masks (192.168.1.0/24). It
+# is known to work with both bash and dash shells.
#
# Example ALL_PROXY values:
# ALL_PROXY=socks://socks.example.com:1080
@@ -99,6 +101,9 @@ match_host() {
# If no proxy is set or needed, just connect directly
METHOD="TCP:$1:$2"
+[ -z "${ALL_PROXY}" ] && ALL_PROXY=$all_proxy
+[ -z "${ALL_PROXY}" ] && ALL_PROXY=$http_proxy
+
if [ -z "$ALL_PROXY" ]; then
exec $SOCAT STDIO $METHOD
fi
@@ -111,14 +116,27 @@ for H in ${NO_PROXY//,/ }; do
done
# Proxy is necessary, determine protocol, server, and port
-PROTO=$(echo $ALL_PROXY | sed -e 's/\([^:]*\):\/\/.*/\1/')
-PROXY=$(echo $ALL_PROXY | sed -e 's/.*:\/\/\([^:]*\).*/\1/')
-# For backwards compatibility, this allows the port number to be followed by /?
-# in addition to the customary optional /
-PORT=$(echo $ALL_PROXY | sed -e 's/.*:\([0-9]*\)\(\/?\?\)\?$/\1/')
-if [ "$PORT" = "$ALL_PROXY" ]; then
+# extract protocol
+PROTO=${ALL_PROXY%://*}
+# strip protocol:// from string
+ALL_PROXY=${ALL_PROXY#*://}
+# extract host & port parts:
+# 1) drop username/password
+PROXY=${ALL_PROXY##*@}
+# 2) remove optional trailing /?
+PROXY=${PROXY%%/*}
+# 3) extract optional port
+PORT=${PROXY##*:}
+if [ "$PORT" = "$PROXY" ]; then
PORT=""
fi
+# 4) remove port
+PROXY=${PROXY%%:*}
+
+# extract username & password
+PROXYAUTH="${ALL_PROXY%@*}"
+[ "$PROXYAUTH" = "$ALL_PROXY" ] && PROXYAUTH=
+[ -n "${PROXYAUTH}" ] && PROXYAUTH=",proxyauth=${PROXYAUTH}"
if [ "$PROTO" = "socks" ] || [ "$PROTO" = "socks4a" ]; then
if [ -z "$PORT" ]; then
@@ -135,7 +153,7 @@ else
if [ -z "$PORT" ]; then
PORT="8080"
fi
- METHOD="PROXY:$PROXY:$1:$2,proxyport=$PORT"
+ METHOD="PROXY:$PROXY:$1:$2,proxyport=${PORT}${PROXYAUTH}"
fi
-exec $SOCAT STDIO $METHOD
+exec $SOCAT STDIO "$METHOD"
diff --git a/yocto-poky/scripts/oe-pkgdata-util b/yocto-poky/scripts/oe-pkgdata-util
index cb19cc4ae..a04e44d35 100755
--- a/yocto-poky/scripts/oe-pkgdata-util
+++ b/yocto-poky/scripts/oe-pkgdata-util
@@ -33,6 +33,7 @@ scripts_path = os.path.dirname(os.path.realpath(__file__))
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptutils
+import argparse_oe
logger = scriptutils.logger_create('pkgdatautil')
def tinfoil_init():
@@ -160,8 +161,18 @@ def glob(args):
def read_value(args):
# Handle both multiple arguments and multiple values within an arg (old syntax)
packages = []
- for pkgitem in args.pkg:
- packages.extend(pkgitem.split())
+ if args.file:
+ with open(args.file, 'r') as f:
+ for line in f:
+ splitline = line.split()
+ if splitline:
+ packages.append(splitline[0])
+ else:
+ for pkgitem in args.pkg:
+ packages.extend(pkgitem.split())
+ if not packages:
+ logger.error("No packages specified")
+ sys.exit(1)
def readvar(pkgdata_file, valuename):
val = ""
@@ -186,9 +197,13 @@ def read_value(args):
qvar = "%s_%s" % (args.valuename, mappedpkg)
# PKGSIZE is now in bytes, but we we want it in KB
pkgsize = (int(readvar(revlink, qvar)) + 1024 // 2) // 1024
- print("%d" % pkgsize)
+ value = "%d" % pkgsize
+ else:
+ value = readvar(revlink, qvar)
+ if args.prefix_name:
+ print('%s %s' % (pkg_name, value))
else:
- print(readvar(revlink, qvar))
+ print(value)
def lookup_pkglist(pkgs, pkgdata_dir, reverse):
if reverse:
@@ -365,7 +380,7 @@ def list_pkg_files(args):
sys.exit(1)
pkglist = args.pkg
- for pkg in pkglist:
+ for pkg in sorted(pkglist):
print("%s:" % pkg)
if args.runtime:
pkgdatafile = os.path.join(args.pkgdata_dir, "runtime-reverse", pkg)
@@ -417,8 +432,8 @@ def find_path(args):
def main():
- parser = argparse.ArgumentParser(description="OpenEmbedded pkgdata tool - queries the pkgdata files written out during do_package",
- epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+ parser = argparse_oe.ArgumentParser(description="OpenEmbedded pkgdata tool - queries the pkgdata files written out during do_package",
+ epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-p', '--pkgdata-dir', help='Path to pkgdata directory (determined automatically if not specified)')
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
@@ -464,7 +479,9 @@ def main():
help='Read any pkgdata value for one or more packages',
description='Reads the named value from the pkgdata files for the specified packages')
parser_read_value.add_argument('valuename', help='Name of the value to look up')
- parser_read_value.add_argument('pkg', nargs='+', help='Runtime package name to look up')
+ parser_read_value.add_argument('pkg', nargs='*', help='Runtime package name to look up')
+ parser_read_value.add_argument('-f', '--file', help='Read package names from the specified file (one per line, first field only)')
+ parser_read_value.add_argument('-n', '--prefix-name', help='Prefix output with package name', action='store_true')
parser_read_value.set_defaults(func=read_value)
parser_glob = subparsers.add_parser('glob',
diff --git a/yocto-poky/scripts/oe-publish-sdk b/yocto-poky/scripts/oe-publish-sdk
index ee4169a00..55872f2f9 100755
--- a/yocto-poky/scripts/oe-publish-sdk
+++ b/yocto-poky/scripts/oe-publish-sdk
@@ -2,13 +2,20 @@
# OpenEmbedded SDK publishing tool
-# oe-publish-sdk publish <ext-sdk> <destination>
-# <ext-sdk>: extensible SDK to publish (path to the installer shell script)
-# <destination>: local or remote location which servers as an SDK update server
-# e.g.
-# oe-publish-sdk /path/to/sdk-ext.sh /mnt/poky/sdk-ext
-# oe-publish-sdk /path/to/sdk-ext.sh user@host:/opt/poky/sdk-ext
+# Copyright (C) 2015-2016 Intel Corporation
#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
@@ -24,6 +31,7 @@ scripts_path = os.path.dirname(os.path.realpath(__file__))
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptutils
+import argparse_oe
logger = scriptutils.logger_create('sdktool')
def mkdir(d):
@@ -42,7 +50,10 @@ def publish(args):
# Ensure the SDK exists
if not os.path.exists(target_sdk):
- logger.error("%s doesn't exist" % target_sdk)
+ logger.error("Specified SDK %s doesn't exist" % target_sdk)
+ return -1
+ if os.path.isdir(target_sdk):
+ logger.error("%s is a directory - expected path to SDK installer file" % target_sdk)
return -1
if ':' in destination:
@@ -52,6 +63,7 @@ def publish(args):
else:
is_remote = False
dest_sdk = os.path.join(destination, sdk_basename)
+ destdir = destination
# Making sure the directory exists
logger.debug("Making sure the destination directory exists")
@@ -83,15 +95,16 @@ def publish(args):
# Unpack the SDK
logger.info("Unpacking SDK")
if not is_remote:
- cmd = "sh %s -n -y -d %s" % (dest_sdk, destination)
+ cmd = "sh %s -p -y -d %s" % (dest_sdk, destination)
ret = subprocess.call(cmd, shell=True)
if ret == 0:
logger.info('Successfully unpacked %s to %s' % (dest_sdk, destination))
+ os.remove(dest_sdk)
else:
logger.error('Failed to unpack %s to %s' % (dest_sdk, destination))
return ret
else:
- cmd = "ssh %s 'sh %s -n -y -d %s'" % (host, dest_sdk, destdir)
+ cmd = "ssh %s 'sh %s -p -y -d %s && rm -f %s'" % (host, dest_sdk, destdir, dest_sdk)
ret = subprocess.call(cmd, shell=True)
if ret == 0:
logger.info('Successfully unpacked %s to %s' % (dest_sdk, destdir))
@@ -101,9 +114,9 @@ def publish(args):
# Setting up the git repo
if not is_remote:
- cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; fi; git add -A .; git commit -q -m "init repo" || true; git update-server-info' % (destination, destination)
+ cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo "*.pyc\n*.pyo" > .gitignore; fi; git add -A .; git config user.email "oe@oe.oe" && git config user.name "OE" && git commit -q -m "init repo" || true; git update-server-info' % (destination, destination)
else:
- cmd = "ssh %s 'set -e; mkdir-p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; fi; git add -A .; git commit -q -m \"init repo\" || true; git update-server-info'" % (host, destdir, destdir)
+ cmd = "ssh %s 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo '*.pyc\n*.pyo' > .gitignore; fi; git add -A .; git config user.email 'oe@oe.oe' && git config user.name 'OE' && git commit -q -m \"init repo\" || true; git update-server-info'" % (host, destdir, destdir)
ret = subprocess.call(cmd, shell=True)
if ret == 0:
logger.info('SDK published successfully')
@@ -113,13 +126,12 @@ def publish(args):
def main():
- parser = argparse.ArgumentParser(description="OpenEmbedded development tool",
- epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+ parser = argparse_oe.ArgumentParser(description="OpenEmbedded extensible SDK publishing tool - writes server-side data to support the extensible SDK update process to a specified location")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
- parser.add_argument('sdk', help='Extensible SDK to publish')
- parser.add_argument('dest', help='Destination to publish SDK to')
+ parser.add_argument('sdk', help='Extensible SDK to publish (path to .sh installer file)')
+ parser.add_argument('dest', help='Destination to publish SDK to; can be local path or remote in the form of user@host:/path (in the latter case ssh/scp will be used).')
parser.set_defaults(func=publish)
@@ -139,5 +151,5 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/oe-selftest b/yocto-poky/scripts/oe-selftest
index 91e2dd282..5e23ef003 100755
--- a/yocto-poky/scripts/oe-selftest
+++ b/yocto-poky/scripts/oe-selftest
@@ -16,13 +16,13 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# DESCRIPTION
-# This script runs tests defined in meta/lib/selftest/
+# This script runs tests defined in meta/lib/oeqa/selftest/
# It's purpose is to automate the testing of different bitbake tools.
# To use it you just need to source your build environment setup script and
# add the meta-selftest layer to your BBLAYERS.
-# Call the script as: "oe-selftest" to run all the tests in in meta/lib/selftest/
-# Call the script as: "oe-selftest <module>.<Class>.<method>" to run just a single test
-# E.g: "oe-selftest bboutput.BitbakeLayers" will run just the BitbakeLayers class from meta/lib/selftest/bboutput.py
+# Call the script as: "oe-selftest -a" to run all the tests in meta/lib/oeqa/selftest/
+# Call the script as: "oe-selftest -r <module>.<Class>.<method>" to run just a single test
+# E.g: "oe-selftest -r bblayers.BitbakeLayers" will run just the BitbakeLayers class from meta/lib/oeqa/selftest/bblayers.py
import os
@@ -30,22 +30,31 @@ import sys
import unittest
import logging
import argparse
+import subprocess
+import time as t
+import re
+import fnmatch
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib')
import scriptpath
scriptpath.add_bitbake_lib_path()
scriptpath.add_oe_lib_path()
+import argparse_oe
import oeqa.selftest
import oeqa.utils.ftools as ftools
from oeqa.utils.commands import runCmd, get_bb_var, get_test_layer
-from oeqa.selftest.base import oeSelfTest
+from oeqa.selftest.base import oeSelfTest, get_available_machines
def logger_create():
+ log_file = "oe-selftest-" + t.strftime("%Y-%m-%d_%H:%M:%S") + ".log"
+ if os.path.exists("oe-selftest.log"): os.remove("oe-selftest.log")
+ os.symlink(log_file, "oe-selftest.log")
+
log = logging.getLogger("selftest")
log.setLevel(logging.DEBUG)
- fh = logging.FileHandler(filename='oe-selftest.log', mode='w')
+ fh = logging.FileHandler(filename=log_file, mode='w')
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
@@ -64,12 +73,26 @@ log = logger_create()
def get_args_parser():
description = "Script that runs unit tests agains bitbake and other Yocto related tools. The goal is to validate tools functionality and metadata integrity. Refer to https://wiki.yoctoproject.org/wiki/Oe-selftest for more information."
- parser = argparse.ArgumentParser(description=description)
+ parser = argparse_oe.ArgumentParser(description=description)
group = parser.add_mutually_exclusive_group(required=True)
- group.add_argument('--run-tests', required=False, action='store', nargs='*', dest="run_tests", default=None, help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
- group.add_argument('--run-all-tests', required=False, action="store_true", dest="run_all_tests", default=False, help='Run all (unhidden) tests')
- group.add_argument('--list-modules', required=False, action="store_true", dest="list_modules", default=False, help='List all available test modules.')
+ group.add_argument('-r', '--run-tests', required=False, action='store', nargs='*', dest="run_tests", default=None, help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
+ group.add_argument('-a', '--run-all-tests', required=False, action="store_true", dest="run_all_tests", default=False, help='Run all (unhidden) tests')
+ group.add_argument('-m', '--list-modules', required=False, action="store_true", dest="list_modules", default=False, help='List all available test modules.')
group.add_argument('--list-classes', required=False, action="store_true", dest="list_allclasses", default=False, help='List all available test classes.')
+ parser.add_argument('--coverage', action="store_true", help="Run code coverage when testing")
+ parser.add_argument('--coverage-source', dest="coverage_source", nargs="+", help="Specifiy the directories to take coverage from")
+ parser.add_argument('--coverage-include', dest="coverage_include", nargs="+", help="Specify extra patterns to include into the coverage measurement")
+ parser.add_argument('--coverage-omit', dest="coverage_omit", nargs="+", help="Specify with extra patterns to exclude from the coverage measurement")
+ group.add_argument('--run-tests-by', required=False, dest='run_tests_by', default=False, nargs='*',
+ help='run-tests-by <name|class|module|id|tag> <list of tests|classes|modules|ids|tags>')
+ group.add_argument('--list-tests-by', required=False, dest='list_tests_by', default=False, nargs='*',
+ help='list-tests-by <name|class|module|id|tag> <list of tests|classes|modules|ids|tags>')
+ group.add_argument('-l', '--list-tests', required=False, action="store_true", dest="list_tests", default=False,
+ help='List all available tests.')
+ group.add_argument('--list-tags', required=False, dest='list_tags', default=False, action="store_true",
+ help='List all tags that have been set to test cases.')
+ parser.add_argument('--machine', required=False, dest='machine', choices=['random', 'all'], default=None,
+ help='Run tests on different machines (random/all).')
return parser
@@ -101,7 +124,7 @@ def add_include():
not in ftools.read_file(os.path.join(builddir, "conf/local.conf")):
log.info("Adding: \"include selftest.inc\" in local.conf")
ftools.append_file(os.path.join(builddir, "conf/local.conf"), \
- "\n#include added by oe-selftest.py\ninclude selftest.inc")
+ "\n#include added by oe-selftest.py\ninclude machine.inc\ninclude selftest.inc")
if "#include added by oe-selftest.py" \
not in ftools.read_file(os.path.join(builddir, "conf/bblayers.conf")):
@@ -117,13 +140,13 @@ def remove_include():
in ftools.read_file(os.path.join(builddir, "conf/local.conf")):
log.info("Removing the include from local.conf")
ftools.remove_from_file(os.path.join(builddir, "conf/local.conf"), \
- "#include added by oe-selftest.py\ninclude selftest.inc")
+ "\n#include added by oe-selftest.py\ninclude machine.inc\ninclude selftest.inc")
if "#include added by oe-selftest.py" \
in ftools.read_file(os.path.join(builddir, "conf/bblayers.conf")):
log.info("Removing the include from bblayers.conf")
ftools.remove_from_file(os.path.join(builddir, "conf/bblayers.conf"), \
- "#include added by oe-selftest.py\ninclude bblayers.inc")
+ "\n#include added by oe-selftest.py\ninclude bblayers.inc")
def remove_inc_files():
try:
@@ -135,24 +158,281 @@ def remove_inc_files():
except (AttributeError, OSError,) as e: # AttributeError may happen if BUILDDIR is not set
pass
+ for incl_file in ['conf/bblayers.inc', 'conf/machine.inc']:
+ try:
+ os.remove(os.path.join(os.environ.get("BUILDDIR"), incl_file))
+ except:
+ pass
+
+
+def get_tests_modules(include_hidden=False):
+ modules_list = list()
+ for modules_path in oeqa.selftest.__path__:
+ for (p, d, f) in os.walk(modules_path):
+ files = sorted([f for f in os.listdir(p) if f.endswith('.py') and not (f.startswith('_') and not include_hidden) and not f.startswith('__') and f != 'base.py'])
+ for f in files:
+ submodules = p.split("selftest")[-1]
+ module = ""
+ if submodules:
+ module = 'oeqa.selftest' + submodules.replace("/",".") + "." + f.split('.py')[0]
+ else:
+ module = 'oeqa.selftest.' + f.split('.py')[0]
+ if module not in modules_list:
+ modules_list.append(module)
+ return modules_list
+
+
+def get_tests(exclusive_modules=[], include_hidden=False):
+ test_modules = list()
+ for x in exclusive_modules:
+ test_modules.append('oeqa.selftest.' + x)
+ if not test_modules:
+ inc_hidden = include_hidden
+ test_modules = get_tests_modules(inc_hidden)
+
+ return test_modules
+
+
+class Tc:
+ def __init__(self, tcname, tcclass, tcmodule, tcid=None, tctag=None):
+ self.tcname = tcname
+ self.tcclass = tcclass
+ self.tcmodule = tcmodule
+ self.tcid = tcid
+ # A test case can have multiple tags (as tuples) otherwise str will suffice
+ self.tctag = tctag
+ self.fullpath = '.'.join(['oeqa', 'selftest', tcmodule, tcclass, tcname])
+
+
+def get_tests_from_module(tmod):
+ tlist = []
+ prefix = 'oeqa.selftest.'
+
try:
- os.remove(os.path.join(os.environ.get("BUILDDIR"), "conf/bblayers.inc"))
+ import importlib
+ modlib = importlib.import_module(tmod)
+ for mod in vars(modlib).values():
+ if isinstance(mod, type(oeSelfTest)) and issubclass(mod, oeSelfTest) and mod is not oeSelfTest:
+ for test in dir(mod):
+ if test.startswith('test_') and hasattr(vars(mod)[test], '__call__'):
+ # Get test case id and feature tag
+ # NOTE: if testcase decorator or feature tag not set will throw error
+ try:
+ tid = vars(mod)[test].test_case
+ except:
+ print 'DEBUG: tc id missing for ' + str(test)
+ tid = None
+ try:
+ ttag = vars(mod)[test].tag__feature
+ except:
+ # print 'DEBUG: feature tag missing for ' + str(test)
+ ttag = None
+
+ # NOTE: for some reason lstrip() doesn't work for mod.__module__
+ tlist.append(Tc(test, mod.__name__, mod.__module__.replace(prefix, ''), tid, ttag))
except:
pass
-def get_tests(exclusive_modules=[], include_hidden=False):
- testslist = []
- for x in exclusive_modules:
- testslist.append('oeqa.selftest.' + x)
- if not testslist:
- for testpath in oeqa.selftest.__path__:
- files = sorted([f for f in os.listdir(testpath) if f.endswith('.py') and not (f.startswith('_') and not include_hidden) and not f.startswith('__') and f != 'base.py'])
- for f in files:
- module = 'oeqa.selftest.' + f[:-3]
- if module not in testslist:
- testslist.append(module)
+ return tlist
+
+
+def get_all_tests():
+ # Get all the test modules (except the hidden ones)
+ testlist = []
+ tests_modules = get_tests_modules()
+ # Get all the tests from modules
+ for tmod in sorted(tests_modules):
+ testlist += get_tests_from_module(tmod)
+ return testlist
+
+
+def get_testsuite_by(criteria, keyword):
+ # Get a testsuite based on 'keyword'
+ # criteria: name, class, module, id, tag
+ # keyword: a list of tests, classes, modules, ids, tags
+
+ ts = []
+ all_tests = get_all_tests()
+
+ def get_matches(values):
+ # Get an item and return the ones that match with keyword(s)
+ # values: the list of items (names, modules, classes...)
+ result = []
+ remaining = values[:]
+ for key in keyword:
+ if key in remaining:
+ # Regular matching of exact item
+ result.append(key)
+ remaining.remove(key)
+ else:
+ # Wildcard matching
+ pattern = re.compile(fnmatch.translate(r"%s" % key))
+ added = [x for x in remaining if pattern.match(x)]
+ result.extend(added)
+ remaining = [x for x in remaining if x not in added]
+
+ return result
+
+ if criteria == 'name':
+ names = get_matches([ tc.tcname for tc in all_tests ])
+ ts = [ tc for tc in all_tests if tc.tcname in names ]
+
+ elif criteria == 'class':
+ classes = get_matches([ tc.tcclass for tc in all_tests ])
+ ts = [ tc for tc in all_tests if tc.tcclass in classes ]
+
+ elif criteria == 'module':
+ modules = get_matches([ tc.tcmodule for tc in all_tests ])
+ ts = [ tc for tc in all_tests if tc.tcmodule in modules ]
+
+ elif criteria == 'id':
+ ids = get_matches([ str(tc.tcid) for tc in all_tests ])
+ ts = [ tc for tc in all_tests if str(tc.tcid) in ids ]
+
+ elif criteria == 'tag':
+ values = set()
+ for tc in all_tests:
+ # tc can have multiple tags (as tuple) otherwise str will suffice
+ if isinstance(tc.tctag, tuple):
+ values |= { str(tag) for tag in tc.tctag }
+ else:
+ values.add(str(tc.tctag))
+
+ tags = get_matches(list(values))
+
+ for tc in all_tests:
+ for tag in tags:
+ if isinstance(tc.tctag, tuple) and tag in tc.tctag:
+ ts.append(tc)
+ elif tag == tc.tctag:
+ ts.append(tc)
+
+ # Remove duplicates from the list
+ ts = list(set(ts))
+
+ return ts
+
+
+def list_testsuite_by(criteria, keyword):
+ # Get a testsuite based on 'keyword'
+ # criteria: name, class, module, id, tag
+ # keyword: a list of tests, classes, modules, ids, tags
+
+ ts = sorted([ (tc.tcid, tc.tctag, tc.tcname, tc.tcclass, tc.tcmodule) for tc in get_testsuite_by(criteria, keyword) ])
+
+ print '%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % ('id', 'tag', 'name', 'class', 'module')
+ print '_' * 150
+ for t in ts:
+ if isinstance(t[1], (tuple, list)):
+ print '%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % (t[0], ', '.join(t[1]), t[2], t[3], t[4])
+ else:
+ print '%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % t
+ print '_' * 150
+ print 'Filtering by:\t %s' % criteria
+ print 'Looking for:\t %s' % ', '.join(str(x) for x in keyword)
+ print 'Total found:\t %s' % len(ts)
+
+
+def list_tests():
+ # List all available oe-selftest tests
+
+ ts = get_all_tests()
+
+ print '%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % ('id', 'tag', 'name', 'class', 'module')
+ print '_' * 150
+ for t in ts:
+ if isinstance(t.tctag, (tuple, list)):
+ print '%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % (t.tcid, ', '.join(t.tctag), t.tcname, t.tcclass, t.tcmodule)
+ else:
+ print '%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % (t.tcid, t.tctag, t.tcname, t.tcclass, t.tcmodule)
+ print '_' * 150
+ print 'Total found:\t %s' % len(ts)
+
+
+def list_tags():
+ # Get all tags set to test cases
+ # This is useful when setting tags to test cases
+ # The list of tags should be kept as minimal as possible
+ tags = set()
+ all_tests = get_all_tests()
+
+ for tc in all_tests:
+ if isinstance(tc.tctag, (tuple, list)):
+ tags.update(set(tc.tctag))
+ else:
+ tags.add(tc.tctag)
+
+ print 'Tags:\t%s' % ', '.join(str(x) for x in tags)
+
+def coverage_setup(coverage_source, coverage_include, coverage_omit):
+ """ Set up the coverage measurement for the testcases to be run """
+ import datetime
+ import subprocess
+ builddir = os.environ.get("BUILDDIR")
+ pokydir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+ curcommit= subprocess.check_output(["git", "--git-dir", os.path.join(pokydir, ".git"), "rev-parse", "HEAD"])
+ coveragerc = "%s/.coveragerc" % builddir
+ data_file = "%s/.coverage." % builddir
+ data_file += datetime.datetime.now().strftime('%Y%m%dT%H%M%S')
+ if os.path.isfile(data_file):
+ os.remove(data_file)
+ with open(coveragerc, 'w') as cps:
+ cps.write("# Generated with command '%s'\n" % " ".join(sys.argv))
+ cps.write("# HEAD commit %s\n" % curcommit.strip())
+ cps.write("[run]\n")
+ cps.write("data_file = %s\n" % data_file)
+ cps.write("branch = True\n")
+ # Measure just BBLAYERS, scripts and bitbake folders
+ cps.write("source = \n")
+ if coverage_source:
+ for directory in coverage_source:
+ if not os.path.isdir(directory):
+ log.warn("Directory %s is not valid.", directory)
+ cps.write(" %s\n" % directory)
+ else:
+ for layer in get_bb_var('BBLAYERS').split():
+ cps.write(" %s\n" % layer)
+ cps.write(" %s\n" % os.path.dirname(os.path.realpath(__file__)))
+ cps.write(" %s\n" % os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),'bitbake'))
+
+ if coverage_include:
+ cps.write("include = \n")
+ for pattern in coverage_include:
+ cps.write(" %s\n" % pattern)
+ if coverage_omit:
+ cps.write("omit = \n")
+ for pattern in coverage_omit:
+ cps.write(" %s\n" % pattern)
+
+ return coveragerc
+
+def coverage_report():
+ """ Loads the coverage data gathered and reports it back """
+ try:
+ # Coverage4 uses coverage.Coverage
+ from coverage import Coverage
+ except:
+ # Coverage under version 4 uses coverage.coverage
+ from coverage import coverage as Coverage
+
+ import cStringIO as StringIO
+ from coverage.misc import CoverageException
+
+ cov_output = StringIO.StringIO()
+ # Creating the coverage data with the setting from the configuration file
+ cov = Coverage(config_file = os.environ.get('COVERAGE_PROCESS_START'))
+ try:
+ # Load data from the data file specified in the configuration
+ cov.load()
+ # Store report data in a StringIO variable
+ cov.report(file = cov_output, show_missing=False)
+ log.info("\n%s" % cov_output.getvalue())
+ except CoverageException as e:
+ # Show problems with the reporting. Since Coverage4 not finding any data to report raises an exception
+ log.warn("%s" % str(e))
+ finally:
+ cov_output.close()
- return testslist
def main():
parser = get_args_parser()
@@ -165,6 +445,32 @@ def main():
sys.path.extend(layer_libdirs)
reload(oeqa.selftest)
+ if args.run_tests_by and len(args.run_tests_by) >= 2:
+ valid_options = ['name', 'class', 'module', 'id', 'tag']
+ if args.run_tests_by[0] not in valid_options:
+ print '--run-tests-by %s not a valid option. Choose one of <name|class|module|id|tag>.' % args.run_tests_by[0]
+ return 1
+ else:
+ criteria = args.run_tests_by[0]
+ keyword = args.run_tests_by[1:]
+ ts = sorted([ tc.fullpath for tc in get_testsuite_by(criteria, keyword) ])
+
+ if args.list_tests_by and len(args.list_tests_by) >= 2:
+ valid_options = ['name', 'class', 'module', 'id', 'tag']
+ if args.list_tests_by[0] not in valid_options:
+ print '--list-tests-by %s not a valid option. Choose one of <name|class|module|id|tag>.' % args.list_tests_by[0]
+ return 1
+ else:
+ criteria = args.list_tests_by[0]
+ keyword = args.list_tests_by[1:]
+ list_testsuite_by(criteria, keyword)
+
+ if args.list_tests:
+ list_tests()
+
+ if args.list_tags:
+ list_tags()
+
if args.list_allclasses:
args.list_modules = True
@@ -172,7 +478,7 @@ def main():
log.info('Listing all available test modules:')
testslist = get_tests(include_hidden=True)
for test in testslist:
- module = test.split('.')[-1]
+ module = test.split('oeqa.selftest.')[-1]
info = ''
if module.startswith('_'):
info = ' (hidden)'
@@ -186,22 +492,26 @@ def main():
if isinstance(t, type(oeSelfTest)) and issubclass(t, oeSelfTest) and t!=oeSelfTest:
print " --", v
for method in dir(t):
- if method.startswith("test_"):
+ if method.startswith("test_") and callable(vars(t)[method]):
print " -- --", method
except (AttributeError, ImportError) as e:
print e
pass
- if args.run_tests or args.run_all_tests:
+ if args.run_tests or args.run_all_tests or args.run_tests_by:
if not preflight_check():
return 1
- testslist = get_tests(exclusive_modules=(args.run_tests or []), include_hidden=False)
+ if args.run_tests_by:
+ testslist = ts
+ else:
+ testslist = get_tests(exclusive_modules=(args.run_tests or []), include_hidden=False)
+
suite = unittest.TestSuite()
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = None
- runner = unittest.TextTestRunner(verbosity=2, resultclass=StampedResult)
+ runner = unittest.TextTestRunner(verbosity=2, resultclass=buildResultClass(args))
# we need to do this here, otherwise just loading the tests
# will take 2 minutes (bitbake -e calls)
oeSelfTest.testlayer_path = get_test_layer()
@@ -214,23 +524,104 @@ def main():
log.error(e)
return 1
add_include()
- result = runner.run(suite)
+
+ if args.machine:
+ # Custom machine sets only weak default values (??=) for MACHINE in machine.inc
+ # This let test cases that require a specific MACHINE to be able to override it, using (?= or =)
+ log.info('Custom machine mode enabled. MACHINE set to %s' % args.machine)
+ if args.machine == 'random':
+ os.environ['CUSTOMMACHINE'] = 'random'
+ result = runner.run(suite)
+ else: # all
+ machines = get_available_machines()
+ for m in machines:
+ log.info('Run tests with custom MACHINE set to: %s' % m)
+ os.environ['CUSTOMMACHINE'] = m
+ result = runner.run(suite)
+ else:
+ result = runner.run(suite)
+
log.info("Finished")
+
if result.wasSuccessful():
return 0
else:
return 1
-class StampedResult(unittest.TextTestResult):
- """
- Custom TestResult that prints the time when a test starts. As oe-selftest
- can take a long time (ie a few hours) to run, timestamps help us understand
- what tests are taking a long time to execute.
- """
- def startTest(self, test):
- import time
- self.stream.write(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + " - ")
- super(StampedResult, self).startTest(test)
+def buildResultClass(args):
+ """Build a Result Class to use in the testcase execution"""
+ import site
+
+ class StampedResult(unittest.TextTestResult):
+ """
+ Custom TestResult that prints the time when a test starts. As oe-selftest
+ can take a long time (ie a few hours) to run, timestamps help us understand
+ what tests are taking a long time to execute.
+ If coverage is required, this class executes the coverage setup and reporting.
+ """
+ def startTest(self, test):
+ import time
+ self.stream.write(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + " - ")
+ super(StampedResult, self).startTest(test)
+
+ def startTestRun(self):
+ """ Setup coverage before running any testcase """
+
+ # variable holding the coverage configuration file allowing subprocess to be measured
+ self.coveragepth = None
+
+ # indicates the system if coverage is currently installed
+ self.coverage_installed = True
+
+ if args.coverage or args.coverage_source or args.coverage_include or args.coverage_omit:
+ try:
+ # check if user can do coverage
+ import coverage
+ except:
+ log.warn("python coverage is not installed. More info on https://pypi.python.org/pypi/coverage")
+ self.coverage_installed = False
+
+ if self.coverage_installed:
+ log.info("Coverage is enabled")
+
+ # In case the user has not set the variable COVERAGE_PROCESS_START,
+ # create a default one and export it. The COVERAGE_PROCESS_START
+ # value indicates where the coverage configuration file resides
+ # More info on https://pypi.python.org/pypi/coverage
+ if not os.environ.get('COVERAGE_PROCESS_START'):
+ os.environ['COVERAGE_PROCESS_START'] = coverage_setup(args.coverage_source, args.coverage_include, args.coverage_omit)
+
+ # Use default site.USER_SITE and write corresponding config file
+ site.ENABLE_USER_SITE = True
+ if not os.path.exists(site.USER_SITE):
+ os.makedirs(site.USER_SITE)
+ self.coveragepth = os.path.join(site.USER_SITE, "coverage.pth")
+ with open(self.coveragepth, 'w') as cps:
+ cps.write('import sys,site; sys.path.extend(site.getsitepackages()); import coverage; coverage.process_startup();')
+
+ def stopTestRun(self):
+ """ Report coverage data after the testcases are run """
+
+ if args.coverage or args.coverage_source or args.coverage_include or args.coverage_omit:
+ if self.coverage_installed:
+ with open(os.environ['COVERAGE_PROCESS_START']) as ccf:
+ log.info("Coverage configuration file (%s)" % os.environ.get('COVERAGE_PROCESS_START'))
+ log.info("===========================")
+ log.info("\n%s" % "".join(ccf.readlines()))
+
+ log.info("Coverage Report")
+ log.info("===============")
+ try:
+ coverage_report()
+ finally:
+ # remove the pth file
+ try:
+ os.remove(self.coveragepth)
+ except OSError:
+ log.warn("Expected temporal file from coverage is missing, ignoring removal.")
+
+ return StampedResult
+
if __name__ == "__main__":
try:
@@ -238,7 +629,7 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
finally:
remove_include()
remove_inc_files()
diff --git a/yocto-poky/scripts/oe-setup-builddir b/yocto-poky/scripts/oe-setup-builddir
index 2598ba852..93722e08a 100755
--- a/yocto-poky/scripts/oe-setup-builddir
+++ b/yocto-poky/scripts/oe-setup-builddir
@@ -75,12 +75,11 @@ if [ -z "$OECORELOCALCONF" ]; then
OECORELOCALCONF="$OEROOT/meta/conf/local.conf.sample"
fi
if [ ! -r "$BUILDDIR/conf/local.conf" ]; then
-cat <<EOM
+ cat <<EOM
You had no conf/local.conf file. This configuration file has therefore been
-created for you with some default values. You may wish to edit it to use a
-different MACHINE (target hardware) or enable parallel build options to take
-advantage of multiple cores for example. See the file for more information as
-common configuration options are commented.
+created for you with some default values. You may wish to edit it to, for
+example, select a different MACHINE (target hardware). See conf/local.conf
+for more information as common configuration options are commented.
EOM
cp -f $OECORELOCALCONF "$BUILDDIR/conf/local.conf"
@@ -92,9 +91,9 @@ if [ -z "$OECORELAYERCONF" ]; then
fi
if [ ! -r "$BUILDDIR/conf/bblayers.conf" ]; then
cat <<EOM
-You had no conf/bblayers.conf file. The configuration file has been created for
-you with some default values. To add additional metadata layers into your
-configuration please add entries to this file.
+You had no conf/bblayers.conf file. This configuration file has therefore been
+created for you with some default values. To add additional metadata layers
+into your configuration please add entries to conf/bblayers.conf.
EOM
diff --git a/yocto-poky/scripts/postinst-intercepts/update_font_cache b/yocto-poky/scripts/postinst-intercepts/update_font_cache
index 0deab3c11..bf65e19a4 100644
--- a/yocto-poky/scripts/postinst-intercepts/update_font_cache
+++ b/yocto-poky/scripts/postinst-intercepts/update_font_cache
@@ -1,5 +1,7 @@
#!/bin/sh
+set -e
+
PSEUDO_UNLOAD=1 qemuwrapper -L $D -E LD_LIBRARY_PATH=$D/${libdir}:$D/${base_libdir} \
- -E ${fontconfigcacheenv} $D${bindir}/fc-cache --sysroot=$D ${fontconfigcacheparams}
+ -E ${fontconfigcacheenv} $D${bindir}/fc-cache --sysroot=$D --system-only ${fontconfigcacheparams}
chown -R root:root $D${fontconfigcachedir}
diff --git a/yocto-poky/scripts/postinst-intercepts/update_gio_module_cache b/yocto-poky/scripts/postinst-intercepts/update_gio_module_cache
new file mode 100644
index 000000000..fe468092c
--- /dev/null
+++ b/yocto-poky/scripts/postinst-intercepts/update_gio_module_cache
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+set -e
+
+PSEUDO_UNLOAD=1 qemuwrapper -L $D -E LD_LIBRARY_PATH=$D${libdir}:$D${base_libdir} \
+ $D${libexecdir}/${binprefix}gio-querymodules $D${libdir}/gio/modules/
+
diff --git a/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache b/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache
index a8b400106..5d44075fb 100644
--- a/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache
+++ b/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache
@@ -1,5 +1,7 @@
#!/bin/sh
+set -e
+
export GDK_PIXBUF_MODULEDIR=$D${libdir}/gdk-pixbuf-2.0/2.10.0/loaders
export GDK_PIXBUF_FATAL_LOADER=1
diff --git a/yocto-poky/scripts/recipetool b/yocto-poky/scripts/recipetool
index 87fb35ed7..6c6648756 100755
--- a/yocto-poky/scripts/recipetool
+++ b/yocto-poky/scripts/recipetool
@@ -27,6 +27,7 @@ scripts_path = os.path.dirname(os.path.realpath(__file__))
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptutils
+import argparse_oe
logger = scriptutils.logger_create('recipetool')
plugins = []
@@ -34,7 +35,7 @@ plugins = []
def tinfoil_init(parserecipes):
import bb.tinfoil
import logging
- tinfoil = bb.tinfoil.Tinfoil()
+ tinfoil = bb.tinfoil.Tinfoil(tracking=True)
tinfoil.prepare(not parserecipes)
tinfoil.logger.setLevel(logger.getEffectiveLevel())
return tinfoil
@@ -45,9 +46,9 @@ def main():
logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
sys.exit(1)
- parser = argparse.ArgumentParser(description="OpenEmbedded recipe tool",
- add_help=False,
- epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+ parser = argparse_oe.ArgumentParser(description="OpenEmbedded recipe tool",
+ add_help=False,
+ epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
@@ -82,7 +83,11 @@ def main():
registered = False
for plugin in plugins:
- if hasattr(plugin, 'register_command'):
+ if hasattr(plugin, 'register_commands'):
+ registered = True
+ plugin.register_commands(subparsers)
+ elif hasattr(plugin, 'register_command'):
+ # Legacy function name
registered = True
plugin.register_command(subparsers)
if hasattr(plugin, 'tinfoil_init'):
@@ -96,7 +101,9 @@ def main():
try:
if getattr(args, 'parserecipes', False):
+ tinfoil.config_data.disableTracking()
tinfoil.parseRecipes()
+ tinfoil.config_data.enableTracking()
ret = args.func(args)
except bb.BBHandledException:
ret = 1
@@ -110,5 +117,5 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/relocate_sdk.py b/yocto-poky/scripts/relocate_sdk.py
index 992db5c47..99fca86a1 100755
--- a/yocto-poky/scripts/relocate_sdk.py
+++ b/yocto-poky/scripts/relocate_sdk.py
@@ -112,7 +112,7 @@ def change_interpreter(elf_file_name):
f.write(dl_path)
break
-def change_dl_sysdirs():
+def change_dl_sysdirs(elf_file_name):
if arch == 32:
sh_fmt = "<IIIIIIIIII"
else:
@@ -156,6 +156,11 @@ def change_dl_sysdirs():
elif name == b(".ldsocache"):
ldsocache_path = f.read(sh_size)
new_ldsocache_path = old_prefix.sub(new_prefix, ldsocache_path)
+ new_ldsocache_path = new_ldsocache_path.rstrip(b("\0"))
+ if (len(new_ldsocache_path) >= sh_size):
+ print("ERROR: could not relocate %s, .ldsocache section size = %i and %i is needed." \
+ % (elf_file_name, sh_size, len(new_ldsocache_path)))
+ sys.exit(-1)
# pad with zeros
new_ldsocache_path += b("\0") * (sh_size - len(new_ldsocache_path))
# write it back
@@ -166,6 +171,11 @@ def change_dl_sysdirs():
while (offset + 4096) <= sh_size:
path = f.read(4096)
new_path = old_prefix.sub(new_prefix, path)
+ new_path = new_path.rstrip(b("\0"))
+ if (len(new_path) >= 4096):
+ print("ERROR: could not relocate %s, max path size = 4096 and %i is needed." \
+ % (elf_file_name, len(new_path)))
+ sys.exit(-1)
# pad with zeros
new_path += b("\0") * (4096 - len(new_path))
#print "Changing %s to %s at %s" % (str(path), str(new_path), str(offset))
@@ -240,7 +250,7 @@ for e in executables_list:
if arch:
parse_elf_header()
change_interpreter(e)
- change_dl_sysdirs()
+ change_dl_sysdirs(e)
""" change permissions back """
if perms:
diff --git a/yocto-poky/scripts/runqemu b/yocto-poky/scripts/runqemu
index e01d276f7..d7fa941a6 100755
--- a/yocto-poky/scripts/runqemu
+++ b/yocto-poky/scripts/runqemu
@@ -19,34 +19,33 @@
usage() {
MYNAME=`basename $0`
- echo ""
- echo "Usage: you can run this script with any valid combination"
- echo "of the following environment variables (in any order):"
- echo " QEMUARCH - the qemu machine architecture to use"
- echo " KERNEL - the kernel image file to use"
- echo " ROOTFS - the rootfs image file or nfsroot directory to use"
- echo " MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified)"
- echo " RAMFS - boot a ramfs-based image"
- echo " ISO - boot an ISO image"
- echo " VM - boot a virtual machine image (= a file representing a full disk with boot loader)"
- echo " Simplified QEMU command-line options can be passed with:"
- echo " nographic - disables video console"
- echo " serial - enables a serial console on /dev/ttyS0"
- echo " kvm - enables KVM when running qemux86/qemux86-64 (VT-capable CPU required)"
- echo " kvm-vhost - enables KVM with vhost support when running qemux86/qemux86-64 (VT-capable CPU required)"
- echo " publicvnc - enable a VNC server open to all hosts"
- echo " qemuparams=\"xyz\" - specify custom parameters to QEMU"
- echo " bootparams=\"xyz\" - specify custom kernel parameters during boot"
- echo ""
- echo "Examples:"
- echo " $MYNAME qemuarm"
- echo " $MYNAME qemux86-64 core-image-sato ext4"
- echo " $MYNAME path/to/bzImage-qemux86.bin path/to/nfsrootdir/ serial"
- echo " $MYNAME qemux86 ramfs"
- echo " $MYNAME qemux86 iso"
- echo " $MYNAME qemux86 qemuparams=\"-m 256\""
- echo " $MYNAME qemux86 bootparams=\"psplash=false\""
- echo " $MYNAME path/to/<image>-<machine>.vmdk"
+cat <<_EOF
+
+Usage: you can run this script with any valid combination
+of the following environment variables (in any order):
+ KERNEL - the kernel image file to use
+ ROOTFS - the rootfs image file or nfsroot directory to use
+ MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified)
+ Simplified QEMU command-line options can be passed with:
+ nographic - disables video console
+ serial - enables a serial console on /dev/ttyS0
+ kvm - enables KVM when running qemux86/qemux86-64 (VT-capable CPU required)
+ kvm-vhost - enables KVM with vhost support when running qemux86/qemux86-64 (VT-capable CPU required)
+ publicvnc - enable a VNC server open to all hosts
+ qemuparams="xyz" - specify custom parameters to QEMU
+ bootparams="xyz" - specify custom kernel parameters during boot
+
+Examples:
+ $MYNAME qemuarm
+ $MYNAME qemux86-64 core-image-sato ext4
+ $MYNAME qemux86-64 wic-image-minimal wic
+ $MYNAME path/to/bzImage-qemux86.bin path/to/nfsrootdir/ serial
+ $MYNAME qemux86 iso/hddimg/vmdk/qcow2/vdi/ramfs/cpio.gz...
+ $MYNAME qemux86 qemuparams="-m 256"
+ $MYNAME qemux86 bootparams="psplash=false"
+ $MYNAME path/to/<image>-<machine>.vmdk
+ $MYNAME path/to/<image>-<machine>.wic
+_EOF
exit 1
}
@@ -62,7 +61,6 @@ error() {
MACHINE=${MACHINE:=""}
KERNEL=${KERNEL:=""}
ROOTFS=${ROOTFS:=""}
-VM=${VM:=""}
FSTYPE=${FSTYPE:=""}
LAZY_ROOTFS=""
SCRIPT_QEMU_OPT=""
@@ -74,6 +72,7 @@ KVM_ENABLED="no"
KVM_ACTIVE="no"
VHOST_ENABLED="no"
VHOST_ACTIVE="no"
+IS_VM="false"
# Determine whether the file is a kernel or QEMU image, and set the
# appropriate variables
@@ -97,9 +96,11 @@ process_filename() {
error "conflicting FSTYPE types [$FSTYPE] and [$EXT]"
fi
;;
- /hddimg/|/hdddirect/|/vmdk/)
+ /hddimg/|/hdddirect/|/vmdk/|/wic/|/qcow2/|/vdi/)
FSTYPE=$EXT
VM=$filename
+ ROOTFS=$filename
+ IS_VM="true"
;;
*)
error "unknown file arg [$filename]"
@@ -107,6 +108,13 @@ process_filename() {
esac
}
+check_fstype_conflicts() {
+ if [ -z "$FSTYPE" -o "$FSTYPE" = "$1" ]; then
+ FSTYPE=$1
+ else
+ error "conflicting FSTYPE types [$FSTYPE] and [$1]"
+ fi
+}
# Parse command line args without requiring specific ordering. It's a
# bit more complex, but offers a great user experience.
while true; do
@@ -117,18 +125,16 @@ while true; do
[ -z "$MACHINE" -o "$MACHINE" = "$arg" ] && MACHINE=$arg || \
error "conflicting MACHINE types [$MACHINE] and [$arg]"
;;
- "ext2" | "ext3" | "ext4" | "jffs2" | "nfs" | "btrfs" | "hddimg" | "hdddirect" )
- [ -z "$FSTYPE" -o "$FSTYPE" = "$arg" ] && FSTYPE=$arg || \
- error "conflicting FSTYPE types [$FSTYPE] and [$arg]"
+ "ext"[234] | "jffs2" | "nfs" | "btrfs")
+ check_fstype_conflicts $arg
+ ;;
+ "hddimg" | "hdddirect" | "wic" | "vmdk" | "qcow2" | "vdi" | "iso")
+ check_fstype_conflicts $arg
+ IS_VM="true"
;;
- "ramfs")
+ "ramfs" | "cpio.gz")
FSTYPE=cpio.gz
- RAMFS=true
;;
- "iso")
- FSTYPE=iso
- ISOFS=true
- ;;
"nographic")
SCRIPT_QEMU_OPT="$SCRIPT_QEMU_OPT -nographic"
SCRIPT_KERNEL_OPT="$SCRIPT_KERNEL_OPT console=ttyS0"
@@ -141,7 +147,7 @@ while true; do
"tcpserial="*)
TCPSERIAL_PORTNUM=${arg##tcpserial=}
;;
- "biosdir="*)
+ "biosdir="*)
CUSTOMBIOSDIR="${arg##biosdir=}"
;;
"biosfilename="*)
@@ -182,11 +188,11 @@ while true; do
SLIRP_ENABLED="yes"
;;
"publicvnc")
- SCRIPT_QEMU_OPT="$SCRIPT_QEMU_OPT -vnc 0.0.0.0:0"
+ SCRIPT_QEMU_OPT="$SCRIPT_QEMU_OPT -vnc :0"
;;
*-image*)
[ -z "$ROOTFS" ] || \
- error "conflicting ROOTFS args [$ROOTFS] and [$arg]"
+ error "conflicting ROOTFS args [$ROOTFS] and [$arg]"
if [ -f "$arg" ]; then
process_filename $arg
elif [ -d "$arg" ]; then
@@ -235,16 +241,17 @@ elif [ ! -w /dev/net/tun ] ; then
fi
# Report errors for missing combinations of options
-if [ -z "$MACHINE" -a -z "$KERNEL" -a -z "$VM" ]; then
- error "you must specify at least a MACHINE, VM, or KERNEL argument"
+if [ -z "$MACHINE" -a -z "$KERNEL" -a -z "$VM" -a "$FSTYPE" != "wic" ]; then
+ error "you must specify at least a MACHINE or KERNEL argument"
fi
if [ "$FSTYPE" = "nfs" -a -z "$ROOTFS" ]; then
error "NFS booting without an explicit ROOTFS path is not yet supported"
fi
if [ -z "$MACHINE" ]; then
- if [ "x$FSTYPE" = "xvmdk" ] || [ "x$FSTYPE" = "xhddimg" ] || [ "x$FSTYPE" = "xhdddirect" ]; then
- MACHINE=`basename $VM | sed -n 's/.*\(qemux86-64\|qemux86\|qemuarm64\|qemuarm\|qemumips64\|qemumips\|qemuppc\|qemush4\).*/\1/p'`
+ if [ "$IS_VM" = "true" ]; then
+ [ "x$FSTYPE" = "xwic" ] && filename=$ROOTFS || filename=$VM
+ MACHINE=`basename $filename | sed -n 's/.*\(qemux86-64\|qemux86\|qemuarm64\|qemuarm\|qemumips64\|qemumips\|qemuppc\|qemush4\).*/\1/p'`
if [ -z "$MACHINE" ]; then
error "Unable to set MACHINE from image filename [$VM]"
fi
@@ -343,12 +350,6 @@ QEMUMICROBLAZE_DEFAULT_FSTYPE=cpio
QEMUZYNQ_DEFAULT_KERNEL=uImage
QEMUZYNQ_DEFAULT_FSTYPE=cpio
-AKITA_DEFAULT_KERNEL=zImage-akita.bin
-AKITA_DEFAULT_FSTYPE=jffs2
-
-SPITZ_DEFAULT_KERNEL=zImage-spitz.bin
-SPITZ_DEFAULT_FSTYPE=ext3
-
setup_path_vars() {
if [ -z "$OE_TMPDIR" ] ; then
PATHS_REQUIRED=true
@@ -454,7 +455,7 @@ if [ -e "$ROOTFS" -a -z "$FSTYPE" ]; then
fi
fi
-if [ -z "$KERNEL" -a "x$FSTYPE" != "xvmdk" -a "x$FSTYPE" != "xhddimg" -a "x$FSTYPE" != "xhdddirect" ]; then
+if [ -z "$KERNEL" -a "$IS_VM" = "false" ]; then \
setup_path_vars 1
eval kernel_file=\$${machine2}_DEFAULT_KERNEL
KERNEL=$DEPLOY_DIR_IMAGE/$kernel_file
@@ -480,14 +481,14 @@ fi
if [ "$LAZY_ROOTFS" = "true" ]; then
setup_path_vars 1
echo "Assuming $ROOTFS really means $DEPLOY_DIR_IMAGE/$ROOTFS-$MACHINE.$FSTYPE"
- if [ "$FSTYPE" = "hddimg" -o "x$FSTYPE" = "xhdddirect" ]; then
+ if [ "$IS_VM" = "true" ]; then
VM=$DEPLOY_DIR_IMAGE/$ROOTFS-$MACHINE.$FSTYPE
else
ROOTFS=$DEPLOY_DIR_IMAGE/$ROOTFS-$MACHINE.$FSTYPE
fi
fi
-if [ -z "$ROOTFS" -a "x$FSTYPE" != "xvmdk" -a "x$FSTYPE" != "xhddimg" -a "x$FSTYPE" != "xhdddirect" ]; then
+if [ -z "$ROOTFS" ]; then
setup_path_vars 1
T=$DEPLOY_DIR_IMAGE
eval rootfs_list=\$${machine2}_DEFAULT_ROOTFS
@@ -495,6 +496,8 @@ if [ -z "$ROOTFS" -a "x$FSTYPE" != "xvmdk" -a "x$FSTYPE" != "xhddimg" -a "x$FSTY
if [ -z "$ROOTFS" ]; then
error "Unable to determine default rootfs for MACHINE [$MACHINE]"
+ elif [ "$IS_VM" = "true" ]; then
+ VM=$ROOTFS
fi
fi
# ROOTFS is now set for all cases, now expand it to be an absolute path, it should exist at this point
@@ -503,11 +506,11 @@ ROOTFS=`readlink -f $ROOTFS`
echo ""
echo "Continuing with the following parameters:"
-if [ "x$FSTYPE" != "xvmdk" -a "x$FSTYPE" != "xhddimg" -a "x$FSTYPE" != "xhdddirect" ]; then
+if [ "$IS_VM" = "false" ]; then
echo "KERNEL: [$KERNEL]"
echo "ROOTFS: [$ROOTFS]"
else
- echo "VMDK: [$VM]"
+ echo "VM: [$VM]"
fi
echo "FSTYPE: [$FSTYPE]"
diff --git a/yocto-poky/scripts/runqemu-gen-tapdevs b/yocto-poky/scripts/runqemu-gen-tapdevs
index d3b27be29..624deacb7 100755
--- a/yocto-poky/scripts/runqemu-gen-tapdevs
+++ b/yocto-poky/scripts/runqemu-gen-tapdevs
@@ -85,6 +85,15 @@ for ((index=0; index < $COUNT; index++)); do
fi
done
+if [ $COUNT -gt 0 ]; then
+ echo "Note: For systems running NetworkManager, it's recommended"
+ echo "Note: that the tap devices be set as unmanaged in the"
+ echo "Note: NetworkManager.conf file. Add the following lines to"
+ echo "Note: /etc/NetworkManager/NetworkManager.conf"
+ echo "[keyfile]"
+ echo "unmanaged-devices=interface-name:tap*"
+fi
+
# The runqemu script will check for this file, and if it exists,
# will use the existing bank of tap devices without creating
# additional ones via sudo.
diff --git a/yocto-poky/scripts/runqemu-internal b/yocto-poky/scripts/runqemu-internal
index 3b0e54c86..ac1c703c5 100755
--- a/yocto-poky/scripts/runqemu-internal
+++ b/yocto-poky/scripts/runqemu-internal
@@ -28,7 +28,6 @@
# ROOTFS - the disk image file to use
#
-
mem_size=-1
#Get rid of <> and get the contents of extra qemu running params
@@ -39,62 +38,8 @@ mem_set=`expr "$SCRIPT_QEMU_EXTRA_OPT" : '.*\(-m[[:space:]] *[0-9]*\)'`
if [ ! -z "$mem_set" ] ; then
#Get memory setting size from user input
mem_size=`echo $mem_set | sed 's/-m[[:space:]] *//'`
-else
- case "$MACHINE" in
- "qemux86")
- mem_size=256
- ;;
- "qemux86-64")
- mem_size=256
- ;;
- "qemuarm")
- mem_size=128
- ;;
- "qemuarm64")
- mem_size=512
- ;;
- "qemumicroblaze")
- mem_size=256
- ;;
- "qemumips"|"qemumips64")
- mem_size=256
- ;;
- "qemuppc")
- mem_size=256
- ;;
- "qemush4")
- mem_size=1024
- ;;
- "qemuzynq")
- mem_size=1024
- ;;
- *)
- mem_size=64
- ;;
- esac
-
fi
-# QEMU_MEMORY has 'M' appended to mem_size
-QEMU_MEMORY="$mem_size"M
-
-# Bug 433: qemuarm cannot use > 256 MB RAM
-if [ "$MACHINE" = "qemuarm" ]; then
- if [ -z "$mem_size" -o $mem_size -gt 256 ]; then
- echo "WARNING: qemuarm does not support > 256M of RAM."
- echo "Changing QEMU_MEMORY to default of 256M."
- QEMU_MEMORY="256M"
- mem_size="256"
- SCRIPT_QEMU_EXTRA_OPT=`echo $SCRIPT_QEMU_EXTRA_OPT | sed -e "s/$mem_set/-m 256/" `
- fi
-fi
-
-# We need to specify -m <mem_size> to overcome a bug in qemu 0.14.0
-# https://bugs.launchpad.net/ubuntu/+source/qemu-kvm/+bug/584480
-
-if [ -z "$mem_set" ] ; then
- SCRIPT_QEMU_EXTRA_OPT="$SCRIPT_QEMU_EXTRA_OPT -m $mem_size"
-fi
# This file is created when runqemu-gen-tapdevs creates a bank of tap
# devices, indicating that the user should not bring up new ones using
# sudo.
@@ -119,7 +64,7 @@ ORIG_STTY=$(stty -g)
if [ "$SLIRP_ENABLED" = "yes" ]; then
KERNEL_NETWORK_CMD="ip=dhcp"
QEMU_TAP_CMD=""
- QEMU_UI_OPTIONS="-show-cursor -usb -usbdevice wacom-tablet"
+ QEMU_UI_OPTIONS="-show-cursor -usb -usbdevice tablet"
QEMU_NETWORK_CMD=""
DROOT="/dev/vda"
ROOTFS_OPTIONS="-drive file=$ROOTFS,if=virtio,format=raw"
@@ -268,7 +213,7 @@ else
ROOTFS_OPTIONS="-drive file=$ROOTFS,if=virtio,format=raw"
KERNCMDLINE="mem=$QEMU_MEMORY"
- QEMU_UI_OPTIONS="-show-cursor -usb -usbdevice wacom-tablet"
+ QEMU_UI_OPTIONS="-show-cursor -usb -usbdevice tablet"
NFS_INSTANCE=`echo $TAP | sed 's/tap//'`
export NFS_INSTANCE
@@ -279,35 +224,13 @@ else
fi
fi
-case "$MACHINE" in
- "qemuarm") ;;
- "qemuarm64") ;;
- "qemumicroblaze") ;;
- "qemumips") ;;
- "qemumipsel") ;;
- "qemumips64") ;;
- "qemush4") ;;
- "qemuppc") ;;
- "qemuarmv6") ;;
- "qemuarmv7") ;;
- "qemux86") ;;
- "qemux86-64") ;;
- "qemuzynq") ;;
- "akita") ;;
- "spitz") ;;
- *)
- echo "Error: Unsupported machine type $MACHINE"
- return 1
- ;;
-esac
-
-if [ ! -f "$KERNEL" -a "x$FSTYPE" != "xvmdk" -a "x$FSTYPE" != "xhddimg" -a "x$FSTYPE" != "xhdddirect" ]; then
+if [ ! -f "$KERNEL" -a "$IS_VM" = "false" ]; then
echo "Error: Kernel image file $KERNEL doesn't exist"
cleanup
return 1
fi
-if [ "$FSTYPE" != "nfs" -a "$FSTYPE" != "vmdk" -a "$FSTYPE" != "hddimg" -a "$FSTYPE" != "hdddirect" -a ! -f "$ROOTFS" ]; then
+if [ "$FSTYPE" != "nfs" -a "$IS_VM" = "false" -a ! -f "$ROOTFS" ]; then
echo "Error: Image file $ROOTFS doesn't exist"
cleanup
return 1
@@ -343,13 +266,26 @@ if [ "$FSTYPE" = "nfs" ]; then
NFSRUNNING="true"
fi
-if [ "$MACHINE" = "qemuarm" -o "$MACHINE" = "qemuarmv6" -o "$MACHINE" = "qemuarmv7" ]; then
+
+set_mem_size() {
+ if [ ! -z "$mem_set" ] ; then
+ #Get memory setting size from user input
+ mem_size=`echo $mem_set | sed 's/-m[[:space:]] *//'`
+ else
+ mem_size=$1
+ fi
+ # QEMU_MEMORY has 'M' appended to mem_size
+ QEMU_MEMORY="$mem_size"M
+
+}
+
+config_qemuarm() {
+ set_mem_size 128
QEMU=qemu-system-arm
MACHINE_SUBTYPE=versatilepb
export QEMU_AUDIO_DRV="none"
QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS"
- # QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS -force-pointer"
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" -o "$FSTYPE" = "wic" ]; then
KERNCMDLINE="root=$DROOT rw console=ttyAMA0,115200 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY highres=off"
QEMUOPTIONS="$QEMU_NETWORK_CMD -M ${MACHINE_SUBTYPE} $ROOTFS_OPTIONS -no-reboot $QEMU_UI_OPTIONS"
fi
@@ -368,9 +304,10 @@ if [ "$MACHINE" = "qemuarm" -o "$MACHINE" = "qemuarmv6" -o "$MACHINE" = "qemuarm
if [ "$MACHINE" = "qemuarmv7" ]; then
QEMUOPTIONS="$QEMUOPTIONS -cpu cortex-a8"
fi
-fi
+}
-if [ "$MACHINE" = "qemuarm64" ]; then
+config_qemuarm64() {
+ set_mem_size 512
QEMU=qemu-system-aarch64
QEMU_NETWORK_CMD="-netdev tap,id=net0,ifname=$TAP,script=no,downscript=no -device virtio-net-device,netdev=net0 "
@@ -383,7 +320,7 @@ if [ "$MACHINE" = "qemuarm64" ]; then
else
QEMU_UI_OPTIONS=""
fi
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" -o "$FSTYPE" = "wic" ]; then
KERNCMDLINE="root=$DROOT rw console=ttyAMA0,38400 mem=$QEMU_MEMORY highres=off $KERNEL_NETWORK_CMD"
# qemu-system-aarch64 only support '-machine virt -cpu cortex-a57' for now
QEMUOPTIONS="$QEMU_NETWORK_CMD -machine virt -cpu cortex-a57 $ROOTFS_OPTIONS $QEMU_UI_OPTIONS"
@@ -397,9 +334,10 @@ if [ "$MACHINE" = "qemuarm64" ]; then
KERNCMDLINE="root=/dev/nfs nfsroot=$NFS_SERVER:$NFS_DIR,$UNFS_OPTS rw console=ttyAMA0,38400 mem=$QEMU_MEMORY highres=off $KERNEL_NETWORK_CMD"
QEMUOPTIONS="$QEMU_NETWORK_CMD -machine virt -cpu cortex-a57 $QEMU_UI_OPTIONS"
fi
-fi
+}
-if [ "$MACHINE" = "qemux86" ]; then
+config_qemux86() {
+ set_mem_size 256
QEMU=qemu-system-i386
if [ "$KVM_ACTIVE" = "yes" ]; then
CPU_SUBTYPE=kvm32
@@ -411,7 +349,7 @@ if [ "$MACHINE" = "qemux86" ]; then
else
QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS -vga vmware"
fi
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" -o "$FSTYPE" = "wic" ]; then
KERNCMDLINE="vga=0 uvesafb.mode_option=640x480-32 root=$DROOT rw mem=$QEMU_MEMORY $KERNEL_NETWORK_CMD"
QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE $ROOTFS_OPTIONS $QEMU_UI_OPTIONS"
fi
@@ -429,15 +367,16 @@ if [ "$MACHINE" = "qemux86" ]; then
KERNCMDLINE="root=/dev/nfs nfsroot=$NFS_SERVER:$NFS_DIR,$UNFS_OPTS rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_NETWORK_CMD $QEMU_UI_OPTIONS"
fi
- if [ "$FSTYPE" = "vmdk" -o "$FSTYPE" = "hddimg" -o "$FSTYPE" = "hdddirect" ]; then
+ if [ "$IS_VM" = "true" ]; then
QEMUOPTIONS="$QEMU_NETWORK_CMD $QEMU_UI_OPTIONS"
fi
# Currently oprofile's event based interrupt mode doesn't work(Bug #828) in
# qemux86 and qemux86-64. We can use timer interrupt mode for now.
KERNCMDLINE="$KERNCMDLINE oprofile.timer=1"
-fi
+}
-if [ "$MACHINE" = "qemux86-64" ]; then
+config_qemux86_64() {
+ set_mem_size 256
QEMU=qemu-system-x86_64
if [ "$KVM_ACTIVE" = "yes" ]; then
CPU_SUBTYPE=kvm64
@@ -449,7 +388,7 @@ if [ "$MACHINE" = "qemux86-64" ]; then
else
QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS -vga vmware"
fi
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" -o "$FSTYPE" = "wic" ]; then
KERNCMDLINE="vga=0 uvesafb.mode_option=640x480-32 root=$DROOT rw mem=$QEMU_MEMORY $KERNEL_NETWORK_CMD"
QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE $ROOTFS_OPTIONS $QEMU_UI_OPTIONS"
fi
@@ -465,29 +404,16 @@ if [ "$MACHINE" = "qemux86-64" ]; then
KERNCMDLINE="root=/dev/nfs nfsroot=$NFS_SERVER:$NFS_DIR,$UNFS_OPTS rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE $QEMU_UI_OPTIONS"
fi
- if [ "$FSTYPE" = "vmdk" -o "$FSTYPE" = "hddimg" -o "$FSTYPE" = "hdddirect" ]; then
+ if [ "$IS_VM" = "true" ]; then
QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE $QEMU_UI_OPTIONS"
fi
# Currently oprofile's event based interrupt mode doesn't work(Bug #828) in
# qemux86 and qemux86-64. We can use timer interrupt mode for now.
KERNCMDLINE="$KERNCMDLINE oprofile.timer=1"
-fi
-
-if [ "$MACHINE" = "spitz" ]; then
- QEMU=qemu-system-arm
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
- echo $ROOTFS
- ROOTFS=`readlink -f $ROOTFS`
- echo $ROOTFS
- if [ ! -e "$ROOTFS.qemudisk" ]; then
- echo "Adding a partition table to the ext3 image for use by QEMU, please wait..."
- runqemu-addptable2image $ROOTFS $ROOTFS.qemudisk
- fi
- QEMUOPTIONS="$QEMU_NETWORK_CMD -M spitz -hda $ROOTFS.qemudisk -portrait"
- fi
-fi
+}
-if [ "$MACHINE" = "qemumips" -o "$MACHINE" = "qemumipsel" -o "$MACHINE" = "qemumips64" ]; then
+config_qemumips() {
+ set_mem_size 256
case "$MACHINE" in
qemumips) QEMU=qemu-system-mips ;;
qemumipsel) QEMU=qemu-system-mipsel ;;
@@ -495,7 +421,7 @@ if [ "$MACHINE" = "qemumips" -o "$MACHINE" = "qemumipsel" -o "$MACHINE" = "qemum
esac
MACHINE_SUBTYPE=malta
QEMU_UI_OPTIONS="-vga cirrus $QEMU_UI_OPTIONS"
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" -o "$FSTYPE" = "wic" ]; then
#KERNCMDLINE="root=/dev/hda console=ttyS0 console=tty0 $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
KERNCMDLINE="root=$DROOT rw console=ttyS0 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_NETWORK_CMD -M $MACHINE_SUBTYPE $ROOTFS_OPTIONS -no-reboot $QEMU_UI_OPTIONS"
@@ -509,9 +435,10 @@ if [ "$MACHINE" = "qemumips" -o "$MACHINE" = "qemumipsel" -o "$MACHINE" = "qemum
KERNCMDLINE="root=/dev/nfs console=ttyS0 console=tty nfsroot=$NFS_SERVER:$NFS_DIR,$UNFS_OPTS rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_NETWORK_CMD -M $MACHINE_SUBTYPE -no-reboot $QEMU_UI_OPTIONS"
fi
-fi
+}
-if [ "$MACHINE" = "qemuppc" ]; then
+config_qemuppc() {
+ set_mem_size 256
QEMU=qemu-system-ppc
MACHINE_SUBTYPE=mac99
CPU_SUBTYPE=G4
@@ -521,7 +448,7 @@ if [ "$MACHINE" = "qemuppc" ]; then
else
QEMU_NETWORK_CMD="-net nic,model=pcnet $QEMU_TAP_CMD"
fi
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" -o "$FSTYPE" = "wic" ]; then
KERNCMDLINE="root=$DROOT rw console=ttyS0 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE -M $MACHINE_SUBTYPE $ROOTFS_OPTIONS -no-reboot $QEMU_UI_OPTIONS"
fi
@@ -534,13 +461,14 @@ if [ "$MACHINE" = "qemuppc" ]; then
KERNCMDLINE="root=/dev/nfs console=ttyS0 console=tty nfsroot=$NFS_SERVER:$NFS_DIR,$UNFS_OPTS rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE -M $MACHINE_SUBTYPE -no-reboot $QEMU_UI_OPTIONS"
fi
-fi
+}
-if [ "$MACHINE" = "qemush4" ]; then
+config_qemush4() {
+ set_mem_size 1024
QEMU=qemu-system-sh4
MACHINE_SUBTYPE=r2d
QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS"
- if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" -o "$FSTYPE" = "wic" ]; then
#KERNCMDLINE="root=/dev/hda console=ttyS0 console=tty0 $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
KERNCMDLINE="root=/dev/hda rw console=ttySC1 noiotrap earlyprintk=sh-sci.1 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_NETWORK_CMD -M $MACHINE_SUBTYPE -hda $ROOTFS -no-reboot $QEMU_UI_OPTIONS -monitor null -serial vc -serial stdio"
@@ -556,50 +484,80 @@ if [ "$MACHINE" = "qemush4" ]; then
QEMUOPTIONS="$QEMU_NETWORK_CMD -M $MACHINE_SUBTYPE -no-reboot $QEMU_UI_OPTIONS -monitor null -serial vc -serial stdio"
SERIALSTDIO="1"
fi
-fi
-
-if [ "${FSTYPE:0:3}" = "ext" ]; then
- KERNCMDLINE="$KERNCMDLINE rootfstype=$FSTYPE"
-fi
+}
-if [ "$MACHINE" = "akita" ]; then
+config_qemuzynq() {
+ set_mem_size 1024
QEMU=qemu-system-arm
- if [ "$FSTYPE" = "jffs2" ]; then
- ROOTFS=`readlink -f $ROOTFS`
- if [ ! -e "$ROOTFS.qemuflash" ]; then
- echo "Converting raw image into flash image format for use by QEMU, please wait..."
- raw2flash.akita < $ROOTFS > $ROOTFS.qemuflash
- fi
- QEMUOPTIONS="$QEMU_NETWORK_CMD -M akita -mtdblock $ROOTFS.qemuflash -portrait"
+ QEMU_SYSTEM_OPTIONS="$QEMU_NETWORK_CMD -M xilinx-zynq-a9 -serial null -serial mon:stdio -dtb $KERNEL-$MACHINE.dtb"
+ # zynq serial ports are named 'ttyPS0' and 'ttyPS1', fixup the default values
+ SCRIPT_KERNEL_OPT=$(echo "$SCRIPT_KERNEL_OPT" | sed 's/console=ttyS/console=ttyPS/g')
+ if [ "${FSTYPE:0:3}" = "ext" -o "${FSTYPE:0:4}" = "cpio" ]; then
+ KERNCMDLINE="earlyprintk root=/dev/ram rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
+ QEMUOPTIONS="$QEMU_SYSTEM_OPTIONS -initrd $ROOTFS"
fi
-fi
+}
-if [ "$MACHINE" = "qemumicroblaze" ]; then
+config_qemumicroblaze() {
+ set_mem_size 256
QEMU=qemu-system-microblazeel
QEMU_SYSTEM_OPTIONS="$QEMU_NETWORK_CMD -M petalogix-ml605 -serial mon:stdio"
if [ "${FSTYPE:0:3}" = "ext" -o "${FSTYPE:0:4}" = "cpio" ]; then
KERNCMDLINE="earlyprintk root=/dev/ram rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
QEMUOPTIONS="$QEMU_SYSTEM_OPTIONS -initrd $ROOTFS"
fi
+}
+
+case "$MACHINE" in
+ "qemuarm" | "qemuarmv6" | "qemuarmv7")
+ config_qemuarm
+ ;;
+ "qemuarm64")
+ config_qemuarm64
+ ;;
+ "qemux86")
+ config_qemux86
+ ;;
+ "qemux86-64")
+ config_qemux86_64
+ ;;
+ "qemumips" | "qemumipsel" | "qemumips64")
+ config_qemumips
+ ;;
+ "qemuppc")
+ config_qemuppc
+ ;;
+ "qemush4")
+ config_qemush4
+ ;;
+ "qemuzynq")
+ config_qemuzynq
+ ;;
+ "qemumicroblaze")
+ config_qemumicroblaze
+ ;;
+ *)
+ echo "Error: Unsupported machine type $MACHINE"
+ return 1
+ ;;
+esac
+
+# We need to specify -m <mem_size> to overcome a bug in qemu 0.14.0
+# https://bugs.launchpad.net/ubuntu/+source/qemu-kvm/+bug/584480
+if [ -z "$mem_set" ] ; then
+ SCRIPT_QEMU_EXTRA_OPT="$SCRIPT_QEMU_EXTRA_OPT -m $mem_size"
fi
-if [ "$MACHINE" = "qemuzynq" ]; then
- QEMU=qemu-system-arm
- QEMU_SYSTEM_OPTIONS="$QEMU_NETWORK_CMD -M xilinx-zynq-a9 -serial null -serial mon:stdio -dtb $KERNEL-$MACHINE.dtb"
- # zynq serial ports are named 'ttyPS0' and 'ttyPS1', fixup the default values
- SCRIPT_KERNEL_OPT=$(echo "$SCRIPT_KERNEL_OPT" | sed 's/console=ttyS/console=ttyPS/g')
- if [ "${FSTYPE:0:3}" = "ext" -o "${FSTYPE:0:4}" = "cpio" ]; then
- KERNCMDLINE="earlyprintk root=/dev/ram rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
- QEMUOPTIONS="$QEMU_SYSTEM_OPTIONS -initrd $ROOTFS"
- fi
+if [ "${FSTYPE:0:3}" = "ext" ]; then
+ KERNCMDLINE="$KERNCMDLINE rootfstype=$FSTYPE"
fi
-if [ "x$RAMFS" = "xtrue" ]; then
+if [ "$FSTYPE" = "cpio.gz" ]; then
QEMUOPTIONS="-initrd $ROOTFS -nographic"
KERNCMDLINE="root=/dev/ram0 console=ttyS0 debugshell"
fi
-if [ "x$ISOFS" = "xtrue" ]; then
+if [ "$FSTYPE" = "iso" ]; then
QEMUOPTIONS="$QEMU_NETWORK_CMD -cdrom $ROOTFS $QEMU_UI_OPTIONS"
fi
@@ -720,18 +678,32 @@ elif [ "$NUM_SERIAL_OPTS" = "1" ]; then
SCRIPT_QEMU_EXTRA_OPT="$SCRIPT_QEMU_EXTRA_OPT $SECOND_SERIAL_OPT"
fi
-
echo "Running $QEMU..."
# -no-reboot is a mandatory option - see bug #100
-if [ "$FSTYPE" = "vmdk" -o "$FSTYPE" = "hddimg" -o "$FSTYPE" = "hdddirect" ]; then
- echo $QEMUBIN $VM $QEMUOPTIONS $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT
- LD_PRELOAD="$GL_LD_PRELOAD" $QEMUBIN $VM $QEMUOPTIONS $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT
-elif [ "$FSTYPE" = "iso" ]; then
- echo $QEMUBIN $QEMUOPTIONS $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT
- LD_PRELOAD="$GL_LD_PRELOAD" $QEMUBIN $QEMUOPTIONS $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT
+if [ "$IS_VM" = "true" ]; then
+ # Check root=/dev/sdX or root=/dev/vdX
+ [ ! -e "$VM" ] && error "VM image is not found!"
+ if grep -q 'root=/dev/sd' $VM; then
+ echo "Using scsi drive"
+ VM_DRIVE="-drive if=none,id=hd,file=$VM -device virtio-scsi-pci,id=scsi -device scsi-hd,drive=hd"
+ elif grep -q 'root=/dev/hd' $VM; then
+ echo "Using ide drive"
+ VM_DRIVE="$VM"
+ else
+ echo "Using virtio block drive"
+ VM_DRIVE="-drive if=virtio,file=$VM"
+ fi
+ QEMU_FIRE="$QEMUBIN $VM_DRIVE $QEMUOPTIONS $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT"
+ echo $QEMU_FIRE
+ LD_PRELOAD="$GL_LD_PRELOAD" $QEMU_FIRE
+elif [ "$FSTYPE" = "iso" -o "$FSTYPE" = "wic" ]; then
+ QEMU_FIRE="$QEMUBIN $QEMUOPTIONS $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT"
+ echo $QEMU_FIRE
+ LD_PRELOAD="$GL_LD_PRELOAD" $QEMU_FIRE
else
- echo $QEMUBIN -kernel $KERNEL $QEMUOPTIONS $SLIRP_CMD $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT --append '"'$KERNCMDLINE $SCRIPT_KERNEL_OPT'"'
- LD_PRELOAD="$GL_LD_PRELOAD" $QEMUBIN -kernel $KERNEL $QEMUOPTIONS $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT --append "$KERNCMDLINE $SCRIPT_KERNEL_OPT"
+ QEMU_FIRE="$QEMUBIN -kernel $KERNEL $QEMUOPTIONS $SLIRP_CMD $SERIALOPTS -no-reboot $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT"
+ echo $QEMU_FIRE -append '"'$KERNCMDLINE $SCRIPT_KERNEL_OPT'"'
+ LD_PRELOAD="$GL_LD_PRELOAD" $QEMU_FIRE -append "$KERNCMDLINE $SCRIPT_KERNEL_OPT"
fi
ret=$?
if [ "$SLIRP_ENABLED" != "yes" ]; then
diff --git a/yocto-poky/scripts/send-error-report b/yocto-poky/scripts/send-error-report
index 1a1b96580..a29feff32 100755
--- a/yocto-poky/scripts/send-error-report
+++ b/yocto-poky/scripts/send-error-report
@@ -15,6 +15,10 @@ import subprocess
import argparse
import logging
+scripts_lib_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lib')
+sys.path.insert(0, scripts_lib_path)
+import argparse_oe
+
version = "0.3"
log = logging.getLogger("send-error-report")
@@ -143,7 +147,7 @@ def send_data(data, args):
if __name__ == '__main__':
- arg_parse = argparse.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.")
+ arg_parse = argparse_oe.ArgumentParser(description="This scripts will send an error report to your specified error-report-web server.")
arg_parse.add_argument("error_file",
help="Generated error report file location",
diff --git a/yocto-poky/scripts/sstate-cache-management.sh b/yocto-poky/scripts/sstate-cache-management.sh
index 2286765eb..2ab450ab5 100755
--- a/yocto-poky/scripts/sstate-cache-management.sh
+++ b/yocto-poky/scripts/sstate-cache-management.sh
@@ -101,7 +101,7 @@ do_nothing () {
# Read the input "y"
read_confirm () {
- echo "$total_deleted from $total_files files will be removed! "
+ echo "$total_deleted out of $total_files files will be removed! "
if [ "$confirm" != "y" ]; then
echo "Do you want to continue (y/n)? "
while read confirm; do
@@ -282,7 +282,7 @@ remove_duplicated () {
deleted_tgz=`cat $rm_list.* 2>/dev/null | grep ".tgz$" | wc -l`
deleted_files=`cat $rm_list.* 2>/dev/null | wc -l`
[ "$deleted_files" -gt 0 -a $debug -gt 0 ] && cat $rm_list.*
- echo "($deleted_tgz from $total_tgz_suffix .tgz files for $suffix suffix will be removed or $deleted_files from $total_files_suffix when counting also .siginfo and .done files)"
+ echo "($deleted_tgz out of $total_tgz_suffix .tgz files for $suffix suffix will be removed or $deleted_files out of $total_files_suffix when counting also .siginfo and .done files)"
let total_deleted=$total_deleted+$deleted_files
done
deleted_tgz=0
diff --git a/yocto-poky/scripts/sstate-sysroot-cruft.sh b/yocto-poky/scripts/sstate-sysroot-cruft.sh
index f62485eaa..b7ed8ea84 100755
--- a/yocto-poky/scripts/sstate-sysroot-cruft.sh
+++ b/yocto-poky/scripts/sstate-sysroot-cruft.sh
@@ -90,6 +90,12 @@ WHITELIST="${WHITELIST} \
WHITELIST="${WHITELIST} \
.*\.pyc \
.*\.pyo \
+ .*/__pycache__ \
+"
+
+# generated by lua
+WHITELIST="${WHITELIST} \
+ .*\.luac \
"
# generated by sgml-common-native
@@ -97,6 +103,16 @@ WHITELIST="${WHITELIST} \
.*/etc/sgml/sgml-docbook.bak \
"
+# generated by php
+WHITELIST="${WHITELIST} \
+ .*/usr/lib/php5/php/.channels/.* \
+ .*/usr/lib/php5/php/.registry/.* \
+ .*/usr/lib/php5/php/.depdb \
+ .*/usr/lib/php5/php/.depdblock \
+ .*/usr/lib/php5/php/.filemap \
+ .*/usr/lib/php5/php/.lock \
+"
+
# generated by toolchain
WHITELIST="${WHITELIST} \
[^/]*-tcbootstrap/lib \
@@ -106,6 +122,23 @@ WHITELIST="${WHITELIST} \
WHITELIST="${WHITELIST} \
[^/]*/home \
[^/]*/home/xuser \
+ [^/]*/home/xuser/.bashrc \
+ [^/]*/home/xuser/.profile \
+ [^/]*/home/builder \
+ [^/]*/home/builder/.bashrc \
+ [^/]*/home/builder/.profile \
+"
+
+# generated by image.py for WIC
+# introduced in oe-core commit 861ce6c5d4836df1a783be3b01d2de56117c9863
+WHITELIST="${WHITELIST} \
+ [^/]*/imgdata \
+ [^/]*/imgdata/[^/]*\.env \
+"
+
+# generated by fontcache.bbclass
+WHITELIST="${WHITELIST} \
+ .*/var/cache/fontconfig/ \
"
SYSROOTS="`readlink -f ${tmpdir}`/sysroots/"
diff --git a/yocto-poky/scripts/test-remote-image b/yocto-poky/scripts/test-remote-image
index f3a44ebe5..9c5b0158d 100755
--- a/yocto-poky/scripts/test-remote-image
+++ b/yocto-poky/scripts/test-remote-image
@@ -38,6 +38,7 @@ lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptpath
+import argparse_oe
# Add meta/lib to sys.path
scriptpath.add_oe_lib_path()
@@ -82,7 +83,7 @@ log = logger_create()
# Define and return the arguments parser for the script
def get_args_parser():
description = "This script is used to run automated runtime tests using remotely published image files. You should prepare the build environment just like building local images and running the tests."
- parser = argparse.ArgumentParser(description=description)
+ parser = argparse_oe.ArgumentParser(description=description)
parser.add_argument('--image-types', required=True, action="store", nargs='*', dest="image_types", default=None, help='The image types to test(ex: core-image-minimal).')
parser.add_argument('--repo-link', required=True, action="store", type=str, dest="repo_link", default=None, help='The link to the remote images repository.')
parser.add_argument('--required-packages', required=False, action="store", nargs='*', dest="required_packages", default=None, help='Required packages for the tests. They will be built before the testing begins.')
@@ -356,5 +357,5 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/wic b/yocto-poky/scripts/wic
index 7ad2b191c..2286f20a9 100755
--- a/yocto-poky/scripts/wic
+++ b/yocto-poky/scripts/wic
@@ -41,12 +41,12 @@ from distutils import spawn
# External modules
scripts_path = os.path.abspath(os.path.dirname(__file__))
lib_path = scripts_path + '/lib'
-sys.path.append(lib_path)
+sys.path.insert(0, lib_path)
bitbake_exe = spawn.find_executable('bitbake')
if bitbake_exe:
bitbake_path = os.path.join(os.path.dirname(bitbake_exe), '../lib')
- sys.path.append(bitbake_path)
+ sys.path.insert(0, bitbake_path)
from bb import cookerdata
from bb.main import bitbake_main, BitBakeConfigParameters
else:
diff --git a/yocto-poky/scripts/wipe-sysroot b/yocto-poky/scripts/wipe-sysroot
index c22d39a40..9e067e8df 100755
--- a/yocto-poky/scripts/wipe-sysroot
+++ b/yocto-poky/scripts/wipe-sysroot
@@ -42,7 +42,7 @@ fi
echo "Deleting the sysroots in $STAGING_DIR, and selected stamps in $SSTATE_MANIFESTS and $STAMPS_DIR."
# The sysroots themselves
-rm -rf $STAGING_DIR
+rm -rf $STAGING_DIR ${STAGING_DIR}-uninative
# The stamps that said the sysroot was populated
rm -rf $STAMPS_DIR/*/*/*.do_populate_sysroot.*
diff --git a/yocto-poky/scripts/yocto-bsp b/yocto-poky/scripts/yocto-bsp
index 2d9453fbe..82a050ebd 100755
--- a/yocto-poky/scripts/yocto-bsp
+++ b/yocto-poky/scripts/yocto-bsp
@@ -151,6 +151,6 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/yocto-kernel b/yocto-poky/scripts/yocto-kernel
index c9b2821e0..daaad0752 100755
--- a/yocto-poky/scripts/yocto-kernel
+++ b/yocto-poky/scripts/yocto-kernel
@@ -395,5 +395,5 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
diff --git a/yocto-poky/scripts/yocto-layer b/yocto-poky/scripts/yocto-layer
index 53d2aabd3..356972ece 100755
--- a/yocto-poky/scripts/yocto-layer
+++ b/yocto-poky/scripts/yocto-layer
@@ -75,7 +75,11 @@ def yocto_layer_create_subcommand(args, usage_str):
if options.outdir:
layer_output_dir = options.outdir
else:
- layer_output_dir = "meta-" + layer_name
+ prefix="meta-"
+ if not layer_name.startswith(prefix):
+ layer_output_dir="%s%s"%(prefix,layer_name)
+ else:
+ layer_output_dir=layer_name
yocto_layer_create(layer_name, scripts_path, layer_output_dir, options.codedump, options.properties_file, properties)
@@ -142,6 +146,6 @@ if __name__ == "__main__":
except Exception:
ret = 1
import traceback
- traceback.print_exc(5)
+ traceback.print_exc()
sys.exit(ret)
OpenPOWER on IntegriCloud