summaryrefslogtreecommitdiffstats
path: root/import-layers/yocto-poky/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'import-layers/yocto-poky/scripts')
-rwxr-xr-ximport-layers/yocto-poky/scripts/buildhistory-diff125
-rwxr-xr-ximport-layers/yocto-poky/scripts/buildstats-diff324
-rwxr-xr-ximport-layers/yocto-poky/scripts/contrib/build-perf-test-wrapper.sh30
-rwxr-xr-ximport-layers/yocto-poky/scripts/contrib/oe-build-perf-report-email.py55
-rwxr-xr-ximport-layers/yocto-poky/scripts/contrib/patchreview.py211
-rwxr-xr-ximport-layers/yocto-poky/scripts/contrib/patchtest.sh118
-rwxr-xr-ximport-layers/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py19
-rwxr-xr-ximport-layers/yocto-poky/scripts/contrib/python/generate-manifest-3.5.py25
-rwxr-xr-ximport-layers/yocto-poky/scripts/create-pull-request14
-rwxr-xr-ximport-layers/yocto-poky/scripts/devtool20
-rw-r--r--import-layers/yocto-poky/scripts/lib/argparse_oe.py7
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/help.py8
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend)8
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.10.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.1.bbappend)9
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb1
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-features.scc1
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend)8
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.10.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend)9
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.8.bbappend34
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend)8
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.10.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.8.bbappend)9
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend)8
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.10.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.1.bbappend)9
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.8.bbappend34
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend37
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.1.bbappend34
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.10.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.1.bbappend)9
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.8.bbappend34
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend6
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend)9
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend64
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.10.bbappend6
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend)10
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend6
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.8.bbappend63
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend37
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend4
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend35
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend34
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.10.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.12.bbappend (renamed from import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.8.bbappend)9
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend5
-rw-r--r--import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.8.bbappend34
-rw-r--r--import-layers/yocto-poky/scripts/lib/build_perf/html/report.html116
-rw-r--r--import-layers/yocto-poky/scripts/lib/build_perf/report.py5
-rw-r--r--import-layers/yocto-poky/scripts/lib/buildstats.py349
-rw-r--r--import-layers/yocto-poky/scripts/lib/checklayer/__init__.py (renamed from import-layers/yocto-poky/scripts/lib/compatlayer/__init__.py)10
-rw-r--r--import-layers/yocto-poky/scripts/lib/checklayer/case.py (renamed from import-layers/yocto-poky/scripts/lib/compatlayer/case.py)2
-rw-r--r--import-layers/yocto-poky/scripts/lib/checklayer/cases/__init__.py (renamed from import-layers/yocto-poky/scripts/lib/compatlayer/cases/__init__.py)0
-rw-r--r--import-layers/yocto-poky/scripts/lib/checklayer/cases/bsp.py (renamed from import-layers/yocto-poky/scripts/lib/compatlayer/cases/bsp.py)10
-rw-r--r--import-layers/yocto-poky/scripts/lib/checklayer/cases/common.py (renamed from import-layers/yocto-poky/scripts/lib/compatlayer/cases/common.py)6
-rw-r--r--import-layers/yocto-poky/scripts/lib/checklayer/cases/distro.py (renamed from import-layers/yocto-poky/scripts/lib/compatlayer/cases/distro.py)8
-rw-r--r--import-layers/yocto-poky/scripts/lib/checklayer/context.py (renamed from import-layers/yocto-poky/scripts/lib/compatlayer/context.py)4
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/__init__.py107
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/deploy.py41
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/export.py119
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/import.py144
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/sdk.py2
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/standard.py350
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/upgrade.py78
-rw-r--r--import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py27
-rw-r--r--import-layers/yocto-poky/scripts/lib/recipetool/create.py372
-rw-r--r--import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys.py4
-rw-r--r--import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py2
-rw-r--r--import-layers/yocto-poky/scripts/lib/recipetool/create_kmod.py2
-rw-r--r--import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py86
-rw-r--r--import-layers/yocto-poky/scripts/lib/recipetool/newappend.py2
-rw-r--r--import-layers/yocto-poky/scripts/lib/scriptutils.py171
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc2
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg8
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks2
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/canned-wks/systemd-bootdisk.wks4
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/engine.py320
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/filemap.py58
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/help.py264
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/ksparser.py4
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/misc.py (renamed from import-layers/yocto-poky/scripts/lib/wic/utils/misc.py)57
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/partition.py45
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/pluginbase.py3
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py62
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py4
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py19
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py12
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py94
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py24
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py25
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/utils/__init__.py0
-rw-r--r--import-layers/yocto-poky/scripts/lib/wic/utils/runner.py52
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-build-perf-report190
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-buildenv-internal6
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-find-native-sysroot2
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-pkgdata-util8
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-publish-sdk4
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-selftest791
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-setup-builddir7
-rwxr-xr-ximport-layers/yocto-poky/scripts/oe-test36
-rwxr-xr-ximport-layers/yocto-poky/scripts/recipetool2
-rwxr-xr-ximport-layers/yocto-poky/scripts/runqemu245
-rwxr-xr-ximport-layers/yocto-poky/scripts/runqemu-export-rootfs8
-rw-r--r--import-layers/yocto-poky/scripts/runqemu.README2
-rwxr-xr-ximport-layers/yocto-poky/scripts/sstate-diff-machines.sh2
-rwxr-xr-ximport-layers/yocto-poky/scripts/sstate-sysroot-cruft.sh2
-rwxr-xr-ximport-layers/yocto-poky/scripts/test-reexec6
-rwxr-xr-ximport-layers/yocto-poky/scripts/wic375
-rwxr-xr-ximport-layers/yocto-poky/scripts/yocto-check-layer (renamed from import-layers/yocto-poky/scripts/yocto-compat-layer.py)25
-rwxr-xr-ximport-layers/yocto-poky/scripts/yocto-check-layer-wrapper43
-rwxr-xr-ximport-layers/yocto-poky/scripts/yocto-compat-layer-wrapper27
158 files changed, 3893 insertions, 2922 deletions
diff --git a/import-layers/yocto-poky/scripts/buildhistory-diff b/import-layers/yocto-poky/scripts/buildhistory-diff
index dd9745e80..e79cb7ac8 100755
--- a/import-layers/yocto-poky/scripts/buildhistory-diff
+++ b/import-layers/yocto-poky/scripts/buildhistory-diff
@@ -7,7 +7,7 @@
import sys
import os
-import optparse
+import argparse
from distutils.version import LooseVersion
# Ensure PythonGit is installed (buildhistory_analysis needs it)
@@ -17,47 +17,70 @@ except ImportError:
print("Please install GitPython (python3-git) 0.3.4 or later in order to use this script")
sys.exit(1)
+def get_args_parser():
+ description = "Reports significant differences in the buildhistory repository."
+
+ parser = argparse.ArgumentParser(description=description,
+ usage="""
+ %(prog)s [options] [from-revision [to-revision]]
+ (if not specified, from-revision defaults to build-minus-1, and to-revision defaults to HEAD)""")
+
+ parser.add_argument('-p', '--buildhistory-dir',
+ action='store',
+ dest='buildhistory_dir',
+ default='buildhistory/',
+ help="Specify path to buildhistory directory (defaults to buildhistory/ under cwd)")
+ parser.add_argument('-v', '--report-version',
+ action='store_true',
+ dest='report_ver',
+ default=False,
+ help="Report changes in PKGE/PKGV/PKGR even when the values are still the default (PE/PV/PR)")
+ parser.add_argument('-a', '--report-all',
+ action='store_true',
+ dest='report_all',
+ default='False',
+ help="Report all changes, not just the default significant ones")
+ parser.add_argument('-s', '---signatures',
+ action='store_true',
+ dest='sigs',
+ default=False,
+ help="Report list of signatures differing instead of output")
+ parser.add_argument('-S', '--signatures-with-diff',
+ action='store_true',
+ dest='sigsdiff',
+ default=False,
+ help="Report on actual signature differences instead of output (requires signature data to have been generated, either by running the actual tasks or using bitbake -S)")
+ parser.add_argument('-e', '--exclude-path',
+ action='append',
+ help="Exclude path from the output")
+ parser.add_argument('revisions',
+ default = ['build-minus-1', 'HEAD'],
+ nargs='*',
+ help=argparse.SUPPRESS)
+ return parser
+
def main():
- parser = optparse.OptionParser(
- description = "Reports significant differences in the buildhistory repository.",
- usage = """
- %prog [options] [from-revision [to-revision]]
-(if not specified, from-revision defaults to build-minus-1, and to-revision defaults to HEAD)""")
-
- parser.add_option("-p", "--buildhistory-dir",
- help = "Specify path to buildhistory directory (defaults to buildhistory/ under cwd)",
- action="store", dest="buildhistory_dir", default='buildhistory/')
- parser.add_option("-v", "--report-version",
- help = "Report changes in PKGE/PKGV/PKGR even when the values are still the default (PE/PV/PR)",
- action="store_true", dest="report_ver", default=False)
- parser.add_option("-a", "--report-all",
- help = "Report all changes, not just the default significant ones",
- action="store_true", dest="report_all", default=False)
- parser.add_option("-s", "--signatures",
- help = "Report list of signatures differing instead of output",
- action="store_true", dest="sigs", default=False)
- parser.add_option("-S", "--signatures-with-diff",
- help = "Report on actual signature differences instead of output (requires signature data to have been generated, either by running the actual tasks or using bitbake -S)",
- action="store_true", dest="sigsdiff", default=False)
-
- options, args = parser.parse_args(sys.argv)
-
- if len(args) > 3:
- sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args[3:]))
- parser.print_help()
- sys.exit(1)
+
+ parser = get_args_parser()
+ args = parser.parse_args()
if LooseVersion(git.__version__) < '0.3.1':
sys.stderr.write("Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script\n")
sys.exit(1)
- if not os.path.exists(options.buildhistory_dir):
- if options.buildhistory_dir == 'buildhistory/':
+ if len(args.revisions) > 2:
+ sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:]))
+ parser.print_help()
+
+ sys.exit(1)
+ if not os.path.exists(args.buildhistory_dir):
+ if args.buildhistory_dir == 'buildhistory/':
cwd = os.getcwd()
if os.path.basename(cwd) == 'buildhistory':
- options.buildhistory_dir = cwd
- if not os.path.exists(options.buildhistory_dir):
- sys.stderr.write('Buildhistory directory "%s" does not exist\n\n' % options.buildhistory_dir)
+ args.buildhistory_dir = cwd
+
+ if not os.path.exists(args.buildhistory_dir):
+ sys.stderr.write('Buildhistory directory "%s" does not exist\n\n' % args.buildhistory_dir)
parser.print_help()
sys.exit(1)
@@ -71,30 +94,29 @@ def main():
scriptpath.add_oe_lib_path()
# Set path to bitbake lib dir so the buildhistory_analysis module can load bb.utils
bitbakepath = scriptpath.add_bitbake_lib_path()
+
if not bitbakepath:
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
sys.exit(1)
- import oe.buildhistory_analysis
-
- fromrev = 'build-minus-1'
- torev = 'HEAD'
- if len(args) > 1:
- if len(args) == 2 and '..' in args[1]:
- revs = args[1].split('..')
- fromrev = revs[0]
- if revs[1]:
- torev = revs[1]
+ if len(args.revisions) == 1:
+ if '..' in args.revisions[0]:
+ fromrev, torev = args.revisions[0].split('..')
else:
- fromrev = args[1]
- if len(args) > 2:
- torev = args[2]
+ fromrev, torev = args.revisions[0], 'HEAD'
+ elif len(args.revisions) == 2:
+ fromrev, torev = args.revisions
+
+ from oe.buildhistory_analysis import process_changes
import gitdb
+
try:
- changes = oe.buildhistory_analysis.process_changes(options.buildhistory_dir, fromrev, torev, options.report_all, options.report_ver, options.sigs, options.sigsdiff)
+ changes = process_changes(args.buildhistory_dir, fromrev, torev,
+ args.report_all, args.report_ver, args.sigs,
+ args.sigsdiff, args.exclude_path)
except gitdb.exc.BadObject as e:
- if len(args) == 1:
+ if not args.revisions:
sys.stderr.write("Unable to find previous build revision in buildhistory repository\n\n")
parser.print_help()
else:
@@ -102,10 +124,11 @@ def main():
sys.exit(1)
for chg in changes:
- print('%s' % chg)
+ out = str(chg)
+ if out:
+ print(out)
sys.exit(0)
-
if __name__ == "__main__":
main()
diff --git a/import-layers/yocto-poky/scripts/buildstats-diff b/import-layers/yocto-poky/scripts/buildstats-diff
index adeba4498..a128dd324 100755
--- a/import-layers/yocto-poky/scripts/buildstats-diff
+++ b/import-layers/yocto-poky/scripts/buildstats-diff
@@ -15,15 +15,18 @@
#
import argparse
import glob
-import json
import logging
import math
import os
-import re
import sys
-from collections import namedtuple
from operator import attrgetter
+# Import oe libs
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+sys.path.append(os.path.join(scripts_path, 'lib'))
+from buildstats import BuildStats, diff_buildstats, taskdiff_fields, BSVerDiff
+
+
# Setup logging
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
log = logging.getLogger()
@@ -34,180 +37,16 @@ class ScriptError(Exception):
pass
-taskdiff_fields = ('pkg', 'pkg_op', 'task', 'task_op', 'value1', 'value2',
- 'absdiff', 'reldiff')
-TaskDiff = namedtuple('TaskDiff', ' '.join(taskdiff_fields))
-
-
-class BSTask(dict):
- def __init__(self, *args, **kwargs):
- self['start_time'] = None
- self['elapsed_time'] = None
- self['status'] = None
- self['iostat'] = {}
- self['rusage'] = {}
- self['child_rusage'] = {}
- super(BSTask, self).__init__(*args, **kwargs)
-
- @property
- def cputime(self):
- """Sum of user and system time taken by the task"""
- return self['rusage']['ru_stime'] + self['rusage']['ru_utime'] + \
- self['child_rusage']['ru_stime'] + self['child_rusage']['ru_utime']
-
- @property
- def walltime(self):
- """Elapsed wall clock time"""
- return self['elapsed_time']
-
- @property
- def read_bytes(self):
- """Bytes read from the block layer"""
- return self['iostat']['read_bytes']
-
- @property
- def write_bytes(self):
- """Bytes written to the block layer"""
- return self['iostat']['write_bytes']
-
- @property
- def read_ops(self):
- """Number of read operations on the block layer"""
- return self['rusage']['ru_inblock'] + self['child_rusage']['ru_inblock']
-
- @property
- def write_ops(self):
- """Number of write operations on the block layer"""
- return self['rusage']['ru_oublock'] + self['child_rusage']['ru_oublock']
-
-
-def read_buildstats_file(buildstat_file):
- """Convert buildstat text file into dict/json"""
- bs_task = BSTask()
- log.debug("Reading task buildstats from %s", buildstat_file)
- with open(buildstat_file) as fobj:
- for line in fobj.readlines():
- key, val = line.split(':', 1)
- val = val.strip()
- if key == 'Started':
- start_time = float(val)
- bs_task['start_time'] = start_time
- elif key == 'Ended':
- end_time = float(val)
- elif key.startswith('IO '):
- split = key.split()
- bs_task['iostat'][split[1]] = int(val)
- elif key.find('rusage') >= 0:
- split = key.split()
- ru_key = split[-1]
- if ru_key in ('ru_stime', 'ru_utime'):
- val = float(val)
- else:
- val = int(val)
- ru_type = 'rusage' if split[0] == 'rusage' else \
- 'child_rusage'
- bs_task[ru_type][ru_key] = val
- elif key == 'Status':
- bs_task['status'] = val
- bs_task['elapsed_time'] = end_time - start_time
- return bs_task
-
-
-def read_buildstats_dir(bs_dir):
- """Read buildstats directory"""
- def split_nevr(nevr):
- """Split name and version information from recipe "nevr" string"""
- n_e_v, revision = nevr.rsplit('-', 1)
- match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[0-9]\S*)$',
- n_e_v)
- if not match:
- # If we're not able to parse a version starting with a number, just
- # take the part after last dash
- match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[^-]+)$',
- n_e_v)
- name = match.group('name')
- version = match.group('version')
- epoch = match.group('epoch')
- return name, epoch, version, revision
-
- if not os.path.isfile(os.path.join(bs_dir, 'build_stats')):
- raise ScriptError("{} does not look like a buildstats directory".format(bs_dir))
-
- log.debug("Reading buildstats directory %s", bs_dir)
-
- buildstats = {}
- subdirs = os.listdir(bs_dir)
- for dirname in subdirs:
- recipe_dir = os.path.join(bs_dir, dirname)
- if not os.path.isdir(recipe_dir):
- continue
- name, epoch, version, revision = split_nevr(dirname)
- recipe_bs = {'nevr': dirname,
- 'name': name,
- 'epoch': epoch,
- 'version': version,
- 'revision': revision,
- 'tasks': {}}
- for task in os.listdir(recipe_dir):
- recipe_bs['tasks'][task] = [read_buildstats_file(
- os.path.join(recipe_dir, task))]
- if name in buildstats:
- raise ScriptError("Cannot handle multiple versions of the same "
- "package ({})".format(name))
- buildstats[name] = recipe_bs
-
- return buildstats
-
-
-def bs_append(dst, src):
- """Append data from another buildstats"""
- if set(dst.keys()) != set(src.keys()):
- raise ScriptError("Refusing to join buildstats, set of packages is "
- "different")
- for pkg, data in dst.items():
- if data['nevr'] != src[pkg]['nevr']:
- raise ScriptError("Refusing to join buildstats, package version "
- "differs: {} vs. {}".format(data['nevr'], src[pkg]['nevr']))
- if set(data['tasks'].keys()) != set(src[pkg]['tasks'].keys()):
- raise ScriptError("Refusing to join buildstats, set of tasks "
- "in {} differ".format(pkg))
- for taskname, taskdata in data['tasks'].items():
- taskdata.extend(src[pkg]['tasks'][taskname])
-
-
-def read_buildstats_json(path):
- """Read buildstats from JSON file"""
- buildstats = {}
- with open(path) as fobj:
- bs_json = json.load(fobj)
- for recipe_bs in bs_json:
- if recipe_bs['name'] in buildstats:
- raise ScriptError("Cannot handle multiple versions of the same "
- "package ({})".format(recipe_bs['name']))
-
- if recipe_bs['epoch'] is None:
- recipe_bs['nevr'] = "{}-{}-{}".format(recipe_bs['name'], recipe_bs['version'], recipe_bs['revision'])
- else:
- recipe_bs['nevr'] = "{}-{}_{}-{}".format(recipe_bs['name'], recipe_bs['epoch'], recipe_bs['version'], recipe_bs['revision'])
-
- for task, data in recipe_bs['tasks'].copy().items():
- recipe_bs['tasks'][task] = [BSTask(data)]
-
- buildstats[recipe_bs['name']] = recipe_bs
-
- return buildstats
-
-
def read_buildstats(path, multi):
"""Read buildstats"""
if not os.path.exists(path):
raise ScriptError("No such file or directory: {}".format(path))
if os.path.isfile(path):
- return read_buildstats_json(path)
+ return BuildStats.from_file_json(path)
if os.path.isfile(os.path.join(path, 'build_stats')):
- return read_buildstats_dir(path)
+ return BuildStats.from_dir(path)
# Handle a non-buildstat directory
subpaths = sorted(glob.glob(path + '/*'))
@@ -222,88 +61,63 @@ def read_buildstats(path, multi):
bs = None
for subpath in subpaths:
if os.path.isfile(subpath):
- tmpbs = read_buildstats_json(subpath)
+ _bs = BuildStats.from_file_json(subpath)
else:
- tmpbs = read_buildstats_dir(subpath)
- if not bs:
- bs = tmpbs
+ _bs = BuildStats.from_dir(subpath)
+ if bs is None:
+ bs = _bs
else:
- log.debug("Joining buildstats")
- bs_append(bs, tmpbs)
-
+ bs.aggregate(_bs)
if not bs:
raise ScriptError("No buildstats found under {}".format(path))
+
return bs
def print_ver_diff(bs1, bs2):
"""Print package version differences"""
- pkgs1 = set(bs1.keys())
- pkgs2 = set(bs2.keys())
- new_pkgs = pkgs2 - pkgs1
- deleted_pkgs = pkgs1 - pkgs2
-
- echanged = []
- vchanged = []
- rchanged = []
- unchanged = []
- common_pkgs = pkgs2.intersection(pkgs1)
- if common_pkgs:
- for pkg in common_pkgs:
- if bs1[pkg]['epoch'] != bs2[pkg]['epoch']:
- echanged.append(pkg)
- elif bs1[pkg]['version'] != bs2[pkg]['version']:
- vchanged.append(pkg)
- elif bs1[pkg]['revision'] != bs2[pkg]['revision']:
- rchanged.append(pkg)
- else:
- unchanged.append(pkg)
- maxlen = max([len(pkg) for pkg in pkgs1.union(pkgs2)])
+ diff = BSVerDiff(bs1, bs2)
+
+ maxlen = max([len(r) for r in set(bs1.keys()).union(set(bs2.keys()))])
fmt_str = " {:{maxlen}} ({})"
-# if unchanged:
-# print("\nUNCHANGED PACKAGES:")
-# print("-------------------")
-# maxlen = max([len(pkg) for pkg in unchanged])
-# for pkg in sorted(unchanged):
-# print(fmt_str.format(pkg, bs2[pkg]['nevr'], maxlen=maxlen))
-
- if new_pkgs:
- print("\nNEW PACKAGES:")
- print("-------------")
- for pkg in sorted(new_pkgs):
- print(fmt_str.format(pkg, bs2[pkg]['nevr'], maxlen=maxlen))
-
- if deleted_pkgs:
- print("\nDELETED PACKAGES:")
- print("-----------------")
- for pkg in sorted(deleted_pkgs):
- print(fmt_str.format(pkg, bs1[pkg]['nevr'], maxlen=maxlen))
+
+ if diff.new:
+ print("\nNEW RECIPES:")
+ print("------------")
+ for name, val in sorted(diff.new.items()):
+ print(fmt_str.format(name, val.nevr, maxlen=maxlen))
+
+ if diff.dropped:
+ print("\nDROPPED RECIPES:")
+ print("----------------")
+ for name, val in sorted(diff.dropped.items()):
+ print(fmt_str.format(name, val.nevr, maxlen=maxlen))
fmt_str = " {0:{maxlen}} {1:<20} ({2})"
- if rchanged:
+ if diff.rchanged:
print("\nREVISION CHANGED:")
print("-----------------")
- for pkg in sorted(rchanged):
- field1 = "{} -> {}".format(pkg, bs1[pkg]['revision'], bs2[pkg]['revision'])
- field2 = "{} -> {}".format(bs1[pkg]['nevr'], bs2[pkg]['nevr'])
- print(fmt_str.format(pkg, field1, field2, maxlen=maxlen))
+ for name, val in sorted(diff.rchanged.items()):
+ field1 = "{} -> {}".format(val.left.revision, val.right.revision)
+ field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
+ print(fmt_str.format(name, field1, field2, maxlen=maxlen))
- if vchanged:
+ if diff.vchanged:
print("\nVERSION CHANGED:")
print("----------------")
- for pkg in sorted(vchanged):
- field1 = "{} -> {}".format(bs1[pkg]['version'], bs2[pkg]['version'])
- field2 = "{} -> {}".format(bs1[pkg]['nevr'], bs2[pkg]['nevr'])
- print(fmt_str.format(pkg, field1, field2, maxlen=maxlen))
+ for name, val in sorted(diff.vchanged.items()):
+ field1 = "{} -> {}".format(val.left.version, val.right.version)
+ field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
+ print(fmt_str.format(name, field1, field2, maxlen=maxlen))
- if echanged:
+ if diff.echanged:
print("\nEPOCH CHANGED:")
print("--------------")
- for pkg in sorted(echanged):
- field1 = "{} -> {}".format(bs1[pkg]['epoch'], bs2[pkg]['epoch'])
- field2 = "{} -> {}".format(bs1[pkg]['nevr'], bs2[pkg]['nevr'])
- print(fmt_str.format(pkg, field1, field2, maxlen=maxlen))
+ for name, val in sorted(diff.echanged.items()):
+ field1 = "{} -> {}".format(val.left.epoch, val.right.epoch)
+ field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
+ print(fmt_str.format(name, field1, field2, maxlen=maxlen))
def print_task_diff(bs1, bs2, val_type, min_val=0, min_absdiff=0, sort_by=('absdiff',)):
@@ -343,12 +157,10 @@ def print_task_diff(bs1, bs2, val_type, min_val=0, min_absdiff=0, sort_by=('absd
"""Get cumulative sum of all tasks"""
total = 0.0
for recipe_data in buildstats.values():
- for bs_task in recipe_data['tasks'].values():
- total += sum([getattr(b, val_type) for b in bs_task]) / len(bs_task)
+ for bs_task in recipe_data.tasks.values():
+ total += getattr(bs_task, val_type)
return total
- tasks_diff = []
-
if min_val:
print("Ignoring tasks less than {} ({})".format(
val_to_str(min_val, True), val_to_str(min_val)))
@@ -357,49 +169,7 @@ def print_task_diff(bs1, bs2, val_type, min_val=0, min_absdiff=0, sort_by=('absd
val_to_str(min_absdiff, True), val_to_str(min_absdiff)))
# Prepare the data
- pkgs = set(bs1.keys()).union(set(bs2.keys()))
- for pkg in pkgs:
- tasks1 = bs1[pkg]['tasks'] if pkg in bs1 else {}
- tasks2 = bs2[pkg]['tasks'] if pkg in bs2 else {}
- if not tasks1:
- pkg_op = '+ '
- elif not tasks2:
- pkg_op = '- '
- else:
- pkg_op = ' '
-
- for task in set(tasks1.keys()).union(set(tasks2.keys())):
- task_op = ' '
- if task in tasks1:
- # Average over all values
- val1 = [getattr(b, val_type) for b in bs1[pkg]['tasks'][task]]
- val1 = sum(val1) / len(val1)
- else:
- task_op = '+ '
- val1 = 0
- if task in tasks2:
- # Average over all values
- val2 = [getattr(b, val_type) for b in bs2[pkg]['tasks'][task]]
- val2 = sum(val2) / len(val2)
- else:
- val2 = 0
- task_op = '- '
-
- if val1 == 0:
- reldiff = float('inf')
- else:
- reldiff = 100 * (val2 - val1) / val1
-
- if max(val1, val2) < min_val:
- log.debug("Filtering out %s:%s (%s)", pkg, task,
- val_to_str(max(val1, val2)))
- continue
- if abs(val2 - val1) < min_absdiff:
- log.debug("Filtering out %s:%s (difference of %s)", pkg, task,
- val_to_str(val2-val1))
- continue
- tasks_diff.append(TaskDiff(pkg, pkg_op, task, task_op, val1, val2,
- val2-val1, reldiff))
+ tasks_diff = diff_buildstats(bs1, bs2, val_type, min_val, min_absdiff)
# Sort our list
for field in reversed(sort_by):
diff --git a/import-layers/yocto-poky/scripts/contrib/build-perf-test-wrapper.sh b/import-layers/yocto-poky/scripts/contrib/build-perf-test-wrapper.sh
index 3da32532b..19bee1dd0 100755
--- a/import-layers/yocto-poky/scripts/contrib/build-perf-test-wrapper.sh
+++ b/import-layers/yocto-poky/scripts/contrib/build-perf-test-wrapper.sh
@@ -35,6 +35,7 @@ Optional arguments:
-C GIT_REPO commit results into Git
-E EMAIL_ADDR send email report
-P GIT_REMOTE push results to a remote Git repository
+ -R DEST rsync reports to a remote destination
-w WORK_DIR work dir for this script
(default: GIT_TOP_DIR/build-perf-test)
-x create xml report (instead of json)
@@ -50,7 +51,7 @@ get_os_release_var () {
commitish=""
oe_build_perf_test_extra_opts=()
oe_git_archive_extra_opts=()
-while getopts "ha:c:C:E:P:w:x" opt; do
+while getopts "ha:c:C:E:P:R:w:x" opt; do
case $opt in
h) usage
exit 0
@@ -65,6 +66,8 @@ while getopts "ha:c:C:E:P:w:x" opt; do
;;
P) oe_git_archive_extra_opts+=("--push" "$OPTARG")
;;
+ R) rsync_dst="$OPTARG"
+ ;;
w) base_dir=`realpath -s "$OPTARG"`
;;
x) oe_build_perf_test_extra_opts+=("--xml")
@@ -132,6 +135,11 @@ if [ -n "$commitish" ]; then
git reset --hard $commit > /dev/null
fi
+# Determine name of the current branch
+branch=`git symbolic-ref HEAD 2> /dev/null`
+# Strip refs/heads/
+branch=${branch:11}
+
# Setup build environment
if [ -z "$base_dir" ]; then
base_dir="$git_topdir/build-perf-test"
@@ -187,13 +195,25 @@ if [ -n "$results_repo" ]; then
"${oe_git_archive_extra_opts[@]}" \
"$results_dir"
+ # Generate test reports
+ sanitized_branch=`echo $branch | tr / _`
+ report_txt=`hostname`_${sanitized_branch}_${machine}.txt
+ report_html=`hostname`_${sanitized_branch}_${machine}.html
+ echo -e "\nGenerating test report"
+ oe-build-perf-report -r "$results_repo" > $report_txt
+ oe-build-perf-report -r "$results_repo" --html > $report_html
+
# Send email report
if [ -n "$email_to" ]; then
- echo -e "\nEmailing test report"
+ echo "Emailing test report"
os_name=`get_os_release_var PRETTY_NAME`
- oe-build-perf-report -r "$results_repo" > report.txt
- oe-build-perf-report -r "$results_repo" --html > report.html
- "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text report.txt --html report.html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
+ "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
+ fi
+
+ # Upload report files, unless we're on detached head
+ if [ -n "$rsync_dst" -a -n "$branch" ]; then
+ echo "Uploading test report"
+ rsync $report_txt $report_html $rsync_dst
fi
fi
diff --git a/import-layers/yocto-poky/scripts/contrib/oe-build-perf-report-email.py b/import-layers/yocto-poky/scripts/contrib/oe-build-perf-report-email.py
index 261ca514e..913847bbe 100755
--- a/import-layers/yocto-poky/scripts/contrib/oe-build-perf-report-email.py
+++ b/import-layers/yocto-poky/scripts/contrib/oe-build-perf-report-email.py
@@ -25,6 +25,7 @@ import socket
import subprocess
import sys
import tempfile
+from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
@@ -71,6 +72,10 @@ def parse_args(argv):
help="Only print errors")
parser.add_argument('--to', action='append',
help="Recipients of the email")
+ parser.add_argument('--cc', action='append',
+ help="Carbon copy recipients of the email")
+ parser.add_argument('--bcc', action='append',
+ help="Blind carbon copy recipients of the email")
parser.add_argument('--subject', default="Yocto build perf test report",
help="Email subject")
parser.add_argument('--outdir', '-o',
@@ -107,15 +112,6 @@ def decode_png(infile, outfile):
subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
-def encode_png(pngfile):
- """Encode png into a <img> html element"""
- with open(pngfile, 'rb') as f:
- data = f.read()
-
- b64_data = base64.b64encode(data)
- return '<img src="data:image/png;base64,' + b64_data.decode('utf-8') + '">\n'
-
-
def mangle_html_report(infile, outfile, pngs):
"""Mangle html file into a email compatible format"""
paste = True
@@ -140,9 +136,7 @@ def mangle_html_report(infile, outfile, pngs):
# Replace charts with <img> elements
match = re.match('<div id="(?P<id>\w+)"', stripped)
if match and match.group('id') in pngs:
- #f_out.write('<img src="{}">\n'.format(match.group('id') + '.png'))
- png_file = os.path.join(png_dir, match.group('id') + '.png')
- f_out.write(encode_png(png_file))
+ f_out.write('<img src="cid:{}"\n'.format(match.group('id')))
else:
f_out.write(line)
@@ -166,7 +160,7 @@ def scrape_html_report(report, outdir, phantomjs_extra_args=None):
stderr=subprocess.STDOUT)
pngs = []
- attachments = []
+ images = []
for fname in os.listdir(tmpdir):
base, ext = os.path.splitext(fname)
if ext == '.png':
@@ -174,7 +168,7 @@ def scrape_html_report(report, outdir, phantomjs_extra_args=None):
decode_png(os.path.join(tmpdir, fname),
os.path.join(outdir, fname))
pngs.append(base)
- attachments.append(fname)
+ images.append(fname)
elif ext in ('.html', '.htm'):
report_file = fname
else:
@@ -184,11 +178,13 @@ def scrape_html_report(report, outdir, phantomjs_extra_args=None):
log.debug("Mangling html report file %s", report_file)
mangle_html_report(os.path.join(tmpdir, report_file),
os.path.join(outdir, report_file), pngs)
- return report_file, attachments
+ return (os.path.join(outdir, report_file),
+ [os.path.join(outdir, i) for i in images])
finally:
shutil.rmtree(tmpdir)
-def send_email(text_fn, html_fn, subject, recipients):
+def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[],
+ blind_copy=[]):
"""Send email"""
# Generate email message
text_msg = html_msg = None
@@ -197,8 +193,16 @@ def send_email(text_fn, html_fn, subject, recipients):
text_msg = MIMEText("Yocto build performance test report.\n" +
f.read(), 'plain')
if html_fn:
+ html_msg = msg = MIMEMultipart('related')
with open(html_fn) as f:
- html_msg = MIMEText(f.read(), 'html')
+ html_msg.attach(MIMEText(f.read(), 'html'))
+ for img_fn in image_fns:
+ # Expect that content id is same as the filename
+ cid = os.path.splitext(os.path.basename(img_fn))[0]
+ with open(img_fn, 'rb') as f:
+ image_msg = MIMEImage(f.read())
+ image_msg['Content-ID'] = '<{}>'.format(cid)
+ html_msg.attach(image_msg)
if text_msg and html_msg:
msg = MIMEMultipart('alternative')
@@ -217,6 +221,10 @@ def send_email(text_fn, html_fn, subject, recipients):
'{}@{}'.format(pw_data.pw_name, socket.getfqdn()))
msg['From'] = "{} <{}>".format(full_name, email)
msg['To'] = ', '.join(recipients)
+ if copy:
+ msg['Cc'] = ', '.join(copy)
+ if blind_copy:
+ msg['Bcc'] = ', '.join(blind_copy)
msg['Subject'] = subject
# Send email
@@ -243,14 +251,19 @@ def main(argv=None):
try:
log.debug("Storing email parts in %s", outdir)
- html_report = None
+ html_report = images = None
if args.html:
- scrape_html_report(args.html, outdir, args.phantomjs_args)
- html_report = os.path.join(outdir, os.path.basename(args.html))
+ html_report, images = scrape_html_report(args.html, outdir,
+ args.phantomjs_args)
if args.to:
log.info("Sending email to %s", ', '.join(args.to))
- send_email(args.text, html_report, args.subject, args.to)
+ if args.cc:
+ log.info("Copying to %s", ', '.join(args.cc))
+ if args.bcc:
+ log.info("Blind copying to %s", ', '.join(args.bcc))
+ send_email(args.text, html_report, images, args.subject,
+ args.to, args.cc, args.bcc)
except subprocess.CalledProcessError as err:
log.error("%s, with output:\n%s", str(err), err.output.decode())
return 1
diff --git a/import-layers/yocto-poky/scripts/contrib/patchreview.py b/import-layers/yocto-poky/scripts/contrib/patchreview.py
new file mode 100755
index 000000000..4e3e73c7a
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/contrib/patchreview.py
@@ -0,0 +1,211 @@
+#! /usr/bin/env python3
+
+# TODO
+# - option to just list all broken files
+# - test suite
+# - validate signed-off-by
+
+
+class Result:
+ # Whether the patch has an Upstream-Status or not
+ missing_upstream_status = False
+ # If the Upstream-Status tag is malformed in some way (string for bad bit)
+ malformed_upstream_status = None
+ # If the Upstream-Status value is unknown (boolean)
+ unknown_upstream_status = False
+ # The upstream status value (Pending, etc)
+ upstream_status = None
+ # Whether the patch has a Signed-off-by or not
+ missing_sob = False
+ # Whether the Signed-off-by tag is malformed in some way
+ malformed_sob = False
+ # The Signed-off-by tag value
+ sob = None
+ # Whether a patch looks like a CVE but doesn't have a CVE tag
+ missing_cve = False
+
+def blame_patch(patch):
+ """
+ From a patch filename, return a list of "commit summary (author name <author
+ email>)" strings representing the history.
+ """
+ import subprocess
+ return subprocess.check_output(("git", "log",
+ "--follow", "--find-renames", "--diff-filter=A",
+ "--format=%s (%aN <%aE>)",
+ "--", patch)).decode("utf-8").splitlines()
+
+def patchreview(patches):
+ import re
+
+ # General pattern: start of line, optional whitespace, tag with optional
+ # hyphen or spaces, maybe a colon, some whitespace, then the value, all case
+ # insensitive.
+ sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
+ status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE)
+ status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied")
+ cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
+ cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
+
+ results = {}
+
+ for patch in patches:
+ result = Result()
+ results[patch] = result
+
+ content = open(patch, encoding='ascii', errors='ignore').read()
+
+ # Find the Signed-off-by tag
+ match = sob_re.search(content)
+ if match:
+ value = match.group(1)
+ if value != "Signed-off-by:":
+ result.malformed_sob = value
+ result.sob = match.group(2)
+ else:
+ result.missing_sob = True
+
+
+ # Find the Upstream-Status tag
+ match = status_re.search(content)
+ if match:
+ value = match.group(1)
+ if value != "Upstream-Status:":
+ result.malformed_upstream_status = value
+
+ value = match.group(2).lower()
+ # TODO: check case
+ if value not in status_values:
+ result.unknown_upstream_status = True
+ result.upstream_status = value
+ else:
+ result.missing_upstream_status = True
+
+ # Check that patches which looks like CVEs have CVE tags
+ if cve_re.search(patch) or cve_re.search(content):
+ if not cve_tag_re.search(content):
+ result.missing_cve = True
+ # TODO: extract CVE list
+
+ return results
+
+
+def analyse(results, want_blame=False, verbose=True):
+ """
+ want_blame: display blame data for each malformed patch
+ verbose: display per-file results instead of just summary
+ """
+
+ # want_blame requires verbose, so disable blame if we're not verbose
+ if want_blame and not verbose:
+ want_blame = False
+
+ total_patches = 0
+ missing_sob = 0
+ malformed_sob = 0
+ missing_status = 0
+ malformed_status = 0
+ missing_cve = 0
+ pending_patches = 0
+
+ for patch in sorted(results):
+ r = results[patch]
+ total_patches += 1
+ need_blame = False
+
+ # Build statistics
+ if r.missing_sob:
+ missing_sob += 1
+ if r.malformed_sob:
+ malformed_sob += 1
+ if r.missing_upstream_status:
+ missing_status += 1
+ if r.malformed_upstream_status or r.unknown_upstream_status:
+ malformed_status += 1
+ if r.missing_cve:
+ missing_cve += 1
+ if r.upstream_status == "pending":
+ pending_patches += 1
+
+ # Output warnings
+ if r.missing_sob:
+ need_blame = True
+ if verbose:
+ print("Missing Signed-off-by tag (%s)" % patch)
+ # TODO: disable this for now as too much fails
+ if False and r.malformed_sob:
+ need_blame = True
+ if verbose:
+ print("Malformed Signed-off-by '%s' (%s)" % (r.malformed_sob, patch))
+ if r.missing_cve:
+ need_blame = True
+ if verbose:
+ print("Missing CVE tag (%s)" % patch)
+ if r.missing_upstream_status:
+ need_blame = True
+ if verbose:
+ print("Missing Upstream-Status tag (%s)" % patch)
+ if r.malformed_upstream_status:
+ need_blame = True
+ if verbose:
+ print("Malformed Upstream-Status '%s' (%s)" % (r.malformed_upstream_status, patch))
+ if r.unknown_upstream_status:
+ need_blame = True
+ if verbose:
+ print("Unknown Upstream-Status value '%s' (%s)" % (r.upstream_status, patch))
+
+ if want_blame and need_blame:
+ print("\n".join(blame_patch(patch)) + "\n")
+
+ def percent(num):
+ try:
+ return "%d (%d%%)" % (num, round(num * 100.0 / total_patches))
+ except ZeroDivisionError:
+ return "N/A"
+
+ if verbose:
+ print()
+
+ print("""Total patches found: %d
+Patches missing Signed-off-by: %s
+Patches with malformed Signed-off-by: %s
+Patches missing CVE: %s
+Patches missing Upstream-Status: %s
+Patches with malformed Upstream-Status: %s
+Patches in Pending state: %s""" % (total_patches,
+ percent(missing_sob),
+ percent(malformed_sob),
+ percent(missing_cve),
+ percent(missing_status),
+ percent(malformed_status),
+ percent(pending_patches)))
+
+
+
+def histogram(results):
+ from toolz import recipes, dicttoolz
+ import math
+ counts = recipes.countby(lambda r: r.upstream_status, results.values())
+ bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
+ for k in bars:
+ print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
+
+
+if __name__ == "__main__":
+ import argparse, subprocess, os
+
+ args = argparse.ArgumentParser(description="Patch Review Tool")
+ args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
+ args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
+ args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
+ args.add_argument("directory", nargs="?", help="directory to scan")
+ args = args.parse_args()
+
+ if args.directory:
+ os.chdir(args.directory)
+ patches = subprocess.check_output(("git", "ls-files", "*.patch", "*.diff")).decode("utf-8").split()
+ results = patchreview(patches)
+ analyse(results, want_blame=args.blame, verbose=args.verbose)
+ if args.histogram:
+ print()
+ histogram(results)
diff --git a/import-layers/yocto-poky/scripts/contrib/patchtest.sh b/import-layers/yocto-poky/scripts/contrib/patchtest.sh
new file mode 100755
index 000000000..7fe566666
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/contrib/patchtest.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# patchtest: Run patchtest on commits starting at master
+#
+# Copyright (c) 2017, Intel Corporation.
+# All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+set -o errexit
+
+# Default values
+pokydir=''
+
+usage() {
+CMD=$(basename $0)
+cat <<EOM
+Usage: $CMD [-h] [-p pokydir]
+ -p pokydir Defaults to current directory
+EOM
+>&2
+ exit 1
+}
+
+function clone() {
+ local REPOREMOTE=$1
+ local REPODIR=$2
+ if [ ! -d $REPODIR ]; then
+ git clone $REPOREMOTE $REPODIR --quiet
+ else
+ ( cd $REPODIR; git pull --quiet )
+ fi
+}
+
+while getopts ":p:h" opt; do
+ case $opt in
+ p)
+ pokydir=$OPTARG
+ ;;
+ h)
+ usage
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ usage
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ usage
+ ;;
+ esac
+done
+shift $((OPTIND-1))
+
+CDIR="$PWD"
+
+# default pokydir to current directory if user did not specify one
+if [ -z "$pokydir" ]; then
+ pokydir="$CDIR"
+fi
+
+PTENV="$PWD/patchtest"
+PT="$PTENV/patchtest"
+PTOE="$PTENV/patchtest-oe"
+
+if ! which virtualenv > /dev/null; then
+ echo "Install virtualenv before proceeding"
+ exit 1;
+fi
+
+# activate the virtual env
+virtualenv $PTENV --quiet
+source $PTENV/bin/activate
+
+cd $PTENV
+
+# clone or pull
+clone git://git.yoctoproject.org/patchtest $PT
+clone git://git.yoctoproject.org/patchtest-oe $PTOE
+
+# install requirements
+pip install -r $PT/requirements.txt --quiet
+pip install -r $PTOE/requirements.txt --quiet
+
+PATH="$PT:$PT/scripts:$PATH"
+
+# loop through parent to HEAD and execute patchtest on each commit
+for commit in $(git rev-list master..HEAD --reverse)
+do
+ shortlog="$(git log "$commit^1..$commit" --pretty='%h: %aN: %cd: %s')"
+ log="$(git format-patch "$commit^1..$commit" --stdout | patchtest - -r $pokydir -s $PTOE/tests --base-commit $commit^1 --json 2>/dev/null | create-summary --fail --only-results)"
+ if [ -z "$log" ]; then
+ shortlog="$shortlog: OK"
+ else
+ shortlog="$shortlog: FAIL"
+ fi
+ echo "$shortlog"
+ echo "$log" | sed -n -e '/Issue/p' -e '/Suggested fix/p'
+ echo ""
+done
+
+deactivate
+
+cd $CDIR
diff --git a/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py b/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py
index 8c3655d39..586b329c1 100755
--- a/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py
+++ b/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-2.7.py
@@ -28,6 +28,7 @@ class MakefileMaker:
def __init__( self, outfile, isNative ):
"""initialize"""
self.packages = {}
+ self.excluded_pkgs = []
self.targetPrefix = "${libdir}/python%s/" % VERSION[:3]
self.isNative = isNative
self.output = outfile
@@ -52,7 +53,7 @@ class MakefileMaker:
self.out( """ """ )
self.out( "" )
- def addPackage( self, name, description, dependencies, filenames ):
+ def addPackage( self, name, description, dependencies, filenames, mod_exclude = False ):
"""add a package to the Makefile"""
if type( filenames ) == type( "" ):
filenames = filenames.split()
@@ -62,6 +63,8 @@ class MakefileMaker:
fullFilenames.append( "%s%s" % ( self.targetPrefix, filename ) )
else:
fullFilenames.append( filename )
+ if mod_exclude:
+ self.excluded_pkgs.append( name )
self.packages[name] = description, dependencies, fullFilenames
def doBody( self ):
@@ -74,13 +77,11 @@ class MakefileMaker:
#
if self.isNative:
- rprovideLine = 'RPROVIDES+="'
- for name in sorted(self.packages):
- rprovideLine += "%s-native " % name.replace( '${PN}', 'python' )
- rprovideLine += '"'
+ pkglist = []
+ for name in ['${PN}-modules'] + sorted(self.packages):
+ pkglist.append('%s-native' % name.replace('${PN}', 'python'))
- self.out( rprovideLine )
- self.out( "" )
+ self.out('RPROVIDES += "%s"' % " ".join(pkglist))
return
#
@@ -149,7 +150,7 @@ class MakefileMaker:
line = 'RDEPENDS_${PN}-modules="'
for name, data in sorted(self.packages.items()):
- if name not in ['${PN}-dev', '${PN}-distutils-staticdev']:
+ if name not in ['${PN}-dev', '${PN}-distutils-staticdev'] and name not in self.excluded_pkgs:
line += "%s " % name
self.out( "%s \"" % line )
@@ -384,7 +385,7 @@ if __name__ == "__main__":
"pty.* tty.*" )
m.addPackage( "${PN}-tests", "Python tests", "${PN}-core ${PN}-modules",
- "test" ) # package
+ "test", True ) # package
m.addPackage( "${PN}-threading", "Python threading & synchronization support", "${PN}-core ${PN}-lang",
"_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* Queue.*" )
diff --git a/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-3.5.py b/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-3.5.py
index 075860c41..6352f8f12 100755
--- a/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-3.5.py
+++ b/import-layers/yocto-poky/scripts/contrib/python/generate-manifest-3.5.py
@@ -31,6 +31,7 @@ class MakefileMaker:
def __init__( self, outfile, isNative ):
"""initialize"""
self.packages = {}
+ self.excluded_pkgs = []
self.targetPrefix = "${libdir}/python%s/" % VERSION[:3]
self.isNative = isNative
self.output = outfile
@@ -55,7 +56,7 @@ class MakefileMaker:
self.out( """ """ )
self.out( "" )
- def addPackage( self, name, description, dependencies, filenames ):
+ def addPackage( self, name, description, dependencies, filenames, mod_exclude = False ):
"""add a package to the Makefile"""
if type( filenames ) == type( "" ):
filenames = filenames.split()
@@ -67,6 +68,8 @@ class MakefileMaker:
self.pycachePath( filename ) ) )
else:
fullFilenames.append( filename )
+ if mod_exclude:
+ self.excluded_pkgs.append( name )
self.packages[name] = description, dependencies, fullFilenames
def pycachePath( self, filename ):
@@ -87,13 +90,11 @@ class MakefileMaker:
#
if self.isNative:
- rprovideLine = 'RPROVIDES+="'
- for name in sorted(self.packages):
- rprovideLine += "%s-native " % name.replace( '${PN}', 'python3' )
- rprovideLine += '"'
+ pkglist = []
+ for name in ['${PN}-modules'] + sorted(self.packages):
+ pkglist.append('%s-native' % name.replace('${PN}', 'python3'))
- self.out( rprovideLine )
- self.out( "" )
+ self.out('RPROVIDES += "%s"' % " ".join(pkglist))
return
#
@@ -162,7 +163,7 @@ class MakefileMaker:
line = 'RDEPENDS_${PN}-modules="'
for name, data in sorted(self.packages.items()):
- if name not in ['${PN}-dev', '${PN}-distutils-staticdev']:
+ if name not in ['${PN}-dev', '${PN}-distutils-staticdev'] and name not in self.excluded_pkgs:
line += "%s " % name
self.out( "%s \"" % line )
@@ -224,7 +225,7 @@ if __name__ == "__main__":
"${base_libdir}/*.o " +
"${datadir}/aclocal " +
"${datadir}/pkgconfig " +
- "config/Makefile ")
+ "config*/Makefile ")
m.addPackage( "${PN}-2to3", "Python automated Python 2 to 3 code translator", "${PN}-core",
"lib2to3" ) # package
@@ -254,7 +255,7 @@ if __name__ == "__main__":
"py_compile.* compileall.*" )
m.addPackage( "${PN}-compression", "Python high-level compression support", "${PN}-core ${PN}-codecs ${PN}-importlib ${PN}-threading ${PN}-shell",
- "gzip.* zipfile.* tarfile.* lib-dynload/bz2.*.so lib-dynload/zlib.*.so" )
+ "gzip.* zipfile.* tarfile.* lib-dynload/bz2.*.so lib-dynload/zlib.*.so bz2.py lzma.py _compression.py" )
m.addPackage( "${PN}-crypt", "Python basic cryptographic and hashing support", "${PN}-core",
"hashlib.* md5.* sha.* lib-dynload/crypt.*.so lib-dynload/_hashlib.*.so lib-dynload/_sha256.*.so lib-dynload/_sha512.*.so" )
@@ -402,8 +403,8 @@ if __name__ == "__main__":
m.addPackage( "${PN}-terminal", "Python terminal controlling support", "${PN}-core ${PN}-io",
"pty.* tty.*" )
- m.addPackage( "${PN}-tests", "Python tests", "${PN}-core",
- "test" ) # package
+ m.addPackage( "${PN}-tests", "Python tests", "${PN}-core ${PN}-compression",
+ "test", True ) # package
m.addPackage( "${PN}-threading", "Python threading & synchronization support", "${PN}-core ${PN}-lang",
"_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* queue.*" )
diff --git a/import-layers/yocto-poky/scripts/create-pull-request b/import-layers/yocto-poky/scripts/create-pull-request
index e82858bc9..280880b3f 100755
--- a/import-layers/yocto-poky/scripts/create-pull-request
+++ b/import-layers/yocto-poky/scripts/create-pull-request
@@ -34,7 +34,7 @@ RFC=0
usage() {
CMD=$(basename $0)
cat <<EOM
-Usage: $CMD [-h] [-o output_dir] [-m msg_body_file] [-s subject] [-r relative_to] [-i commit_id] [-d relative_dir] -u remote [-b branch]
+Usage: $CMD [-h] [-o output_dir] [-m msg_body_file] [-s subject] [-r relative_to] [-i commit_id] [-d relative_dir] -u remote [-b branch] [-- <format-patch options>]
-b branch Branch name in the specified remote (default: current branch)
-l local branch Local branch name (default: HEAD)
-c Create an RFC (Request for Comment) patch series
@@ -57,6 +57,7 @@ Usage: $CMD [-h] [-o output_dir] [-m msg_body_file] [-s subject] [-r relative_to
$CMD -u contrib -r master -i misc -b nitin/misc -o pull-misc
$CMD -u contrib -p "RFC PATCH" -b nitin/experimental
$CMD -u contrib -i misc -b nitin/misc -d ./bitbake
+ $CMD -u contrib -r origin/master -o /tmp/out.v3 -- -v3 --in-reply-to=20170511120134.XX7799@site.com
EOM
}
@@ -108,9 +109,16 @@ while getopts "b:acd:hi:m:o:p:r:s:u:l:" OPT; do
a)
CPR_CONTRIB_AUTO_PUSH="1"
;;
+ --)
+ shift
+ break
+ ;;
esac
done
+shift "$((OPTIND - 1))"
+extraopts="$@"
+
if [ -z "$REMOTE" ]; then
echo "ERROR: Missing parameter -u or CPR_CONTRIB_REMOTE in env, no git remote!"
usage
@@ -201,7 +209,7 @@ if [ -n "$RELDIR" ]; then
ODIR=$(realpath $ODIR)
pdir=$(pwd)
cd $RELDIR
- extraopts="--relative"
+ extraopts="$extraopts --relative"
fi
# Generate the patches and cover letter
@@ -218,7 +226,7 @@ fi
[ -n "$RELDIR" ] && cd $pdir
# Customize the cover letter
-CL="$ODIR/0000-cover-letter.patch"
+CL="$(echo $ODIR/*0000-cover-letter.patch)"
PM="$ODIR/pull-msg"
GIT_VERSION=$(`git --version` | tr -d '[:alpha:][:space:].' | sed 's/\(...\).*/\1/')
NEWER_GIT_VERSION=210
diff --git a/import-layers/yocto-poky/scripts/devtool b/import-layers/yocto-poky/scripts/devtool
index c9ad9ddb9..5292f187e 100755
--- a/import-layers/yocto-poky/scripts/devtool
+++ b/import-layers/yocto-poky/scripts/devtool
@@ -130,25 +130,6 @@ def read_workspace():
'recipefile': recipefile}
logger.debug('Found recipe %s' % workspace[pn])
-def create_unlockedsigs():
- """ This function will make unlocked-sigs.inc match the recipes in the
- workspace. This runs on every run of devtool, but it lets us ensure
- the unlocked items are in sync with the workspace. """
-
- confdir = os.path.join(basepath, 'conf')
- unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
- bb.utils.mkdirhier(confdir)
- with open(os.path.join(confdir, 'unlocked-sigs.inc'), 'w') as f:
- f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
- "# This layer was created by the OpenEmbedded devtool" +
- " utility in order to\n" +
- "# contain recipes that are unlocked.\n")
-
- f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
- for pn in workspace:
- f.write(' ' + pn)
- f.write('"')
-
def create_workspace(args, config, basepath, workspace):
if args.layerpath:
workspacedir = os.path.abspath(args.layerpath)
@@ -332,7 +313,6 @@ def main():
if not getattr(args, 'no_workspace', False):
read_workspace()
- create_unlockedsigs()
try:
ret = args.func(args, config, basepath, workspace)
diff --git a/import-layers/yocto-poky/scripts/lib/argparse_oe.py b/import-layers/yocto-poky/scripts/lib/argparse_oe.py
index bf6eb1719..9bdfc1cec 100644
--- a/import-layers/yocto-poky/scripts/lib/argparse_oe.py
+++ b/import-layers/yocto-poky/scripts/lib/argparse_oe.py
@@ -167,3 +167,10 @@ class OeHelpFormatter(argparse.HelpFormatter):
return '\n'.join(lines)
else:
return super(OeHelpFormatter, self)._format_action(action)
+
+def int_positive(value):
+ ivalue = int(value)
+ if ivalue <= 0:
+ raise argparse.ArgumentTypeError(
+ "%s is not a positive int value" % value)
+ return ivalue
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/help.py b/import-layers/yocto-poky/scripts/lib/bsp/help.py
index 4f0d7721f..85d446b86 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/help.py
+++ b/import-layers/yocto-poky/scripts/lib/bsp/help.py
@@ -818,6 +818,10 @@ yocto_layer_help_usage = """
yocto_layer_create_usage = """
+ WARNING: this plugin will be removed starting 2.5 development in favour
+ of using 'bitbake-layers create-layer' script/plugin, offering a single
+ script to manage layers.
+
Create a new generic Yocto layer
usage: yocto-layer create <layer-name> [layer_priority]
@@ -845,6 +849,10 @@ yocto_layer_create_usage = """
yocto_layer_create_help = """
+WARNING: this plugin will be removed starting 2.5 development in favour
+of using 'bitbake-layers create-layer' script/plugin, offering a single
+script to manage layers.
+
NAME
yocto-layer create - Create a new generic Yocto layer
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc
index 828400df4..fb3866f11 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc
@@ -4,5 +4,3 @@ kconf non-hardware {{machine}}-non_hardware.cfg
include features/usb-net/usb-net.scc
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
index 20f2059ce..917f0e220 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.10) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.12) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.10"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.12"}}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
index c336007eb..22ed27381 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -19,7 +19,10 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
index 5cc82e82a..bae943ea1 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
index d15a178e1..6f3e104c6 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
@@ -30,6 +32,6 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 070bd876a..62d1817f2 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.10.bbappend
index eaf436759..dfbecb533 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.10.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.12.bbappend
index 4e7d7cbed..e874c9e45 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,15 +20,18 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend
index 56e8ad3a1..a809c7600 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb
index fda955b5e..3ba4226aa 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb
@@ -42,6 +42,7 @@ SRC_URI += "file://{{=machine}}.scc \
file://{{=machine}}.cfg \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
"
{{ if kernel_choice == "custom" and custom_kernel_need_kbranch == "y" and custom_kernel_kbranch and custom_kernel_kbranch != "master": }}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-features.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-features.scc
new file mode 100644
index 000000000..582759e61
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc
index 0b6b41337..64d3ed181 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc
@@ -14,5 +14,3 @@ kconf hardware {{=machine}}.cfg
# These are used by yocto-kernel to add config fragments and features.
# Don't remove if you plan on using yocto-kernel with this BSP.
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc
index 3d32f111b..3e4c54fcf 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc
@@ -17,5 +17,3 @@ include cfg/usb-mass-storage.scc
include cfg/boot-live.scc
include features/power/intel.scc
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
index 20f2059ce..917f0e220 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.10) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.12) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.10"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.12"}}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
index c336007eb..22ed27381 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -19,7 +19,10 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
index 5cc82e82a..bae943ea1 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
index c391322e3..6f3e104c6 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.8": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
@@ -30,6 +32,6 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 070bd876a..62d1817f2 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.10.bbappend
index 02059207d..f8616ed87 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.10.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.12.bbappend
index 5ed144b0e..20d57f673 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,15 +20,18 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend
index ab644bdad..0a9d47595 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.8.bbappend
deleted file mode 100644
index a535aeab8..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.8.bbappend
+++ /dev/null
@@ -1,34 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc
index f39dc3edf..792fdc94a 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc
@@ -4,5 +4,3 @@ kconf hardware {{=machine}}.cfg
include cfg/usb-mass-storage.scc
include cfg/fs/vfat.scc
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
index 20f2059ce..917f0e220 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.10) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.12) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.10"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.12"}}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
index c336007eb..22ed27381 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -19,7 +19,10 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
deleted file mode 100644
index d15a178e1..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
index 5cc82e82a..bae943ea1 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
index d15a178e1..6f3e104c6 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
@@ -30,6 +32,6 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 070bd876a..62d1817f2 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
deleted file mode 100644
index c391322e3..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.10.bbappend
index eaf436759..dfbecb533 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.10.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.12.bbappend
index 59752a9ed..e874c9e45 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.8.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.8": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,15 +20,18 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend
index 56e8ad3a1..a809c7600 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc
index f39dc3edf..792fdc94a 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc
@@ -4,5 +4,3 @@ kconf hardware {{=machine}}.cfg
include cfg/usb-mass-storage.scc
include cfg/fs/vfat.scc
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
index 20f2059ce..917f0e220 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.10) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.12) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.10"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.12"}}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
index c336007eb..22ed27381 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -19,7 +19,10 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
deleted file mode 100644
index d15a178e1..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
index 5cc82e82a..bae943ea1 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
index c391322e3..6f3e104c6 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.8": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
@@ -30,6 +32,6 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 070bd876a..62d1817f2 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
deleted file mode 100644
index c391322e3..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.10.bbappend
index 512b59758..336a95631 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.10.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.12.bbappend
index 802e5f465..5333c30b8 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,15 +20,18 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend
index dbb0fd55a..7d18566b2 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.8.bbappend
deleted file mode 100644
index c2eb40d49..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.8.bbappend
+++ /dev/null
@@ -1,34 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/edgerouter" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/edgerouter" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc
index 7aac8b080..89bb97efd 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc
@@ -6,5 +6,3 @@ include cfg/fs/vfat.scc
include cfg/dmaengine.scc
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
index 20f2059ce..917f0e220 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.10) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.12) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.10"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.12"}}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
index c336007eb..22ed27381 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -19,7 +19,10 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
deleted file mode 100644
index d15a178e1..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
index 5cc82e82a..bae943ea1 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
new file mode 100644
index 000000000..6f3e104c6
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
@@ -0,0 +1,37 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.12": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.10"
+#Remove the following line once AUTOREV is locked to a certain SRCREV
+KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 070bd876a..62d1817f2 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
deleted file mode 100644
index c391322e3..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.1.bbappend
deleted file mode 100644
index 4e7d7cbed..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ /dev/null
@@ -1,34 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.10.bbappend
index eaf436759..dfbecb533 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.10.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.12.bbappend
index 4e7d7cbed..e874c9e45 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,15 +20,18 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend
index 56e8ad3a1..a809c7600 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.8.bbappend
deleted file mode 100644
index 59752a9ed..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.8.bbappend
+++ /dev/null
@@ -1,34 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc
index 8301e05f7..d25d0a037 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc
@@ -1,5 +1,3 @@
# yocto-bsp-filename {{=machine}}.scc
kconf hardware {{=machine}}.cfg
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
index 20f2059ce..917f0e220 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.10) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.12) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.10"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.12"}}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
index 7e3ce5ba1..d7b9cef98 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -45,11 +45,15 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+{{ if qemuarch != "arm": }}
{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
index 29ad17b20..8c0fd1577 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
@@ -45,11 +45,14 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+{{ if qemuarch != "arm": }}
{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
index 81392ce38..83eb216dc 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -45,11 +45,14 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+{{ if qemuarch != "arm": }}
{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
@@ -59,6 +62,6 @@ SRC_URI += "file://{{=machine}}-tiny.scc \
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index a73b1aa13..22abc230b 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -45,11 +45,14 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+{{ if qemuarch != "arm": }}
{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
deleted file mode 100644
index 7d40671fd..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
+++ /dev/null
@@ -1,64 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/common-pc" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.10.bbappend
index 5873da424..851d96c37 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.10.bbappend
@@ -45,13 +45,17 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+{{ if qemuarch != "arm": }}
{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.12.bbappend
index a9fd9ecff..d7ce37e23 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -45,19 +45,23 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+{{ if qemuarch != "arm": }}
{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend
index cdee77379..71be913bb 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -45,13 +45,17 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+{{ if qemuarch != "arm": }}
{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.8.bbappend
deleted file mode 100644
index 24c28803e..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.8.bbappend
+++ /dev/null
@@ -1,63 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base your new BSP branch on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose an existing machine branch to use for this BSP:" default:"standard/arm-versatile-926ejs" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
-
-{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta64" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc
index 9b7c291a8..9d20d199b 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc
@@ -10,5 +10,3 @@ include features/i915/i915.scc
include cfg/usb-mass-storage.scc
include features/power/intel.scc
-kconf hardware {{=machine}}-user-config.cfg
-include {{=machine}}-user-patches.scc
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
index 20f2059ce..917f0e220 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.10) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.12) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.10"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.12"}}
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
index c336007eb..22ed27381 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -19,7 +19,10 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
deleted file mode 100644
index d15a178e1..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
index 5cc82e82a..bae943ea1 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
new file mode 100644
index 000000000..6f3e104c6
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.12.bbappend
@@ -0,0 +1,37 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.12": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.10"
+#Remove the following line once AUTOREV is locked to a certain SRCREV
+KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
index 070bd876a..62d1817f2 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.4.bbappend
@@ -20,7 +20,9 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-tiny.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-tiny.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-patches.scc \
file://{{=machine}}-user-features.scc \
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
deleted file mode 100644
index c391322e3..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.8.bbappend
+++ /dev/null
@@ -1,35 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend
deleted file mode 100644
index 4e7d7cbed..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ /dev/null
@@ -1,34 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.1"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.10.bbappend
index eaf436759..dfbecb533 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.10.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.10.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.12.bbappend
index 59752a9ed..e874c9e45 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.8.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.12.bbappend
@@ -1,4 +1,4 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.8": }} this
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.12": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -20,15 +20,18 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
# the appropriate changes committed to the upstream linux-yocto repo
SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
+#LINUX_VERSION = "4.10"
#Remove the following line once AUTOREV is locked to a certain SRCREV
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend
index 56e8ad3a1..a809c7600 100644
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend
+++ b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.4.bbappend
@@ -20,9 +20,12 @@ KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
{{ if smp == "y": }}
KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-SRC_URI += "file://{{=machine}}-standard.scc \
+SRC_URI += "file://{{=machine}}.scc \
+ file://{{=machine}}.cfg \
+ file://{{=machine}}-standard.scc \
file://{{=machine}}-user-config.cfg \
file://{{=machine}}-user-features.scc \
+ file://{{=machine}}-user-patches.scc \
"
# replace these SRCREVs with the real commit ids once you've had
diff --git a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.8.bbappend
deleted file mode 100644
index 59752a9ed..000000000
--- a/import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.8.bbappend
+++ /dev/null
@@ -1,34 +0,0 @@
-# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.8": }} this
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-features.scc \
- "
-
-# replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
-#LINUX_VERSION = "4.8"
-#Remove the following line once AUTOREV is locked to a certain SRCREV
-KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/import-layers/yocto-poky/scripts/lib/build_perf/html/report.html b/import-layers/yocto-poky/scripts/lib/build_perf/html/report.html
index 165cbb811..291ad9d72 100644
--- a/import-layers/yocto-poky/scripts/lib/build_perf/html/report.html
+++ b/import-layers/yocto-poky/scripts/lib/build_perf/html/report.html
@@ -53,9 +53,11 @@ summary th, .meta-table td {
border-collapse: collapse;
}
.details th {
- font-weight: normal;
padding-right: 8px;
}
+.details.plain th {
+ font-weight: normal;
+}
.preformatted {
font-family: monospace;
white-space: pre-wrap;
@@ -118,29 +120,32 @@ h3 {
{% else %}
{% set row_style = 'style="background-color: #ffffff"' %}
{% endif %}
- <tr {{ row_style }}><td>{{ test.name }}: {{ test.description }}</td>
{% if test.status == 'SUCCESS' %}
{% for measurement in test.measurements %}
- {# add empty cell in place of the test name#}
- {% if loop.index > 1 %}<td></td>{% endif %}
- {% if measurement.absdiff > 0 %}
- {% set result_style = "color: red" %}
- {% elif measurement.absdiff == measurement.absdiff %}
- {% set result_style = "color: green" %}
- {% else %}
- {% set result_style = "color: orange" %}
- {%endif %}
- <td>{{ measurement.description }}</td>
- <td style="font-weight: bold">{{ measurement.value.mean }}</td>
- <td style="{{ result_style }}">{{ measurement.absdiff_str }}</td>
- <td style="{{ result_style }}">{{ measurement.reldiff }}</td>
- </tr><tr {{ row_style }}>
+ <tr {{ row_style }}>
+ {% if loop.index == 1 %}
+ <td>{{ test.name }}: {{ test.description }}</td>
+ {% else %}
+ {# add empty cell in place of the test name#}
+ <td></td>
+ {% endif %}
+ {% if measurement.absdiff > 0 %}
+ {% set result_style = "color: red" %}
+ {% elif measurement.absdiff == measurement.absdiff %}
+ {% set result_style = "color: green" %}
+ {% else %}
+ {% set result_style = "color: orange" %}
+ {%endif %}
+ <td>{{ measurement.description }}</td>
+ <td style="font-weight: bold">{{ measurement.value.mean }}</td>
+ <td style="{{ result_style }}">{{ measurement.absdiff_str }}</td>
+ <td style="{{ result_style }}">{{ measurement.reldiff }}</td>
+ </tr>
{% endfor %}
{% else %}
<td style="font-weight: bold; color: red;">{{test.status }}</td>
<td></td> <td></td> <td></td> <td></td>
{% endif %}
- </tr>
{% endfor %}
</table>
@@ -165,6 +170,7 @@ h3 {
{{ measurement.absdiff_str }} ({{measurement.reldiff}})
</span></span>
</div>
+ {# Table for trendchart and the statistics #}
<table style="width: 100%">
<tr>
<td style="width: 75%">
@@ -173,7 +179,7 @@ h3 {
</td>
<td>
{# Measurement statistics #}
- <table class="details">
+ <table class="details plain">
<tr>
<th>Test runs</th><td>{{ measurement.value.sample_cnt }}</td>
</tr><tr>
@@ -186,11 +192,85 @@ h3 {
<th>Stdev</th><td>{{ measurement.value.stdev }}</td>
</tr><tr>
<th><div id="{{ test.name }}_{{ measurement.name }}_chart_png"></div></th>
+ <td></td>
</tr>
</table>
</td>
</tr>
</table>
+
+ {# Task and recipe summary from buildstats #}
+ {% if 'buildstats' in measurement %}
+ Task resource usage
+ <table class="details" style="width:100%">
+ <tr>
+ <th>Number of tasks</th>
+ <th>Top consumers of cputime</th>
+ </tr>
+ <tr>
+ <td style="vertical-align: top">{{ measurement.buildstats.tasks.count }} ({{ measurement.buildstats.tasks.change }})</td>
+ {# Table of most resource-hungry tasks #}
+ <td>
+ <table class="details plain">
+ {% for diff in measurement.buildstats.top_consumer|reverse %}
+ <tr>
+ <th>{{ diff.pkg }}.{{ diff.task }}</th>
+ <td>{{ '%0.0f' % diff.value2 }} s</td>
+ </tr>
+ {% endfor %}
+ </table>
+ </td>
+ </tr>
+ <tr>
+ <th>Biggest increase in cputime</th>
+ <th>Biggest decrease in cputime</th>
+ </tr>
+ <tr>
+ {# Table biggest increase in resource usage #}
+ <td>
+ <table class="details plain">
+ {% for diff in measurement.buildstats.top_increase|reverse %}
+ <tr>
+ <th>{{ diff.pkg }}.{{ diff.task }}</th>
+ <td>{{ '%+0.0f' % diff.absdiff }} s</td>
+ </tr>
+ {% endfor %}
+ </table>
+ </td>
+ {# Table biggest decrease in resource usage #}
+ <td>
+ <table class="details plain">
+ {% for diff in measurement.buildstats.top_decrease %}
+ <tr>
+ <th>{{ diff.pkg }}.{{ diff.task }}</th>
+ <td>{{ '%+0.0f' % diff.absdiff }} s</td>
+ </tr>
+ {% endfor %}
+ </table>
+ </td>
+ </tr>
+ </table>
+
+ {# Recipe version differences #}
+ {% if measurement.buildstats.ver_diff %}
+ <div style="margin-top: 16px">Recipe version changes</div>
+ <table class="details">
+ {% for head, recipes in measurement.buildstats.ver_diff.items() %}
+ <tr>
+ <th colspan="2">{{ head }}</th>
+ </tr>
+ {% for name, info in recipes|sort %}
+ <tr>
+ <td>{{ name }}</td>
+ <td>{{ info }}</td>
+ </tr>
+ {% endfor %}
+ {% endfor %}
+ </table>
+ {% else %}
+ <div style="margin-top: 16px">No recipe version changes detected</div>
+ {% endif %}
+ {% endif %}
</div>
{% endfor %}
{# Unsuccessful test #}
diff --git a/import-layers/yocto-poky/scripts/lib/build_perf/report.py b/import-layers/yocto-poky/scripts/lib/build_perf/report.py
index eb00ccca2..d99a36797 100644
--- a/import-layers/yocto-poky/scripts/lib/build_perf/report.py
+++ b/import-layers/yocto-poky/scripts/lib/build_perf/report.py
@@ -11,12 +11,15 @@
# more details.
#
"""Handling of build perf test reports"""
-from collections import OrderedDict, Mapping
+from collections import OrderedDict, Mapping, namedtuple
from datetime import datetime, timezone
from numbers import Number
from statistics import mean, stdev, variance
+AggregateTestData = namedtuple('AggregateTestData', ['metadata', 'results'])
+
+
def isofmt_to_timestamp(string):
"""Convert timestamp string in ISO 8601 format into unix timestamp"""
if '.' in string:
diff --git a/import-layers/yocto-poky/scripts/lib/buildstats.py b/import-layers/yocto-poky/scripts/lib/buildstats.py
new file mode 100644
index 000000000..d9aadf3cb
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/buildstats.py
@@ -0,0 +1,349 @@
+#
+# Copyright (c) 2017, Intel Corporation.
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms and conditions of the GNU General Public License,
+# version 2, as published by the Free Software Foundation.
+#
+# This program is distributed in the hope it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+# more details.
+#
+"""Functionality for analyzing buildstats"""
+import json
+import logging
+import os
+import re
+from collections import namedtuple,OrderedDict
+from statistics import mean
+
+
+log = logging.getLogger()
+
+
+taskdiff_fields = ('pkg', 'pkg_op', 'task', 'task_op', 'value1', 'value2',
+ 'absdiff', 'reldiff')
+TaskDiff = namedtuple('TaskDiff', ' '.join(taskdiff_fields))
+
+
+class BSError(Exception):
+ """Error handling of buildstats"""
+ pass
+
+
+class BSTask(dict):
+ def __init__(self, *args, **kwargs):
+ self['start_time'] = None
+ self['elapsed_time'] = None
+ self['status'] = None
+ self['iostat'] = {}
+ self['rusage'] = {}
+ self['child_rusage'] = {}
+ super(BSTask, self).__init__(*args, **kwargs)
+
+ @property
+ def cputime(self):
+ """Sum of user and system time taken by the task"""
+ rusage = self['rusage']['ru_stime'] + self['rusage']['ru_utime']
+ if self['child_rusage']:
+ # Child rusage may have been optimized out
+ return rusage + self['child_rusage']['ru_stime'] + self['child_rusage']['ru_utime']
+ else:
+ return rusage
+
+ @property
+ def walltime(self):
+ """Elapsed wall clock time"""
+ return self['elapsed_time']
+
+ @property
+ def read_bytes(self):
+ """Bytes read from the block layer"""
+ return self['iostat']['read_bytes']
+
+ @property
+ def write_bytes(self):
+ """Bytes written to the block layer"""
+ return self['iostat']['write_bytes']
+
+ @property
+ def read_ops(self):
+ """Number of read operations on the block layer"""
+ if self['child_rusage']:
+ # Child rusage may have been optimized out
+ return self['rusage']['ru_inblock'] + self['child_rusage']['ru_inblock']
+ else:
+ return self['rusage']['ru_inblock']
+
+ @property
+ def write_ops(self):
+ """Number of write operations on the block layer"""
+ if self['child_rusage']:
+ # Child rusage may have been optimized out
+ return self['rusage']['ru_oublock'] + self['child_rusage']['ru_oublock']
+ else:
+ return self['rusage']['ru_oublock']
+
+ @classmethod
+ def from_file(cls, buildstat_file):
+ """Read buildstat text file"""
+ bs_task = cls()
+ log.debug("Reading task buildstats from %s", buildstat_file)
+ end_time = None
+ with open(buildstat_file) as fobj:
+ for line in fobj.readlines():
+ key, val = line.split(':', 1)
+ val = val.strip()
+ if key == 'Started':
+ start_time = float(val)
+ bs_task['start_time'] = start_time
+ elif key == 'Ended':
+ end_time = float(val)
+ elif key.startswith('IO '):
+ split = key.split()
+ bs_task['iostat'][split[1]] = int(val)
+ elif key.find('rusage') >= 0:
+ split = key.split()
+ ru_key = split[-1]
+ if ru_key in ('ru_stime', 'ru_utime'):
+ val = float(val)
+ else:
+ val = int(val)
+ ru_type = 'rusage' if split[0] == 'rusage' else \
+ 'child_rusage'
+ bs_task[ru_type][ru_key] = val
+ elif key == 'Status':
+ bs_task['status'] = val
+ if end_time is not None and start_time is not None:
+ bs_task['elapsed_time'] = end_time - start_time
+ else:
+ raise BSError("{} looks like a invalid buildstats file".format(buildstat_file))
+ return bs_task
+
+
+class BSTaskAggregate(object):
+ """Class representing multiple runs of the same task"""
+ properties = ('cputime', 'walltime', 'read_bytes', 'write_bytes',
+ 'read_ops', 'write_ops')
+
+ def __init__(self, tasks=None):
+ self._tasks = tasks or []
+ self._properties = {}
+
+ def __getattr__(self, name):
+ if name in self.properties:
+ if name not in self._properties:
+ # Calculate properties on demand only. We only provide mean
+ # value, so far
+ self._properties[name] = mean([getattr(t, name) for t in self._tasks])
+ return self._properties[name]
+ else:
+ raise AttributeError("'BSTaskAggregate' has no attribute '{}'".format(name))
+
+ def append(self, task):
+ """Append new task"""
+ # Reset pre-calculated properties
+ assert isinstance(task, BSTask), "Type is '{}' instead of 'BSTask'".format(type(task))
+ self._properties = {}
+ self._tasks.append(task)
+
+
+class BSRecipe(object):
+ """Class representing buildstats of one recipe"""
+ def __init__(self, name, epoch, version, revision):
+ self.name = name
+ self.epoch = epoch
+ self.version = version
+ self.revision = revision
+ if epoch is None:
+ self.evr = "{}-{}".format(version, revision)
+ else:
+ self.evr = "{}_{}-{}".format(epoch, version, revision)
+ self.tasks = {}
+
+ def aggregate(self, bsrecipe):
+ """Aggregate data of another recipe buildstats"""
+ if self.nevr != bsrecipe.nevr:
+ raise ValueError("Refusing to aggregate buildstats, recipe version "
+ "differs: {} vs. {}".format(self.nevr, bsrecipe.nevr))
+ if set(self.tasks.keys()) != set(bsrecipe.tasks.keys()):
+ raise ValueError("Refusing to aggregate buildstats, set of tasks "
+ "in {} differ".format(self.name))
+
+ for taskname, taskdata in bsrecipe.tasks.items():
+ if not isinstance(self.tasks[taskname], BSTaskAggregate):
+ self.tasks[taskname] = BSTaskAggregate([self.tasks[taskname]])
+ self.tasks[taskname].append(taskdata)
+
+ @property
+ def nevr(self):
+ return self.name + '-' + self.evr
+
+
+class BuildStats(dict):
+ """Class representing buildstats of one build"""
+
+ @property
+ def num_tasks(self):
+ """Get number of tasks"""
+ num = 0
+ for recipe in self.values():
+ num += len(recipe.tasks)
+ return num
+
+ @classmethod
+ def from_json(cls, bs_json):
+ """Create new BuildStats object from JSON object"""
+ buildstats = cls()
+ for recipe in bs_json:
+ if recipe['name'] in buildstats:
+ raise BSError("Cannot handle multiple versions of the same "
+ "package ({})".format(recipe['name']))
+ bsrecipe = BSRecipe(recipe['name'], recipe['epoch'],
+ recipe['version'], recipe['revision'])
+ for task, data in recipe['tasks'].items():
+ bsrecipe.tasks[task] = BSTask(data)
+
+ buildstats[recipe['name']] = bsrecipe
+
+ return buildstats
+
+ @staticmethod
+ def from_file_json(path):
+ """Load buildstats from a JSON file"""
+ with open(path) as fobj:
+ bs_json = json.load(fobj)
+ return BuildStats.from_json(bs_json)
+
+
+ @staticmethod
+ def split_nevr(nevr):
+ """Split name and version information from recipe "nevr" string"""
+ n_e_v, revision = nevr.rsplit('-', 1)
+ match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[0-9]\S*)$',
+ n_e_v)
+ if not match:
+ # If we're not able to parse a version starting with a number, just
+ # take the part after last dash
+ match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[^-]+)$',
+ n_e_v)
+ name = match.group('name')
+ version = match.group('version')
+ epoch = match.group('epoch')
+ return name, epoch, version, revision
+
+ @classmethod
+ def from_dir(cls, path):
+ """Load buildstats from a buildstats directory"""
+ if not os.path.isfile(os.path.join(path, 'build_stats')):
+ raise BSError("{} does not look like a buildstats directory".format(path))
+
+ log.debug("Reading buildstats directory %s", path)
+
+ buildstats = cls()
+ subdirs = os.listdir(path)
+ for dirname in subdirs:
+ recipe_dir = os.path.join(path, dirname)
+ if not os.path.isdir(recipe_dir):
+ continue
+ name, epoch, version, revision = cls.split_nevr(dirname)
+ bsrecipe = BSRecipe(name, epoch, version, revision)
+ for task in os.listdir(recipe_dir):
+ bsrecipe.tasks[task] = BSTask.from_file(
+ os.path.join(recipe_dir, task))
+ if name in buildstats:
+ raise BSError("Cannot handle multiple versions of the same "
+ "package ({})".format(name))
+ buildstats[name] = bsrecipe
+
+ return buildstats
+
+ def aggregate(self, buildstats):
+ """Aggregate other buildstats into this"""
+ if set(self.keys()) != set(buildstats.keys()):
+ raise ValueError("Refusing to aggregate buildstats, set of "
+ "recipes is different")
+ for pkg, data in buildstats.items():
+ self[pkg].aggregate(data)
+
+
+def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None):
+ """Compare the tasks of two buildstats"""
+ tasks_diff = []
+ pkgs = set(bs1.keys()).union(set(bs2.keys()))
+ for pkg in pkgs:
+ tasks1 = bs1[pkg].tasks if pkg in bs1 else {}
+ tasks2 = bs2[pkg].tasks if pkg in bs2 else {}
+ if not tasks1:
+ pkg_op = '+'
+ elif not tasks2:
+ pkg_op = '-'
+ else:
+ pkg_op = ' '
+
+ for task in set(tasks1.keys()).union(set(tasks2.keys())):
+ task_op = ' '
+ if task in tasks1:
+ val1 = getattr(bs1[pkg].tasks[task], stat_attr)
+ else:
+ task_op = '+'
+ val1 = 0
+ if task in tasks2:
+ val2 = getattr(bs2[pkg].tasks[task], stat_attr)
+ else:
+ val2 = 0
+ task_op = '-'
+
+ if val1 == 0:
+ reldiff = float('inf')
+ else:
+ reldiff = 100 * (val2 - val1) / val1
+
+ if min_val and max(val1, val2) < min_val:
+ log.debug("Filtering out %s:%s (%s)", pkg, task,
+ max(val1, val2))
+ continue
+ if min_absdiff and abs(val2 - val1) < min_absdiff:
+ log.debug("Filtering out %s:%s (difference of %s)", pkg, task,
+ val2-val1)
+ continue
+ tasks_diff.append(TaskDiff(pkg, pkg_op, task, task_op, val1, val2,
+ val2-val1, reldiff))
+ return tasks_diff
+
+
+class BSVerDiff(object):
+ """Class representing recipe version differences between two buildstats"""
+ def __init__(self, bs1, bs2):
+ RecipeVerDiff = namedtuple('RecipeVerDiff', 'left right')
+
+ recipes1 = set(bs1.keys())
+ recipes2 = set(bs2.keys())
+
+ self.new = dict([(r, bs2[r]) for r in sorted(recipes2 - recipes1)])
+ self.dropped = dict([(r, bs1[r]) for r in sorted(recipes1 - recipes2)])
+ self.echanged = {}
+ self.vchanged = {}
+ self.rchanged = {}
+ self.unchanged = {}
+ self.empty_diff = False
+
+ common = recipes2.intersection(recipes1)
+ if common:
+ for recipe in common:
+ rdiff = RecipeVerDiff(bs1[recipe], bs2[recipe])
+ if bs1[recipe].epoch != bs2[recipe].epoch:
+ self.echanged[recipe] = rdiff
+ elif bs1[recipe].version != bs2[recipe].version:
+ self.vchanged[recipe] = rdiff
+ elif bs1[recipe].revision != bs2[recipe].revision:
+ self.rchanged[recipe] = rdiff
+ else:
+ self.unchanged[recipe] = rdiff
+
+ if len(recipes1) == len(recipes2) == len(self.unchanged):
+ self.empty_diff = True
+
+ def __bool__(self):
+ return not self.empty_diff
diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/__init__.py b/import-layers/yocto-poky/scripts/lib/checklayer/__init__.py
index 7197e850e..63952616b 100644
--- a/import-layers/yocto-poky/scripts/lib/compatlayer/__init__.py
+++ b/import-layers/yocto-poky/scripts/lib/checklayer/__init__.py
@@ -1,4 +1,4 @@
-# Yocto Project compatibility layer tool
+# Yocto Project layer check tool
#
# Copyright (C) 2017 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
@@ -46,16 +46,16 @@ def _get_layer_collections(layer_path, lconf=None, data=None):
raise LayerError(exc)
ldata.expandVarref('LAYERDIR')
- collections = (ldata.getVar('BBFILE_COLLECTIONS', True) or '').split()
+ collections = (ldata.getVar('BBFILE_COLLECTIONS') or '').split()
if not collections:
name = os.path.basename(layer_path)
collections = [name]
collections = {c: {} for c in collections}
for name in collections:
- priority = ldata.getVar('BBFILE_PRIORITY_%s' % name, True)
- pattern = ldata.getVar('BBFILE_PATTERN_%s' % name, True)
- depends = ldata.getVar('LAYERDEPENDS_%s' % name, True)
+ priority = ldata.getVar('BBFILE_PRIORITY_%s' % name)
+ pattern = ldata.getVar('BBFILE_PATTERN_%s' % name)
+ depends = ldata.getVar('LAYERDEPENDS_%s' % name)
collections[name]['priority'] = priority
collections[name]['pattern'] = pattern
collections[name]['depends'] = depends
diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/case.py b/import-layers/yocto-poky/scripts/lib/checklayer/case.py
index 54ce78aa6..9dd00412e 100644
--- a/import-layers/yocto-poky/scripts/lib/compatlayer/case.py
+++ b/import-layers/yocto-poky/scripts/lib/checklayer/case.py
@@ -3,5 +3,5 @@
from oeqa.core.case import OETestCase
-class OECompatLayerTestCase(OETestCase):
+class OECheckLayerTestCase(OETestCase):
pass
diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/__init__.py b/import-layers/yocto-poky/scripts/lib/checklayer/cases/__init__.py
index e69de29bb..e69de29bb 100644
--- a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/__init__.py
+++ b/import-layers/yocto-poky/scripts/lib/checklayer/cases/__init__.py
diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/bsp.py b/import-layers/yocto-poky/scripts/lib/checklayer/cases/bsp.py
index 43efae406..b6b611be7 100644
--- a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/bsp.py
+++ b/import-layers/yocto-poky/scripts/lib/checklayer/cases/bsp.py
@@ -3,14 +3,14 @@
import unittest
-from compatlayer import LayerType, get_signatures, check_command, get_depgraph
-from compatlayer.case import OECompatLayerTestCase
+from checklayer import LayerType, get_signatures, check_command, get_depgraph
+from checklayer.case import OECheckLayerTestCase
-class BSPCompatLayer(OECompatLayerTestCase):
+class BSPCheckLayer(OECheckLayerTestCase):
@classmethod
def setUpClass(self):
if self.tc.layer['type'] != LayerType.BSP:
- raise unittest.SkipTest("BSPCompatLayer: Layer %s isn't BSP one." %\
+ raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\
self.tc.layer['name'])
def test_bsp_defines_machines(self):
@@ -62,7 +62,7 @@ class BSPCompatLayer(OECompatLayerTestCase):
This criteria can only be tested by testing different machines in combination,
i.e. one main layer, potentially several additional BSP layers and an explicit
choice of machines:
- yocto-compat-layer --additional-layers .../meta-intel --machines intel-corei7-64 imx6slevk -- .../meta-freescale
+ yocto-check-layer --additional-layers .../meta-intel --machines intel-corei7-64 imx6slevk -- .../meta-freescale
'''
if not self.td['machines']:
diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/common.py b/import-layers/yocto-poky/scripts/lib/checklayer/cases/common.py
index 55e8ba4c5..a13c1088f 100644
--- a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/common.py
+++ b/import-layers/yocto-poky/scripts/lib/checklayer/cases/common.py
@@ -4,10 +4,10 @@
import glob
import os
import unittest
-from compatlayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures
-from compatlayer.case import OECompatLayerTestCase
+from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures
+from checklayer.case import OECheckLayerTestCase
-class CommonCompatLayer(OECompatLayerTestCase):
+class CommonCheckLayer(OECheckLayerTestCase):
def test_readme(self):
# The top-level README file may have a suffix (like README.rst or README.txt).
readme_files = glob.glob(os.path.join(self.tc.layer['path'], 'README*'))
diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/distro.py b/import-layers/yocto-poky/scripts/lib/checklayer/cases/distro.py
index 523acc1e7..df1b3035e 100644
--- a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/distro.py
+++ b/import-layers/yocto-poky/scripts/lib/checklayer/cases/distro.py
@@ -3,14 +3,14 @@
import unittest
-from compatlayer import LayerType
-from compatlayer.case import OECompatLayerTestCase
+from checklayer import LayerType
+from checklayer.case import OECheckLayerTestCase
-class DistroCompatLayer(OECompatLayerTestCase):
+class DistroCheckLayer(OECheckLayerTestCase):
@classmethod
def setUpClass(self):
if self.tc.layer['type'] != LayerType.DISTRO:
- raise unittest.SkipTest("DistroCompatLayer: Layer %s isn't Distro one." %\
+ raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\
self.tc.layer['name'])
def test_distro_defines_distros(self):
diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/context.py b/import-layers/yocto-poky/scripts/lib/checklayer/context.py
index 7811d4ac2..1bec2c410 100644
--- a/import-layers/yocto-poky/scripts/lib/compatlayer/context.py
+++ b/import-layers/yocto-poky/scripts/lib/checklayer/context.py
@@ -8,8 +8,8 @@ import re
from oeqa.core.context import OETestContext
-class CompatLayerTestContext(OETestContext):
+class CheckLayerTestContext(OETestContext):
def __init__(self, td=None, logger=None, layer=None, test_software_layer_signatures=True):
- super(CompatLayerTestContext, self).__init__(td, logger)
+ super(CheckLayerTestContext, self).__init__(td, logger)
self.layer = layer
self.test_software_layer_signatures = test_software_layer_signatures
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
index d646b0cf6..94e3d7d4b 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
@@ -115,8 +115,8 @@ def setup_tinfoil(config_only=False, basepath=None, tracking=False):
import bb.tinfoil
tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
try:
- tinfoil.prepare(config_only)
tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ tinfoil.prepare(config_only)
except bb.tinfoil.TinfoilUIException:
tinfoil.shutdown()
raise DevtoolError('Failed to start bitbake environment')
@@ -191,7 +191,7 @@ def use_external_build(same_dir, no_same_dir, d):
logger.info('Using source tree as build directory since --same-dir specified')
elif bb.data.inherits_class('autotools-brokensep', d):
logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
- elif d.getVar('B') == os.path.abspath(d.getVar('S')):
+ elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
logger.info('Using source tree as build directory since that would be the default for this recipe')
else:
b_is_s = False
@@ -261,34 +261,79 @@ def get_bbclassextend_targets(recipefile, pn):
targets.append('%s-%s' % (pn, variant))
return targets
-def ensure_npm(config, basepath, fixed_setup=False, check_exists=True):
- """
- Ensure that npm is available and either build it or show a
- reasonable error message
- """
- if check_exists:
- tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
- try:
- rd = tinfoil.parse_recipe('nodejs-native')
- nativepath = rd.getVar('STAGING_BINDIR_NATIVE')
- finally:
- tinfoil.shutdown()
- npmpath = os.path.join(nativepath, 'npm')
- build_npm = not os.path.exists(npmpath)
- else:
- build_npm = True
+def replace_from_file(path, old, new):
+ """Replace strings on a file"""
+
+ def read_file(path):
+ data = None
+ with open(path) as f:
+ data = f.read()
+ return data
+
+ def write_file(path, data):
+ if data is None:
+ return
+ wdata = data.rstrip() + "\n"
+ with open(path, "w") as f:
+ f.write(wdata)
+
+ # In case old is None, return immediately
+ if old is None:
+ return
+ try:
+ rdata = read_file(path)
+ except IOError as e:
+ # if file does not exit, just quit, otherwise raise an exception
+ if e.errno == errno.ENOENT:
+ return
+ else:
+ raise
- if build_npm:
- logger.info('Building nodejs-native')
+ old_contents = rdata.splitlines()
+ new_contents = []
+ for old_content in old_contents:
try:
- exec_build_env_command(config.init_path, basepath,
- 'bitbake -q nodejs-native -c addto_recipe_sysroot', watch=True)
- except bb.process.ExecutionError as e:
- if "Nothing PROVIDES 'nodejs-native'" in e.stdout:
- if fixed_setup:
- msg = 'nodejs-native is required for npm but is not available within this SDK'
- else:
- msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs'
- raise DevtoolError(msg)
- else:
- raise
+ new_contents.append(old_content.replace(old, new))
+ except ValueError:
+ pass
+ write_file(path, "\n".join(new_contents))
+
+
+def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
+ """ This function will make unlocked-sigs.inc match the recipes in the
+ workspace plus any extras we want unlocked. """
+
+ if not fixed_setup:
+ # Only need to write this out within the eSDK
+ return
+
+ if not extra:
+ extra = []
+
+ confdir = os.path.join(basepath, 'conf')
+ unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
+
+ # Get current unlocked list if any
+ values = {}
+ def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
+ values[varname] = origvalue
+ return origvalue, None, 0, True
+ if os.path.exists(unlockedsigs):
+ with open(unlockedsigs, 'r') as f:
+ bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
+ unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
+
+ # If the new list is different to the current list, write it out
+ newunlocked = sorted(list(workspace.keys()) + extra)
+ if unlocked != newunlocked:
+ bb.utils.mkdirhier(confdir)
+ with open(unlockedsigs, 'w') as f:
+ f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
+ "# This layer was created by the OpenEmbedded devtool" +
+ " utility in order to\n" +
+ "# contain recipes that are unlocked.\n")
+
+ f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
+ for pn in newunlocked:
+ f.write(' ' + pn)
+ f.write('"')
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
index b3730ae83..9cc492788 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
@@ -16,12 +16,16 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugin containing the deploy subcommands"""
+import logging
import os
+import shutil
import subprocess
-import logging
import tempfile
-import shutil
+
+import bb.utils
import argparse_oe
+import oe.types
+
from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
logger = logging.getLogger('devtool')
@@ -64,7 +68,7 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals
lines.append(' rmdir $file > /dev/null 2>&1 || true')
lines.append(' fi')
lines.append(' else')
- lines.append(' rm $file')
+ lines.append(' rm -f $file')
lines.append(' fi')
lines.append(' done')
if not dryrun:
@@ -119,7 +123,11 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals
# Put any preserved files back
lines.append('if [ -d $preservedir ] ; then')
lines.append(' cd $preservedir')
- lines.append(' find . -type f -exec mv {} /{} \;')
+ # find from busybox might not have -exec, so we don't use that
+ lines.append(' find . -type f | while read file')
+ lines.append(' do')
+ lines.append(' mv $file /$file')
+ lines.append(' done')
lines.append(' cd /')
lines.append(' rm -rf $preservedir')
lines.append('fi')
@@ -136,11 +144,12 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals
return '\n'.join(lines)
+
def deploy(args, config, basepath, workspace):
"""Entry point for the devtool 'deploy' subcommand"""
- import re
import math
import oe.recipeutils
+ import oe.package
check_workspace_recipe(workspace, args.recipename, checksrc=False)
@@ -166,6 +175,17 @@ def deploy(args, config, basepath, workspace):
'recipe? If so, the install step has not installed '
'any files.' % args.recipename)
+ if args.strip and not args.dry_run:
+ # Fakeroot copy to new destination
+ srcdir = recipe_outdir
+ recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped')
+ if os.path.isdir(recipe_outdir):
+ bb.utils.remove(recipe_outdir, True)
+ exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
+ os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or ''])
+ oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'),
+ rd.getVar('base_libdir'))
+
filelist = []
ftotalsize = 0
for root, _, files in os.walk(recipe_outdir):
@@ -185,7 +205,6 @@ def deploy(args, config, basepath, workspace):
print(' %s' % item)
return 0
-
extraoptions = ''
if args.no_host_check:
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
@@ -297,6 +316,7 @@ def undeploy(args, config, basepath, workspace):
def register_commands(subparsers, context):
"""Register devtool subcommands from the deploy plugin"""
+
parser_deploy = subparsers.add_parser('deploy-target',
help='Deploy recipe output files to live target machine',
description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
@@ -309,6 +329,15 @@ def register_commands(subparsers, context):
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
parser_deploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
+
+ strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
+ strip_opts.add_argument('-S', '--strip',
+ help='Strip executables prior to deploying (default: %(default)s). '
+ 'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
+ default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
+ action='store_true')
+ strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
+
parser_deploy.set_defaults(func=deploy)
parser_undeploy = subparsers.add_parser('undeploy-target',
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/export.py b/import-layers/yocto-poky/scripts/lib/devtool/export.py
new file mode 100644
index 000000000..13ee258e7
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/devtool/export.py
@@ -0,0 +1,119 @@
+# Development tool - export command plugin
+#
+# Copyright (C) 2014-2017 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool export plugin"""
+
+import os
+import argparse
+import tarfile
+import logging
+import datetime
+import json
+
+logger = logging.getLogger('devtool')
+
+# output files
+default_arcname_prefix = "workspace-export"
+metadata = '.export_metadata'
+
+def export(args, config, basepath, workspace):
+ """Entry point for the devtool 'export' subcommand"""
+
+ def add_metadata(tar):
+ """Archive the workspace object"""
+ # finally store the workspace metadata
+ with open(metadata, 'w') as fd:
+ fd.write(json.dumps((config.workspace_path, workspace)))
+ tar.add(metadata)
+ os.unlink(metadata)
+
+ def add_recipe(tar, recipe, data):
+ """Archive recipe with proper arcname"""
+ # Create a map of name/arcnames
+ arcnames = []
+ for key, name in data.items():
+ if name:
+ if key == 'srctree':
+ # all sources, no matter where are located, goes into the sources directory
+ arcname = 'sources/%s' % recipe
+ else:
+ arcname = name.replace(config.workspace_path, '')
+ arcnames.append((name, arcname))
+
+ for name, arcname in arcnames:
+ tar.add(name, arcname=arcname)
+
+
+ # Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
+ if not workspace:
+ logger.info('Workspace contains no recipes, nothing to export')
+ return 0
+ else:
+ for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
+ for recipe in recipes:
+ if recipe not in workspace:
+ logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
+ return 1
+
+ name = args.file
+
+ default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+ if not name:
+ name = default_name
+ else:
+ # if name is a directory, append the default name
+ if os.path.isdir(name):
+ name = os.path.join(name, default_name)
+
+ if os.path.exists(name) and not args.overwrite:
+ logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
+ return 1
+
+ # if all workspace is excluded, quit
+ if not len(set(workspace.keys()).difference(set(args.exclude))):
+ logger.warn('All recipes in workspace excluded, nothing to export')
+ return 0
+
+ exported = []
+ with tarfile.open(name, 'w:gz') as tar:
+ if args.include:
+ for recipe in args.include:
+ add_recipe(tar, recipe, workspace[recipe])
+ exported.append(recipe)
+ else:
+ for recipe, data in workspace.items():
+ if recipe not in args.exclude:
+ add_recipe(tar, recipe, data)
+ exported.append(recipe)
+
+ add_metadata(tar)
+
+ logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
+ return 0
+
+def register_commands(subparsers, context):
+ """Register devtool export subcommands"""
+ parser = subparsers.add_parser('export',
+ help='Export workspace into a tar archive',
+ description='Export one or more recipes from current workspace into a tar archive',
+ group='advanced')
+
+ parser.add_argument('--file', '-f', help='Output archive file name')
+ parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
+ group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
+ parser.set_defaults(func=export)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/import.py b/import-layers/yocto-poky/scripts/lib/devtool/import.py
new file mode 100644
index 000000000..c13a180d1
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/devtool/import.py
@@ -0,0 +1,144 @@
+# Development tool - import command plugin
+#
+# Copyright (C) 2014-2017 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool import plugin"""
+
+import os
+import tarfile
+import logging
+import collections
+import json
+import fnmatch
+
+from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
+from devtool import export
+
+logger = logging.getLogger('devtool')
+
+def devimport(args, config, basepath, workspace):
+ """Entry point for the devtool 'import' subcommand"""
+
+ def get_pn(name):
+ """ Returns the filename of a workspace recipe/append"""
+ metadata = name.split('/')[-1]
+ fn, _ = os.path.splitext(metadata)
+ return fn
+
+ if not os.path.exists(args.file):
+ raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
+
+ with tarfile.open(args.file) as tar:
+ # Get exported metadata
+ export_workspace_path = export_workspace = None
+ try:
+ metadata = tar.getmember(export.metadata)
+ except KeyError as ke:
+ raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
+
+ tar.extract(metadata)
+ with open(metadata.name) as fdm:
+ export_workspace_path, export_workspace = json.load(fdm)
+ os.unlink(metadata.name)
+
+ members = tar.getmembers()
+
+ # Get appends and recipes from the exported archive, these
+ # will be needed to find out those appends without corresponding
+ # recipe pair
+ append_fns, recipe_fns = set(), set()
+ for member in members:
+ if member.name.startswith('appends'):
+ append_fns.add(get_pn(member.name))
+ elif member.name.startswith('recipes'):
+ recipe_fns.add(get_pn(member.name))
+
+ # Setup tinfoil, get required data and shutdown
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
+ finally:
+ tinfoil.shutdown()
+
+ # Find those appends that do not have recipes in current metadata
+ non_importables = []
+ for fn in append_fns - recipe_fns:
+ # Check on current metadata (covering those layers indicated in bblayers.conf)
+ for current_fn in current_fns:
+ if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
+ break
+ else:
+ non_importables.append(fn)
+ logger.warn('No recipe to append %s.bbapppend, skipping' % fn)
+
+ # Extract
+ imported = []
+ for member in members:
+ if member.name == export.metadata:
+ continue
+
+ for nonimp in non_importables:
+ pn = nonimp.split('_')[0]
+ # do not extract data from non-importable recipes or metadata
+ if member.name.startswith('appends/%s' % nonimp) or \
+ member.name.startswith('recipes/%s' % nonimp) or \
+ member.name.startswith('sources/%s' % pn):
+ break
+ else:
+ path = os.path.join(config.workspace_path, member.name)
+ if os.path.exists(path):
+ # by default, no file overwrite is done unless -o is given by the user
+ if args.overwrite:
+ try:
+ tar.extract(member, path=config.workspace_path)
+ except PermissionError as pe:
+ logger.warn(pe)
+ else:
+ logger.warn('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
+ continue
+ else:
+ tar.extract(member, path=config.workspace_path)
+
+ # Update EXTERNALSRC and the devtool md5 file
+ if member.name.startswith('appends'):
+ if export_workspace_path:
+ # appends created by 'devtool modify' just need to update the workspace
+ replace_from_file(path, export_workspace_path, config.workspace_path)
+
+ # appends created by 'devtool add' need replacement of exported source tree
+ pn = get_pn(member.name).split('_')[0]
+ exported_srctree = export_workspace[pn]['srctree']
+ if exported_srctree:
+ replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
+
+ standard._add_md5(config, pn, path)
+ imported.append(pn)
+
+ if imported:
+ logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
+ else:
+ logger.warn('No recipes imported into the workspace')
+
+ return 0
+
+def register_commands(subparsers, context):
+ """Register devtool import subcommands"""
+ parser = subparsers.add_parser('import',
+ help='Import exported tar archive into workspace',
+ description='Import tar archive previously created by "devtool export" into workspace',
+ group='advanced')
+ parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
+ parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
+ parser.set_defaults(func=devimport)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
index e8bf0ad98..f46577c2a 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
@@ -155,7 +155,7 @@ def sdk_update(args, config, basepath, workspace):
if os.path.exists(os.path.join(basepath, 'layers/.git')):
out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
if not out:
- ret = subprocess.call("git fetch --all; git reset --hard", shell=True, cwd=layers_dir)
+ ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
else:
logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
logger.error("Changed files:\n%s" % out);
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/standard.py b/import-layers/yocto-poky/scripts/lib/devtool/standard.py
index 5ff1e230f..beea0d4c2 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/standard.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/standard.py
@@ -1,6 +1,6 @@
# Development tool - standard commands plugin
#
-# Copyright (C) 2014-2016 Intel Corporation
+# Copyright (C) 2014-2017 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -30,7 +30,7 @@ import errno
import glob
import filecmp
from collections import OrderedDict
-from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, ensure_npm, DevtoolError
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, DevtoolError
from devtool import parse_recipe
logger = logging.getLogger('devtool')
@@ -66,6 +66,12 @@ def add(args, config, basepath, workspace):
elif os.path.isdir(args.recipename):
logger.warn('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
+ if not args.fetchuri:
+ if args.srcrev:
+ raise DevtoolError('The -S/--srcrev option is only valid when fetching from an SCM repository')
+ if args.srcbranch:
+ raise DevtoolError('The -B/--srcbranch option is only valid when fetching from an SCM repository')
+
if args.srctree and os.path.isfile(args.srctree):
args.fetchuri = 'file://' + os.path.abspath(args.srctree)
args.srctree = ''
@@ -128,9 +134,6 @@ def add(args, config, basepath, workspace):
color = args.color
extracmdopts = ''
if args.fetchuri:
- if args.fetchuri.startswith('npm://'):
- ensure_npm(config, basepath, args.fixed_setup)
-
source = args.fetchuri
if srctree:
extracmdopts += ' -x %s' % srctree
@@ -152,31 +155,24 @@ def add(args, config, basepath, workspace):
extracmdopts += ' -a'
if args.fetch_dev:
extracmdopts += ' --fetch-dev'
+ if args.mirrors:
+ extracmdopts += ' --mirrors'
+ if args.srcrev:
+ extracmdopts += ' --srcrev %s' % args.srcrev
+ if args.srcbranch:
+ extracmdopts += ' --srcbranch %s' % args.srcbranch
+ if args.provides:
+ extracmdopts += ' --provides %s' % args.provides
tempdir = tempfile.mkdtemp(prefix='devtool')
try:
- builtnpm = False
- while True:
- try:
- stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
- except bb.process.ExecutionError as e:
- if e.exitcode == 14:
- if builtnpm:
- raise DevtoolError('Re-running recipetool still failed to find npm')
- # FIXME this is a horrible hack that is unfortunately
- # necessary due to the fact that we can't run bitbake from
- # inside recipetool since recipetool keeps tinfoil active
- # with references to it throughout the code, so we have
- # to exit out and come back here to do it.
- ensure_npm(config, basepath, args.fixed_setup, check_exists=False)
- logger.info('Re-running recipe creation process after building nodejs')
- builtnpm = True
- continue
- elif e.exitcode == 15:
- raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
- else:
- raise DevtoolError('Command \'%s\' failed' % e.command)
- break
+ try:
+ stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
+ except bb.process.ExecutionError as e:
+ if e.exitcode == 15:
+ raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
+ else:
+ raise DevtoolError('Command \'%s\' failed' % e.command)
recipes = glob.glob(os.path.join(tempdir, '*.bb'))
if recipes:
@@ -282,6 +278,24 @@ def add(args, config, basepath, workspace):
f.write(' done\n')
f.write('}\n')
+ # Check if the new layer provides recipes whose priorities have been
+ # overriden by PREFERRED_PROVIDER.
+ recipe_name = rd.getVar('PN')
+ provides = rd.getVar('PROVIDES')
+ # Search every item defined in PROVIDES
+ for recipe_provided in provides.split():
+ preferred_provider = 'PREFERRED_PROVIDER_' + recipe_provided
+ current_pprovider = rd.getVar(preferred_provider)
+ if current_pprovider and current_pprovider != recipe_name:
+ if args.fixed_setup:
+ #if we are inside the eSDK add the new PREFERRED_PROVIDER in the workspace layer.conf
+ layerconf_file = os.path.join(config.workspace_path, "conf", "layer.conf")
+ with open(layerconf_file, 'a') as f:
+ f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
+ else:
+ logger.warn('Set \'%s\' in order to use the recipe' % preferred_provider)
+ break
+
_add_md5(config, recipename, appendfile)
logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
@@ -383,7 +397,7 @@ def extract(args, config, basepath, workspace):
"""Entry point for the devtool 'extract' subcommand"""
import bb
- tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+ tinfoil = setup_tinfoil(basepath=basepath)
if not tinfoil:
# Error already shown
return 1
@@ -393,7 +407,7 @@ def extract(args, config, basepath, workspace):
return 1
srctree = os.path.abspath(args.srctree)
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
logger.info('Source tree extracted to %s' % srctree)
if initial_rev:
@@ -407,7 +421,7 @@ def sync(args, config, basepath, workspace):
"""Entry point for the devtool 'sync' subcommand"""
import bb
- tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+ tinfoil = setup_tinfoil(basepath=basepath)
if not tinfoil:
# Error already shown
return 1
@@ -417,7 +431,7 @@ def sync(args, config, basepath, workspace):
return 1
srctree = os.path.abspath(args.srctree)
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd, tinfoil)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
logger.info('Source tree %s synchronized' % srctree)
if initial_rev:
@@ -428,31 +442,10 @@ def sync(args, config, basepath, workspace):
tinfoil.shutdown()
-def _prep_extract_operation(config, basepath, recipename, tinfoil=None):
- """HACK: Ugly workaround for making sure that requirements are met when
- trying to extract a package. Returns the tinfoil instance to be used."""
- if not tinfoil:
- tinfoil = setup_tinfoil(basepath=basepath)
-
- rd = parse_recipe(config, tinfoil, recipename, True)
- if not rd:
- return None
-
- if bb.data.inherits_class('kernel-yocto', rd):
- tinfoil.shutdown()
- try:
- stdout, _ = exec_build_env_command(config.init_path, basepath,
- 'bitbake kern-tools-native')
- tinfoil = setup_tinfoil(basepath=basepath)
- except bb.process.ExecutionError as err:
- raise DevtoolError("Failed to build kern-tools-native:\n%s" %
- err.stdout)
- return tinfoil
-
-
-def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil):
+def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil):
"""Extract sources of a recipe"""
import oe.recipeutils
+ import oe.patch
pn = d.getVar('PN')
@@ -480,6 +473,12 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil):
os.rmdir(srctree)
initial_rev = None
+
+ appendexisted = False
+ recipefile = d.getVar('FILE')
+ appendfile = recipe_to_append(recipefile, config)
+ is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
+
# We need to redirect WORKDIR, STAMPS_DIR etc. under a temporary
# directory so that:
# (a) we pick up all files that get unpacked to the WORKDIR, and
@@ -498,137 +497,56 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil):
try:
tinfoil.logger.setLevel(logging.WARNING)
- crd = d.createCopy()
- # Make a subdir so we guard against WORKDIR==S
- workdir = os.path.join(tempdir, 'workdir')
- crd.setVar('WORKDIR', workdir)
- if not crd.getVar('S').startswith(workdir):
- # Usually a shared workdir recipe (kernel, gcc)
- # Try to set a reasonable default
- if bb.data.inherits_class('kernel', d):
- crd.setVar('S', '${WORKDIR}/source')
- else:
- crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S')))
- if bb.data.inherits_class('kernel', d):
- # We don't want to move the source to STAGING_KERNEL_DIR here
- crd.setVar('STAGING_KERNEL_DIR', '${S}')
-
- is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
- if not is_kernel_yocto:
- crd.setVar('PATCHTOOL', 'git')
- crd.setVar('PATCH_COMMIT_FUNCTIONS', '1')
-
- # Apply our changes to the datastore to the server's datastore
- for key in crd.localkeys():
- tinfoil.config_data.setVar('%s_pn-%s' % (key, pn), crd.getVar(key, False))
-
- tinfoil.config_data.setVar('STAMPS_DIR', os.path.join(tempdir, 'stamps'))
- tinfoil.config_data.setVar('T', os.path.join(tempdir, 'temp'))
- tinfoil.config_data.setVar('BUILDCFG_FUNCS', '')
- tinfoil.config_data.setVar('BUILDCFG_HEADER', '')
- tinfoil.config_data.setVar('BB_HASH_IGNORE_MISMATCH', '1')
-
- tinfoil.set_event_mask(['bb.event.BuildStarted',
- 'bb.event.BuildCompleted',
- 'logging.LogRecord',
- 'bb.command.CommandCompleted',
- 'bb.command.CommandFailed',
- 'bb.build.TaskStarted',
- 'bb.build.TaskSucceeded',
- 'bb.build.TaskFailed',
- 'bb.build.TaskFailedSilent'])
-
- def runtask(target, task):
- if tinfoil.build_file(target, task):
- while True:
- event = tinfoil.wait_event(0.25)
- if event:
- if isinstance(event, bb.command.CommandCompleted):
- break
- elif isinstance(event, bb.command.CommandFailed):
- raise DevtoolError('Task do_%s failed: %s' % (task, event.error))
- elif isinstance(event, bb.build.TaskFailed):
- raise DevtoolError('Task do_%s failed' % task)
- elif isinstance(event, bb.build.TaskStarted):
- logger.info('Executing %s...' % event._task)
- elif isinstance(event, logging.LogRecord):
- if event.levelno <= logging.INFO:
- continue
- logger.handle(event)
-
- # we need virtual:native:/path/to/recipe if it's a BBCLASSEXTEND
- fn = tinfoil.get_recipe_file(pn)
- runtask(fn, 'unpack')
-
- if bb.data.inherits_class('kernel-yocto', d):
- # Extra step for kernel to populate the source directory
- runtask(fn, 'kernel_checkout')
-
- srcsubdir = crd.getVar('S')
-
- # Move local source files into separate subdir
- recipe_patches = [os.path.basename(patch) for patch in
- oe.recipeutils.get_recipe_patches(crd)]
- local_files = oe.recipeutils.get_recipe_local_files(crd)
-
- # Ignore local files with subdir={BP}
- srcabspath = os.path.abspath(srcsubdir)
- local_files = [fname for fname in local_files if
- os.path.exists(os.path.join(workdir, fname)) and
- (srcabspath == workdir or not
- os.path.join(workdir, fname).startswith(srcabspath +
- os.sep))]
- if local_files:
- for fname in local_files:
- _move_file(os.path.join(workdir, fname),
- os.path.join(tempdir, 'oe-local-files', fname))
- with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'),
- 'w') as f:
- f.write('# Ignore local files, by default. Remove this file '
- 'if you want to commit the directory to Git\n*\n')
-
- if srcsubdir == workdir:
- # Find non-patch non-local sources that were "unpacked" to srctree
- # directory
- src_files = [fname for fname in _ls_tree(workdir) if
- os.path.basename(fname) not in recipe_patches]
- # Force separate S so that patch files can be left out from srctree
- srcsubdir = tempfile.mkdtemp(dir=workdir)
- tinfoil.config_data.setVar('S_task-patch', srcsubdir)
- # Move source files to S
- for path in src_files:
- _move_file(os.path.join(workdir, path),
- os.path.join(srcsubdir, path))
- elif os.path.dirname(srcsubdir) != workdir:
- # Handle if S is set to a subdirectory of the source
- srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
-
- scriptutils.git_convert_standalone_clone(srcsubdir)
-
- # Make sure that srcsubdir exists
- bb.utils.mkdirhier(srcsubdir)
- if not os.path.exists(srcsubdir) or not os.listdir(srcsubdir):
- logger.warning("no source unpacked to S, either the %s recipe "
- "doesn't use any source or the correct source "
- "directory could not be determined" % pn)
-
- setup_git_repo(srcsubdir, crd.getVar('PV'), devbranch, d=d)
+ # FIXME this results in a cache reload under control of tinfoil, which is fine
+ # except we don't get the knotty progress bar
- (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir)
- initial_rev = stdout.rstrip()
+ if os.path.exists(appendfile):
+ appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
+ shutil.copyfile(appendfile, appendbackup)
+ else:
+ appendbackup = None
+ bb.utils.mkdirhier(os.path.dirname(appendfile))
+ logger.debug('writing append file %s' % appendfile)
+ with open(appendfile, 'a') as f:
+ f.write('###--- _extract_source\n')
+ f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
+ f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
+ if not is_kernel_yocto:
+ f.write('PATCHTOOL = "git"\n')
+ f.write('PATCH_COMMIT_FUNCTIONS = "1"\n')
+ f.write('inherit devtool-source\n')
+ f.write('###--- _extract_source\n')
+
+ update_unlockedsigs(basepath, workspace, fixed_setup, [pn])
+
+ sstate_manifests = d.getVar('SSTATE_MANIFESTS')
+ bb.utils.mkdirhier(sstate_manifests)
+ preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
+ with open(preservestampfile, 'w') as f:
+ f.write(d.getVar('STAMP'))
+ try:
+ if bb.data.inherits_class('kernel-yocto', d):
+ # We need to generate the kernel config
+ task = 'do_configure'
+ else:
+ task = 'do_patch'
- logger.info('Patching...')
- runtask(fn, 'patch')
+ # Run the fetch + unpack tasks
+ res = tinfoil.build_targets(pn,
+ task,
+ handle_events=True)
+ finally:
+ if os.path.exists(preservestampfile):
+ os.remove(preservestampfile)
- bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
+ if not res:
+ raise DevtoolError('Extracting source for %s failed' % pn)
- kconfig = None
- if bb.data.inherits_class('kernel-yocto', d):
- # Store generate and store kernel config
- logger.info('Generating kernel config')
- runtask(fn, 'configure')
- kconfig = os.path.join(crd.getVar('B'), '.config')
+ with open(os.path.join(tempdir, 'initial_rev'), 'r') as f:
+ initial_rev = f.read()
+ with open(os.path.join(tempdir, 'srcsubdir'), 'r') as f:
+ srcsubdir = f.read()
tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
srctree_localdir = os.path.join(srctree, 'oe-local-files')
@@ -682,11 +600,15 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil):
oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
- if kconfig:
+ if is_kernel_yocto:
logger.info('Copying kernel config to srctree')
- shutil.copy2(kconfig, srctree)
+ shutil.copy2(os.path.join(tempdir, '.config'), srctree)
finally:
+ if appendbackup:
+ shutil.copyfile(appendbackup, appendfile)
+ elif os.path.exists(appendfile):
+ os.remove(appendfile)
if keep_temp:
logger.info('Preserving temporary directory %s' % tempdir)
else:
@@ -699,8 +621,11 @@ def _add_md5(config, recipename, filename):
def addfile(fn):
md5 = bb.utils.md5_file(fn)
- with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a') as f:
- f.write('%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5))
+ with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
+ md5_str = '%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5)
+ f.seek(0, os.SEEK_SET)
+ if not md5_str in f.read():
+ f.write(md5_str)
if os.path.isdir(filename):
for root, _, files in os.walk(filename):
@@ -772,13 +697,6 @@ def modify(args, config, basepath, workspace):
raise DevtoolError("--no-extract specified and source path %s does "
"not exist or is not a directory" %
srctree)
- if not args.no_extract:
- tinfoil = _prep_extract_operation(config, basepath, pn, tinfoil)
- if not tinfoil:
- # Error already shown
- return 1
- # We need to re-parse because tinfoil may have been re-initialised
- rd = parse_recipe(config, tinfoil, args.recipename, True)
recipefile = rd.getVar('FILE')
appendfile = recipe_to_append(recipefile, config, args.wildcard)
@@ -793,7 +711,7 @@ def modify(args, config, basepath, workspace):
initial_rev = None
commits = []
if not args.no_extract:
- initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
if not initial_rev:
return 1
logger.info('Source tree extracted to %s' % srctree)
@@ -842,7 +760,10 @@ def modify(args, config, basepath, workspace):
if bb.data.inherits_class('kernel', rd):
f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
- 'do_fetch do_unpack do_patch do_kernel_configme do_kernel_configcheck"\n')
+ 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n')
+ f.write('\ndo_patch() {\n'
+ ' :\n'
+ '}\n')
f.write('\ndo_configure_append() {\n'
' cp ${B}/.config ${S}/.config.baseline\n'
' ln -sfT ${B}/.config ${S}/.config.new\n'
@@ -852,6 +773,8 @@ def modify(args, config, basepath, workspace):
for commit in commits:
f.write('# commit: %s\n' % commit)
+ update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
+
_add_md5(config, pn, appendfile)
logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
@@ -1375,12 +1298,13 @@ def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remo
if not no_remove:
# Find list of existing patches in recipe file
patches_dir = tempfile.mkdtemp(dir=tempdir)
- old_srcrev = (rd.getVar('SRCREV', False) or '')
+ old_srcrev = rd.getVar('SRCREV') or ''
upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
patches_dir)
+ logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
# Remove deleted local files and "overlapping" patches
- remove_files = list(del_f.values()) + list(upd_p.values())
+ remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values())
if remove_files:
removedentries = _remove_file_entries(srcuri, remove_files)[0]
update_srcuri = True
@@ -1612,7 +1536,7 @@ def update_recipe(args, config, basepath, workspace):
def status(args, config, basepath, workspace):
"""Entry point for the devtool 'status' subcommand"""
if workspace:
- for recipe, value in workspace.items():
+ for recipe, value in sorted(workspace.items()):
recipefile = value['recipefile']
if recipefile:
recipestr = ' (%s)' % recipefile
@@ -1627,6 +1551,26 @@ def status(args, config, basepath, workspace):
def _reset(recipes, no_clean, config, basepath, workspace):
"""Reset one or more recipes"""
+ def clean_preferred_provider(pn, layerconf_path):
+ """Remove PREFERRED_PROVIDER from layer.conf'"""
+ import re
+ layerconf_file = os.path.join(layerconf_path, 'conf', 'layer.conf')
+ new_layerconf_file = os.path.join(layerconf_path, 'conf', '.layer.conf')
+ pprovider_found = False
+ with open(layerconf_file, 'r') as f:
+ lines = f.readlines()
+ with open(new_layerconf_file, 'a') as nf:
+ for line in lines:
+ pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$'
+ if not re.match(pprovider_exp, line):
+ nf.write(line)
+ else:
+ pprovider_found = True
+ if pprovider_found:
+ shutil.move(new_layerconf_file, layerconf_file)
+ else:
+ os.remove(new_layerconf_file)
+
if recipes and not no_clean:
if len(recipes) == 1:
logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
@@ -1679,6 +1623,7 @@ def _reset(recipes, no_clean, config, basepath, workspace):
# This is unlikely, but if it's empty we can just remove it
os.rmdir(srctree)
+ clean_preferred_provider(pn, config.workspace_path)
def reset(args, config, basepath, workspace):
"""Entry point for the devtool 'reset' subcommand"""
@@ -1834,10 +1779,15 @@ def register_commands(subparsers, context):
parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true")
parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
- parser_add.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
+ group = parser_add.add_mutually_exclusive_group()
+ group.add_argument('--srcrev', '-S', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
+ group.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
+ parser_add.add_argument('--srcbranch', '-B', help='Branch in source repository if fetching from an SCM such as git (default master)')
parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure). Useful with binary packages e.g. RPMs.', action='store_true')
parser_add.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
parser_add.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
+ parser_add.add_argument('--mirrors', help='Enable PREMIRRORS and MIRRORS for source tree fetching (disable by default).', action="store_true")
+ parser_add.add_argument('--provides', '-p', help='Specify an alias for the item provided by the recipe. E.g. virtual/libgl')
parser_add.set_defaults(func=add, fixed_setup=context.fixed_setup)
parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
@@ -1854,7 +1804,7 @@ def register_commands(subparsers, context):
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
- parser_modify.set_defaults(func=modify)
+ parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
description='Extracts the source for an existing recipe',
@@ -1863,7 +1813,7 @@ def register_commands(subparsers, context):
parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (default "%(default)s")')
parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_extract.set_defaults(func=extract, no_workspace=True)
+ parser_extract.set_defaults(func=extract, fixed_setup=context.fixed_setup)
parser_sync = subparsers.add_parser('sync', help='Synchronize the source tree for an existing recipe',
description='Synchronize the previously extracted source tree for an existing recipe',
@@ -1873,7 +1823,7 @@ def register_commands(subparsers, context):
parser_sync.add_argument('srctree', help='Path to the source tree')
parser_sync.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_sync.set_defaults(func=sync)
+ parser_sync.set_defaults(func=sync, fixed_setup=context.fixed_setup)
parser_rename = subparsers.add_parser('rename', help='Rename a recipe file in the workspace',
description='Renames the recipe file for a recipe in the workspace, changing the name or version part or both, ensuring that all references within the workspace are updated at the same time. Only works when the recipe file itself is in the workspace, e.g. after devtool add. Particularly useful when devtool add did not automatically determine the correct name.',
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
index 05fb9e5ed..f1b3ff0a9 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
@@ -1,6 +1,6 @@
# Development tool - upgrade command plugin
#
-# Copyright (C) 2014-2015 Intel Corporation
+# Copyright (C) 2014-2017 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -33,7 +33,7 @@ sys.path = sys.path + [devtool_path]
import oe.recipeutils
from devtool import standard
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs
logger = logging.getLogger('devtool')
@@ -180,7 +180,7 @@ def _get_uri(rd):
srcuri = rev_re.sub('', srcuri)
return srcuri, srcrev
-def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tinfoil, rd):
+def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
"""Extract sources of a recipe with a new version"""
def __run(cmd):
@@ -202,15 +202,38 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
__run('git tag -f devtool-base-new')
md5 = None
sha256 = None
+ if not srcbranch:
+ check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
+ get_branch = [x.strip() for x in check_branch.splitlines()]
+ # Remove HEAD reference point and drop remote prefix
+ get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
+ if 'master' in get_branch:
+ # If it is master, we do not need to append 'branch=master' as this is default.
+ # Even with the case where get_branch has multiple objects, if 'master' is one
+ # of them, we should default take from 'master'
+ srcbranch = ''
+ elif len(get_branch) == 1:
+ # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
+ srcbranch = get_branch[0]
+ else:
+ # If get_branch contains more than one objects, then display error and exit.
+ mbrch = '\n ' + '\n '.join(get_branch)
+ raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
else:
__run('git checkout devtool-base -b devtool-%s' % newpv)
tmpdir = tempfile.mkdtemp(prefix='devtool')
try:
- md5, sha256 = scriptutils.fetch_uri(tinfoil.config_data, uri, tmpdir, rev)
- except bb.fetch2.FetchError as e:
+ checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
+ except scriptutils.FetchUrlFailure as e:
raise DevtoolError(e)
+ if ftmpdir and keep_temp:
+ logger.info('Fetch temp directory is %s' % ftmpdir)
+
+ md5 = checksums['md5sum']
+ sha256 = checksums['sha256sum']
+
tmpsrctree = _get_srctree(tmpdir)
srctree = os.path.abspath(srctree)
@@ -269,7 +292,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
else:
shutil.rmtree(tmpsrctree)
- return (rev, md5, sha256)
+ return (rev, md5, sha256, srcbranch)
def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd):
"""Creates the new recipe under workspace"""
@@ -277,7 +300,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil
bpn = rd.getVar('BPN')
path = os.path.join(workspace, 'recipes', bpn)
bb.utils.mkdirhier(path)
- copied, _ = oe.recipeutils.copy_recipe_files(rd, path)
+ copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
+ if not copied:
+ raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
+ logger.debug('Copied %s to %s' % (copied, path))
oldpv = rd.getVar('PV')
if not newpv:
@@ -329,6 +355,29 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil
return fullpath, copied
+
+def _check_git_config():
+ def getconfig(name):
+ try:
+ value = bb.process.run('git config --global %s' % name)[0].strip()
+ except bb.process.ExecutionError as e:
+ if e.exitcode == 1:
+ value = None
+ else:
+ raise
+ return value
+
+ username = getconfig('user.name')
+ useremail = getconfig('user.email')
+ configerr = []
+ if not username:
+ configerr.append('Please set your name using:\n git config --global user.name')
+ if not useremail:
+ configerr.append('Please set your email using:\n git config --global user.email')
+ if configerr:
+ raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
+
+
def upgrade(args, config, basepath, workspace):
"""Entry point for the devtool 'upgrade' subcommand"""
@@ -339,6 +388,8 @@ def upgrade(args, config, basepath, workspace):
if args.srcbranch and not args.srcrev:
raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
+ _check_git_config()
+
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -367,11 +418,11 @@ def upgrade(args, config, basepath, workspace):
rf = None
try:
- rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd, tinfoil)
- rev2, md5, sha256 = _extract_new_source(args.version, srctree, args.no_patch,
- args.srcrev, args.branch, args.keep_temp,
+ rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
+ rev2, md5, sha256, srcbranch = _extract_new_source(args.version, srctree, args.no_patch,
+ args.srcrev, args.srcbranch, args.branch, args.keep_temp,
tinfoil, rd)
- rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, args.srcbranch, config.workspace_path, tinfoil, rd)
+ rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, config.workspace_path, tinfoil, rd)
except bb.process.CmdError as e:
_upgrade_error(e, rf, srctree)
except DevtoolError as e:
@@ -381,6 +432,9 @@ def upgrade(args, config, basepath, workspace):
af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
copied, config.workspace_path, rd)
standard._add_md5(config, pn, af)
+
+ update_unlockedsigs(basepath, workspace, [pn], args.fixed_setup)
+
logger.info('Upgraded source extracted to %s' % srctree)
logger.info('New recipe is %s' % rf)
finally:
@@ -406,4 +460,4 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_upgrade.set_defaults(func=upgrade)
+ parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
index 0437e6417..b74511643 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
@@ -30,15 +30,13 @@ from devtool import parse_recipe
logger = logging.getLogger('devtool')
-
-def edit_recipe(args, config, basepath, workspace):
- """Entry point for the devtool 'edit-recipe' subcommand"""
+def _find_recipe_path(args, config, basepath, workspace):
if args.any_recipe:
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
if not rd:
- return 1
+ raise DevtoolError("Failed to find specified recipe")
recipefile = rd.getVar('FILE')
finally:
tinfoil.shutdown()
@@ -48,8 +46,19 @@ def edit_recipe(args, config, basepath, workspace):
if not recipefile:
raise DevtoolError("Recipe file for %s is not under the workspace" %
args.recipename)
+ return recipefile
+
+
+def find_recipe(args, config, basepath, workspace):
+ """Entry point for the devtool 'find-recipe' subcommand"""
+ recipefile = _find_recipe_path(args, config, basepath, workspace)
+ print(recipefile)
+ return 0
- return scriptutils.run_editor(recipefile)
+
+def edit_recipe(args, config, basepath, workspace):
+ """Entry point for the devtool 'edit-recipe' subcommand"""
+ return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
def configure_help(args, config, basepath, workspace):
@@ -220,6 +229,14 @@ def register_commands(subparsers, context):
parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Edit any recipe, not just where the recipe file itself is in the workspace')
parser_edit_recipe.set_defaults(func=edit_recipe)
+ # Find-recipe
+ parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file in your workspace',
+ description='By default, this will find a recipe file in your workspace; you can override this with the -a/--any-recipe option.',
+ group='working')
+ parser_find_recipe.add_argument('recipename', help='Recipe to find')
+ parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Find any recipe, not just where the recipe file itself is in the workspace')
+ parser_find_recipe.set_defaults(func=find_recipe)
+
# NOTE: Needed to override the usage string here since the default
# gets the order wrong - recipename must come before --arg
parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create.py b/import-layers/yocto-poky/scripts/lib/recipetool/create.py
index 4de52fc30..5bf939efc 100644
--- a/import-layers/yocto-poky/scripts/lib/recipetool/create.py
+++ b/import-layers/yocto-poky/scripts/lib/recipetool/create.py
@@ -1,6 +1,6 @@
# Recipe creation tool - create command plugin
#
-# Copyright (C) 2014-2016 Intel Corporation
+# Copyright (C) 2014-2017 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -156,10 +156,12 @@ class RecipeHandler(object):
RecipeHandler.recipebinmap[prog] = pn
@staticmethod
- def checkfiles(path, speclist, recursive=False):
+ def checkfiles(path, speclist, recursive=False, excludedirs=None):
results = []
if recursive:
- for root, _, files in os.walk(path):
+ for root, dirs, files in os.walk(path, topdown=True):
+ if excludedirs:
+ dirs[:] = [d for d in dirs if d not in excludedirs]
for fn in files:
for spec in speclist:
if fnmatch.fnmatch(fn, spec):
@@ -339,9 +341,14 @@ def determine_from_url(srcuri):
pn = res.group(1).strip().replace('_', '-')
pv = res.group(2).strip().replace('_', '.')
- if not pn and not pv and parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']:
- srcfile = os.path.basename(parseres.path.rstrip('/'))
- pn, pv = determine_from_filename(srcfile)
+ if not pn and not pv:
+ if parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']:
+ srcfile = os.path.basename(parseres.path.rstrip('/'))
+ pn, pv = determine_from_filename(srcfile)
+ elif parseres.scheme in ['git', 'gitsm']:
+ pn = os.path.basename(parseres.path.rstrip('/')).lower().replace('_', '-')
+ if pn.endswith('.git'):
+ pn = pn[:-4]
logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv))
return (pn, pv)
@@ -412,11 +419,15 @@ def create_recipe(args):
pkgarch = "${MACHINE_ARCH}"
extravalues = {}
- checksums = (None, None)
+ checksums = {}
tempsrc = ''
source = args.source
srcsubdir = ''
srcrev = '${AUTOREV}'
+ srcbranch = ''
+ scheme = ''
+ storeTagName = ''
+ pv_srcpv = False
if os.path.isfile(source):
source = 'file://%s' % os.path.abspath(source)
@@ -432,24 +443,65 @@ def create_recipe(args):
rev_re = re.compile(';rev=([^;]+)')
res = rev_re.search(srcuri)
if res:
+ if args.srcrev:
+ logger.error('rev= parameter and -S/--srcrev option cannot both be specified - use one or the other')
+ sys.exit(1)
+ if args.autorev:
+ logger.error('rev= parameter and -a/--autorev option cannot both be specified - use one or the other')
+ sys.exit(1)
srcrev = res.group(1)
srcuri = rev_re.sub('', srcuri)
- tempsrc = tempfile.mkdtemp(prefix='recipetool-')
- srctree = tempsrc
- d = bb.data.createCopy(tinfoil.config_data)
- if fetchuri.startswith('npm://'):
- # Check if npm is available
- npm_bindir = check_npm(tinfoil, args.devtool)
- d.prependVar('PATH', '%s:' % npm_bindir)
- logger.info('Fetching %s...' % srcuri)
+ elif args.srcrev:
+ srcrev = args.srcrev
+
+ # Check whether users provides any branch info in fetchuri.
+ # If true, we will skip all branch checking process to honor all user's input.
+ scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(fetchuri)
+ srcbranch = params.get('branch')
+ if args.srcbranch:
+ if srcbranch:
+ logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other')
+ sys.exit(1)
+ srcbranch = args.srcbranch
+ nobranch = params.get('nobranch')
+ if nobranch and srcbranch:
+ logger.error('nobranch= cannot be used if you specify a branch')
+ sys.exit(1)
+ tag = params.get('tag')
+ if not srcbranch and not nobranch and srcrev != '${AUTOREV}':
+ # Append nobranch=1 in the following conditions:
+ # 1. User did not set 'branch=' in srcuri, and
+ # 2. User did not set 'nobranch=1' in srcuri, and
+ # 3. Source revision is not '${AUTOREV}'
+ params['nobranch'] = '1'
+ if tag:
+ # Keep a copy of tag and append nobranch=1 then remove tag from URL.
+ # Bitbake fetcher unable to fetch when {AUTOREV} and tag is set at the same time.
+ storeTagName = params['tag']
+ params['nobranch'] = '1'
+ del params['tag']
+ if scheme == 'npm':
+ params['noverify'] = '1'
+ fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
+
+ tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
+ bb.utils.mkdirhier(tmpparent)
+ tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
+ srctree = os.path.join(tempsrc, 'source')
+
try:
- checksums = scriptutils.fetch_uri(d, fetchuri, srctree, srcrev)
- except bb.fetch2.BBFetchException as e:
- logger.error(str(e).rstrip())
+ checksums, ftmpdir = scriptutils.fetch_url(tinfoil, fetchuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp)
+ except scriptutils.FetchUrlFailure as e:
+ logger.error(str(e))
sys.exit(1)
+
+ if ftmpdir and args.keep_temp:
+ logger.info('Fetch temp directory is %s' % ftmpdir)
+
dirlist = os.listdir(srctree)
- if 'git.indirectionsymlink' in dirlist:
- dirlist.remove('git.indirectionsymlink')
+ filterout = ['git.indirectionsymlink']
+ dirlist = [x for x in dirlist if x not in filterout]
+ logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
if len(dirlist) == 1:
singleitem = os.path.join(srctree, dirlist[0])
if os.path.isdir(singleitem):
@@ -457,30 +509,74 @@ def create_recipe(args):
srcsubdir = dirlist[0]
srctree = os.path.join(srctree, srcsubdir)
else:
- with open(singleitem, 'r', errors='surrogateescape') as f:
- if '<html' in f.read(100).lower():
- logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri)
- sys.exit(1)
+ check_single_file(dirlist[0], fetchuri)
+ elif len(dirlist) == 0:
+ if '/' in fetchuri:
+ fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1])
+ if os.path.isfile(fn):
+ check_single_file(fn, fetchuri)
+ # If we've got to here then there's no source so we might as well give up
+ logger.error('URL %s resulted in an empty source tree' % fetchuri)
+ sys.exit(1)
+
+ # We need this checking mechanism to improve the recipe created by recipetool and devtool
+ # is able to parse and build by bitbake.
+ # If there is no input for branch name, then check for branch name with SRCREV provided.
+ if not srcbranch and not nobranch and srcrev and (srcrev != '${AUTOREV}') and scheme in ['git', 'gitsm']:
+ try:
+ cmd = 'git branch -r --contains'
+ check_branch, check_branch_err = bb.process.run('%s %s' % (cmd, srcrev), cwd=srctree)
+ except bb.process.ExecutionError as err:
+ logger.error(str(err))
+ sys.exit(1)
+ get_branch = [x.strip() for x in check_branch.splitlines()]
+ # Remove HEAD reference point and drop remote prefix
+ get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
+ if 'master' in get_branch:
+ # If it is master, we do not need to append 'branch=master' as this is default.
+ # Even with the case where get_branch has multiple objects, if 'master' is one
+ # of them, we should default take from 'master'
+ srcbranch = ''
+ elif len(get_branch) == 1:
+ # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
+ srcbranch = get_branch[0]
+ else:
+ # If get_branch contains more than one objects, then display error and exit.
+ mbrch = '\n ' + '\n '.join(get_branch)
+ logger.error('Revision %s was found on multiple branches: %s\nPlease provide the correct branch with -B/--srcbranch' % (srcrev, mbrch))
+ sys.exit(1)
+
+ # Since we might have a value in srcbranch, we need to
+ # recontruct the srcuri to include 'branch' in params.
+ scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri)
+ if srcbranch:
+ params['branch'] = srcbranch
+
+ if storeTagName and scheme in ['git', 'gitsm']:
+ # Check srcrev using tag and check validity of the tag
+ cmd = ('git rev-parse --verify %s' % (storeTagName))
+ try:
+ check_tag, check_tag_err = bb.process.run('%s' % cmd, cwd=srctree)
+ srcrev = check_tag.split()[0]
+ except bb.process.ExecutionError as err:
+ logger.error(str(err))
+ logger.error("Possibly wrong tag name is provided")
+ sys.exit(1)
+ # Drop tag from srcuri as it will have conflicts with SRCREV during recipe parse.
+ del params['tag']
+ srcuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
+
if os.path.exists(os.path.join(srctree, '.gitmodules')) and srcuri.startswith('git://'):
srcuri = 'gitsm://' + srcuri[6:]
logger.info('Fetching submodules...')
bb.process.run('git submodule update --init --recursive', cwd=srctree)
if is_package(fetchuri):
- tmpfdir = tempfile.mkdtemp(prefix='recipetool-')
- try:
- pkgfile = None
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ pkgfile = bb.fetch2.localpath(fetchuri, localdata)
+ if pkgfile:
+ tmpfdir = tempfile.mkdtemp(prefix='recipetool-')
try:
- fileuri = fetchuri + ';unpack=0'
- scriptutils.fetch_uri(tinfoil.config_data, fileuri, tmpfdir, srcrev)
- for root, _, files in os.walk(tmpfdir):
- for f in files:
- pkgfile = os.path.join(root, f)
- break
- except bb.fetch2.BBFetchException as e:
- logger.warn('Second fetch to get metadata failed: %s' % str(e).rstrip())
-
- if pkgfile:
if pkgfile.endswith(('.deb', '.ipk')):
stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir)
stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir)
@@ -490,8 +586,8 @@ def create_recipe(args):
stdout, _ = bb.process.run('rpm -qp --xml %s > pkginfo.xml' % pkgfile, cwd=tmpfdir)
values = convert_rpm_xml(os.path.join(tmpfdir, 'pkginfo.xml'))
extravalues.update(values)
- finally:
- shutil.rmtree(tmpfdir)
+ finally:
+ shutil.rmtree(tmpfdir)
else:
# Assume we're pointing to an existing source tree
if args.extract_to:
@@ -519,9 +615,9 @@ def create_recipe(args):
if args.src_subdir:
srcsubdir = os.path.join(srcsubdir, args.src_subdir)
- srctree_use = os.path.join(srctree, args.src_subdir)
+ srctree_use = os.path.abspath(os.path.join(srctree, args.src_subdir))
else:
- srctree_use = srctree
+ srctree_use = os.path.abspath(srctree)
if args.outfile and os.path.isdir(args.outfile):
outfile = None
@@ -543,9 +639,10 @@ def create_recipe(args):
# We need a blank line here so that patch_recipe_lines can rewind before the LICENSE comments
lines_before.append('')
- handled = []
- licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data)
+ # We'll come back and replace this later in handle_license_vars()
+ lines_before.append('##LICENSE_PLACEHOLDER##')
+ handled = []
classes = []
# FIXME This is kind of a hack, we probably ought to be using bitbake to do this
@@ -581,30 +678,31 @@ def create_recipe(args):
else:
realpv = None
- if srcuri and not realpv or not pn:
- name_pn, name_pv = determine_from_url(srcuri)
- if name_pn and not pn:
- pn = name_pn
- if name_pv and not realpv:
- realpv = name_pv
-
if not srcuri:
lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
lines_before.append('SRC_URI = "%s"' % srcuri)
- (md5value, sha256value) = checksums
- if md5value:
- lines_before.append('SRC_URI[md5sum] = "%s"' % md5value)
- if sha256value:
- lines_before.append('SRC_URI[sha256sum] = "%s"' % sha256value)
+ for key, value in sorted(checksums.items()):
+ lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
if srcuri and supports_srcrev(srcuri):
lines_before.append('')
lines_before.append('# Modify these as desired')
- lines_before.append('PV = "%s+git${SRCPV}"' % (realpv or '1.0'))
+ # Note: we have code to replace realpv further down if it gets set to some other value
+ scheme, _, _, _, _, _ = bb.fetch2.decodeurl(srcuri)
+ if scheme in ['git', 'gitsm']:
+ srcpvprefix = 'git'
+ elif scheme == 'svn':
+ srcpvprefix = 'svnr'
+ else:
+ srcpvprefix = scheme
+ lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix))
+ pv_srcpv = True
if not args.autorev and srcrev == '${AUTOREV}':
if os.path.exists(os.path.join(srctree, '.git')):
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
srcrev = stdout.rstrip()
lines_before.append('SRCREV = "%s"' % srcrev)
+ if args.provides:
+ lines_before.append('PROVIDES = "%s"' % args.provides)
lines_before.append('')
if srcsubdir and not args.binary:
@@ -677,6 +775,15 @@ def create_recipe(args):
if '_' in pn:
pn = pn.replace('_', '-')
+ if srcuri and not realpv or not pn:
+ name_pn, name_pv = determine_from_url(srcuri)
+ if name_pn and not pn:
+ pn = name_pn
+ if name_pv and not realpv:
+ realpv = name_pv
+
+ licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data)
+
if not outfile:
if not pn:
log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool)
@@ -726,10 +833,11 @@ def create_recipe(args):
skipblank = True
continue
elif line.startswith('SRC_URI = '):
- if realpv:
+ if realpv and not pv_srcpv:
line = line.replace(realpv, '${PV}')
elif line.startswith('PV = '):
if realpv:
+ # Replace the first part of the PV value
line = re.sub('"[^+]*\+', '"%s+' % realpv, line)
lines_before.append(line)
@@ -768,9 +876,6 @@ def create_recipe(args):
outlines.extend(lines_after)
if extravalues:
- if 'LICENSE' in extravalues and not licvalues:
- # Don't blow away 'CLOSED' value that comments say we set
- del extravalues['LICENSE']
_, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False)
if args.extract_to:
@@ -807,54 +912,101 @@ def create_recipe(args):
return 0
+def check_single_file(fn, fetchuri):
+ """Determine if a single downloaded file is something we can't handle"""
+ with open(fn, 'r', errors='surrogateescape') as f:
+ if '<html' in f.read(100).lower():
+ logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri)
+ sys.exit(1)
+
+def split_value(value):
+ if isinstance(value, str):
+ return value.split()
+ else:
+ return value
+
def handle_license_vars(srctree, lines_before, handled, extravalues, d):
+ lichandled = [x for x in handled if x[0] == 'license']
+ if lichandled:
+ # Someone else has already handled the license vars, just return their value
+ return lichandled[0][1]
+
licvalues = guess_license(srctree, d)
+ licenses = []
lic_files_chksum = []
lic_unknown = []
+ lines = []
if licvalues:
- licenses = []
for licvalue in licvalues:
if not licvalue[0] in licenses:
licenses.append(licvalue[0])
lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
if licvalue[0] == 'Unknown':
lic_unknown.append(licvalue[1])
- lines_before.append('# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is')
- lines_before.append('# your responsibility to verify that the values are complete and correct.')
- if len(licvalues) > 1:
- lines_before.append('#')
- lines_before.append('# NOTE: multiple licenses have been detected; they have been separated with &')
- lines_before.append('# in the LICENSE value for now since it is a reasonable assumption that all')
- lines_before.append('# of the licenses apply. If instead there is a choice between the multiple')
- lines_before.append('# licenses then you should change the value to separate the licenses with |')
- lines_before.append('# instead of &. If there is any doubt, check the accompanying documentation')
- lines_before.append('# to determine which situation is applicable.')
if lic_unknown:
- lines_before.append('#')
- lines_before.append('# The following license files were not able to be identified and are')
- lines_before.append('# represented as "Unknown" below, you will need to check them yourself:')
+ lines.append('#')
+ lines.append('# The following license files were not able to be identified and are')
+ lines.append('# represented as "Unknown" below, you will need to check them yourself:')
for licfile in lic_unknown:
- lines_before.append('# %s' % licfile)
- lines_before.append('#')
- else:
- lines_before.append('# Unable to find any files that looked like license statements. Check the accompanying')
- lines_before.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.')
- lines_before.append('#')
- lines_before.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if')
- lines_before.append('# this is not accurate with respect to the licensing of the software being built (it')
- lines_before.append('# will not be in most cases) you must specify the correct value before using this')
- lines_before.append('# recipe for anything other than initial testing/development!')
- licenses = ['CLOSED']
- pkg_license = extravalues.pop('LICENSE', None)
- if pkg_license:
+ lines.append('# %s' % licfile)
+
+ extra_license = split_value(extravalues.pop('LICENSE', []))
+ if '&' in extra_license:
+ extra_license.remove('&')
+ if extra_license:
if licenses == ['Unknown']:
- lines_before.append('# NOTE: The following LICENSE value was determined from the original package metadata')
- licenses = [pkg_license]
+ licenses = extra_license
else:
- lines_before.append('# NOTE: Original package metadata indicates license is: %s' % pkg_license)
- lines_before.append('LICENSE = "%s"' % ' & '.join(licenses))
- lines_before.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
- lines_before.append('')
+ for item in extra_license:
+ if item not in licenses:
+ licenses.append(item)
+ extra_lic_files_chksum = split_value(extravalues.pop('LIC_FILES_CHKSUM', []))
+ for item in extra_lic_files_chksum:
+ if item not in lic_files_chksum:
+ lic_files_chksum.append(item)
+
+ if lic_files_chksum:
+ # We are going to set the vars, so prepend the standard disclaimer
+ lines.insert(0, '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is')
+ lines.insert(1, '# your responsibility to verify that the values are complete and correct.')
+ else:
+ # Without LIC_FILES_CHKSUM we set LICENSE = "CLOSED" to allow the
+ # user to get started easily
+ lines.append('# Unable to find any files that looked like license statements. Check the accompanying')
+ lines.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.')
+ lines.append('#')
+ lines.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if')
+ lines.append('# this is not accurate with respect to the licensing of the software being built (it')
+ lines.append('# will not be in most cases) you must specify the correct value before using this')
+ lines.append('# recipe for anything other than initial testing/development!')
+ licenses = ['CLOSED']
+
+ if extra_license and sorted(licenses) != sorted(extra_license):
+ lines.append('# NOTE: Original package / source metadata indicates license is: %s' % ' & '.join(extra_license))
+
+ if len(licenses) > 1:
+ lines.append('#')
+ lines.append('# NOTE: multiple licenses have been detected; they have been separated with &')
+ lines.append('# in the LICENSE value for now since it is a reasonable assumption that all')
+ lines.append('# of the licenses apply. If instead there is a choice between the multiple')
+ lines.append('# licenses then you should change the value to separate the licenses with |')
+ lines.append('# instead of &. If there is any doubt, check the accompanying documentation')
+ lines.append('# to determine which situation is applicable.')
+
+ lines.append('LICENSE = "%s"' % ' & '.join(licenses))
+ lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
+ lines.append('')
+
+ # Replace the placeholder so we get the values in the right place in the recipe file
+ try:
+ pos = lines_before.index('##LICENSE_PLACEHOLDER##')
+ except ValueError:
+ pos = -1
+ if pos == -1:
+ lines_before.extend(lines)
+ else:
+ lines_before[pos:pos+1] = lines
+
handled.append(('license', licvalues))
return licvalues
@@ -951,6 +1103,10 @@ def crunch_license(licfile):
crunched_md5sums['1daebd9491d1e8426900b4fa5a422814'] = 'LGPLv2.1'
# https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
crunched_md5sums['2ebfb3bb49b9a48a075cc1425e7f4129'] = 'LGPLv3'
+ # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
+ crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
+ # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/edl-v10
+ crunched_md5sums['0a9c78c0a398d1bbce4a166757d60387'] = 'EDL-1.0'
lictext = []
with open(licfile, 'r', errors='surrogateescape') as f:
for line in f:
@@ -983,7 +1139,7 @@ def guess_license(srctree, d):
md5sums = get_license_md5sums(d)
licenses = []
- licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*']
+ licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
licfiles = []
for root, dirs, files in os.walk(srctree):
for fn in files:
@@ -1142,28 +1298,13 @@ def convert_rpm_xml(xmlfile):
return values
-def check_npm(tinfoil, debugonly=False):
- try:
- rd = tinfoil.parse_recipe('nodejs-native')
- except bb.providers.NoProvider:
- # We still conditionally show the message and exit with the special
- # return code, otherwise we can't show the proper message for eSDK
- # users
- log_error_cond('nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs', debugonly)
- sys.exit(14)
- bindir = rd.getVar('STAGING_BINDIR_NATIVE')
- npmpath = os.path.join(bindir, 'npm')
- if not os.path.exists(npmpath):
- log_error_cond('npm required to process specified source, but npm is not available - you need to run bitbake -c addto_recipe_sysroot nodejs-native first', debugonly)
- sys.exit(14)
- return bindir
-
def register_commands(subparsers):
parser_create = subparsers.add_parser('create',
help='Create a new recipe',
description='Creates a new recipe from a source tree')
parser_create.add_argument('source', help='Path or URL to source')
parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create')
+ parser_create.add_argument('-p', '--provides', help='Specify an alias for the item provided by the recipe')
parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true')
parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s')
parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)')
@@ -1171,12 +1312,13 @@ def register_commands(subparsers):
parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true')
parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
- parser_create.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
+ group = parser_create.add_mutually_exclusive_group()
+ group.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
+ group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
+ parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
parser_create.add_argument('--fetch-dev', action="store_true", help='For npm, also fetch devDependencies')
parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
- # FIXME I really hate having to set parserecipes for this, but given we may need
- # to call into npm (and we don't know in advance if we will or not) and in order
- # to do so we need to know npm's recipe sysroot path, there's not much alternative
- parser_create.set_defaults(func=create_recipe, parserecipes=True)
+ parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
+ parser_create.set_defaults(func=create_recipe)
diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys.py b/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys.py
index e914e53aa..4743c740c 100644
--- a/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys.py
+++ b/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys.py
@@ -863,6 +863,10 @@ class SpecFileRecipeHandler(RecipeHandler):
break
if len(foundvalues) == len(valuemap):
break
+ # Drop values containing unexpanded RPM macros
+ for k in list(foundvalues.keys()):
+ if '%' in foundvalues[k]:
+ del foundvalues[k]
if 'PV' in foundvalues:
if not validate_pv(foundvalues['PV']):
del foundvalues['PV']
diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py b/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py
index ec5449bee..5bd2aa337 100644
--- a/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py
+++ b/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py
@@ -356,6 +356,8 @@ class PythonRecipeHandler(RecipeHandler):
# Naive mapping of setup() arguments to PKG-INFO field names
for d in [info, non_literals]:
for key, value in list(d.items()):
+ if key is None:
+ continue
new_key = _map(key)
if new_key != key:
del d[key]
diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create_kmod.py b/import-layers/yocto-poky/scripts/lib/recipetool/create_kmod.py
index 7cf188db2..4569b53c8 100644
--- a/import-layers/yocto-poky/scripts/lib/recipetool/create_kmod.py
+++ b/import-layers/yocto-poky/scripts/lib/recipetool/create_kmod.py
@@ -40,7 +40,7 @@ class KernelModuleRecipeHandler(RecipeHandler):
makefiles = []
- files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True)
+ files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True, excludedirs=['contrib', 'test', 'examples'])
if files:
for cfile in files:
# Look in same dir or parent for Makefile
diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py b/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py
index cb8f338b8..ae5397262 100644
--- a/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py
+++ b/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py
@@ -21,7 +21,7 @@ import subprocess
import tempfile
import shutil
import json
-from recipetool.create import RecipeHandler, split_pkg_licenses, handle_license_vars, check_npm
+from recipetool.create import RecipeHandler, split_pkg_licenses, handle_license_vars
logger = logging.getLogger('recipetool')
@@ -36,6 +36,27 @@ def tinfoil_init(instance):
class NpmRecipeHandler(RecipeHandler):
lockdownpath = None
+ def _ensure_npm(self, fixed_setup=False):
+ if not tinfoil.recipes_parsed:
+ tinfoil.parse_recipes()
+ try:
+ rd = tinfoil.parse_recipe('nodejs-native')
+ except bb.providers.NoProvider:
+ if fixed_setup:
+ msg = 'nodejs-native is required for npm but is not available within this SDK'
+ else:
+ msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs'
+ logger.error(msg)
+ return None
+ bindir = rd.getVar('STAGING_BINDIR_NATIVE')
+ npmpath = os.path.join(bindir, 'npm')
+ if not os.path.exists(npmpath):
+ tinfoil.build_targets('nodejs-native', 'addto_recipe_sysroot')
+ if not os.path.exists(npmpath):
+ logger.error('npm required to process specified source, but nodejs-native did not seem to populate it')
+ return None
+ return bindir
+
def _handle_license(self, data):
'''
Handle the license value from an npm package.json file
@@ -109,7 +130,6 @@ class NpmRecipeHandler(RecipeHandler):
if varname == 'SRC_URI':
if not origvalue.startswith('npm://'):
src_uri = origvalue.split()
- changed = False
deplist = {}
for dep, depver in optdeps.items():
depdata = self.get_npm_data(dep, depver, d)
@@ -123,14 +143,15 @@ class NpmRecipeHandler(RecipeHandler):
depdata = self.get_npm_data(dep, depver, d)
deplist[dep] = depdata
+ extra_urls = []
for dep, depdata in deplist.items():
version = depdata.get('version', None)
if version:
url = 'npm://registry.npmjs.org;name=%s;version=%s;subdir=node_modules/%s' % (dep, version, dep)
- scriptutils.fetch_uri(d, url, srctree)
- src_uri.append(url)
- changed = True
- if changed:
+ extra_urls.append(url)
+ if extra_urls:
+ scriptutils.fetch_url(tinfoil, ' '.join(extra_urls), None, srctree, logger)
+ src_uri.extend(extra_urls)
return src_uri, None, -1, True
return origvalue, None, 0, True
updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
@@ -143,40 +164,9 @@ class NpmRecipeHandler(RecipeHandler):
lines_before.append(line)
return updated
- def _replace_license_vars(self, srctree, lines_before, handled, extravalues, d):
- for item in handled:
- if isinstance(item, tuple):
- if item[0] == 'license':
- del item
- break
-
- calledvars = []
- def varfunc(varname, origvalue, op, newlines):
- if varname in ['LICENSE', 'LIC_FILES_CHKSUM']:
- for i, e in enumerate(reversed(newlines)):
- if not e.startswith('#'):
- stop = i
- while stop > 0:
- newlines.pop()
- stop -= 1
- break
- calledvars.append(varname)
- if len(calledvars) > 1:
- # The second time around, put the new license text in
- insertpos = len(newlines)
- handle_license_vars(srctree, newlines, handled, extravalues, d)
- return None, None, 0, True
- return origvalue, None, 0, True
- updated, newlines = bb.utils.edit_metadata(lines_before, ['LICENSE', 'LIC_FILES_CHKSUM'], varfunc)
- if updated:
- del lines_before[:]
- lines_before.extend(newlines)
- else:
- raise Exception('Did not find license variables')
-
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
import bb.utils
- import oe
+ import oe.package
from collections import OrderedDict
if 'buildsystem' in handled:
@@ -189,7 +179,9 @@ class NpmRecipeHandler(RecipeHandler):
files = RecipeHandler.checkfiles(srctree, ['package.json'])
if files:
d = bb.data.createCopy(tinfoil.config_data)
- npm_bindir = check_npm(tinfoil, self._devtool)
+ npm_bindir = self._ensure_npm()
+ if not npm_bindir:
+ sys.exit(14)
d.prependVar('PATH', '%s:' % npm_bindir)
data = read_package_json(files[0])
@@ -205,10 +197,7 @@ class NpmRecipeHandler(RecipeHandler):
fetchdev = extravalues['fetchdev'] or None
deps, optdeps, devdeps = self.get_npm_package_dependencies(data, fetchdev)
- updated = self._handle_dependencies(d, deps, optdeps, devdeps, lines_before, srctree)
- if updated:
- # We need to redo the license stuff
- self._replace_license_vars(srctree, lines_before, handled, extravalues, d)
+ self._handle_dependencies(d, deps, optdeps, devdeps, lines_before, srctree)
# Shrinkwrap
localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm')
@@ -219,11 +208,14 @@ class NpmRecipeHandler(RecipeHandler):
# Split each npm module out to is own package
npmpackages = oe.package.npm_split_package_dirs(srctree)
+ licvalues = None
for item in handled:
if isinstance(item, tuple):
if item[0] == 'license':
licvalues = item[1]
break
+ if not licvalues:
+ licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d)
if licvalues:
# Augment the license list with information we have in the packages
licenses = {}
@@ -244,13 +236,7 @@ class NpmRecipeHandler(RecipeHandler):
all_licenses = list(set([item.replace('_', ' ') for pkglicense in pkglicenses.values() for item in pkglicense]))
if '&' in all_licenses:
all_licenses.remove('&')
- # Go back and update the LICENSE value since we have a bit more
- # information than when that was written out (and we know all apply
- # vs. there being a choice, so we can join them with &)
- for i, line in enumerate(lines_before):
- if line.startswith('LICENSE = '):
- lines_before[i] = 'LICENSE = "%s"' % ' & '.join(all_licenses)
- break
+ extravalues['LICENSE'] = ' & '.join(all_licenses)
# Need to move S setting after inherit npm
for i, line in enumerate(lines_before):
diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py b/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py
index 0b63759d8..decce83fa 100644
--- a/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py
+++ b/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py
@@ -74,7 +74,7 @@ def newappend(args):
return 1
if args.edit:
- return scriptutils.run_editor([append_path, recipe_path])
+ return scriptutils.run_editor([append_path, recipe_path], logger)
else:
print(append_path)
diff --git a/import-layers/yocto-poky/scripts/lib/scriptutils.py b/import-layers/yocto-poky/scripts/lib/scriptutils.py
index 92b601c7e..85b1c949b 100644
--- a/import-layers/yocto-poky/scripts/lib/scriptutils.py
+++ b/import-layers/yocto-poky/scripts/lib/scriptutils.py
@@ -23,6 +23,8 @@ import argparse
import subprocess
import tempfile
import shutil
+import random
+import string
def logger_create(name, stream=None):
logger = logging.getLogger(name)
@@ -78,52 +80,139 @@ def git_convert_standalone_clone(repodir):
bb.process.run('git repack -a', cwd=repodir)
os.remove(alternatesfile)
-def fetch_uri(d, uri, destdir, srcrev=None):
- """Fetch a URI to a local directory"""
+def _get_temp_recipe_dir(d):
+ # This is a little bit hacky but we need to find a place where we can put
+ # the recipe so that bitbake can find it. We're going to delete it at the
+ # end so it doesn't really matter where we put it.
+ bbfiles = d.getVar('BBFILES').split()
+ fetchrecipedir = None
+ for pth in bbfiles:
+ if pth.endswith('.bb'):
+ pthdir = os.path.dirname(pth)
+ if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
+ fetchrecipedir = pthdir.replace('*', 'recipetool')
+ if pthdir.endswith('workspace/recipes/*'):
+ # Prefer the workspace
+ break
+ return fetchrecipedir
+
+class FetchUrlFailure(Exception):
+ def __init__(self, url):
+ self.url = url
+ def __str__(self):
+ return "Failed to fetch URL %s" % self.url
+
+def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False):
+ """
+ Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
+ any dependencies that need to be satisfied in order to support the fetch
+ operation will be taken care of
+ """
+
import bb
- tmpparent = d.getVar('BASE_WORKDIR')
+
+ checksums = {}
+ fetchrecipepn = None
+
+ # We need to put our temp directory under ${BASE_WORKDIR} otherwise
+ # we may have problems with the recipe-specific sysroot population
+ tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
bb.utils.mkdirhier(tmpparent)
- tmpworkdir = tempfile.mkdtemp(dir=tmpparent)
+ tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
try:
- bb.utils.mkdirhier(destdir)
- localdata = bb.data.createCopy(d)
-
- # Set some values to allow extend_recipe_sysroot to work here we're we are not running from a task
- localdata.setVar('WORKDIR', tmpworkdir)
- localdata.setVar('BB_RUNTASK', 'do_fetch')
- localdata.setVar('PN', 'dummy')
- localdata.setVar('BB_LIMITEDDEPS', '1')
- bb.build.exec_func("extend_recipe_sysroot", localdata)
-
- # Set some values for the benefit of the fetcher code
- localdata.setVar('BB_STRICT_CHECKSUM', '')
- localdata.setVar('SRCREV', srcrev)
- ret = (None, None)
- olddir = os.getcwd()
+ tmpworkdir = os.path.join(tmpdir, 'work')
+ logger.debug('fetch_url: temp dir is %s' % tmpdir)
+
+ fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
+ if not fetchrecipedir:
+ logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
+ sys.exit(1)
+ fetchrecipe = None
+ bb.utils.mkdirhier(fetchrecipedir)
try:
- fetcher = bb.fetch2.Fetch([uri], localdata)
- for u in fetcher.ud:
- ud = fetcher.ud[u]
- ud.ignore_checksums = True
- fetcher.download()
- for u in fetcher.ud:
- ud = fetcher.ud[u]
- if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR').rstrip(os.sep):
- raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri)
- fetcher.unpack(destdir)
- for u in fetcher.ud:
- ud = fetcher.ud[u]
- if ud.method.recommends_checksum(ud):
- md5value = bb.utils.md5_file(ud.localpath)
- sha256value = bb.utils.sha256_file(ud.localpath)
- ret = (md5value, sha256value)
+ # Generate a dummy recipe so we can follow more or less normal paths
+ # for do_fetch and do_unpack
+ # I'd use tempfile functions here but underscores can be produced by that and those
+ # aren't allowed in recipe file names except to separate the version
+ rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
+ fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
+ fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
+ logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
+ with open(fetchrecipe, 'w') as f:
+ # We don't want to have to specify LIC_FILES_CHKSUM
+ f.write('LICENSE = "CLOSED"\n')
+ # We don't need the cross-compiler
+ f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
+ # We don't have the checksums yet so we can't require them
+ f.write('BB_STRICT_CHECKSUM = "ignore"\n')
+ f.write('SRC_URI = "%s"\n' % srcuri)
+ f.write('SRCREV = "%s"\n' % srcrev)
+ f.write('WORKDIR = "%s"\n' % tmpworkdir)
+ # Set S out of the way so it doesn't get created under the workdir
+ f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
+ if not mirrors:
+ # We do not need PREMIRRORS since we are almost certainly
+ # fetching new source rather than something that has already
+ # been fetched. Hence, we disable them by default.
+ # However, we provide an option for users to enable it.
+ f.write('PREMIRRORS = ""\n')
+ f.write('MIRRORS = ""\n')
+
+ logger.info('Fetching %s...' % srcuri)
+
+ # FIXME this is too noisy at the moment
+
+ # Parse recipes so our new recipe gets picked up
+ tinfoil.parse_recipes()
+
+ def eventhandler(event):
+ if isinstance(event, bb.fetch2.MissingChecksumEvent):
+ checksums.update(event.checksums)
+ return True
+ return False
+
+ # Run the fetch + unpack tasks
+ res = tinfoil.build_targets(fetchrecipepn,
+ 'do_unpack',
+ handle_events=True,
+ extra_events=['bb.fetch2.MissingChecksumEvent'],
+ event_callback=eventhandler)
+ if not res:
+ raise FetchUrlFailure(srcuri)
+
+ # Remove unneeded directories
+ rd = tinfoil.parse_recipe(fetchrecipepn)
+ if rd:
+ pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
+ for pathvar in pathvars:
+ path = rd.getVar(pathvar)
+ shutil.rmtree(path)
finally:
- os.chdir(olddir)
+ if fetchrecipe:
+ try:
+ os.remove(fetchrecipe)
+ except FileNotFoundError:
+ pass
+ try:
+ os.rmdir(fetchrecipedir)
+ except OSError as e:
+ import errno
+ if e.errno != errno.ENOTEMPTY:
+ raise
+
+ bb.utils.mkdirhier(destdir)
+ for fn in os.listdir(tmpworkdir):
+ shutil.move(os.path.join(tmpworkdir, fn), destdir)
+
finally:
- shutil.rmtree(tmpworkdir)
- return ret
+ if not preserve_tmp:
+ shutil.rmtree(tmpdir)
+ tmpdir = None
+
+ return checksums, tmpdir
+
-def run_editor(fn):
+def run_editor(fn, logger=None):
if isinstance(fn, str):
params = '"%s"' % fn
else:
@@ -134,8 +223,8 @@ def run_editor(fn):
editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
try:
return subprocess.check_call('%s %s' % (editor, params), shell=True)
- except OSError as exc:
- logger.error("Execution of editor '%s' failed: %s", editor, exc)
+ except subprocess.CalledProcessError as exc:
+ logger.error("Execution of '%s' failed: %s" % (editor, exc))
return 1
def is_src_url(param):
diff --git a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc
index 5cf2fd1f3..89880b417 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc
+++ b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/common.wks.inc
@@ -1,3 +1,3 @@
# This file is included into 3 canned wks files from this directory
part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
-part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024
+part / --source rootfs --use-uuid --fstype=ext4 --label platform --align 1024
diff --git a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg
index d5a07d204..c58e74a85 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg
+++ b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg
@@ -12,16 +12,16 @@ DEFAULT Graphics console boot
LABEL Graphics console boot
KERNEL /vmlinuz
-APPEND label=boot root=/dev/sda2 rootwait
+APPEND label=boot rootwait
LABEL Serial console boot
KERNEL /vmlinuz
-APPEND label=boot root=/dev/sda2 rootwait console=ttyS0,115200
+APPEND label=boot rootwait console=ttyS0,115200
LABEL Graphics console install
KERNEL /vmlinuz
-APPEND label=install root=/dev/sda2 rootwait
+APPEND label=install rootwait
LABEL Serial console install
KERNEL /vmlinuz
-APPEND label=install root=/dev/sda2 rootwait console=ttyS0,115200
+APPEND label=install rootwait console=ttyS0,115200
diff --git a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
index db30bbced..1f8466af2 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
@@ -4,5 +4,5 @@
include common.wks.inc
-bootloader --timeout=0 --append="vga=0 uvesafb.mode_option=640x480-32 root=/dev/sda2 rw mem=256M ip=192.168.7.2::192.168.7.1:255.255.255.0 oprofile.timer=1 rootfstype=ext4 "
+bootloader --timeout=0 --append="vga=0 uvesafb.mode_option=640x480-32 rw mem=256M ip=192.168.7.2::192.168.7.1:255.255.255.0 oprofile.timer=1 rootfstype=ext4 "
diff --git a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/systemd-bootdisk.wks b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
index 4bd9d6a65..95d7b97a6 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
+++ b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/systemd-bootdisk.wks
@@ -2,10 +2,10 @@
# long-description: Creates a partitioned EFI disk image that the user
# can directly dd to boot media. The selected bootloader is systemd-boot.
-part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024
+part /boot --source bootimg-efi --sourceparams="loader=systemd-boot" --ondisk sda --label msdos --active --align 1024 --use-uuid
part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
-part swap --ondisk sda --size 44 --label swap1 --fstype=swap
+part swap --ondisk sda --size 44 --label swap1 --fstype=swap --use-uuid
bootloader --ptable gpt --timeout=5 --append="rootwait rootfstype=ext4 console=ttyS0,115200 console=tty0"
diff --git a/import-layers/yocto-poky/scripts/lib/wic/engine.py b/import-layers/yocto-poky/scripts/lib/wic/engine.py
index f59821fea..edcfab39e 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/engine.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/engine.py
@@ -30,10 +30,17 @@
import logging
import os
+import tempfile
+import json
+import subprocess
+
+from collections import namedtuple, OrderedDict
+from distutils.spawn import find_executable
from wic import WicError
+from wic.filemap import sparse_copy
from wic.pluginbase import PluginMgr
-from wic.utils.misc import get_bitbake_var
+from wic.misc import get_bitbake_var, exec_cmd
logger = logging.getLogger('wic')
@@ -201,17 +208,18 @@ def wic_list(args, scripts_path):
"""
Print the list of images or source plugins.
"""
- if len(args) < 1:
+ if args.list_type is None:
return False
- if args == ["images"]:
+ if args.list_type == "images":
+
list_canned_images(scripts_path)
return True
- elif args == ["source-plugins"]:
+ elif args.list_type == "source-plugins":
list_source_plugins()
return True
- elif len(args) == 2 and args[1] == "help":
- wks_file = args[0]
+ elif len(args.help_for) == 1 and args.help_for[0] == 'help':
+ wks_file = args.list_type
fullpath = find_canned_image(scripts_path, wks_file)
if not fullpath:
raise WicError("No image named %s found, exiting. "
@@ -224,6 +232,306 @@ def wic_list(args, scripts_path):
return False
+
+class Disk:
+ def __init__(self, imagepath, native_sysroot, fstypes=('fat', 'ext')):
+ self.imagepath = imagepath
+ self.native_sysroot = native_sysroot
+ self.fstypes = fstypes
+ self._partitions = None
+ self._partimages = {}
+ self._lsector_size = None
+ self._psector_size = None
+ self._ptable_format = None
+
+ # find parted
+ self.paths = "/bin:/usr/bin:/usr/sbin:/sbin/"
+ if native_sysroot:
+ for path in self.paths.split(':'):
+ self.paths = "%s%s:%s" % (native_sysroot, path, self.paths)
+
+ self.parted = find_executable("parted", self.paths)
+ if not self.parted:
+ raise WicError("Can't find executable parted")
+
+ self.partitions = self.get_partitions()
+
+ def __del__(self):
+ for path in self._partimages.values():
+ os.unlink(path)
+
+ def get_partitions(self):
+ if self._partitions is None:
+ self._partitions = OrderedDict()
+ out = exec_cmd("%s -sm %s unit B print" % (self.parted, self.imagepath))
+ parttype = namedtuple("Part", "pnum start end size fstype")
+ splitted = out.splitlines()
+ lsector_size, psector_size, self._ptable_format = splitted[1].split(":")[3:6]
+ self._lsector_size = int(lsector_size)
+ self._psector_size = int(psector_size)
+ for line in splitted[2:]:
+ pnum, start, end, size, fstype = line.split(':')[:5]
+ partition = parttype(int(pnum), int(start[:-1]), int(end[:-1]),
+ int(size[:-1]), fstype)
+ self._partitions[pnum] = partition
+
+ return self._partitions
+
+ def __getattr__(self, name):
+ """Get path to the executable in a lazy way."""
+ if name in ("mdir", "mcopy", "mdel", "mdeltree", "sfdisk", "e2fsck",
+ "resize2fs", "mkswap", "mkdosfs", "debugfs"):
+ aname = "_%s" % name
+ if aname not in self.__dict__:
+ setattr(self, aname, find_executable(name, self.paths))
+ if aname not in self.__dict__:
+ raise WicError("Can't find executable {}".format(name))
+ return self.__dict__[aname]
+ return self.__dict__[name]
+
+ def _get_part_image(self, pnum):
+ if pnum not in self.partitions:
+ raise WicError("Partition %s is not in the image")
+ part = self.partitions[pnum]
+ # check if fstype is supported
+ for fstype in self.fstypes:
+ if part.fstype.startswith(fstype):
+ break
+ else:
+ raise WicError("Not supported fstype: {}".format(part.fstype))
+ if pnum not in self._partimages:
+ tmpf = tempfile.NamedTemporaryFile(prefix="wic-part")
+ dst_fname = tmpf.name
+ tmpf.close()
+ sparse_copy(self.imagepath, dst_fname, skip=part.start, length=part.size)
+ self._partimages[pnum] = dst_fname
+
+ return self._partimages[pnum]
+
+ def _put_part_image(self, pnum):
+ """Put partition image into partitioned image."""
+ sparse_copy(self._partimages[pnum], self.imagepath,
+ seek=self.partitions[pnum].start)
+
+ def dir(self, pnum, path):
+ if self.partitions[pnum].fstype.startswith('ext'):
+ return exec_cmd("{} {} -R 'ls -l {}'".format(self.debugfs,
+ self._get_part_image(pnum),
+ path), as_shell=True)
+ else: # fat
+ return exec_cmd("{} -i {} ::{}".format(self.mdir,
+ self._get_part_image(pnum),
+ path))
+
+ def copy(self, src, pnum, path):
+ """Copy partition image into wic image."""
+ if self.partitions[pnum].fstype.startswith('ext'):
+ cmd = "echo -e 'cd {}\nwrite {} {}' | {} -w {}".\
+ format(path, src, os.path.basename(src),
+ self.debugfs, self._get_part_image(pnum))
+ else: # fat
+ cmd = "{} -i {} -snop {} ::{}".format(self.mcopy,
+ self._get_part_image(pnum),
+ src, path)
+ exec_cmd(cmd, as_shell=True)
+ self._put_part_image(pnum)
+
+ def remove(self, pnum, path):
+ """Remove files/dirs from the partition."""
+ partimg = self._get_part_image(pnum)
+ if self.partitions[pnum].fstype.startswith('ext'):
+ exec_cmd("{} {} -wR 'rm {}'".format(self.debugfs,
+ self._get_part_image(pnum),
+ path), as_shell=True)
+ else: # fat
+ cmd = "{} -i {} ::{}".format(self.mdel, partimg, path)
+ try:
+ exec_cmd(cmd)
+ except WicError as err:
+ if "not found" in str(err) or "non empty" in str(err):
+ # mdel outputs 'File ... not found' or 'directory .. non empty"
+ # try to use mdeltree as path could be a directory
+ cmd = "{} -i {} ::{}".format(self.mdeltree,
+ partimg, path)
+ exec_cmd(cmd)
+ else:
+ raise err
+ self._put_part_image(pnum)
+
+ def write(self, target, expand):
+ """Write disk image to the media or file."""
+ def write_sfdisk_script(outf, parts):
+ for key, val in parts['partitiontable'].items():
+ if key in ("partitions", "device", "firstlba", "lastlba"):
+ continue
+ if key == "id":
+ key = "label-id"
+ outf.write("{}: {}\n".format(key, val))
+ outf.write("\n")
+ for part in parts['partitiontable']['partitions']:
+ line = ''
+ for name in ('attrs', 'name', 'size', 'type', 'uuid'):
+ if name == 'size' and part['type'] == 'f':
+ # don't write size for extended partition
+ continue
+ val = part.get(name)
+ if val:
+ line += '{}={}, '.format(name, val)
+ if line:
+ line = line[:-2] # strip ', '
+ if part.get('bootable'):
+ line += ' ,bootable'
+ outf.write("{}\n".format(line))
+ outf.flush()
+
+ def read_ptable(path):
+ out = exec_cmd("{} -dJ {}".format(self.sfdisk, path))
+ return json.loads(out)
+
+ def write_ptable(parts, target):
+ with tempfile.NamedTemporaryFile(prefix="wic-sfdisk-", mode='w') as outf:
+ write_sfdisk_script(outf, parts)
+ cmd = "{} --no-reread {} < {} 2>/dev/null".format(self.sfdisk, target, outf.name)
+ try:
+ subprocess.check_output(cmd, shell=True)
+ except subprocess.CalledProcessError as err:
+ raise WicError("Can't run '{}' command: {}".format(cmd, err))
+
+ if expand is None:
+ sparse_copy(self.imagepath, target)
+ else:
+ # copy first sectors that may contain bootloader
+ sparse_copy(self.imagepath, target, length=2048 * self._lsector_size)
+
+ # copy source partition table to the target
+ parts = read_ptable(self.imagepath)
+ write_ptable(parts, target)
+
+ # get size of unpartitioned space
+ free = None
+ for line in exec_cmd("{} -F {}".format(self.sfdisk, target)).splitlines():
+ if line.startswith("Unpartitioned space ") and line.endswith("sectors"):
+ free = int(line.split()[-2])
+ if free is None:
+ raise WicError("Can't get size of unpartitioned space")
+
+ # calculate expanded partitions sizes
+ sizes = {}
+ for num, part in enumerate(parts['partitiontable']['partitions'], 1):
+ if num in expand:
+ if expand[num] != 0: # don't resize partition if size is set to 0
+ sectors = expand[num] // self._lsector_size
+ free -= sectors - part['size']
+ part['size'] = sectors
+ sizes[num] = sectors
+ elif part['type'] != 'f':
+ sizes[num] = -1
+
+ for num, part in enumerate(parts['partitiontable']['partitions'], 1):
+ if sizes.get(num) == -1:
+ part['size'] += free // len(sizes)
+
+ # write resized partition table to the target
+ write_ptable(parts, target)
+
+ # read resized partition table
+ parts = read_ptable(target)
+
+ # copy partitions content
+ for num, part in enumerate(parts['partitiontable']['partitions'], 1):
+ pnum = str(num)
+ fstype = self.partitions[pnum].fstype
+
+ # copy unchanged partition
+ if part['size'] == self.partitions[pnum].size // self._lsector_size:
+ logger.info("copying unchanged partition {}".format(pnum))
+ sparse_copy(self._get_part_image(pnum), target, seek=part['start'] * self._lsector_size)
+ continue
+
+ # resize or re-create partitions
+ if fstype.startswith('ext') or fstype.startswith('fat') or \
+ fstype.startswith('linux-swap'):
+
+ partfname = None
+ with tempfile.NamedTemporaryFile(prefix="wic-part{}-".format(pnum)) as partf:
+ partfname = partf.name
+
+ if fstype.startswith('ext'):
+ logger.info("resizing ext partition {}".format(pnum))
+ partimg = self._get_part_image(pnum)
+ sparse_copy(partimg, partfname)
+ exec_cmd("{} -pf {}".format(self.e2fsck, partfname))
+ exec_cmd("{} {} {}s".format(\
+ self.resize2fs, partfname, part['size']))
+ elif fstype.startswith('fat'):
+ logger.info("copying content of the fat partition {}".format(pnum))
+ with tempfile.TemporaryDirectory(prefix='wic-fatdir-') as tmpdir:
+ # copy content to the temporary directory
+ cmd = "{} -snompi {} :: {}".format(self.mcopy,
+ self._get_part_image(pnum),
+ tmpdir)
+ exec_cmd(cmd)
+ # create new msdos partition
+ label = part.get("name")
+ label_str = "-n {}".format(label) if label else ''
+
+ cmd = "{} {} -C {} {}".format(self.mkdosfs, label_str, partfname,
+ part['size'])
+ exec_cmd(cmd)
+ # copy content from the temporary directory to the new partition
+ cmd = "{} -snompi {} {}/* ::".format(self.mcopy, partfname, tmpdir)
+ exec_cmd(cmd, as_shell=True)
+ elif fstype.startswith('linux-swap'):
+ logger.info("creating swap partition {}".format(pnum))
+ label = part.get("name")
+ label_str = "-L {}".format(label) if label else ''
+ uuid = part.get("uuid")
+ uuid_str = "-U {}".format(uuid) if uuid else ''
+ with open(partfname, 'w') as sparse:
+ os.ftruncate(sparse.fileno(), part['size'] * self._lsector_size)
+ exec_cmd("{} {} {} {}".format(self.mkswap, label_str, uuid_str, partfname))
+ sparse_copy(partfname, target, seek=part['start'] * self._lsector_size)
+ os.unlink(partfname)
+ elif part['type'] != 'f':
+ logger.warn("skipping partition {}: unsupported fstype {}".format(pnum, fstype))
+
+def wic_ls(args, native_sysroot):
+ """List contents of partitioned image or vfat partition."""
+ disk = Disk(args.path.image, native_sysroot)
+ if not args.path.part:
+ if disk.partitions:
+ print('Num Start End Size Fstype')
+ for part in disk.partitions.values():
+ print("{:2d} {:12d} {:12d} {:12d} {}".format(\
+ part.pnum, part.start, part.end,
+ part.size, part.fstype))
+ else:
+ path = args.path.path or '/'
+ print(disk.dir(args.path.part, path))
+
+def wic_cp(args, native_sysroot):
+ """
+ Copy local file or directory to the vfat partition of
+ partitioned image.
+ """
+ disk = Disk(args.dest.image, native_sysroot)
+ disk.copy(args.src, args.dest.part, args.dest.path)
+
+def wic_rm(args, native_sysroot):
+ """
+ Remove files or directories from the vfat partition of
+ partitioned image.
+ """
+ disk = Disk(args.path.image, native_sysroot)
+ disk.remove(args.path.part, args.path.path)
+
+def wic_write(args, native_sysroot):
+ """
+ Write image to a target device.
+ """
+ disk = Disk(args.image, native_sysroot, ('fat', 'ext', 'swap'))
+ disk.write(args.target, args.expand)
+
def find_canned(scripts_path, file_name):
"""
Find a file either by its path or by name in the canned files dir.
diff --git a/import-layers/yocto-poky/scripts/lib/wic/filemap.py b/import-layers/yocto-poky/scripts/lib/wic/filemap.py
index 1f1aacc52..77e32b9ad 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/filemap.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/filemap.py
@@ -34,13 +34,9 @@ def get_block_size(file_obj):
Returns block size for file object 'file_obj'. Errors are indicated by the
'IOError' exception.
"""
-
- from fcntl import ioctl
- import struct
-
# Get the block size of the host file-system for the image file by calling
# the FIGETBSZ ioctl (number 2).
- binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
+ binary_data = fcntl.ioctl(file_obj, 2, struct.pack('I', 0))
return struct.unpack('I', binary_data)[0]
class ErrorNotSupp(Exception):
@@ -228,7 +224,7 @@ class FilemapSeek(_FilemapBase):
try:
tmp_obj = tempfile.TemporaryFile("w+", dir=directory)
except IOError as err:
- raise ErrorNotSupp("cannot create a temporary in \"%s\": %s"
+ raise ErrorNotSupp("cannot create a temporary in \"%s\": %s" \
% (directory, err))
try:
@@ -530,8 +526,18 @@ def filemap(image, log=None):
except ErrorNotSupp:
return FilemapSeek(image, log)
-def sparse_copy(src_fname, dst_fname, offset=0, skip=0, api=None):
- """Efficiently copy sparse file to or into another file."""
+def sparse_copy(src_fname, dst_fname, skip=0, seek=0,
+ length=0, api=None):
+ """
+ Efficiently copy sparse file to or into another file.
+
+ src_fname: path to source file
+ dst_fname: path to destination file
+ skip: skip N bytes at thestart of src
+ seek: seek N bytes from the start of dst
+ length: read N bytes from src and write them to dst
+ api: FilemapFiemap or FilemapSeek object
+ """
if not api:
api = filemap
fmap = api(src_fname)
@@ -539,17 +545,32 @@ def sparse_copy(src_fname, dst_fname, offset=0, skip=0, api=None):
dst_file = open(dst_fname, 'r+b')
except IOError:
dst_file = open(dst_fname, 'wb')
- dst_file.truncate(os.path.getsize(src_fname))
+ if length:
+ dst_size = length + seek
+ else:
+ dst_size = os.path.getsize(src_fname) + seek - skip
+ dst_file.truncate(dst_size)
+ written = 0
for first, last in fmap.get_mapped_ranges(0, fmap.blocks_cnt):
start = first * fmap.block_size
end = (last + 1) * fmap.block_size
+ if skip >= end:
+ continue
+
if start < skip < end:
- fmap._f_image.seek(skip, os.SEEK_SET)
- else:
- fmap._f_image.seek(start, os.SEEK_SET)
- dst_file.seek(offset + start, os.SEEK_SET)
+ start = skip
+
+ fmap._f_image.seek(start, os.SEEK_SET)
+
+ written += start - skip - written
+ if length and written >= length:
+ dst_file.seek(seek + length, os.SEEK_SET)
+ dst_file.close()
+ return
+
+ dst_file.seek(seek + start - skip, os.SEEK_SET)
chunk_size = 1024 * 1024
to_read = end - start
@@ -558,7 +579,14 @@ def sparse_copy(src_fname, dst_fname, offset=0, skip=0, api=None):
while read < to_read:
if read + chunk_size > to_read:
chunk_size = to_read - read
- chunk = fmap._f_image.read(chunk_size)
+ size = chunk_size
+ if length and written + size > length:
+ size = length - written
+ chunk = fmap._f_image.read(size)
dst_file.write(chunk)
- read += chunk_size
+ read += size
+ written += size
+ if written == length:
+ dst_file.close()
+ return
dst_file.close()
diff --git a/import-layers/yocto-poky/scripts/lib/wic/help.py b/import-layers/yocto-poky/scripts/lib/wic/help.py
index d6e027d25..2ac45e052 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/help.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/help.py
@@ -56,7 +56,7 @@ def wic_help(args, usage_str, subcommands):
"""
Subcommand help dispatcher.
"""
- if len(args) == 1 or not display_help(args[1], subcommands):
+ if args.help_topic == None or not display_help(args.help_topic, subcommands):
print(usage_str)
@@ -82,19 +82,20 @@ def invoke_subcommand(args, parser, main_command_usage, subcommands):
Dispatch to subcommand handler borrowed from combo-layer.
Should use argparse, but has to work in 2.6.
"""
- if not args:
+ if not args.command:
logger.error("No subcommand specified, exiting")
parser.print_help()
return 1
- elif args[0] == "help":
+ elif args.command == "help":
wic_help(args, main_command_usage, subcommands)
- elif args[0] not in subcommands:
- logger.error("Unsupported subcommand %s, exiting\n", args[0])
+ elif args.command not in subcommands:
+ logger.error("Unsupported subcommand %s, exiting\n", args.command)
parser.print_help()
return 1
else:
- usage = subcommands.get(args[0], subcommand_error)[1]
- subcommands.get(args[0], subcommand_error)[0](args[1:], usage)
+ subcmd = subcommands.get(args.command, subcommand_error)
+ usage = subcmd[1]
+ subcmd[0](args, usage)
##
@@ -130,10 +131,10 @@ wic_create_usage = """
Create a new OpenEmbedded image
usage: wic create <wks file or image name> [-o <DIRNAME> | --outdir <DIRNAME>]
- [-i <JSON PROPERTY FILE> | --infile <JSON PROPERTY_FILE>]
[-e | --image-name] [-s, --skip-build-check] [-D, --debug]
[-r, --rootfs-dir] [-b, --bootimg-dir]
[-k, --kernel-dir] [-n, --native-sysroot] [-f, --build-rootfs]
+ [-c, --compress-with] [-m, --bmap]
This command creates an OpenEmbedded image based on the 'OE kickstart
commands' found in the <wks file>.
@@ -154,7 +155,7 @@ SYNOPSIS
[-e | --image-name] [-s, --skip-build-check] [-D, --debug]
[-r, --rootfs-dir] [-b, --bootimg-dir]
[-k, --kernel-dir] [-n, --native-sysroot] [-f, --build-rootfs]
- [-c, --compress-with] [-m, --bmap]
+ [-c, --compress-with] [-m, --bmap] [--no-fstab-update]
DESCRIPTION
This command creates an OpenEmbedded image based on the 'OE
@@ -226,6 +227,11 @@ DESCRIPTION
The -m option is used to produce .bmap file for the image. This file
can be used to flash image using bmaptool utility.
+
+ The --no-fstab-update option is used to doesn't change fstab file. When
+ using this option the final fstab file will be same that in rootfs and
+ wic doesn't update file, e.g adding a new mount point. User can control
+ the fstab file content in base-files recipe.
"""
wic_list_usage = """
@@ -283,6 +289,230 @@ DESCRIPTION
details.
"""
+wic_ls_usage = """
+
+ List content of a partitioned image
+
+ usage: wic ls <image>[:<partition>[<path>]] [--native-sysroot <path>]
+
+ This command outputs either list of image partitions or directory contents
+ of vfat and ext* partitions.
+
+ See 'wic help ls' for more detailed instructions.
+
+"""
+
+wic_ls_help = """
+
+NAME
+ wic ls - List contents of partitioned image or partition
+
+SYNOPSIS
+ wic ls <image>
+ wic ls <image>:<vfat or ext* partition>
+ wic ls <image>:<vfat or ext* partition><path>
+ wic ls <image>:<vfat or ext* partition><path> --native-sysroot <path>
+
+DESCRIPTION
+ This command lists either partitions of the image or directory contents
+ of vfat or ext* partitions.
+
+ The first form it lists partitions of the image.
+ For example:
+ $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic
+ Num Start End Size Fstype
+ 1 1048576 24438783 23390208 fat16
+ 2 25165824 50315263 25149440 ext4
+
+ Second and third form list directory content of the partition:
+ $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
+ Volume in drive : is boot
+ Volume Serial Number is 2DF2-5F02
+ Directory for ::/
+
+ efi <DIR> 2017-05-11 10:54
+ startup nsh 26 2017-05-11 10:54
+ vmlinuz 6922288 2017-05-11 10:54
+ 3 files 6 922 314 bytes
+ 15 818 752 bytes free
+
+
+ $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/EFI/boot/
+ Volume in drive : is boot
+ Volume Serial Number is 2DF2-5F02
+ Directory for ::/EFI/boot
+
+ . <DIR> 2017-05-11 10:54
+ .. <DIR> 2017-05-11 10:54
+ grub cfg 679 2017-05-11 10:54
+ bootx64 efi 571392 2017-05-11 10:54
+ 4 files 572 071 bytes
+ 15 818 752 bytes free
+
+ The -n option is used to specify the path to the native sysroot
+ containing the tools(parted and mtools) to use.
+
+"""
+
+wic_cp_usage = """
+
+ Copy files and directories to the vfat or ext* partition
+
+ usage: wic cp <src> <image>:<partition>[<path>] [--native-sysroot <path>]
+
+ This command copies local files or directories to the vfat or ext* partitions
+of partitioned image.
+
+ See 'wic help cp' for more detailed instructions.
+
+"""
+
+wic_cp_help = """
+
+NAME
+ wic cp - copy files and directories to the vfat or ext* partitions
+
+SYNOPSIS
+ wic cp <src> <image>:<partition>
+ wic cp <src> <image>:<partition><path>
+ wic cp <src> <image>:<partition><path> --native-sysroot <path>
+
+DESCRIPTION
+ This command copies files and directories to the vfat or ext* partition of
+ the partitioned image.
+
+ The first form of it copies file or directory to the root directory of
+ the partition:
+ $ wic cp test.wks tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
+ $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
+ Volume in drive : is boot
+ Volume Serial Number is DB4C-FD4C
+ Directory for ::/
+
+ efi <DIR> 2017-05-24 18:15
+ loader <DIR> 2017-05-24 18:15
+ startup nsh 26 2017-05-24 18:15
+ vmlinuz 6926384 2017-05-24 18:15
+ test wks 628 2017-05-24 21:22
+ 5 files 6 927 038 bytes
+ 15 677 440 bytes free
+
+ The second form of the command copies file or directory to the specified directory
+ on the partition:
+ $ wic cp test tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/efi/
+ $ wic ls tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/efi/
+ Volume in drive : is boot
+ Volume Serial Number is DB4C-FD4C
+ Directory for ::/efi
+
+ . <DIR> 2017-05-24 18:15
+ .. <DIR> 2017-05-24 18:15
+ boot <DIR> 2017-05-24 18:15
+ test <DIR> 2017-05-24 21:27
+ 4 files 0 bytes
+ 15 675 392 bytes free
+
+ The -n option is used to specify the path to the native sysroot
+ containing the tools(parted and mtools) to use.
+"""
+
+wic_rm_usage = """
+
+ Remove files or directories from the vfat or ext* partitions
+
+ usage: wic rm <image>:<partition><path> [--native-sysroot <path>]
+
+ This command removes files or directories from the vfat or ext* partitions of
+ the partitioned image.
+
+ See 'wic help rm' for more detailed instructions.
+
+"""
+
+wic_rm_help = """
+
+NAME
+ wic rm - remove files or directories from the vfat or ext* partitions
+
+SYNOPSIS
+ wic rm <src> <image>:<partition><path>
+ wic rm <src> <image>:<partition><path> --native-sysroot <path>
+
+DESCRIPTION
+ This command removes files or directories from the vfat or ext* partition of the
+ partitioned image:
+
+ $ wic ls ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
+ Volume in drive : is boot
+ Volume Serial Number is 11D0-DE21
+ Directory for ::/
+
+ libcom32 c32 186500 2017-06-02 15:15
+ libutil c32 24148 2017-06-02 15:15
+ syslinux cfg 209 2017-06-02 15:15
+ vesamenu c32 27104 2017-06-02 15:15
+ vmlinuz 6926384 2017-06-02 15:15
+ 5 files 7 164 345 bytes
+ 16 582 656 bytes free
+
+ $ wic rm ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/libutil.c32
+
+ $ wic ls ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1
+ Volume in drive : is boot
+ Volume Serial Number is 11D0-DE21
+ Directory for ::/
+
+ libcom32 c32 186500 2017-06-02 15:15
+ syslinux cfg 209 2017-06-02 15:15
+ vesamenu c32 27104 2017-06-02 15:15
+ vmlinuz 6926384 2017-06-02 15:15
+ 4 files 7 140 197 bytes
+ 16 607 232 bytes free
+
+ The -n option is used to specify the path to the native sysroot
+ containing the tools(parted and mtools) to use.
+"""
+
+wic_write_usage = """
+
+ Write image to a device
+
+ usage: wic write <image> <target device> [--expand [rules]] [--native-sysroot <path>]
+
+ This command writes partitioned image to a target device (USB stick, SD card etc).
+
+ See 'wic help write' for more detailed instructions.
+
+"""
+
+wic_write_help = """
+
+NAME
+ wic write - write an image to a device
+
+SYNOPSIS
+ wic write <image> <target>
+ wic write <image> <target> --expand auto
+ wic write <image> <target> --expand 1:100M-2:300M
+ wic write <image> <target> --native-sysroot <path>
+
+DESCRIPTION
+ This command writes an image to a target device (USB stick, SD card etc)
+
+ $ wic write ./tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic /dev/sdb
+
+ The --expand option is used to resize image partitions.
+ --expand auto expands partitions to occupy all free space available on the target device.
+ It's also possible to specify expansion rules in a format
+ <partition>:<size>[-<partition>:<size>...] for one or more partitions.
+ Specifying size 0 will keep partition unmodified.
+ Note: Resizing boot partition can result in non-bootable image for non-EFI images. It is
+ recommended to use size 0 for boot partition to keep image bootable.
+
+ The --native-sysroot option is used to specify the path to the native sysroot
+ containing the tools(parted, resize2fs) to use.
+"""
+
wic_plugins_help = """
NAME
@@ -740,6 +970,8 @@ DESCRIPTION
This option cannot be used with --fixed-size
option.
+ --part-name: This option is specific to wic. It specifies name for GPT partitions.
+
--part-type: This option is specific to wic. It specifies partition
type GUID for GPT partitions.
List of partition type GUIDS can be found here:
@@ -758,6 +990,12 @@ DESCRIPTION
for the harware that requires non-default partition system ids. The parameter
in one byte long hex number either with 0x prefix or without it.
+ --mkfs-extraopts: This option specifies extra options to pass to mkfs utility.
+ NOTE, that wic uses default options for some filesystems, for example
+ '-S 512' for mkfs.fat or '-F -i 8192' for mkfs.ext. Those options will
+ not take effect when --mkfs-extraopts is used. This should be taken into
+ account when using --mkfs-extraopts.
+
* bootloader
This command allows the user to specify various bootloader
@@ -795,3 +1033,11 @@ DESCRIPTION
.wks files.
"""
+
+wic_help_help = """
+NAME
+ wic help - display a help topic
+
+DESCRIPTION
+ Specify a help topic to display it. Topics are shown above.
+"""
diff --git a/import-layers/yocto-poky/scripts/lib/wic/ksparser.py b/import-layers/yocto-poky/scripts/lib/wic/ksparser.py
index d026caad0..7850e81d2 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/ksparser.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/ksparser.py
@@ -114,7 +114,7 @@ def systemidtype(arg):
return arg
class KickStart():
- """"Kickstart parser implementation."""
+ """Kickstart parser implementation."""
DEFAULT_EXTRA_SPACE = 10*1024
DEFAULT_OVERHEAD_FACTOR = 1.3
@@ -139,10 +139,12 @@ class KickStart():
part.add_argument('--fstype', default='vfat',
choices=('ext2', 'ext3', 'ext4', 'btrfs',
'squashfs', 'vfat', 'msdos', 'swap'))
+ part.add_argument('--mkfs-extraopts', default='')
part.add_argument('--label')
part.add_argument('--no-table', action='store_true')
part.add_argument('--ondisk', '--ondrive', dest='disk', default='sda')
part.add_argument("--overhead-factor", type=overheadtype)
+ part.add_argument('--part-name')
part.add_argument('--part-type')
part.add_argument('--rootfs-dir')
diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/misc.py b/import-layers/yocto-poky/scripts/lib/wic/misc.py
index 37e0ad6a3..ee888b478 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/utils/misc.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/misc.py
@@ -29,12 +29,12 @@
import logging
import os
import re
+import subprocess
from collections import defaultdict
from distutils import spawn
from wic import WicError
-from wic.utils import runner
logger = logging.getLogger('wic')
@@ -43,6 +43,9 @@ NATIVE_RECIPES = {"bmaptool": "bmap-tools",
"grub-mkimage": "grub-efi",
"isohybrid": "syslinux",
"mcopy": "mtools",
+ "mdel" : "mtools",
+ "mdeltree" : "mtools",
+ "mdir" : "mtools",
"mkdosfs": "dosfstools",
"mkisofs": "cdrtools",
"mkfs.btrfs": "btrfs-tools",
@@ -52,13 +55,46 @@ NATIVE_RECIPES = {"bmaptool": "bmap-tools",
"mkfs.vfat": "dosfstools",
"mksquashfs": "squashfs-tools",
"mkswap": "util-linux",
- "mmd": "syslinux",
+ "mmd": "mtools",
"parted": "parted",
"sfdisk": "util-linux",
"sgdisk": "gptfdisk",
"syslinux": "syslinux"
}
+def runtool(cmdln_or_args):
+ """ wrapper for most of the subprocess calls
+ input:
+ cmdln_or_args: can be both args and cmdln str (shell=True)
+ return:
+ rc, output
+ """
+ if isinstance(cmdln_or_args, list):
+ cmd = cmdln_or_args[0]
+ shell = False
+ else:
+ import shlex
+ cmd = shlex.split(cmdln_or_args)[0]
+ shell = True
+
+ sout = subprocess.PIPE
+ serr = subprocess.STDOUT
+
+ try:
+ process = subprocess.Popen(cmdln_or_args, stdout=sout,
+ stderr=serr, shell=shell)
+ sout, serr = process.communicate()
+ # combine stdout and stderr, filter None out and decode
+ out = ''.join([out.decode('utf-8') for out in [sout, serr] if out])
+ except OSError as err:
+ if err.errno == 2:
+ # [Errno 2] No such file or directory
+ raise WicError('Cannot run command: %s, lost dependency?' % cmd)
+ else:
+ raise # relay
+
+ return process.returncode, out
+
def _exec_cmd(cmd_and_args, as_shell=False):
"""
Execute command, catching stderr, stdout
@@ -70,9 +106,9 @@ def _exec_cmd(cmd_and_args, as_shell=False):
logger.debug(args)
if as_shell:
- ret, out = runner.runtool(cmd_and_args)
+ ret, out = runtool(cmd_and_args)
else:
- ret, out = runner.runtool(args)
+ ret, out = runtool(args)
out = out.strip()
if ret != 0:
raise WicError("_exec_cmd: %s returned '%s' instead of 0\noutput: %s" % \
@@ -106,14 +142,11 @@ def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
if pseudo:
cmd_and_args = pseudo + cmd_and_args
- wtools_sysroot = get_bitbake_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
+ native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \
+ (native_sysroot, native_sysroot, native_sysroot)
- native_paths = \
- "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/sbin:%s/usr/sbin:%s/usr/bin" % \
- (wtools_sysroot, wtools_sysroot, wtools_sysroot,
- native_sysroot, native_sysroot, native_sysroot)
native_cmd_and_args = "export PATH=%s:$PATH;%s" % \
- (native_paths, cmd_and_args)
+ (native_paths, cmd_and_args)
logger.debug("exec_native_cmd: %s", native_cmd_and_args)
# If the command isn't in the native sysroot say we failed.
@@ -131,8 +164,8 @@ def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
"was not found (see details above).\n\n" % prog
recipe = NATIVE_RECIPES.get(prog)
if recipe:
- msg += "Please make sure wic-tools have %s-native in its DEPENDS, bake it with 'bitbake wic-tools' "\
- "and try again.\n" % recipe
+ msg += "Please make sure wic-tools have %s-native in its DEPENDS, "\
+ "build it with 'bitbake wic-tools' and try again.\n" % recipe
else:
msg += "Wic failed to find a recipe to build native %s. Please "\
"file a bug against wic.\n" % prog
diff --git a/import-layers/yocto-poky/scripts/lib/wic/partition.py b/import-layers/yocto-poky/scripts/lib/wic/partition.py
index 939e66731..84fe85d62 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/partition.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/partition.py
@@ -26,10 +26,9 @@
import logging
import os
-import tempfile
from wic import WicError
-from wic.utils.misc import exec_cmd, exec_native_cmd, get_bitbake_var
+from wic.misc import exec_cmd, exec_native_cmd, get_bitbake_var
from wic.pluginbase import PluginMgr
logger = logging.getLogger('wic')
@@ -47,10 +46,12 @@ class Partition():
self.fsopts = args.fsopts
self.fstype = args.fstype
self.label = args.label
+ self.mkfs_extraopts = args.mkfs_extraopts
self.mountpoint = args.mountpoint
self.no_table = args.no_table
self.num = None
self.overhead_factor = args.overhead_factor
+ self.part_name = args.part_name
self.part_type = args.part_type
self.rootfs_dir = args.rootfs_dir
self.size = args.size
@@ -205,7 +206,7 @@ class Partition():
"""
p_prefix = os.environ.get("PSEUDO_PREFIX", "%s/usr" % native_sysroot)
p_localstatedir = os.environ.get("PSEUDO_LOCALSTATEDIR",
- "%s/../pseudo" % rootfs_dir)
+ "%s/../pseudo" % get_bitbake_var("IMAGE_ROOTFS"))
p_passwd = os.environ.get("PSEUDO_PASSWD", rootfs_dir)
p_nosymlinkexp = os.environ.get("PSEUDO_NOSYMLINKEXP", "1")
pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix
@@ -257,14 +258,14 @@ class Partition():
with open(rootfs, 'w') as sparse:
os.ftruncate(sparse.fileno(), rootfs_size * 1024)
- extra_imagecmd = "-i 8192"
+ extraopts = self.mkfs_extraopts or "-F -i 8192"
label_str = ""
if self.label:
label_str = "-L %s" % self.label
- mkfs_cmd = "mkfs.%s -F %s %s %s -d %s" % \
- (self.fstype, extra_imagecmd, rootfs, label_str, rootfs_dir)
+ mkfs_cmd = "mkfs.%s %s %s %s -d %s" % \
+ (self.fstype, extraopts, rootfs, label_str, rootfs_dir)
exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs)
@@ -290,8 +291,9 @@ class Partition():
if self.label:
label_str = "-L %s" % self.label
- mkfs_cmd = "mkfs.%s -b %d -r %s %s %s" % \
- (self.fstype, rootfs_size * 1024, rootfs_dir, label_str, rootfs)
+ mkfs_cmd = "mkfs.%s -b %d -r %s %s %s %s" % \
+ (self.fstype, rootfs_size * 1024, rootfs_dir, label_str,
+ self.mkfs_extraopts, rootfs)
exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
def prepare_rootfs_msdos(self, rootfs, oe_builddir, rootfs_dir,
@@ -313,8 +315,10 @@ class Partition():
if self.fstype == 'msdos':
size_str = "-F 16" # FAT 16
- dosfs_cmd = "mkdosfs %s -S 512 %s -C %s %d" % (label_str, size_str,
- rootfs, rootfs_size)
+ extraopts = self.mkfs_extraopts or '-S 512'
+
+ dosfs_cmd = "mkdosfs %s %s %s -C %s %d" % \
+ (label_str, size_str, extraopts, rootfs, rootfs_size)
exec_native_cmd(dosfs_cmd, native_sysroot)
mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (rootfs, rootfs_dir)
@@ -330,8 +334,9 @@ class Partition():
"""
Prepare content for a squashfs rootfs partition.
"""
- squashfs_cmd = "mksquashfs %s %s -noappend" % \
- (rootfs_dir, rootfs)
+ extraopts = self.mkfs_extraopts or '-noappend'
+ squashfs_cmd = "mksquashfs %s %s %s" % \
+ (rootfs_dir, rootfs, extraopts)
exec_native_cmd(squashfs_cmd, native_sysroot, pseudo=pseudo)
def prepare_empty_partition_ext(self, rootfs, oe_builddir,
@@ -343,14 +348,14 @@ class Partition():
with open(rootfs, 'w') as sparse:
os.ftruncate(sparse.fileno(), size * 1024)
- extra_imagecmd = "-i 8192"
+ extraopts = self.mkfs_extraopts or "-i 8192"
label_str = ""
if self.label:
label_str = "-L %s" % self.label
mkfs_cmd = "mkfs.%s -F %s %s %s" % \
- (self.fstype, extra_imagecmd, label_str, rootfs)
+ (self.fstype, extraopts, label_str, rootfs)
exec_native_cmd(mkfs_cmd, native_sysroot)
def prepare_empty_partition_btrfs(self, rootfs, oe_builddir,
@@ -366,8 +371,9 @@ class Partition():
if self.label:
label_str = "-L %s" % self.label
- mkfs_cmd = "mkfs.%s -b %d %s %s" % \
- (self.fstype, self.size * 1024, label_str, rootfs)
+ mkfs_cmd = "mkfs.%s -b %d %s %s %s" % \
+ (self.fstype, self.size * 1024, label_str,
+ self.mkfs_extraopts, rootfs)
exec_native_cmd(mkfs_cmd, native_sysroot)
def prepare_empty_partition_msdos(self, rootfs, oe_builddir,
@@ -385,8 +391,11 @@ class Partition():
if self.fstype == 'msdos':
size_str = "-F 16" # FAT 16
- dosfs_cmd = "mkdosfs %s -S 512 %s -C %s %d" % (label_str, size_str,
- rootfs, blocks)
+ extraopts = self.mkfs_extraopts or '-S 512'
+
+ dosfs_cmd = "mkdosfs %s %s %s -C %s %d" % \
+ (label_str, extraopts, size_str, rootfs, blocks)
+
exec_native_cmd(dosfs_cmd, native_sysroot)
chmod_cmd = "chmod 644 %s" % rootfs
diff --git a/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py b/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py
index fb3d179c2..c009820ad 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py
@@ -24,7 +24,7 @@ from collections import defaultdict
from importlib.machinery import SourceFileLoader
from wic import WicError
-from wic.utils.misc import get_bitbake_var
+from wic.misc import get_bitbake_var
PLUGIN_TYPES = ["imager", "source"]
@@ -137,4 +137,3 @@ class SourcePlugin(metaclass=PluginMeta):
'prepares' the partition to be incorporated into the image.
"""
logger.debug("SourcePlugin: do_prepare_partition: part: %s", part)
-
diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py
index f2e612733..da1c06106 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py
@@ -26,17 +26,20 @@
import logging
import os
+import random
import shutil
import tempfile
import uuid
from time import strftime
+from oe.path import copyhardlinktree
+
from wic import WicError
from wic.filemap import sparse_copy
from wic.ksparser import KickStart, KickStartError
from wic.pluginbase import PluginMgr, ImagerPlugin
-from wic.utils.misc import get_bitbake_var, exec_cmd, exec_native_cmd
+from wic.misc import get_bitbake_var, exec_cmd, exec_native_cmd
logger = logging.getLogger('wic')
@@ -68,6 +71,7 @@ class DirectPlugin(ImagerPlugin):
self.outdir = options.outdir
self.compressor = options.compressor
self.bmap = options.bmap
+ self.no_fstab_update = options.no_fstab_update
self.name = "%s-%s" % (os.path.splitext(os.path.basename(wks_file))[0],
strftime("%Y%m%d%H%M"))
@@ -115,24 +119,33 @@ class DirectPlugin(ImagerPlugin):
fstab_lines = fstab.readlines()
if self._update_fstab(fstab_lines, self.parts):
- shutil.copyfile(fstab_path, fstab_path + ".orig")
+ # copy rootfs dir to workdir to update fstab
+ # as rootfs can be used by other tasks and can't be modified
+ new_rootfs = os.path.realpath(os.path.join(self.workdir, "rootfs_copy"))
+ copyhardlinktree(image_rootfs, new_rootfs)
+ fstab_path = os.path.join(new_rootfs, 'etc/fstab')
+
+ os.unlink(fstab_path)
with open(fstab_path, "w") as fstab:
fstab.writelines(fstab_lines)
- return fstab_path
+ return new_rootfs
def _update_fstab(self, fstab_lines, parts):
"""Assume partition order same as in wks"""
updated = False
for part in parts:
if not part.realnum or not part.mountpoint \
- or part.mountpoint in ("/", "/boot"):
+ or part.mountpoint == "/":
continue
- # mmc device partitions are named mmcblk0p1, mmcblk0p2..
- prefix = 'p' if part.disk.startswith('mmcblk') else ''
- device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum)
+ if part.use_uuid:
+ device_name = "PARTUUID=%s" % part.uuid
+ else:
+ # mmc device partitions are named mmcblk0p1, mmcblk0p2..
+ prefix = 'p' if part.disk.startswith('mmcblk') else ''
+ device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum)
opts = part.fsopts if part.fsopts else "defaults"
line = "\t".join([device_name, part.mountpoint, part.fstype,
@@ -156,7 +169,13 @@ class DirectPlugin(ImagerPlugin):
filesystems from the artifacts directly and combine them into
a partitioned image.
"""
- fstab_path = self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
+ if self.no_fstab_update:
+ new_rootfs = None
+ else:
+ new_rootfs = self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
+ if new_rootfs:
+ # rootfs was copied to update fstab
+ self.rootfs_dir['ROOTFS_DIR'] = new_rootfs
for part in self.parts:
# get rootfs size from bitbake variable if it's not set in .ks file
@@ -173,10 +192,6 @@ class DirectPlugin(ImagerPlugin):
part.size = int(round(float(rsize_bb)))
self._image.prepare(self)
-
- if fstab_path:
- shutil.move(fstab_path + ".orig", fstab_path)
-
self._image.layout_partitions()
self._image.create()
@@ -205,8 +220,10 @@ class DirectPlugin(ImagerPlugin):
# Generate .bmap
if self.bmap:
logger.debug("Generating bmap file for %s", disk_name)
- exec_native_cmd("bmaptool create %s -o %s.bmap" % (full_path, full_path),
- self.native_sysroot)
+ python = os.path.join(self.native_sysroot, 'usr/bin/python3-native/python3')
+ bmaptool = os.path.join(self.native_sysroot, 'usr/bin/bmaptool')
+ exec_native_cmd("%s %s create %s -o %s.bmap" % \
+ (python, bmaptool, full_path, full_path), self.native_sysroot)
# Compress the image
if self.compressor:
logger.debug("Compressing disk %s with %s", disk_name, self.compressor)
@@ -296,7 +313,7 @@ class PartitionedImage():
# all partitions (in bytes)
self.ptable_format = ptable_format # Partition table format
# Disk system identifier
- self.identifier = int.from_bytes(os.urandom(4), 'little')
+ self.identifier = random.SystemRandom().randint(1, 0xffffffff)
self.partitions = partitions
self.partimages = []
@@ -312,7 +329,7 @@ class PartitionedImage():
part.realnum = 0
else:
realnum += 1
- if self.ptable_format == 'msdos' and realnum > 3:
+ if self.ptable_format == 'msdos' and realnum > 3 and len(partitions) > 4:
part.realnum = realnum + 1
continue
part.realnum = realnum
@@ -352,6 +369,10 @@ class PartitionedImage():
for num in range(len(self.partitions)):
part = self.partitions[num]
+ if self.ptable_format == 'msdos' and part.part_name:
+ raise WicError("setting custom partition name is not " \
+ "implemented for msdos partitions")
+
if self.ptable_format == 'msdos' and part.part_type:
# The --part-type can also be implemented for MBR partitions,
# in which case it would map to the 1-byte "partition type"
@@ -505,6 +526,13 @@ class PartitionedImage():
self._create_partition(self.path, part.type,
parted_fs_type, part.start, part.size_sec)
+ if part.part_name:
+ logger.debug("partition %d: set name to %s",
+ part.num, part.part_name)
+ exec_native_cmd("sgdisk --change-name=%d:%s %s" % \
+ (part.num, part.part_name,
+ self.path), self.native_sysroot)
+
if part.part_type:
logger.debug("partition %d: set type UID to %s",
part.num, part.part_type)
@@ -550,7 +578,7 @@ class PartitionedImage():
source = part.source_file
if source:
# install source_file contents into a partition
- sparse_copy(source, self.path, part.start * self.sector_size)
+ sparse_copy(source, self.path, seek=part.start * self.sector_size)
logger.debug("Installed %s in partition %d, sectors %d-%d, "
"size %d sectors", source, part.num, part.start,
diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py
index 9879cb9fc..4c4f36a32 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -31,8 +31,8 @@ import shutil
from wic import WicError
from wic.engine import get_custom_config
from wic.pluginbase import SourcePlugin
-from wic.utils.misc import (exec_cmd, exec_native_cmd, get_bitbake_var,
- BOOTDD_EXTRA_SPACE)
+from wic.misc import (exec_cmd, exec_native_cmd,
+ get_bitbake_var, BOOTDD_EXTRA_SPACE)
logger = logging.getLogger('wic')
diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py
index 13fddbd47..67e5498d5 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py
@@ -31,7 +31,7 @@ from glob import glob
from wic import WicError
from wic.pluginbase import SourcePlugin
-from wic.utils.misc import exec_cmd, get_bitbake_var
+from wic.misc import exec_cmd, get_bitbake_var
logger = logging.getLogger('wic')
@@ -54,7 +54,7 @@ class BootimgPartitionPlugin(SourcePlugin):
- sets up a vfat partition
- copies all files listed in IMAGE_BOOT_FILES variable
"""
- hdddir = "%s/boot" % cr_workdir
+ hdddir = "%s/boot.%d" % (cr_workdir, part.lineno)
install_cmd = "install -d %s" % hdddir
exec_cmd(install_cmd)
@@ -65,10 +65,19 @@ class BootimgPartitionPlugin(SourcePlugin):
logger.debug('Kernel dir: %s', bootimg_dir)
- boot_files = get_bitbake_var("IMAGE_BOOT_FILES")
+ boot_files = None
+ for (fmt, id) in (("_uuid-%s", part.uuid), ("_label-%s", part.label), (None, None)):
+ if fmt:
+ var = fmt % id
+ else:
+ var = ""
+
+ boot_files = get_bitbake_var("IMAGE_BOOT_FILES" + var)
+ if boot_files is not None:
+ break
- if not boot_files:
- raise WicError('No boot files defined, IMAGE_BOOT_FILES unset')
+ if boot_files is None:
+ raise WicError('No boot files defined, IMAGE_BOOT_FILES unset for entry #%d' % part.lineno)
logger.debug('Boot files: %s', boot_files)
diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
index 5890c1267..56da468fb 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
@@ -29,10 +29,9 @@ import os
from wic import WicError
from wic.engine import get_custom_config
-from wic.utils import runner
from wic.pluginbase import SourcePlugin
-from wic.utils.misc import (exec_cmd, exec_native_cmd,
- get_bitbake_var, BOOTDD_EXTRA_SPACE)
+from wic.misc import (exec_cmd, exec_native_cmd,
+ get_bitbake_var, BOOTDD_EXTRA_SPACE)
logger = logging.getLogger('wic')
@@ -46,10 +45,9 @@ class BootimgPcbiosPlugin(SourcePlugin):
@classmethod
def _get_bootimg_dir(cls, bootimg_dir, dirname):
"""
- Check if dirname exists in default bootimg_dir or
- in wic-tools STAGING_DIR.
+ Check if dirname exists in default bootimg_dir or in STAGING_DIR.
"""
- for result in (bootimg_dir, get_bitbake_var("STAGING_DATADIR", "wic-tools")):
+ for result in (bootimg_dir, get_bitbake_var("STAGING_DATADIR")):
if os.path.exists("%s/%s" % (result, dirname)):
return result
@@ -186,7 +184,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
extra_blocks, part.mountpoint, blocks)
# dosfs image, created by mkdosfs
- bootimg = "%s/boot.img" % cr_workdir
+ bootimg = "%s/boot%s.img" % (cr_workdir, part.lineno)
dosfs_cmd = "mkdosfs -n boot -S 512 -C %s %d" % (bootimg, blocks)
exec_native_cmd(dosfs_cmd, native_sysroot)
diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
index 1ceba62be..d6bd3bff7 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
@@ -29,7 +29,7 @@ import shutil
from wic import WicError
from wic.engine import get_custom_config
from wic.pluginbase import SourcePlugin
-from wic.utils.misc import exec_cmd, exec_native_cmd, get_bitbake_var
+from wic.misc import exec_cmd, exec_native_cmd, get_bitbake_var
logger = logging.getLogger('wic')
@@ -95,7 +95,7 @@ class IsoImagePlugin(SourcePlugin):
cfg.write(syslinux_conf)
@classmethod
- def do_configure_grubefi(cls, part, creator, cr_workdir):
+ def do_configure_grubefi(cls, part, creator, target_dir):
"""
Create loader-specific (grub-efi) config
"""
@@ -109,7 +109,7 @@ class IsoImagePlugin(SourcePlugin):
raise WicError("configfile is specified "
"but failed to get it from %s", configfile)
else:
- splash = os.path.join(cr_workdir, "EFI/boot/splash.jpg")
+ splash = os.path.join(target_dir, "splash.jpg")
if os.path.exists(splash):
splashline = "menu background splash.jpg"
else:
@@ -137,9 +137,10 @@ class IsoImagePlugin(SourcePlugin):
if splashline:
grubefi_conf += "%s\n" % splashline
- logger.debug("Writing grubefi config %s/EFI/BOOT/grub.cfg", cr_workdir)
+ cfg_path = os.path.join(target_dir, "grub.cfg")
+ logger.debug("Writing grubefi config %s", cfg_path)
- with open("%s/EFI/BOOT/grub.cfg" % cr_workdir, "w") as cfg:
+ with open(cfg_path, "w") as cfg:
cfg.write(grubefi_conf)
@staticmethod
@@ -162,13 +163,14 @@ class IsoImagePlugin(SourcePlugin):
if not image_type:
raise WicError("Couldn't find INITRAMFS_FSTYPES, exiting.")
- target_arch = get_bitbake_var("TRANSLATED_TARGET_ARCH")
- if not target_arch:
- raise WicError("Couldn't find TRANSLATED_TARGET_ARCH, exiting.")
+ machine = os.path.basename(initrd_dir)
- initrd = glob.glob('%s/%s*%s.%s' % (initrd_dir, image_name, target_arch, image_type))[0]
+ pattern = '%s/%s*%s.%s' % (initrd_dir, image_name, machine, image_type)
+ files = glob.glob(pattern)
+ if files:
+ initrd = files[0]
- if not os.path.exists(initrd):
+ if not initrd or not os.path.exists(initrd):
# Create initrd from rootfs directory
initrd = "%s/initrd.cpio.gz" % cr_workdir
initrd_dir = "%s/INITRD" % cr_workdir
@@ -206,8 +208,8 @@ class IsoImagePlugin(SourcePlugin):
"""
isodir = "%s/ISO/" % cr_workdir
- if os.path.exists(cr_workdir):
- shutil.rmtree(cr_workdir)
+ if os.path.exists(isodir):
+ shutil.rmtree(isodir)
install_cmd = "install -d %s " % isodir
exec_cmd(install_cmd)
@@ -312,20 +314,13 @@ class IsoImagePlugin(SourcePlugin):
#Create bootloader for efi boot
try:
- if source_params['loader'] == 'grub-efi':
- # Builds grub.cfg if ISODIR didn't exist or
- # didn't contains grub.cfg
- bootimg_dir = img_iso_dir
- if not os.path.exists("%s/EFI/BOOT" % bootimg_dir):
- bootimg_dir = "%s/bootimg" % cr_workdir
- if os.path.exists(bootimg_dir):
- shutil.rmtree(bootimg_dir)
- install_cmd = "install -d %s/EFI/BOOT" % bootimg_dir
- exec_cmd(install_cmd)
-
- if not os.path.isfile("%s/EFI/BOOT/boot.cfg" % bootimg_dir):
- cls.do_configure_grubefi(part, creator, bootimg_dir)
+ target_dir = "%s/EFI/BOOT" % isodir
+ if os.path.exists(target_dir):
+ shutil.rmtree(target_dir)
+
+ os.makedirs(target_dir)
+ if source_params['loader'] == 'grub-efi':
# Builds bootx64.efi/bootia32.efi if ISODIR didn't exist or
# didn't contains it
target_arch = get_bitbake_var("TARGET_SYS")
@@ -333,37 +328,23 @@ class IsoImagePlugin(SourcePlugin):
raise WicError("Coludn't find target architecture")
if re.match("x86_64", target_arch):
- grub_target = 'x86_64-efi'
- grub_image = "bootx64.efi"
+ grub_image = "grub-efi-bootx64.efi"
elif re.match('i.86', target_arch):
- grub_target = 'i386-efi'
- grub_image = "bootia32.efi"
+ grub_image = "grub-efi-bootia32.efi"
else:
raise WicError("grub-efi is incompatible with target %s" %
target_arch)
- if not os.path.isfile("%s/EFI/BOOT/%s" \
- % (bootimg_dir, grub_image)):
- grub_path = get_bitbake_var("STAGING_LIBDIR", "wic-tools")
- if not grub_path:
- raise WicError("Couldn't find STAGING_LIBDIR, exiting.")
-
- grub_core = "%s/grub/%s" % (grub_path, grub_target)
- if not os.path.exists(grub_core):
- raise WicError("Please build grub-efi first")
-
- grub_cmd = "grub-mkimage -p '/EFI/BOOT' "
- grub_cmd += "-d %s " % grub_core
- grub_cmd += "-O %s -o %s/EFI/BOOT/%s " \
- % (grub_target, bootimg_dir, grub_image)
- grub_cmd += "part_gpt part_msdos ntfs ntfscomp fat ext2 "
- grub_cmd += "normal chain boot configfile linux multiboot "
- grub_cmd += "search efi_gop efi_uga font gfxterm gfxmenu "
- grub_cmd += "terminal minicmd test iorw loadenv echo help "
- grub_cmd += "reboot serial terminfo iso9660 loopback tar "
- grub_cmd += "memdisk ls search_fs_uuid udf btrfs xfs lvm "
- grub_cmd += "reiserfs ata "
- exec_native_cmd(grub_cmd, native_sysroot)
+ grub_target = os.path.join(target_dir, grub_image)
+ if not os.path.isfile(grub_target):
+ grub_src = os.path.join(deploy_dir, grub_image)
+ if not os.path.exists(grub_src):
+ raise WicError("Grub loader %s is not found in %s. "
+ "Please build grub-efi first" % (grub_image, deploy_dir))
+ shutil.copy(grub_src, grub_target)
+
+ if not os.path.isfile(os.path.join(target_dir, "boot.cfg")):
+ cls.do_configure_grubefi(part, creator, target_dir)
else:
raise WicError("unrecognized bootimg-efi loader: %s" %
@@ -371,15 +352,6 @@ class IsoImagePlugin(SourcePlugin):
except KeyError:
raise WicError("bootimg-efi requires a loader, none specified")
- if os.path.exists("%s/EFI/BOOT" % isodir):
- shutil.rmtree("%s/EFI/BOOT" % isodir)
-
- shutil.copytree(bootimg_dir+"/EFI/BOOT", isodir+"/EFI/BOOT")
-
- # If exists, remove cr_workdir/bootimg temporary folder
- if os.path.exists("%s/bootimg" % cr_workdir):
- shutil.rmtree("%s/bootimg" % cr_workdir)
-
# Create efi.img that contains bootloader files for EFI booting
# if ISODIR didn't exist or didn't contains it
if os.path.isfile("%s/efi.img" % img_iso_dir):
@@ -413,7 +385,7 @@ class IsoImagePlugin(SourcePlugin):
exec_cmd(chmod_cmd)
# Prepare files for legacy boot
- syslinux_dir = get_bitbake_var("STAGING_DATADIR", "wic-tools")
+ syslinux_dir = get_bitbake_var("STAGING_DATADIR")
if not syslinux_dir:
raise WicError("Couldn't find STAGING_DATADIR, exiting.")
diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py
index e1c4f5e7d..e86398ac8 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py
@@ -20,7 +20,7 @@ import os
from wic import WicError
from wic.pluginbase import SourcePlugin
-from wic.utils.misc import exec_cmd, get_bitbake_var
+from wic.misc import exec_cmd, get_bitbake_var
from wic.filemap import sparse_copy
logger = logging.getLogger('wic')
@@ -32,6 +32,25 @@ class RawCopyPlugin(SourcePlugin):
name = 'rawcopy'
+ @staticmethod
+ def do_image_label(fstype, dst, label):
+ if fstype.startswith('ext'):
+ cmd = 'tune2fs -L %s %s' % (label, dst)
+ elif fstype in ('msdos', 'vfat'):
+ cmd = 'dosfslabel %s %s' % (dst, label)
+ elif fstype == 'btrfs':
+ cmd = 'btrfs filesystem label %s %s' % (dst, label)
+ elif fstype == 'swap':
+ cmd = 'mkswap -L %s %s' % (label, dst)
+ elif fstype == 'squashfs':
+ raise WicError("It's not possible to update a squashfs "
+ "filesystem label '%s'" % (label))
+ else:
+ raise WicError("Cannot update filesystem label: "
+ "Unknown fstype: '%s'" % (fstype))
+
+ exec_cmd(cmd)
+
@classmethod
def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
oe_builddir, bootimg_dir, kernel_dir,
@@ -66,4 +85,7 @@ class RawCopyPlugin(SourcePlugin):
if filesize > part.size:
part.size = filesize
+ if part.label:
+ RawCopyPlugin.do_image_label(part.fstype, dst, part.label)
+
part.source_file = dst
diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py
index f2e2ca8a2..aec720fb2 100644
--- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py
+++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py
@@ -28,12 +28,13 @@
import logging
import os
import shutil
+import sys
from oe.path import copyhardlinktree
from wic import WicError
from wic.pluginbase import SourcePlugin
-from wic.utils.misc import get_bitbake_var, exec_cmd
+from wic.misc import get_bitbake_var
logger = logging.getLogger('wic')
@@ -47,7 +48,7 @@ class RootfsPlugin(SourcePlugin):
@staticmethod
def __get_rootfs_dir(rootfs_dir):
if os.path.isdir(rootfs_dir):
- return rootfs_dir
+ return os.path.realpath(rootfs_dir)
image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir)
if not os.path.isdir(image_rootfs_dir):
@@ -55,7 +56,7 @@ class RootfsPlugin(SourcePlugin):
"named %s has been found at %s, exiting." %
(rootfs_dir, image_rootfs_dir))
- return image_rootfs_dir
+ return os.path.realpath(image_rootfs_dir)
@classmethod
def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
@@ -80,25 +81,25 @@ class RootfsPlugin(SourcePlugin):
raise WicError("Couldn't find --rootfs-dir=%s connection or "
"it is not a valid path, exiting" % part.rootfs_dir)
- real_rootfs_dir = cls.__get_rootfs_dir(rootfs_dir)
+ part.rootfs_dir = cls.__get_rootfs_dir(rootfs_dir)
+ new_rootfs = None
# Handle excluded paths.
if part.exclude_path is not None:
# We need a new rootfs directory we can delete files from. Copy to
# workdir.
- new_rootfs = os.path.realpath(os.path.join(cr_workdir, "rootfs"))
+ new_rootfs = os.path.realpath(os.path.join(cr_workdir, "rootfs%d" % part.lineno))
if os.path.lexists(new_rootfs):
shutil.rmtree(os.path.join(new_rootfs))
- copyhardlinktree(real_rootfs_dir, new_rootfs)
-
- real_rootfs_dir = new_rootfs
+ copyhardlinktree(part.rootfs_dir, new_rootfs)
for orig_path in part.exclude_path:
path = orig_path
if os.path.isabs(path):
- msger.error("Must be relative: --exclude-path=%s" % orig_path)
+ logger.error("Must be relative: --exclude-path=%s" % orig_path)
+ sys.exit(1)
full_path = os.path.realpath(os.path.join(new_rootfs, path))
@@ -106,7 +107,8 @@ class RootfsPlugin(SourcePlugin):
# because doing so could be quite disastrous (we will delete the
# directory).
if not full_path.startswith(new_rootfs):
- msger.error("'%s' points to a path outside the rootfs" % orig_path)
+ logger.error("'%s' points to a path outside the rootfs" % orig_path)
+ sys.exit(1)
if path.endswith(os.sep):
# Delete content only.
@@ -120,6 +122,5 @@ class RootfsPlugin(SourcePlugin):
# Delete whole directory.
shutil.rmtree(full_path)
- part.rootfs_dir = real_rootfs_dir
part.prepare_rootfs(cr_workdir, oe_builddir,
- real_rootfs_dir, native_sysroot)
+ new_rootfs or part.rootfs_dir, native_sysroot)
diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/__init__.py b/import-layers/yocto-poky/scripts/lib/wic/utils/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/import-layers/yocto-poky/scripts/lib/wic/utils/__init__.py
+++ /dev/null
diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/runner.py b/import-layers/yocto-poky/scripts/lib/wic/utils/runner.py
deleted file mode 100644
index 4aa00fbe2..000000000
--- a/import-layers/yocto-poky/scripts/lib/wic/utils/runner.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python -tt
-#
-# Copyright (c) 2011 Intel, Inc.
-#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by the Free
-# Software Foundation; version 2 of the License
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-# for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc., 59
-# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import subprocess
-
-from wic import WicError
-
-def runtool(cmdln_or_args):
- """ wrapper for most of the subprocess calls
- input:
- cmdln_or_args: can be both args and cmdln str (shell=True)
- return:
- rc, output
- """
- if isinstance(cmdln_or_args, list):
- cmd = cmdln_or_args[0]
- shell = False
- else:
- import shlex
- cmd = shlex.split(cmdln_or_args)[0]
- shell = True
-
- sout = subprocess.PIPE
- serr = subprocess.STDOUT
-
- try:
- process = subprocess.Popen(cmdln_or_args, stdout=sout,
- stderr=serr, shell=shell)
- sout, serr = process.communicate()
- # combine stdout and stderr, filter None out and decode
- out = ''.join([out.decode('utf-8') for out in [sout, serr] if out])
- except OSError as err:
- if err.errno == 2:
- # [Errno 2] No such file or directory
- raise WicError('Cannot run command: %s, lost dependency?' % cmd)
- else:
- raise # relay
-
- return process.returncode, out
diff --git a/import-layers/yocto-poky/scripts/oe-build-perf-report b/import-layers/yocto-poky/scripts/oe-build-perf-report
index 6f0b84f9e..ac88f0fce 100755
--- a/import-layers/yocto-poky/scripts/oe-build-perf-report
+++ b/import-layers/yocto-poky/scripts/oe-build-perf-report
@@ -29,12 +29,14 @@ sys.path.append(os.path.join(scripts_path, 'lib'))
import scriptpath
from build_perf import print_table
from build_perf.report import (metadata_xml_to_json, results_xml_to_json,
- aggregate_data, aggregate_metadata, measurement_stats)
+ aggregate_data, aggregate_metadata, measurement_stats,
+ AggregateTestData)
from build_perf import html
+from buildstats import BuildStats, diff_buildstats, BSVerDiff
scriptpath.add_oe_lib_path()
-from oeqa.utils.git import GitRepo
+from oeqa.utils.git import GitRepo, GitError
# Setup logging
@@ -82,29 +84,52 @@ def get_test_runs(repo, tag_name, **kwargs):
# Return field names and a sorted list of revs
return undef_fields, sorted(revs)
-def list_test_revs(repo, tag_name, **kwargs):
+def list_test_revs(repo, tag_name, verbosity, **kwargs):
"""Get list of all tested revisions"""
- fields, revs = get_test_runs(repo, tag_name, **kwargs)
+ valid_kwargs = dict([(k, v) for k, v in kwargs.items() if v is not None])
+
+ fields, revs = get_test_runs(repo, tag_name, **valid_kwargs)
ignore_fields = ['tag_number']
+ if verbosity < 2:
+ extra_fields = ['COMMITS', 'TEST RUNS']
+ ignore_fields.extend(['commit_number', 'commit'])
+ else:
+ extra_fields = ['TEST RUNS']
+
print_fields = [i for i, f in enumerate(fields) if f not in ignore_fields]
# Sort revs
- rows = [[fields[i].upper() for i in print_fields] + ['TEST RUNS']]
- prev = [''] * len(revs)
+ rows = [[fields[i].upper() for i in print_fields] + extra_fields]
+
+ prev = [''] * len(print_fields)
+ prev_commit = None
+ commit_cnt = 0
+ commit_field = fields.index('commit')
for rev in revs:
# Only use fields that we want to print
- rev = [rev[i] for i in print_fields]
+ cols = [rev[i] for i in print_fields]
+
+
+ if cols != prev:
+ commit_cnt = 1
+ test_run_cnt = 1
+ new_row = [''] * (len(print_fields) + len(extra_fields))
- if rev != prev:
- new_row = [''] * len(print_fields) + [1]
for i in print_fields:
- if rev[i] != prev[i]:
+ if cols[i] != prev[i]:
break
- new_row[i:-1] = rev[i:]
+ new_row[i:-len(extra_fields)] = cols[i:]
rows.append(new_row)
else:
- rows[-1][-1] += 1
- prev = rev
+ if rev[commit_field] != prev_commit:
+ commit_cnt += 1
+ test_run_cnt += 1
+
+ if verbosity < 2:
+ new_row[-2] = commit_cnt
+ new_row[-1] = test_run_cnt
+ prev = cols
+ prev_commit = rev[commit_field]
print_table(rows)
@@ -309,20 +334,50 @@ def print_diff_report(metadata_l, data_l, metadata_r, data_r):
print()
-def print_html_report(data, id_comp):
+class BSSummary(object):
+ def __init__(self, bs1, bs2):
+ self.tasks = {'count': bs2.num_tasks,
+ 'change': '{:+d}'.format(bs2.num_tasks - bs1.num_tasks)}
+ self.top_consumer = None
+ self.top_decrease = None
+ self.top_increase = None
+ self.ver_diff = OrderedDict()
+
+ tasks_diff = diff_buildstats(bs1, bs2, 'cputime')
+
+ # Get top consumers of resources
+ tasks_diff = sorted(tasks_diff, key=attrgetter('value2'))
+ self.top_consumer = tasks_diff[-5:]
+
+ # Get biggest increase and decrease in resource usage
+ tasks_diff = sorted(tasks_diff, key=attrgetter('absdiff'))
+ self.top_decrease = tasks_diff[0:5]
+ self.top_increase = tasks_diff[-5:]
+
+ # Compare recipe versions and prepare data for display
+ ver_diff = BSVerDiff(bs1, bs2)
+ if ver_diff:
+ if ver_diff.new:
+ self.ver_diff['New recipes'] = [(n, r.evr) for n, r in ver_diff.new.items()]
+ if ver_diff.dropped:
+ self.ver_diff['Dropped recipes'] = [(n, r.evr) for n, r in ver_diff.dropped.items()]
+ if ver_diff.echanged:
+ self.ver_diff['Epoch changed'] = [(n, "{} &rarr; {}".format(r.left.evr, r.right.evr)) for n, r in ver_diff.echanged.items()]
+ if ver_diff.vchanged:
+ self.ver_diff['Version changed'] = [(n, "{} &rarr; {}".format(r.left.version, r.right.version)) for n, r in ver_diff.vchanged.items()]
+ if ver_diff.rchanged:
+ self.ver_diff['Revision changed'] = [(n, "{} &rarr; {}".format(r.left.evr, r.right.evr)) for n, r in ver_diff.rchanged.items()]
+
+
+def print_html_report(data, id_comp, buildstats):
"""Print report in html format"""
# Handle metadata
- metadata = {'branch': {'title': 'Branch', 'value': 'master'},
- 'hostname': {'title': 'Hostname', 'value': 'foobar'},
- 'commit': {'title': 'Commit', 'value': '1234'}
- }
- metadata = metadata_diff(data[id_comp][0], data[-1][0])
-
+ metadata = metadata_diff(data[id_comp].metadata, data[-1].metadata)
# Generate list of tests
tests = []
- for test in data[-1][1]['tests'].keys():
- test_r = data[-1][1]['tests'][test]
+ for test in data[-1].results['tests'].keys():
+ test_r = data[-1].results['tests'][test]
new_test = {'name': test_r['name'],
'description': test_r['description'],
'status': test_r['status'],
@@ -368,6 +423,16 @@ def print_html_report(data, id_comp):
new_meas['value'] = samples[-1]
new_meas['value_type'] = samples[-1]['val_cls']
+ # Compare buildstats
+ bs_key = test + '.' + meas
+ rev = metadata['commit_num']['value']
+ comp_rev = metadata['commit_num']['value_old']
+ if (rev in buildstats and bs_key in buildstats[rev] and
+ comp_rev in buildstats and bs_key in buildstats[comp_rev]):
+ new_meas['buildstats'] = BSSummary(buildstats[comp_rev][bs_key],
+ buildstats[rev][bs_key])
+
+
new_test['measurements'].append(new_meas)
tests.append(new_test)
@@ -376,7 +441,61 @@ def print_html_report(data, id_comp):
'max': get_data_item(data[-1][0], 'layers.meta.commit_count')}
}
- print(html.template.render(metadata=metadata, test_data=tests, chart_opts=chart_opts))
+ print(html.template.render(title="Build Perf Test Report",
+ metadata=metadata, test_data=tests,
+ chart_opts=chart_opts))
+
+
+def get_buildstats(repo, notes_ref, revs, outdir=None):
+ """Get the buildstats from git notes"""
+ full_ref = 'refs/notes/' + notes_ref
+ if not repo.rev_parse(full_ref):
+ log.error("No buildstats found, please try running "
+ "'git fetch origin %s:%s' to fetch them from the remote",
+ full_ref, full_ref)
+ return
+
+ missing = False
+ buildstats = {}
+ log.info("Parsing buildstats from 'refs/notes/%s'", notes_ref)
+ for rev in revs:
+ buildstats[rev.commit_number] = {}
+ log.debug('Dumping buildstats for %s (%s)', rev.commit_number,
+ rev.commit)
+ for tag in rev.tags:
+ log.debug(' %s', tag)
+ try:
+ bs_all = json.loads(repo.run_cmd(['notes', '--ref', notes_ref,
+ 'show', tag + '^0']))
+ except GitError:
+ log.warning("Buildstats not found for %s", tag)
+ bs_all = {}
+ missing = True
+
+ for measurement, bs in bs_all.items():
+ # Write out onto disk
+ if outdir:
+ tag_base, run_id = tag.rsplit('/', 1)
+ tag_base = tag_base.replace('/', '_')
+ bs_dir = os.path.join(outdir, measurement, tag_base)
+ if not os.path.exists(bs_dir):
+ os.makedirs(bs_dir)
+ with open(os.path.join(bs_dir, run_id + '.json'), 'w') as f:
+ json.dump(bs, f, indent=2)
+
+ # Read buildstats into a dict
+ _bs = BuildStats.from_json(bs)
+ if measurement not in buildstats[rev.commit_number]:
+ buildstats[rev.commit_number][measurement] = _bs
+ else:
+ buildstats[rev.commit_number][measurement].aggregate(_bs)
+
+ if missing:
+ log.info("Buildstats were missing for some test runs, please "
+ "run 'git fetch origin %s:%s' and try again",
+ full_ref, full_ref)
+
+ return buildstats
def auto_args(repo, args):
@@ -411,7 +530,7 @@ Examine build performance test results from a Git repository"""
help="Verbose logging")
parser.add_argument('--repo', '-r', required=True,
help="Results repository (local git clone)")
- parser.add_argument('--list', '-l', action='store_true',
+ parser.add_argument('--list', '-l', action='count',
help="List available test runs")
parser.add_argument('--html', action='store_true',
help="Generate report in html format")
@@ -434,6 +553,8 @@ Examine build performance test results from a Git repository"""
group.add_argument('--commit-number2',
help="Revision number to compare with, redundant if "
"--commit2 is specified")
+ parser.add_argument('--dump-buildstats', nargs='?', const='.',
+ help="Dump buildstats of the tests")
return parser.parse_args(argv)
@@ -447,7 +568,7 @@ def main(argv=None):
repo = GitRepo(args.repo)
if args.list:
- list_test_revs(repo, args.tag_name)
+ list_test_revs(repo, args.tag_name, args.list, hostname=args.hostname)
return 0
# Determine hostname which to use
@@ -501,7 +622,7 @@ def main(argv=None):
xml = is_xml_format(repo, revs[index_r].tags[-1])
if args.html:
- index_0 = max(0, index_r - args.history_length)
+ index_0 = max(0, min(index_l, index_r - args.history_length))
rev_range = range(index_0, index_r + 1)
else:
# We do not need range of commits for text report (no graphs)
@@ -515,18 +636,27 @@ def main(argv=None):
data = []
for raw_m, raw_d in raw_data:
- data.append((aggregate_metadata(raw_m), aggregate_data(raw_d)))
+ data.append(AggregateTestData(aggregate_metadata(raw_m),
+ aggregate_data(raw_d)))
# Re-map list indexes to the new table starting from index 0
index_r = index_r - index_0
index_l = index_l - index_0
+ # Read buildstats only when needed
+ buildstats = None
+ if args.dump_buildstats or args.html:
+ outdir = 'oe-build-perf-buildstats' if args.dump_buildstats else None
+ notes_ref = 'buildstats/{}/{}/{}'.format(args.hostname, args.branch,
+ args.machine)
+ buildstats = get_buildstats(repo, notes_ref, [rev_l, rev_r], outdir)
+
# Print report
if not args.html:
- print_diff_report(data[index_l][0], data[index_l][1],
- data[index_r][0], data[index_r][1])
+ print_diff_report(data[index_l].metadata, data[index_l].results,
+ data[index_r].metadata, data[index_r].results)
else:
- print_html_report(data, index_l)
+ print_html_report(data, index_l, buildstats)
return 0
diff --git a/import-layers/yocto-poky/scripts/oe-buildenv-internal b/import-layers/yocto-poky/scripts/oe-buildenv-internal
index c8905524f..77f98a32b 100755
--- a/import-layers/yocto-poky/scripts/oe-buildenv-internal
+++ b/import-layers/yocto-poky/scripts/oe-buildenv-internal
@@ -24,8 +24,7 @@ if ! $(return >/dev/null 2>&1) ; then
echo 'Usage: . $OEROOT/scripts/oe-buildenv-internal &&'
echo ''
echo 'OpenEmbedded oe-buildenv-internal - an internal script that is'
- echo 'used in oe-init-build-env and oe-init-build-env-memres to'
- echo 'initialize oe build environment'
+ echo 'used in oe-init-build-env to initialize oe build environment'
echo ''
exit 2
fi
@@ -106,6 +105,9 @@ fi
BITBAKEDIR=$(readlink -f "$BITBAKEDIR")
BUILDDIR=$(readlink -f "$BUILDDIR")
+BBPATH=$BUILDDIR
+
+export BBPATH
if [ ! -d "$BITBAKEDIR" ]; then
echo >&2 "Error: The bitbake directory ($BITBAKEDIR) does not exist! Please ensure a copy of bitbake exists at this location or specify an alternative path on the command line"
diff --git a/import-layers/yocto-poky/scripts/oe-find-native-sysroot b/import-layers/yocto-poky/scripts/oe-find-native-sysroot
index 235a67c95..350ea2137 100755
--- a/import-layers/yocto-poky/scripts/oe-find-native-sysroot
+++ b/import-layers/yocto-poky/scripts/oe-find-native-sysroot
@@ -50,7 +50,7 @@ BITBAKE_E=""
set_oe_native_sysroot(){
echo "Running bitbake -e $1"
BITBAKE_E="`bitbake -e $1`"
- OECORE_NATIVE_SYSROOT=`echo "$BITBAKE_E" | grep ^STAGING_DIR_NATIVE | cut -d '"' -f2`
+ OECORE_NATIVE_SYSROOT=`echo "$BITBAKE_E" | grep ^STAGING_DIR_NATIVE= | cut -d '"' -f2`
if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then
# This indicates that there was an error running bitbake -e that
diff --git a/import-layers/yocto-poky/scripts/oe-pkgdata-util b/import-layers/yocto-poky/scripts/oe-pkgdata-util
index 677effeef..c6fba56c8 100755
--- a/import-layers/yocto-poky/scripts/oe-pkgdata-util
+++ b/import-layers/yocto-poky/scripts/oe-pkgdata-util
@@ -40,9 +40,8 @@ def tinfoil_init():
import bb.tinfoil
import logging
tinfoil = bb.tinfoil.Tinfoil()
- tinfoil.prepare(True)
-
tinfoil.logger.setLevel(logging.WARNING)
+ tinfoil.prepare(True)
return tinfoil
@@ -198,6 +197,10 @@ def read_value(args):
# PKGSIZE is now in bytes, but we we want it in KB
pkgsize = (int(value) + 1024 // 2) // 1024
value = "%d" % pkgsize
+ if args.unescape:
+ import codecs
+ # escape_decode() unescapes backslash encodings in byte streams
+ value = codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8")
if args.prefix_name:
print('%s %s' % (pkg_name, value))
else:
@@ -553,6 +556,7 @@ def main():
parser_read_value.add_argument('pkg', nargs='*', help='Runtime package name to look up')
parser_read_value.add_argument('-f', '--file', help='Read package names from the specified file (one per line, first field only)')
parser_read_value.add_argument('-n', '--prefix-name', help='Prefix output with package name', action='store_true')
+ parser_read_value.add_argument('-u', '--unescape', help='Expand escapes such as \\n', action='store_true')
parser_read_value.set_defaults(func=read_value)
parser_glob = subparsers.add_parser('glob',
diff --git a/import-layers/yocto-poky/scripts/oe-publish-sdk b/import-layers/yocto-poky/scripts/oe-publish-sdk
index 9f7963c24..ee33acf90 100755
--- a/import-layers/yocto-poky/scripts/oe-publish-sdk
+++ b/import-layers/yocto-poky/scripts/oe-publish-sdk
@@ -114,9 +114,9 @@ def publish(args):
# Setting up the git repo
if not is_remote:
- cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo "*.pyc\n*.pyo\npyshtables.py" > .gitignore; fi; git add -A .; git config user.email "oe@oe.oe" && git config user.name "OE" && git commit -q -m "init repo" || true; git update-server-info' % (destination, destination)
+ cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; cp .git/hooks/post-update.sample .git/hooks/post-commit; echo "*.pyc\n*.pyo\npyshtables.py" > .gitignore; fi; git add -A .; git config user.email "oe@oe.oe" && git config user.name "OE" && git commit -q -m "init repo" || true' % (destination, destination)
else:
- cmd = "ssh %s 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo '*.pyc\n*.pyo\npyshtables.py' > .gitignore; fi; git add -A .; git config user.email 'oe@oe.oe' && git config user.name 'OE' && git commit -q -m \"init repo\" || true; git update-server-info'" % (host, destdir, destdir)
+ cmd = "ssh %s 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; cp .git/hooks/post-update.sample .git/hooks/post-commit; echo '*.pyc\n*.pyo\npyshtables.py' > .gitignore; fi; git add -A .; git config user.email 'oe@oe.oe' && git config user.name 'OE' && git commit -q -m \"init repo\" || true'" % (host, destdir, destdir)
ret = subprocess.call(cmd, shell=True)
if ret == 0:
logger.info('SDK published successfully')
diff --git a/import-layers/yocto-poky/scripts/oe-selftest b/import-layers/yocto-poky/scripts/oe-selftest
index 52366b1c8..1bf860a41 100755
--- a/import-layers/yocto-poky/scripts/oe-selftest
+++ b/import-layers/yocto-poky/scripts/oe-selftest
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-# Copyright (c) 2013 Intel Corporation
+# Copyright (c) 2013-2017 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -25,790 +25,51 @@
# E.g: "oe-selftest -r bblayers.BitbakeLayers" will run just the BitbakeLayers class from meta/lib/oeqa/selftest/bblayers.py
+
import os
import sys
-import unittest
-import logging
import argparse
-import subprocess
-import time as t
-import re
-import fnmatch
-import collections
-import imp
+import logging
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib')
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+lib_path = scripts_path + '/lib'
+sys.path = sys.path + [lib_path]
+import argparse_oe
+import scriptutils
import scriptpath
-scriptpath.add_bitbake_lib_path()
scriptpath.add_oe_lib_path()
-import argparse_oe
-
-import oeqa.selftest
-import oeqa.utils.ftools as ftools
-from oeqa.utils.commands import runCmd, get_bb_var, get_test_layer
-from oeqa.utils.metadata import metadata_from_bb, write_metadata_file
-from oeqa.selftest.base import oeSelfTest, get_available_machines
-
-try:
- import xmlrunner
- from xmlrunner.result import _XMLTestResult as TestResult
- from xmlrunner import XMLTestRunner as _TestRunner
-except ImportError:
- # use the base runner instead
- from unittest import TextTestResult as TestResult
- from unittest import TextTestRunner as _TestRunner
-
-log_prefix = "oe-selftest-" + t.strftime("%Y%m%d-%H%M%S")
-
-def logger_create():
- log_file = log_prefix + ".log"
- if os.path.lexists("oe-selftest.log"):
- os.remove("oe-selftest.log")
- os.symlink(log_file, "oe-selftest.log")
-
- log = logging.getLogger("selftest")
- log.setLevel(logging.DEBUG)
-
- fh = logging.FileHandler(filename=log_file, mode='w')
- fh.setLevel(logging.DEBUG)
-
- ch = logging.StreamHandler(sys.stdout)
- ch.setLevel(logging.INFO)
-
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
- fh.setFormatter(formatter)
- ch.setFormatter(formatter)
-
- log.addHandler(fh)
- log.addHandler(ch)
+scriptpath.add_bitbake_lib_path()
- return log
+from oeqa.utils import load_test_components
+from oeqa.core.exception import OEQAPreRun
-log = logger_create()
+logger = scriptutils.logger_create('oe-selftest', stream=sys.stdout)
-def get_args_parser():
+def main():
description = "Script that runs unit tests against bitbake and other Yocto related tools. The goal is to validate tools functionality and metadata integrity. Refer to https://wiki.yoctoproject.org/wiki/Oe-selftest for more information."
parser = argparse_oe.ArgumentParser(description=description)
- group = parser.add_mutually_exclusive_group(required=True)
- group.add_argument('-r', '--run-tests', required=False, action='store', nargs='*', dest="run_tests", default=None, help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
- group.add_argument('-a', '--run-all-tests', required=False, action="store_true", dest="run_all_tests", default=False, help='Run all (unhidden) tests')
- group.add_argument('-m', '--list-modules', required=False, action="store_true", dest="list_modules", default=False, help='List all available test modules.')
- group.add_argument('--list-classes', required=False, action="store_true", dest="list_allclasses", default=False, help='List all available test classes.')
- parser.add_argument('--coverage', action="store_true", help="Run code coverage when testing")
- parser.add_argument('--coverage-source', dest="coverage_source", nargs="+", help="Specifiy the directories to take coverage from")
- parser.add_argument('--coverage-include', dest="coverage_include", nargs="+", help="Specify extra patterns to include into the coverage measurement")
- parser.add_argument('--coverage-omit', dest="coverage_omit", nargs="+", help="Specify with extra patterns to exclude from the coverage measurement")
- group.add_argument('--run-tests-by', required=False, dest='run_tests_by', default=False, nargs='*',
- help='run-tests-by <name|class|module|id|tag> <list of tests|classes|modules|ids|tags>')
- group.add_argument('--list-tests-by', required=False, dest='list_tests_by', default=False, nargs='*',
- help='list-tests-by <name|class|module|id|tag> <list of tests|classes|modules|ids|tags>')
- group.add_argument('-l', '--list-tests', required=False, action="store_true", dest="list_tests", default=False,
- help='List all available tests.')
- group.add_argument('--list-tags', required=False, dest='list_tags', default=False, action="store_true",
- help='List all tags that have been set to test cases.')
- parser.add_argument('--machine', required=False, dest='machine', choices=['random', 'all'], default=None,
- help='Run tests on different machines (random/all).')
- parser.add_argument('--repository', required=False, dest='repository', default='', action='store',
- help='Submit test results to a repository')
- return parser
-
-builddir = None
-
-
-def preflight_check():
-
- global builddir
-
- log.info("Checking that everything is in order before running the tests")
-
- if not os.environ.get("BUILDDIR"):
- log.error("BUILDDIR isn't set. Did you forget to source your build environment setup script?")
- return False
-
- builddir = os.environ.get("BUILDDIR")
- if os.getcwd() != builddir:
- log.info("Changing cwd to %s" % builddir)
- os.chdir(builddir)
-
- if not "meta-selftest" in get_bb_var("BBLAYERS"):
- log.warn("meta-selftest layer not found in BBLAYERS, adding it")
- meta_selftestdir = os.path.join(
- get_bb_var("BBLAYERS_FETCH_DIR"),
- 'meta-selftest')
- if os.path.isdir(meta_selftestdir):
- runCmd("bitbake-layers add-layer %s" %meta_selftestdir)
- else:
- log.error("could not locate meta-selftest in:\n%s"
- %meta_selftestdir)
- return False
-
- if "buildhistory.bbclass" in get_bb_var("BBINCLUDED"):
- log.error("You have buildhistory enabled already and this isn't recommended for selftest, please disable it first.")
- return False
-
- if get_bb_var("PRSERV_HOST"):
- log.error("Please unset PRSERV_HOST in order to run oe-selftest")
- return False
-
- if get_bb_var("SANITY_TESTED_DISTROS"):
- log.error("Please unset SANITY_TESTED_DISTROS in order to run oe-selftest")
- return False
-
- log.info("Running bitbake -p")
- runCmd("bitbake -p")
-
- return True
-
-def add_include():
- global builddir
- if "#include added by oe-selftest.py" \
- not in ftools.read_file(os.path.join(builddir, "conf/local.conf")):
- log.info("Adding: \"include selftest.inc\" in local.conf")
- ftools.append_file(os.path.join(builddir, "conf/local.conf"), \
- "\n#include added by oe-selftest.py\ninclude machine.inc\ninclude selftest.inc")
-
- if "#include added by oe-selftest.py" \
- not in ftools.read_file(os.path.join(builddir, "conf/bblayers.conf")):
- log.info("Adding: \"include bblayers.inc\" in bblayers.conf")
- ftools.append_file(os.path.join(builddir, "conf/bblayers.conf"), \
- "\n#include added by oe-selftest.py\ninclude bblayers.inc")
-def remove_include():
- global builddir
- if builddir is None:
- return
- if "#include added by oe-selftest.py" \
- in ftools.read_file(os.path.join(builddir, "conf/local.conf")):
- log.info("Removing the include from local.conf")
- ftools.remove_from_file(os.path.join(builddir, "conf/local.conf"), \
- "\n#include added by oe-selftest.py\ninclude machine.inc\ninclude selftest.inc")
-
- if "#include added by oe-selftest.py" \
- in ftools.read_file(os.path.join(builddir, "conf/bblayers.conf")):
- log.info("Removing the include from bblayers.conf")
- ftools.remove_from_file(os.path.join(builddir, "conf/bblayers.conf"), \
- "\n#include added by oe-selftest.py\ninclude bblayers.inc")
-
-def remove_inc_files():
- global builddir
- if builddir is None:
- return
- try:
- os.remove(os.path.join(builddir, "conf/selftest.inc"))
- for root, _, files in os.walk(get_test_layer()):
- for f in files:
- if f == 'test_recipe.inc':
- os.remove(os.path.join(root, f))
- except OSError as e:
- pass
-
- for incl_file in ['conf/bblayers.inc', 'conf/machine.inc']:
- try:
- os.remove(os.path.join(builddir, incl_file))
- except:
- pass
-
-
-def get_tests_modules(include_hidden=False):
- modules_list = list()
- for modules_path in oeqa.selftest.__path__:
- for (p, d, f) in os.walk(modules_path):
- files = sorted([f for f in os.listdir(p) if f.endswith('.py') and not (f.startswith('_') and not include_hidden) and not f.startswith('__') and f != 'base.py'])
- for f in files:
- submodules = p.split("selftest")[-1]
- module = ""
- if submodules:
- module = 'oeqa.selftest' + submodules.replace("/",".") + "." + f.split('.py')[0]
- else:
- module = 'oeqa.selftest.' + f.split('.py')[0]
- if module not in modules_list:
- modules_list.append(module)
- return modules_list
-
-
-def get_tests(exclusive_modules=[], include_hidden=False):
- test_modules = list()
- for x in exclusive_modules:
- test_modules.append('oeqa.selftest.' + x)
- if not test_modules:
- inc_hidden = include_hidden
- test_modules = get_tests_modules(inc_hidden)
-
- return test_modules
-
-
-class Tc:
- def __init__(self, tcname, tcclass, tcmodule, tcid=None, tctag=None):
- self.tcname = tcname
- self.tcclass = tcclass
- self.tcmodule = tcmodule
- self.tcid = tcid
- # A test case can have multiple tags (as tuples) otherwise str will suffice
- self.tctag = tctag
- self.fullpath = '.'.join(['oeqa', 'selftest', tcmodule, tcclass, tcname])
-
-
-def get_tests_from_module(tmod):
- tlist = []
- prefix = 'oeqa.selftest.'
+ comp_name, comp = load_test_components(logger, 'oe-selftest').popitem()
+ comp.register_commands(logger, parser)
try:
- import importlib
- modlib = importlib.import_module(tmod)
- for mod in list(vars(modlib).values()):
- if isinstance(mod, type(oeSelfTest)) and issubclass(mod, oeSelfTest) and mod is not oeSelfTest:
- for test in dir(mod):
- if test.startswith('test_') and hasattr(vars(mod)[test], '__call__'):
- # Get test case id and feature tag
- # NOTE: if testcase decorator or feature tag not set will throw error
- try:
- tid = vars(mod)[test].test_case
- except:
- print('DEBUG: tc id missing for ' + str(test))
- tid = None
- try:
- ttag = vars(mod)[test].tag__feature
- except:
- # print('DEBUG: feature tag missing for ' + str(test))
- ttag = None
-
- # NOTE: for some reason lstrip() doesn't work for mod.__module__
- tlist.append(Tc(test, mod.__name__, mod.__module__.replace(prefix, ''), tid, ttag))
- except:
- pass
-
- return tlist
-
-
-def get_all_tests():
- # Get all the test modules (except the hidden ones)
- testlist = []
- tests_modules = get_tests_modules()
- # Get all the tests from modules
- for tmod in sorted(tests_modules):
- testlist += get_tests_from_module(tmod)
- return testlist
-
-
-def get_testsuite_by(criteria, keyword):
- # Get a testsuite based on 'keyword'
- # criteria: name, class, module, id, tag
- # keyword: a list of tests, classes, modules, ids, tags
-
- ts = []
- all_tests = get_all_tests()
-
- def get_matches(values):
- # Get an item and return the ones that match with keyword(s)
- # values: the list of items (names, modules, classes...)
- result = []
- remaining = values[:]
- for key in keyword:
- found = False
- if key in remaining:
- # Regular matching of exact item
- result.append(key)
- remaining.remove(key)
- found = True
- else:
- # Wildcard matching
- pattern = re.compile(fnmatch.translate(r"%s" % key))
- added = [x for x in remaining if pattern.match(x)]
- if added:
- result.extend(added)
- remaining = [x for x in remaining if x not in added]
- found = True
- if not found:
- log.error("Failed to find test: %s" % key)
-
- return result
-
- if criteria == 'name':
- names = get_matches([ tc.tcname for tc in all_tests ])
- ts = [ tc for tc in all_tests if tc.tcname in names ]
-
- elif criteria == 'class':
- classes = get_matches([ tc.tcclass for tc in all_tests ])
- ts = [ tc for tc in all_tests if tc.tcclass in classes ]
-
- elif criteria == 'module':
- modules = get_matches([ tc.tcmodule for tc in all_tests ])
- ts = [ tc for tc in all_tests if tc.tcmodule in modules ]
-
- elif criteria == 'id':
- ids = get_matches([ str(tc.tcid) for tc in all_tests ])
- ts = [ tc for tc in all_tests if str(tc.tcid) in ids ]
-
- elif criteria == 'tag':
- values = set()
- for tc in all_tests:
- # tc can have multiple tags (as tuple) otherwise str will suffice
- if isinstance(tc.tctag, tuple):
- values |= { str(tag) for tag in tc.tctag }
- else:
- values.add(str(tc.tctag))
-
- tags = get_matches(list(values))
-
- for tc in all_tests:
- for tag in tags:
- if isinstance(tc.tctag, tuple) and tag in tc.tctag:
- ts.append(tc)
- elif tag == tc.tctag:
- ts.append(tc)
-
- # Remove duplicates from the list
- ts = list(set(ts))
-
- return ts
-
-
-def list_testsuite_by(criteria, keyword):
- # Get a testsuite based on 'keyword'
- # criteria: name, class, module, id, tag
- # keyword: a list of tests, classes, modules, ids, tags
- def tc_key(t):
- if t[0] is None:
- return (0,) + t[1:]
- return t
- # tcid may be None if no ID was assigned, in which case sorted() will throw
- # a TypeError as Python 3 does not allow comparison (<,<=,>=,>) of
- # heterogeneous types, handle this by using a custom key generator
- ts = sorted([ (tc.tcid, tc.tctag, tc.tcname, tc.tcclass, tc.tcmodule) \
- for tc in get_testsuite_by(criteria, keyword) ], key=tc_key)
- print('_' * 150)
- for t in ts:
- if isinstance(t[1], (tuple, list)):
- print('%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % (t[0], ', '.join(t[1]), t[2], t[3], t[4]))
- else:
- print('%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % t)
- print('_' * 150)
- print('Filtering by:\t %s' % criteria)
- print('Looking for:\t %s' % ', '.join(str(x) for x in keyword))
- print('Total found:\t %s' % len(ts))
-
-
-def list_tests():
- # List all available oe-selftest tests
-
- ts = get_all_tests()
-
- print('%-4s\t%-10s\t%-50s' % ('id', 'tag', 'test'))
- print('_' * 80)
- for t in ts:
- if isinstance(t.tctag, (tuple, list)):
- print('%-4s\t%-10s\t%-50s' % (t.tcid, ', '.join(t.tctag), '.'.join([t.tcmodule, t.tcclass, t.tcname])))
- else:
- print('%-4s\t%-10s\t%-50s' % (t.tcid, t.tctag, '.'.join([t.tcmodule, t.tcclass, t.tcname])))
- print('_' * 80)
- print('Total found:\t %s' % len(ts))
-
-def list_tags():
- # Get all tags set to test cases
- # This is useful when setting tags to test cases
- # The list of tags should be kept as minimal as possible
- tags = set()
- all_tests = get_all_tests()
-
- for tc in all_tests:
- if isinstance(tc.tctag, (tuple, list)):
- tags.update(set(tc.tctag))
- else:
- tags.add(tc.tctag)
-
- print('Tags:\t%s' % ', '.join(str(x) for x in tags))
-
-def coverage_setup(coverage_source, coverage_include, coverage_omit):
- """ Set up the coverage measurement for the testcases to be run """
- import datetime
- import subprocess
- global builddir
- pokydir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
- curcommit= subprocess.check_output(["git", "--git-dir", os.path.join(pokydir, ".git"), "rev-parse", "HEAD"]).decode('utf-8')
- coveragerc = "%s/.coveragerc" % builddir
- data_file = "%s/.coverage." % builddir
- data_file += datetime.datetime.now().strftime('%Y%m%dT%H%M%S')
- if os.path.isfile(data_file):
- os.remove(data_file)
- with open(coveragerc, 'w') as cps:
- cps.write("# Generated with command '%s'\n" % " ".join(sys.argv))
- cps.write("# HEAD commit %s\n" % curcommit.strip())
- cps.write("[run]\n")
- cps.write("data_file = %s\n" % data_file)
- cps.write("branch = True\n")
- # Measure just BBLAYERS, scripts and bitbake folders
- cps.write("source = \n")
- if coverage_source:
- for directory in coverage_source:
- if not os.path.isdir(directory):
- log.warn("Directory %s is not valid.", directory)
- cps.write(" %s\n" % directory)
- else:
- for layer in get_bb_var('BBLAYERS').split():
- cps.write(" %s\n" % layer)
- cps.write(" %s\n" % os.path.dirname(os.path.realpath(__file__)))
- cps.write(" %s\n" % os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),'bitbake'))
-
- if coverage_include:
- cps.write("include = \n")
- for pattern in coverage_include:
- cps.write(" %s\n" % pattern)
- if coverage_omit:
- cps.write("omit = \n")
- for pattern in coverage_omit:
- cps.write(" %s\n" % pattern)
-
- return coveragerc
-
-def coverage_report():
- """ Loads the coverage data gathered and reports it back """
- try:
- # Coverage4 uses coverage.Coverage
- from coverage import Coverage
- except:
- # Coverage under version 4 uses coverage.coverage
- from coverage import coverage as Coverage
-
- import io as StringIO
- from coverage.misc import CoverageException
-
- cov_output = StringIO.StringIO()
- # Creating the coverage data with the setting from the configuration file
- cov = Coverage(config_file = os.environ.get('COVERAGE_PROCESS_START'))
- try:
- # Load data from the data file specified in the configuration
- cov.load()
- # Store report data in a StringIO variable
- cov.report(file = cov_output, show_missing=False)
- log.info("\n%s" % cov_output.getvalue())
- except CoverageException as e:
- # Show problems with the reporting. Since Coverage4 not finding any data to report raises an exception
- log.warn("%s" % str(e))
- finally:
- cov_output.close()
-
-
-def main():
- parser = get_args_parser()
- args = parser.parse_args()
-
- # Add <layer>/lib to sys.path, so layers can add selftests
- log.info("Running bitbake -e to get BBPATH")
- bbpath = get_bb_var('BBPATH').split(':')
- layer_libdirs = [p for p in (os.path.join(l, 'lib') for l in bbpath) if os.path.exists(p)]
- sys.path.extend(layer_libdirs)
- imp.reload(oeqa.selftest)
-
- # act like bitbake and enforce en_US.UTF-8 locale
- os.environ["LC_ALL"] = "en_US.UTF-8"
-
- if args.run_tests_by and len(args.run_tests_by) >= 2:
- valid_options = ['name', 'class', 'module', 'id', 'tag']
- if args.run_tests_by[0] not in valid_options:
- print('--run-tests-by %s not a valid option. Choose one of <name|class|module|id|tag>.' % args.run_tests_by[0])
- return 1
- else:
- criteria = args.run_tests_by[0]
- keyword = args.run_tests_by[1:]
- ts = sorted([ tc.fullpath for tc in get_testsuite_by(criteria, keyword) ])
- if not ts:
- return 1
-
- if args.list_tests_by and len(args.list_tests_by) >= 2:
- valid_options = ['name', 'class', 'module', 'id', 'tag']
- if args.list_tests_by[0] not in valid_options:
- print('--list-tests-by %s not a valid option. Choose one of <name|class|module|id|tag>.' % args.list_tests_by[0])
- return 1
- else:
- criteria = args.list_tests_by[0]
- keyword = args.list_tests_by[1:]
- list_testsuite_by(criteria, keyword)
-
- if args.list_tests:
- list_tests()
-
- if args.list_tags:
- list_tags()
-
- if args.list_allclasses:
- args.list_modules = True
-
- if args.list_modules:
- log.info('Listing all available test modules:')
- testslist = get_tests(include_hidden=True)
- for test in testslist:
- module = test.split('oeqa.selftest.')[-1]
- info = ''
- if module.startswith('_'):
- info = ' (hidden)'
- print(module + info)
- if args.list_allclasses:
- try:
- import importlib
- modlib = importlib.import_module(test)
- for v in vars(modlib):
- t = vars(modlib)[v]
- if isinstance(t, type(oeSelfTest)) and issubclass(t, oeSelfTest) and t!=oeSelfTest:
- print(" --", v)
- for method in dir(t):
- if method.startswith("test_") and isinstance(vars(t)[method], collections.Callable):
- print(" -- --", method)
-
- except (AttributeError, ImportError) as e:
- print(e)
- pass
-
- if args.run_tests or args.run_all_tests or args.run_tests_by:
- if not preflight_check():
- return 1
-
- if args.run_tests_by:
- testslist = ts
- else:
- testslist = get_tests(exclusive_modules=(args.run_tests or []), include_hidden=False)
-
- suite = unittest.TestSuite()
- loader = unittest.TestLoader()
- loader.sortTestMethodsUsing = None
- runner = TestRunner(verbosity=2,
- resultclass=buildResultClass(args))
- # we need to do this here, otherwise just loading the tests
- # will take 2 minutes (bitbake -e calls)
- oeSelfTest.testlayer_path = get_test_layer()
- for test in testslist:
- log.info("Loading tests from: %s" % test)
- try:
- suite.addTests(loader.loadTestsFromName(test))
- except AttributeError as e:
- log.error("Failed to import %s" % test)
- log.error(e)
- return 1
- add_include()
-
- if args.machine:
- # Custom machine sets only weak default values (??=) for MACHINE in machine.inc
- # This let test cases that require a specific MACHINE to be able to override it, using (?= or =)
- log.info('Custom machine mode enabled. MACHINE set to %s' % args.machine)
- if args.machine == 'random':
- os.environ['CUSTOMMACHINE'] = 'random'
- result = runner.run(suite)
- else: # all
- machines = get_available_machines()
- for m in machines:
- log.info('Run tests with custom MACHINE set to: %s' % m)
- os.environ['CUSTOMMACHINE'] = m
- result = runner.run(suite)
- else:
- result = runner.run(suite)
-
- log.info("Finished")
-
- if args.repository:
- import git
- # Commit tests results to repository
- metadata = metadata_from_bb()
- git_dir = os.path.join(os.getcwd(), 'selftest')
- if not os.path.isdir(git_dir):
- os.mkdir(git_dir)
-
- log.debug('Checking for git repository in %s' % git_dir)
- try:
- repo = git.Repo(git_dir)
- except git.exc.InvalidGitRepositoryError:
- log.debug("Couldn't find git repository %s; "
- "cloning from %s" % (git_dir, args.repository))
- repo = git.Repo.clone_from(args.repository, git_dir)
-
- r_branches = repo.git.branch(r=True)
- r_branches = set(r_branches.replace('origin/', '').split())
- l_branches = {str(branch) for branch in repo.branches}
- branch = '%s/%s/%s' % (metadata['hostname'],
- metadata['layers']['meta'].get('branch', '(nogit)'),
- metadata['config']['MACHINE'])
-
- if branch in l_branches:
- log.debug('Found branch in local repository, checking out')
- repo.git.checkout(branch)
- elif branch in r_branches:
- log.debug('Found branch in remote repository, checking'
- ' out and pulling')
- repo.git.checkout(branch)
- repo.git.pull()
- else:
- log.debug('New branch %s' % branch)
- repo.git.checkout('master')
- repo.git.checkout(b=branch)
-
- cleanResultsDir(repo)
- xml_dir = os.path.join(os.getcwd(), log_prefix)
- copyResultFiles(xml_dir, git_dir, repo)
- metadata_file = os.path.join(git_dir, 'metadata.xml')
- write_metadata_file(metadata_file, metadata)
- repo.index.add([metadata_file])
- repo.index.write()
-
- # Get information for commit message
- layer_info = ''
- for layer, values in metadata['layers'].items():
- layer_info = '%s%-17s = %s:%s\n' % (layer_info, layer,
- values.get('branch', '(nogit)'), values.get('commit', '0'*40))
- msg = 'Selftest for build %s of %s for machine %s on %s\n\n%s' % (
- log_prefix[12:], metadata['distro']['pretty_name'],
- metadata['config']['MACHINE'], metadata['hostname'], layer_info)
-
- log.debug('Commiting results to local repository')
- repo.index.commit(msg)
- if not repo.is_dirty():
- try:
- if branch in r_branches:
- log.debug('Pushing changes to remote repository')
- repo.git.push()
- else:
- log.debug('Pushing changes to remote repository '
- 'creating new branch')
- repo.git.push('-u', 'origin', branch)
- except GitCommandError:
- log.error('Falied to push to remote repository')
- return 1
- else:
- log.error('Local repository is dirty, not pushing commits')
-
- if result.wasSuccessful():
- return 0
- else:
- return 1
-
-def buildResultClass(args):
- """Build a Result Class to use in the testcase execution"""
- import site
-
- class StampedResult(TestResult):
- """
- Custom TestResult that prints the time when a test starts. As oe-selftest
- can take a long time (ie a few hours) to run, timestamps help us understand
- what tests are taking a long time to execute.
- If coverage is required, this class executes the coverage setup and reporting.
- """
- def startTest(self, test):
- import time
- self.stream.write(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + " - ")
- super(StampedResult, self).startTest(test)
-
- def startTestRun(self):
- """ Setup coverage before running any testcase """
-
- # variable holding the coverage configuration file allowing subprocess to be measured
- self.coveragepth = None
-
- # indicates the system if coverage is currently installed
- self.coverage_installed = True
-
- if args.coverage or args.coverage_source or args.coverage_include or args.coverage_omit:
- try:
- # check if user can do coverage
- import coverage
- except:
- log.warn("python coverage is not installed. More info on https://pypi.python.org/pypi/coverage")
- self.coverage_installed = False
-
- if self.coverage_installed:
- log.info("Coverage is enabled")
-
- major_version = int(coverage.version.__version__[0])
- if major_version < 4:
- log.error("python coverage %s installed. Require version 4 or greater." % coverage.version.__version__)
- self.stop()
- # In case the user has not set the variable COVERAGE_PROCESS_START,
- # create a default one and export it. The COVERAGE_PROCESS_START
- # value indicates where the coverage configuration file resides
- # More info on https://pypi.python.org/pypi/coverage
- if not os.environ.get('COVERAGE_PROCESS_START'):
- os.environ['COVERAGE_PROCESS_START'] = coverage_setup(args.coverage_source, args.coverage_include, args.coverage_omit)
-
- # Use default site.USER_SITE and write corresponding config file
- site.ENABLE_USER_SITE = True
- if not os.path.exists(site.USER_SITE):
- os.makedirs(site.USER_SITE)
- self.coveragepth = os.path.join(site.USER_SITE, "coverage.pth")
- with open(self.coveragepth, 'w') as cps:
- cps.write('import sys,site; sys.path.extend(site.getsitepackages()); import coverage; coverage.process_startup();')
-
- def stopTestRun(self):
- """ Report coverage data after the testcases are run """
-
- if args.coverage or args.coverage_source or args.coverage_include or args.coverage_omit:
- if self.coverage_installed:
- with open(os.environ['COVERAGE_PROCESS_START']) as ccf:
- log.info("Coverage configuration file (%s)" % os.environ.get('COVERAGE_PROCESS_START'))
- log.info("===========================")
- log.info("\n%s" % "".join(ccf.readlines()))
-
- log.info("Coverage Report")
- log.info("===============")
- try:
- coverage_report()
- finally:
- # remove the pth file
- try:
- os.remove(self.coveragepth)
- except OSError:
- log.warn("Expected temporal file from coverage is missing, ignoring removal.")
-
- return StampedResult
-
-def cleanResultsDir(repo):
- """ Remove result files from directory """
-
- xml_files = []
- directory = repo.working_tree_dir
- for f in os.listdir(directory):
- path = os.path.join(directory, f)
- if os.path.isfile(path) and path.endswith('.xml'):
- xml_files.append(f)
- repo.index.remove(xml_files, working_tree=True)
-
-def copyResultFiles(src, dst, repo):
- """ Copy result files from src to dst removing the time stamp. """
-
- import shutil
-
- re_time = re.compile("-[0-9]+")
- file_list = []
-
- for root, subdirs, files in os.walk(src):
- tmp_dir = root.replace(src, '').lstrip('/')
- for s in subdirs:
- os.mkdir(os.path.join(dst, tmp_dir, s))
- for f in files:
- file_name = os.path.join(dst, tmp_dir, re_time.sub("", f))
- shutil.copy2(os.path.join(root, f), file_name)
- file_list.append(file_name)
- repo.index.add(file_list)
+ args = parser.parse_args()
+ results = args.func(logger, args)
+ ret = 0 if results.wasSuccessful() else 1
+ except SystemExit as err:
+ if err.code != 0:
+ raise err
+ ret = err.code
+ except OEQAPreRun as pr:
+ ret = 1
-class TestRunner(_TestRunner):
- """Test runner class aware of exporting tests."""
- def __init__(self, *args, **kwargs):
- try:
- exportdir = os.path.join(os.getcwd(), log_prefix)
- kwargsx = dict(**kwargs)
- # argument specific to XMLTestRunner, if adding a new runner then
- # also add logic to use other runner's args.
- kwargsx['output'] = exportdir
- kwargsx['descriptions'] = False
- # done for the case where telling the runner where to export
- super(TestRunner, self).__init__(*args, **kwargsx)
- except TypeError:
- log.info("test runner init'ed like unittest")
- super(TestRunner, self).__init__(*args, **kwargs)
+ return ret
-if __name__ == "__main__":
+if __name__ == '__main__':
try:
ret = main()
except Exception:
ret = 1
import traceback
traceback.print_exc()
- finally:
- remove_include()
- remove_inc_files()
sys.exit(ret)
diff --git a/import-layers/yocto-poky/scripts/oe-setup-builddir b/import-layers/yocto-poky/scripts/oe-setup-builddir
index ef495517a..55d73ca1e 100755
--- a/import-layers/yocto-poky/scripts/oe-setup-builddir
+++ b/import-layers/yocto-poky/scripts/oe-setup-builddir
@@ -133,13 +133,6 @@ EOM
# unset SHOWYPDOC
fi
-cat <<EOM
-
-### Shell environment set up for builds. ###
-
-You can now run 'bitbake <target>'
-
-EOM
if [ -z "$OECORENOTESCONF" ]; then
OECORENOTESCONF="$OEROOT/meta/conf/conf-notes.txt"
fi
diff --git a/import-layers/yocto-poky/scripts/oe-test b/import-layers/yocto-poky/scripts/oe-test
index a1d282db3..34d9012d1 100755
--- a/import-layers/yocto-poky/scripts/oe-test
+++ b/import-layers/yocto-poky/scripts/oe-test
@@ -8,7 +8,6 @@
import os
import sys
import argparse
-import importlib
import logging
scripts_path = os.path.dirname(os.path.realpath(__file__))
@@ -25,35 +24,10 @@ try:
except ImportError:
pass
-from oeqa.core.context import OETestContextExecutor
+from oeqa.utils import load_test_components
+from oeqa.core.exception import OEQAPreRun
-logger = scriptutils.logger_create('oe-test')
-
-def _load_test_components(logger):
- components = {}
-
- for path in sys.path:
- base_dir = os.path.join(path, 'oeqa')
- if os.path.exists(base_dir) and os.path.isdir(base_dir):
- for file in os.listdir(base_dir):
- comp_name = file
- comp_context = os.path.join(base_dir, file, 'context.py')
- if os.path.exists(comp_context):
- comp_plugin = importlib.import_module('oeqa.%s.%s' % \
- (comp_name, 'context'))
- try:
- if not issubclass(comp_plugin._executor_class,
- OETestContextExecutor):
- raise TypeError("Component %s in %s, _executor_class "\
- "isn't derived from OETestContextExecutor."\
- % (comp_name, comp_context))
-
- components[comp_name] = comp_plugin._executor_class()
- except AttributeError:
- raise AttributeError("Component %s in %s don't have "\
- "_executor_class defined." % (comp_name, comp_context))
-
- return components
+logger = scriptutils.logger_create('oe-test', stream=sys.stdout)
def main():
parser = argparse_oe.ArgumentParser(description="OpenEmbedded test tool",
@@ -73,7 +47,7 @@ def main():
elif global_args.quiet:
logger.setLevel(logging.ERROR)
- components = _load_test_components(logger)
+ components = load_test_components(logger, 'oe-test')
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
subparsers.add_subparser_group('components', 'Test components')
@@ -92,6 +66,8 @@ def main():
ret = err.code
except argparse_oe.ArgumentUsageError as ae:
parser.error_subcommand(ae.message, ae.subcommand)
+ except OEQAPreRun as pr:
+ ret = 1
return ret
diff --git a/import-layers/yocto-poky/scripts/recipetool b/import-layers/yocto-poky/scripts/recipetool
index 3765ec7cf..3a3c9b744 100755
--- a/import-layers/yocto-poky/scripts/recipetool
+++ b/import-layers/yocto-poky/scripts/recipetool
@@ -36,8 +36,8 @@ def tinfoil_init(parserecipes):
import bb.tinfoil
import logging
tinfoil = bb.tinfoil.Tinfoil(tracking=True)
- tinfoil.prepare(not parserecipes)
tinfoil.logger.setLevel(logger.getEffectiveLevel())
+ tinfoil.prepare(not parserecipes)
return tinfoil
def main():
diff --git a/import-layers/yocto-poky/scripts/runqemu b/import-layers/yocto-poky/scripts/runqemu
index 9b6d330f1..0ed1eec2d 100755
--- a/import-layers/yocto-poky/scripts/runqemu
+++ b/import-layers/yocto-poky/scripts/runqemu
@@ -28,14 +28,18 @@ import shutil
import glob
import configparser
-class OEPathError(Exception):
+class RunQemuError(Exception):
+ """Custom exception to raise on known errors."""
+ pass
+
+class OEPathError(RunQemuError):
"""Custom Exception to give better guidance on missing binaries"""
def __init__(self, message):
- self.message = "In order for this script to dynamically infer paths\n \
+ super().__init__("In order for this script to dynamically infer paths\n \
kernels or filesystem images, you either need bitbake in your PATH\n \
or to source oe-init-build-env before running this script.\n\n \
Dynamic path inference can be avoided by passing a *.qemuboot.conf to\n \
-runqemu, i.e. `runqemu /path/to/my-image-name.qemuboot.conf`\n\n %s" % message
+runqemu, i.e. `runqemu /path/to/my-image-name.qemuboot.conf`\n\n %s" % message)
def create_logger():
@@ -44,7 +48,7 @@ def create_logger():
# create console handler and set level to debug
ch = logging.StreamHandler()
- ch.setLevel(logging.INFO)
+ ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
@@ -81,6 +85,8 @@ of the following environment variables (in any order):
qemuparams=<xyz> - specify custom parameters to QEMU
bootparams=<xyz> - specify custom kernel parameters during boot
help, -h, --help: print this text
+ -d, --debug: Enable debug output
+ -q, --quite: Hide most output except error messages
Examples:
runqemu
@@ -90,25 +96,25 @@ Examples:
runqemu qemux86-64 core-image-sato ext4
runqemu qemux86-64 wic-image-minimal wic
runqemu path/to/bzImage-qemux86.bin path/to/nfsrootdir/ serial
- runqemu qemux86 iso/hddimg/vmdk/qcow2/vdi/ramfs/cpio.gz...
+ runqemu qemux86 iso/hddimg/wic.vmdk/wic.qcow2/wic.vdi/ramfs/cpio.gz...
runqemu qemux86 qemuparams="-m 256"
runqemu qemux86 bootparams="psplash=false"
- runqemu path/to/<image>-<machine>.vmdk
runqemu path/to/<image>-<machine>.wic
+ runqemu path/to/<image>-<machine>.wic.vmdk
""")
def check_tun():
"""Check /dev/net/tun"""
dev_tun = '/dev/net/tun'
if not os.path.exists(dev_tun):
- raise Exception("TUN control device %s is unavailable; you may need to enable TUN (e.g. sudo modprobe tun)" % dev_tun)
+ raise RunQemuError("TUN control device %s is unavailable; you may need to enable TUN (e.g. sudo modprobe tun)" % dev_tun)
if not os.access(dev_tun, os.W_OK):
- raise Exception("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun))
+ raise RunQemuError("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun))
def check_libgl(qemu_bin):
cmd = 'ldd %s' % qemu_bin
- logger.info('Running %s...' % cmd)
+ logger.debug('Running %s...' % cmd)
need_gl = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8')
if re.search('libGLU', need_gl):
# We can't run without a libGL.so
@@ -137,7 +143,7 @@ def check_libgl(qemu_bin):
logger.error("You need libGL.so and libGLU.so to exist in your library path to run the QEMU emulator.")
logger.error("Ubuntu package names are: libgl1-mesa-dev and libglu1-mesa-dev.")
logger.error("Fedora package names are: mesa-libGL-devel mesa-libGLU-devel.")
- raise Exception('%s requires libGLU, but not found' % qemu_bin)
+ raise RunQemuError('%s requires libGLU, but not found' % qemu_bin)
def get_first_file(cmds):
"""Return first file found in wildcard cmds"""
@@ -212,8 +218,10 @@ class BaseConfig(object):
self.lock_descriptor = ''
self.bitbake_e = ''
self.snapshot = False
- self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs', 'cpio.gz', 'cpio', 'ramfs')
- self.vmtypes = ('hddimg', 'hdddirect', 'wic', 'vmdk', 'qcow2', 'vdi', 'iso')
+ self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs',
+ 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz')
+ self.vmtypes = ('hddimg', 'hdddirect', 'wic', 'wic.vmdk',
+ 'wic.qcow2', 'wic.vdi', 'iso')
self.network_device = "-device e1000,netdev=net0,mac=@MAC@"
# Use different mac section for tap and slirp to avoid
# conflicts, e.g., when one is running with tap, the other is
@@ -224,13 +232,17 @@ class BaseConfig(object):
self.mac_tap = "52:54:00:12:34:"
self.mac_slirp = "52:54:00:12:35:"
- def acquire_lock(self):
- logger.info("Acquiring lockfile %s..." % self.lock)
+ def acquire_lock(self, error=True):
+ logger.debug("Acquiring lockfile %s..." % self.lock)
try:
self.lock_descriptor = open(self.lock, 'w')
fcntl.flock(self.lock_descriptor, fcntl.LOCK_EX|fcntl.LOCK_NB)
except Exception as e:
- logger.info("Acquiring lockfile %s failed: %s" % (self.lock, e))
+ msg = "Acquiring lockfile %s failed: %s" % (self.lock, e)
+ if error:
+ logger.error(msg)
+ else:
+ logger.info(msg)
if self.lock_descriptor:
self.lock_descriptor.close()
return False
@@ -255,10 +267,10 @@ class BaseConfig(object):
def is_deploy_dir_image(self, p):
if os.path.isdir(p):
if not re.search('.qemuboot.conf$', '\n'.join(os.listdir(p)), re.M):
- logger.info("Can't find required *.qemuboot.conf in %s" % p)
+ logger.debug("Can't find required *.qemuboot.conf in %s" % p)
return False
if not any(map(lambda name: '-image-' in name, os.listdir(p))):
- logger.info("Can't find *-image-* in %s" % p)
+ logger.debug("Can't find *-image-* in %s" % p)
return False
return True
else:
@@ -271,15 +283,17 @@ class BaseConfig(object):
if not self.fstype or self.fstype == fst:
if fst == 'ramfs':
fst = 'cpio.gz'
+ if fst in ('tar.bz2', 'tar.gz'):
+ fst = 'nfs'
self.fstype = fst
else:
- raise Exception("Conflicting: FSTYPE %s and %s" % (self.fstype, fst))
+ raise RunQemuError("Conflicting: FSTYPE %s and %s" % (self.fstype, fst))
def set_machine_deploy_dir(self, machine, deploy_dir_image):
"""Set MACHINE and DEPLOY_DIR_IMAGE"""
- logger.info('MACHINE: %s' % machine)
+ logger.debug('MACHINE: %s' % machine)
self.set("MACHINE", machine)
- logger.info('DEPLOY_DIR_IMAGE: %s' % deploy_dir_image)
+ logger.debug('DEPLOY_DIR_IMAGE: %s' % deploy_dir_image)
self.set("DEPLOY_DIR_IMAGE", deploy_dir_image)
def check_arg_nfs(self, p):
@@ -329,30 +343,30 @@ class BaseConfig(object):
else:
logger.warn("%s doesn't exist" % qb)
else:
- raise Exception("Can't find FSTYPE from: %s" % p)
+ raise RunQemuError("Can't find FSTYPE from: %s" % p)
elif os.path.isdir(p) or re.search(':', p) and re.search('/', p):
if self.is_deploy_dir_image(p):
- logger.info('DEPLOY_DIR_IMAGE: %s' % p)
+ logger.debug('DEPLOY_DIR_IMAGE: %s' % p)
self.set("DEPLOY_DIR_IMAGE", p)
else:
- logger.info("Assuming %s is an nfs rootfs" % p)
+ logger.debug("Assuming %s is an nfs rootfs" % p)
self.check_arg_nfs(p)
elif os.path.basename(p).startswith('ovmf'):
self.ovmf_bios.append(p)
else:
- raise Exception("Unknown path arg %s" % p)
+ raise RunQemuError("Unknown path arg %s" % p)
def check_arg_machine(self, arg):
"""Check whether it is a machine"""
if self.get('MACHINE') == arg:
return
elif self.get('MACHINE') and self.get('MACHINE') != arg:
- raise Exception("Maybe conflicted MACHINE: %s vs %s" % (self.get('MACHINE'), arg))
+ raise RunQemuError("Maybe conflicted MACHINE: %s vs %s" % (self.get('MACHINE'), arg))
elif re.search('/', arg):
- raise Exception("Unknown arg: %s" % arg)
+ raise RunQemuError("Unknown arg: %s" % arg)
- logger.info('Assuming MACHINE = %s' % arg)
+ logger.debug('Assuming MACHINE = %s' % arg)
# if we're running under testimage, or similarly as a child
# of an existing bitbake invocation, we can't invoke bitbake
@@ -381,7 +395,7 @@ class BaseConfig(object):
if s:
deploy_dir_image = s.group(1)
else:
- raise Exception("bitbake -e %s" % self.bitbake_e)
+ raise RunQemuError("bitbake -e %s" % self.bitbake_e)
if self.is_deploy_dir_image(deploy_dir_image):
self.set_machine_deploy_dir(arg, deploy_dir_image)
else:
@@ -389,6 +403,16 @@ class BaseConfig(object):
self.set("MACHINE", arg)
def check_args(self):
+ for debug in ("-d", "--debug"):
+ if debug in sys.argv:
+ logger.setLevel(logging.DEBUG)
+ sys.argv.remove(debug)
+
+ for quiet in ("-q", "--quiet"):
+ if quiet in sys.argv:
+ logger.setLevel(logging.ERROR)
+ sys.argv.remove(quiet)
+
unknown_arg = ""
for arg in sys.argv[1:]:
if arg in self.fstypes + self.vmtypes:
@@ -435,7 +459,9 @@ class BaseConfig(object):
if (not unknown_arg) or unknown_arg == arg:
unknown_arg = arg
else:
- raise Exception("Can't handle two unknown args: %s %s" % (unknown_arg, arg))
+ raise RunQemuError("Can't handle two unknown args: %s %s\n"
+ "Try 'runqemu help' on how to use it" % \
+ (unknown_arg, arg))
# Check to make sure it is a valid machine
if unknown_arg:
if self.get('MACHINE') == unknown_arg:
@@ -448,7 +474,7 @@ class BaseConfig(object):
self.check_arg_machine(unknown_arg)
- if not self.get('DEPLOY_DIR_IMAGE'):
+ if not (self.get('DEPLOY_DIR_IMAGE') or self.qbconfload):
self.load_bitbake_env()
s = re.search('^DEPLOY_DIR_IMAGE="(.*)"', self.bitbake_e, re.M)
if s:
@@ -461,7 +487,7 @@ class BaseConfig(object):
return
if not self.get('QB_CPU_KVM'):
- raise Exception("QB_CPU_KVM is NULL, this board doesn't support kvm")
+ raise RunQemuError("QB_CPU_KVM is NULL, this board doesn't support kvm")
self.qemu_opt_script += ' %s %s' % (self.get('QB_MACHINE'), self.get('QB_CPU_KVM'))
yocto_kvm_wiki = "https://wiki.yoctoproject.org/wiki/How_to_enable_KVM_for_Poky_qemu"
@@ -473,12 +499,12 @@ class BaseConfig(object):
if not kvm_cap:
logger.error("You are trying to enable KVM on a cpu without VT support.")
logger.error("Remove kvm from the command-line, or refer:")
- raise Exception(yocto_kvm_wiki)
+ raise RunQemuError(yocto_kvm_wiki)
if not os.path.exists(dev_kvm):
logger.error("Missing KVM device. Have you inserted kvm modules?")
logger.error("For further help see:")
- raise Exception(yocto_kvm_wiki)
+ raise RunQemuError(yocto_kvm_wiki)
if os.access(dev_kvm, os.W_OK|os.R_OK):
self.qemu_opt_script += ' -enable-kvm'
@@ -490,18 +516,18 @@ class BaseConfig(object):
else:
logger.error("You have no read or write permission on /dev/kvm.")
logger.error("Please change the ownership of this file as described at:")
- raise Exception(yocto_kvm_wiki)
+ raise RunQemuError(yocto_kvm_wiki)
if self.vhost_enabled:
if not os.path.exists(dev_vhost):
logger.error("Missing virtio net device. Have you inserted vhost-net module?")
logger.error("For further help see:")
- raise Exception(yocto_paravirt_kvm_wiki)
+ raise RunQemuError(yocto_paravirt_kvm_wiki)
if not os.access(dev_kvm, os.W_OK|os.R_OK):
logger.error("You have no read or write permission on /dev/vhost-net.")
logger.error("Please change the ownership of this file as described at:")
- raise Exception(yocto_kvm_wiki)
+ raise RunQemuError(yocto_kvm_wiki)
def check_fstype(self):
"""Check and setup FSTYPE"""
@@ -510,7 +536,7 @@ class BaseConfig(object):
if fstype:
self.fstype = fstype
else:
- raise Exception("FSTYPE is NULL!")
+ raise RunQemuError("FSTYPE is NULL!")
def check_rootfs(self):
"""Check and set rootfs"""
@@ -522,7 +548,7 @@ class BaseConfig(object):
if not self.rootfs:
self.rootfs = self.get('ROOTFS')
elif self.get('ROOTFS') != self.rootfs:
- raise Exception("Maybe conflicted ROOTFS: %s vs %s" % (self.get('ROOTFS'), self.rootfs))
+ raise RunQemuError("Maybe conflicted ROOTFS: %s vs %s" % (self.get('ROOTFS'), self.rootfs))
if self.fstype == 'nfs':
return
@@ -538,10 +564,10 @@ class BaseConfig(object):
cmds = (cmd_name, cmd_link)
self.rootfs = get_first_file(cmds)
if not self.rootfs:
- raise Exception("Failed to find rootfs: %s or %s" % cmds)
+ raise RunQemuError("Failed to find rootfs: %s or %s" % cmds)
if not os.path.exists(self.rootfs):
- raise Exception("Can't find rootfs: %s" % self.rootfs)
+ raise RunQemuError("Can't find rootfs: %s" % self.rootfs)
def check_ovmf(self):
"""Check and set full path for OVMF firmware and variable file(s)."""
@@ -555,7 +581,7 @@ class BaseConfig(object):
self.ovmf_bios[index] = path
break
else:
- raise Exception("Can't find OVMF firmware: %s" % ovmf)
+ raise RunQemuError("Can't find OVMF firmware: %s" % ovmf)
def check_kernel(self):
"""Check and set kernel, dtb"""
@@ -578,10 +604,10 @@ class BaseConfig(object):
cmds = (kernel_match_name, kernel_match_link, kernel_startswith)
self.kernel = get_first_file(cmds)
if not self.kernel:
- raise Exception('KERNEL not found: %s, %s or %s' % cmds)
+ raise RunQemuError('KERNEL not found: %s, %s or %s' % cmds)
if not os.path.exists(self.kernel):
- raise Exception("KERNEL %s not found" % self.kernel)
+ raise RunQemuError("KERNEL %s not found" % self.kernel)
dtb = self.get('QB_DTB')
if dtb:
@@ -591,7 +617,7 @@ class BaseConfig(object):
cmds = (cmd_match, cmd_startswith, cmd_wild)
self.dtb = get_first_file(cmds)
if not os.path.exists(self.dtb):
- raise Exception('DTB not found: %s, %s or %s' % cmds)
+ raise RunQemuError('DTB not found: %s, %s or %s' % cmds)
def check_biosdir(self):
"""Check custombiosdir"""
@@ -607,11 +633,11 @@ class BaseConfig(object):
break
if biosdir:
- logger.info("Assuming biosdir is: %s" % biosdir)
+ logger.debug("Assuming biosdir is: %s" % biosdir)
self.qemu_opt_script += ' -L %s' % biosdir
else:
logger.error("Custom BIOS directory not found. Tried: %s, %s, and %s" % (self.custombiosdir, biosdir_native, biosdir_host))
- raise Exception("Invalid custombiosdir: %s" % self.custombiosdir)
+ raise RunQemuError("Invalid custombiosdir: %s" % self.custombiosdir)
def check_mem(self):
s = re.search('-m +([0-9]+)', self.qemu_opt_script)
@@ -639,7 +665,7 @@ class BaseConfig(object):
# Check audio
if self.audio_enabled:
if not self.get('QB_AUDIO_DRV'):
- raise Exception("QB_AUDIO_DRV is NULL, this board doesn't support audio")
+ raise RunQemuError("QB_AUDIO_DRV is NULL, this board doesn't support audio")
if not self.get('QB_AUDIO_OPT'):
logger.warn('QB_AUDIO_OPT is NULL, you may need define it to make audio work')
else:
@@ -662,7 +688,7 @@ class BaseConfig(object):
if self.get('DEPLOY_DIR_IMAGE'):
deploy_dir_image = self.get('DEPLOY_DIR_IMAGE')
else:
- logger.info("Can't find qemuboot conf file, DEPLOY_DIR_IMAGE is NULL!")
+ logger.warn("Can't find qemuboot conf file, DEPLOY_DIR_IMAGE is NULL!")
return
if self.rootfs and not os.path.exists(self.rootfs):
@@ -674,8 +700,11 @@ class BaseConfig(object):
self.rootfs, machine)
else:
cmd = 'ls -t %s/*.qemuboot.conf' % deploy_dir_image
- logger.info('Running %s...' % cmd)
- qbs = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8')
+ logger.debug('Running %s...' % cmd)
+ try:
+ qbs = subprocess.check_output(cmd, shell=True).decode('utf-8')
+ except subprocess.CalledProcessError as err:
+ raise RunQemuError(err)
if qbs:
for qb in qbs.split():
# Don't use initramfs when other choices unless fstype is ramfs
@@ -694,14 +723,18 @@ class BaseConfig(object):
return
if not os.path.exists(self.qemuboot):
- raise Exception("Failed to find %s (wrong image name or BSP does not support running under qemu?)." % self.qemuboot)
+ raise RunQemuError("Failed to find %s (wrong image name or BSP does not support running under qemu?)." % self.qemuboot)
- logger.info('CONFFILE: %s' % self.qemuboot)
+ logger.debug('CONFFILE: %s' % self.qemuboot)
cf = configparser.ConfigParser()
cf.read(self.qemuboot)
for k, v in cf.items('config_bsp'):
k_upper = k.upper()
+ if v.startswith("../"):
+ v = os.path.abspath(os.path.dirname(self.qemuboot) + "/" + v)
+ elif v == ".":
+ v = os.path.dirname(self.qemuboot)
self.set(k_upper, v)
def validate_paths(self):
@@ -789,16 +822,12 @@ class BaseConfig(object):
all_instances.sort(key=int)
self.nfs_instance = int(all_instances.pop()) + 1
- mountd_rpcport = 21111 + self.nfs_instance
- nfsd_rpcport = 11111 + self.nfs_instance
nfsd_port = 3049 + 2 * self.nfs_instance
mountd_port = 3048 + 2 * self.nfs_instance
# Export vars for runqemu-export-rootfs
export_dict = {
'NFS_INSTANCE': self.nfs_instance,
- 'MOUNTD_RPCPORT': mountd_rpcport,
- 'NFSD_RPCPORT': nfsd_rpcport,
'NFSD_PORT': nfsd_port,
'MOUNTD_PORT': mountd_port,
}
@@ -806,7 +835,7 @@ class BaseConfig(object):
# Use '%s' since they are integers
os.putenv(k, '%s' % v)
- self.unfs_opts="nfsvers=3,port=%s,mountprog=%s,nfsprog=%s,udp,mountport=%s" % (nfsd_port, mountd_rpcport, nfsd_rpcport, mountd_port)
+ self.unfs_opts="nfsvers=3,port=%s,udp,mountport=%s" % (nfsd_port, mountd_port)
# Extract .tar.bz2 or .tar.bz if no nfs dir
if not (self.rootfs and os.path.isdir(self.rootfs)):
@@ -824,12 +853,12 @@ class BaseConfig(object):
elif os.path.exists(src2):
src = src2
if not src:
- raise Exception("No NFS_DIR is set, and can't find %s or %s to extract" % (src1, src2))
+ raise RunQemuError("No NFS_DIR is set, and can't find %s or %s to extract" % (src1, src2))
logger.info('NFS_DIR not found, extracting %s to %s' % (src, dest))
cmd = 'runqemu-extract-sdk %s %s' % (src, dest)
logger.info('Running %s...' % cmd)
if subprocess.call(cmd, shell=True) != 0:
- raise Exception('Failed to run %s' % cmd)
+ raise RunQemuError('Failed to run %s' % cmd)
self.clean_nfs_dir = True
self.rootfs = dest
@@ -837,7 +866,7 @@ class BaseConfig(object):
cmd = 'runqemu-export-rootfs start %s' % self.rootfs
logger.info('Running %s...' % cmd)
if subprocess.call(cmd, shell=True) != 0:
- raise Exception('Failed to run %s' % cmd)
+ raise RunQemuError('Failed to run %s' % cmd)
self.nfs_running = True
@@ -849,7 +878,7 @@ class BaseConfig(object):
self.kernel_cmdline_script += ' ip=dhcp'
# Port mapping
hostfwd = ",hostfwd=tcp::2222-:22,hostfwd=tcp::2323-:23"
- qb_slirp_opt_default = "-netdev user,id=net0%s" % hostfwd
+ qb_slirp_opt_default = "-netdev user,id=net0%s,tftp=%s" % (hostfwd, self.get('DEPLOY_DIR_IMAGE'))
qb_slirp_opt = self.get('QB_SLIRP_OPT') or qb_slirp_opt_default
# Figure out the port
ports = re.findall('hostfwd=[^-]*:([0-9]+)-[^,-]*', qb_slirp_opt)
@@ -888,6 +917,9 @@ class BaseConfig(object):
lockdir = "/tmp/qemu-tap-locks"
if not (self.qemuifup and self.qemuifdown and ip):
+ logger.error("runqemu-ifup: %s" % self.qemuifup)
+ logger.error("runqemu-ifdown: %s" % self.qemuifdown)
+ logger.error("ip: %s" % ip)
raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found")
if not os.path.exists(lockdir):
@@ -895,14 +927,15 @@ class BaseConfig(object):
# running at the same time.
try:
os.mkdir(lockdir)
+ os.chmod(lockdir, 0o777)
except FileExistsError:
pass
cmd = '%s link' % ip
- logger.info('Running %s...' % cmd)
+ logger.debug('Running %s...' % cmd)
ip_link = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8')
# Matches line like: 6: tap0: <foo>
- possibles = re.findall('^[1-9]+: +(tap[0-9]+): <.*', ip_link, re.M)
+ possibles = re.findall('^[0-9]+: +(tap[0-9]+): <.*', ip_link, re.M)
tap = ""
for p in possibles:
lockfile = os.path.join(lockdir, p)
@@ -910,7 +943,7 @@ class BaseConfig(object):
logger.info('Found %s.skip, skipping %s' % (lockfile, p))
continue
self.lock = lockfile + '.lock'
- if self.acquire_lock():
+ if self.acquire_lock(error=False):
tap = p
logger.info("Using preconfigured tap device %s" % tap)
logger.info("If this is not intended, touch %s.skip to make runqemu skip %s." %(lockfile, tap))
@@ -920,7 +953,7 @@ class BaseConfig(object):
if os.path.exists(nosudo_flag):
logger.error("Error: There are no available tap devices to use for networking,")
logger.error("and I see %s exists, so I am not going to try creating" % nosudo_flag)
- raise Exception("a new one with sudo.")
+ raise RunQemuError("a new one with sudo.")
gid = os.getgid()
uid = os.getuid()
@@ -931,7 +964,7 @@ class BaseConfig(object):
self.lock = lockfile + '.lock'
self.acquire_lock()
self.cleantap = True
- logger.info('Created tap: %s' % tap)
+ logger.debug('Created tap: %s' % tap)
if not tap:
logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.")
@@ -960,8 +993,8 @@ class BaseConfig(object):
def setup_network(self):
if self.get('QB_NET') == 'none':
return
- cmd = "stty -g"
- self.saved_stty = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8')
+ if sys.stdin.isatty():
+ self.saved_stty = subprocess.check_output("stty -g", shell=True).decode('utf-8')
self.network_device = self.get('QB_NETWORK_DEVICE') or self.network_device
if self.slirp_enabled:
self.setup_slirp()
@@ -971,6 +1004,8 @@ class BaseConfig(object):
def setup_rootfs(self):
if self.get('QB_ROOTFS') == 'none':
return
+ if 'wic.' in self.fstype:
+ self.fstype = self.fstype[4:]
rootfs_format = self.fstype if self.fstype in ('vmdk', 'qcow2', 'vdi') else 'raw'
qb_rootfs_opt = self.get('QB_ROOTFS_OPT')
@@ -986,7 +1021,7 @@ class BaseConfig(object):
vm_drive = ''
if self.fstype in self.vmtypes:
if self.fstype == 'iso':
- vm_drive = '-cdrom %s' % self.rootfs
+ vm_drive = '-drive file=%s,if=virtio,media=cdrom' % self.rootfs
elif self.get('QB_DRIVE_TYPE'):
drive_type = self.get('QB_DRIVE_TYPE')
if drive_type.startswith("/dev/sd"):
@@ -995,7 +1030,7 @@ class BaseConfig(object):
% (self.rootfs, rootfs_format)
elif drive_type.startswith("/dev/hd"):
logger.info('Using ide drive')
- vm_drive = "%s,format=%s" % (self.rootfs, rootfs_format)
+ vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format)
else:
# virtio might have been selected explicitly (just use it), or
# is used as fallback (then warn about that).
@@ -1011,7 +1046,7 @@ class BaseConfig(object):
if self.fstype == 'nfs':
self.rootfs_options = ''
- k_root = '/dev/nfs nfsroot=%s:%s,%s' % (self.nfs_server, self.rootfs, self.unfs_opts)
+ k_root = '/dev/nfs nfsroot=%s:%s,%s' % (self.nfs_server, os.path.abspath(self.rootfs), self.unfs_opts)
self.kernel_cmdline = 'root=%s rw highres=off' % k_root
if self.fstype == 'none':
@@ -1062,7 +1097,7 @@ class BaseConfig(object):
if not qemu_system:
qemu_system = self.guess_qb_system()
if not qemu_system:
- raise Exception("Failed to boot, QB_SYSTEM_NAME is NULL!")
+ raise RunQemuError("Failed to boot, QB_SYSTEM_NAME is NULL!")
qemu_bin = '%s/%s' % (self.bindir_native, qemu_system)
@@ -1101,9 +1136,9 @@ class BaseConfig(object):
self.qemu_opt += " -snapshot"
if self.serialstdio:
- logger.info("Interrupt character is '^]'")
- cmd = "stty intr ^]"
- subprocess.call(cmd, shell=True)
+ if sys.stdin.isatty():
+ subprocess.check_call("stty intr ^]", shell=True)
+ logger.info("Interrupt character is '^]'")
first_serial = ""
if not re.search("-nographic", self.qemu_opt):
@@ -1142,15 +1177,16 @@ class BaseConfig(object):
else:
kernel_opts = ""
cmd = "%s %s" % (self.qemu_opt, kernel_opts)
- logger.info('Running %s' % cmd)
- if subprocess.call(cmd, shell=True) != 0:
- raise Exception('Failed to run %s' % cmd)
+ logger.info('Running %s\n' % cmd)
+ process = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE)
+ if process.wait():
+ logger.error("Failed to run qemu: %s", process.stderr.read().decode())
def cleanup(self):
if self.cleantap:
cmd = 'sudo %s %s %s' % (self.qemuifdown, self.tap, self.bindir_native)
- logger.info('Running %s' % cmd)
- subprocess.call(cmd, shell=True)
+ logger.debug('Running %s' % cmd)
+ subprocess.check_call(cmd, shell=True)
if self.lock_descriptor:
logger.info("Releasing lockfile for tap device '%s'" % self.tap)
self.release_lock()
@@ -1158,12 +1194,12 @@ class BaseConfig(object):
if self.nfs_running:
logger.info("Shutting down the userspace NFS server...")
cmd = "runqemu-export-rootfs stop %s" % self.rootfs
- logger.info('Running %s' % cmd)
- subprocess.call(cmd, shell=True)
+ logger.debug('Running %s' % cmd)
+ subprocess.check_call(cmd, shell=True)
if self.saved_stty:
cmd = "stty %s" % self.saved_stty
- subprocess.call(cmd, shell=True)
+ subprocess.check_call(cmd, shell=True)
if self.clean_nfs_dir:
logger.info('Removing %s' % self.rootfs)
@@ -1193,6 +1229,10 @@ class BaseConfig(object):
self.bitbake_e = ''
logger.warn("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8'))
+ def validate_combos(self):
+ if (self.fstype in self.vmtypes) and self.kernel:
+ raise RunQemuError("%s doesn't need kernel %s!" % (self.fstype, self.kernel))
+
@property
def bindir_native(self):
result = self.get('STAGING_BINDIR_NATIVE')
@@ -1210,42 +1250,37 @@ class BaseConfig(object):
if os.path.exists(result):
self.set('STAGING_BINDIR_NATIVE', result)
return result
- raise Exception("Native sysroot directory %s doesn't exist" % result)
+ raise RunQemuError("Native sysroot directory %s doesn't exist" % result)
else:
- raise Exception("Can't find STAGING_BINDIR_NATIVE in '%s' output" % cmd)
+ raise RunQemuError("Can't find STAGING_BINDIR_NATIVE in '%s' output" % cmd)
def main():
if "help" in sys.argv or '-h' in sys.argv or '--help' in sys.argv:
print_usage()
return 0
- config = BaseConfig()
try:
+ config = BaseConfig()
config.check_args()
- except Exception as esc:
- logger.error(esc)
- logger.error("Try 'runqemu help' on how to use it")
- return 1
- config.read_qemuboot()
- config.check_and_set()
- config.print_config()
- try:
+ config.read_qemuboot()
+ config.check_and_set()
+ # Check whether the combos is valid or not
+ config.validate_combos()
+ config.print_config()
config.setup_network()
config.setup_rootfs()
config.setup_final()
config.start_qemu()
+ except RunQemuError as err:
+ logger.error(err)
+ return 1
+ except Exception as err:
+ import traceback
+ traceback.print_exc()
+ return 1
finally:
+ print("Cleanup")
config.cleanup()
- return 0
if __name__ == "__main__":
- try:
- ret = main()
- except OEPathError as err:
- ret = 1
- logger.error(err.message)
- except Exception as esc:
- ret = 1
- import traceback
- traceback.print_exc()
- sys.exit(ret)
+ sys.exit(main())
diff --git a/import-layers/yocto-poky/scripts/runqemu-export-rootfs b/import-layers/yocto-poky/scripts/runqemu-export-rootfs
index c7992d822..70cdcdbb1 100755
--- a/import-layers/yocto-poky/scripts/runqemu-export-rootfs
+++ b/import-layers/yocto-poky/scripts/runqemu-export-rootfs
@@ -77,10 +77,6 @@ if [ ! -d "$PSEUDO_LOCALSTATEDIR" ]; then
exit 1
fi
-# rpc.mountd RPC port
-MOUNTD_RPCPORT=${MOUNTD_RPCPORT:=$[ 21111 + $NFS_INSTANCE ]}
-# rpc.nfsd RPC port
-NFSD_RPCPORT=${NFSD_RPCPORT:=$[ 11111 + $NFS_INSTANCE ]}
# NFS server port number
NFSD_PORT=${NFSD_PORT:=$[ 3049 + 2 * $NFS_INSTANCE ]}
# mountd port number
@@ -88,7 +84,7 @@ MOUNTD_PORT=${MOUNTD_PORT:=$[ 3048 + 2 * $NFS_INSTANCE ]}
## For debugging you would additionally add
## --debug all
-UNFSD_OPTS="-p -N -i $NFSPID -e $EXPORTS -x $NFSD_RPCPORT -n $NFSD_PORT -y $MOUNTD_RPCPORT -m $MOUNTD_PORT"
+UNFSD_OPTS="-p -N -i $NFSPID -e $EXPORTS -n $NFSD_PORT -m $MOUNTD_PORT"
# See how we were called.
case "$1" in
@@ -130,7 +126,7 @@ case "$1" in
fi
echo " "
echo "On your target please remember to add the following options for NFS"
- echo "nfsroot=IP_ADDRESS:$NFS_EXPORT_DIR,nfsvers=3,port=$NFSD_PORT,mountprog=$MOUNTD_RPCPORT,nfsprog=$NFSD_RPCPORT,udp,mountport=$MOUNTD_PORT"
+ echo "nfsroot=IP_ADDRESS:$NFS_EXPORT_DIR,nfsvers=3,port=$NFSD_PORT,udp,mountport=$MOUNTD_PORT"
;;
stop)
if [ -f "$NFSPID" ]; then
diff --git a/import-layers/yocto-poky/scripts/runqemu.README b/import-layers/yocto-poky/scripts/runqemu.README
index 5908d831a..da9abd7df 100644
--- a/import-layers/yocto-poky/scripts/runqemu.README
+++ b/import-layers/yocto-poky/scripts/runqemu.README
@@ -35,7 +35,7 @@ Notes
run as non root. The runqemu-gen-tapdevs script can also be used by
root to prepopulate the appropriate network devices.
- You can access the host computer at 192.168.7.1 within the image.
- - Your qemu system will be accessible as 192.16.7.2.
+ - Your qemu system will be accessible as 192.168.7.2.
- The script extracts the root filesystem specified under pseudo and sets up a userspace
NFS server to share the image over by default meaning the filesystem can be accessed by
both the host and guest systems.
diff --git a/import-layers/yocto-poky/scripts/sstate-diff-machines.sh b/import-layers/yocto-poky/scripts/sstate-diff-machines.sh
index 056aa0a04..27c6a3300 100755
--- a/import-layers/yocto-poky/scripts/sstate-diff-machines.sh
+++ b/import-layers/yocto-poky/scripts/sstate-diff-machines.sh
@@ -118,7 +118,7 @@ for M in ${machines}; do
cp -ra ${tmpdir}/stamps/* ${OUTPUT}/${M}
find ${OUTPUT}/${M} -name \*sigdata\* | sed "s#${OUTPUT}/${M}/##g" | sort > ${OUTPUT}/${M}/list
M_UNDERSCORE=`echo ${M} | sed 's/-/_/g'`
- sed "s/${M_UNDERSCORE}/MACHINE/g; s/${M}/MACHINE/g" ${OUTPUT}/${M}/list | sort > ${OUTPUT}/${M}/list.M
+ sed "s/^${M_UNDERSCORE}-/MACHINE/g" ${OUTPUT}/${M}/list | sort > ${OUTPUT}/${M}/list.M
find ${tmpdir}/stamps/ -name \*sigdata\* | xargs rm -f
else
printf "ERROR: no sigdata files were generated for MACHINE $M in ${tmpdir}/stamps\n";
diff --git a/import-layers/yocto-poky/scripts/sstate-sysroot-cruft.sh b/import-layers/yocto-poky/scripts/sstate-sysroot-cruft.sh
index b6166aa1b..d9917f515 100755
--- a/import-layers/yocto-poky/scripts/sstate-sysroot-cruft.sh
+++ b/import-layers/yocto-poky/scripts/sstate-sysroot-cruft.sh
@@ -105,7 +105,9 @@ WHITELIST="${WHITELIST} \
# generated by php
WHITELIST="${WHITELIST} \
+ .*/usr/lib/php5/php/.channels \
.*/usr/lib/php5/php/.channels/.* \
+ .*/usr/lib/php5/php/.registry \
.*/usr/lib/php5/php/.registry/.* \
.*/usr/lib/php5/php/.depdb \
.*/usr/lib/php5/php/.depdblock \
diff --git a/import-layers/yocto-poky/scripts/test-reexec b/import-layers/yocto-poky/scripts/test-reexec
index 9eaa96e75..30e792c7d 100755
--- a/import-layers/yocto-poky/scripts/test-reexec
+++ b/import-layers/yocto-poky/scripts/test-reexec
@@ -38,9 +38,9 @@ mkdir -p $LOGS
function clearsstate {
target=$1
- sstate_dir=`bitbake $target -e | grep "^SSTATE_DIR" | cut -d "\"" -f 2`
- sstate_pkgspec=`bitbake $target -e | grep "^SSTATE_PKGSPEC" | cut -d "\"" -f 2`
- sstasks=`bitbake $target -e | grep "^SSTATETASKS" | cut -d "\"" -f 2`
+ sstate_dir=`bitbake $target -e | grep "^SSTATE_DIR=" | cut -d "\"" -f 2`
+ sstate_pkgspec=`bitbake $target -e | grep "^SSTATE_PKGSPEC=" | cut -d "\"" -f 2`
+ sstasks=`bitbake $target -e | grep "^SSTATETASKS=" | cut -d "\"" -f 2`
for sstask in $sstasks
do
diff --git a/import-layers/yocto-poky/scripts/wic b/import-layers/yocto-poky/scripts/wic
index a5f2dbfc6..097084a60 100755
--- a/import-layers/yocto-poky/scripts/wic
+++ b/import-layers/yocto-poky/scripts/wic
@@ -33,8 +33,10 @@ __version__ = "0.2.0"
# Python Standard Library modules
import os
import sys
-import optparse
+import argparse
import logging
+
+from collections import namedtuple
from distutils import spawn
# External modules
@@ -54,7 +56,7 @@ else:
bitbake_main = None
from wic import WicError
-from wic.utils.misc import get_bitbake_var, BB_VARS
+from wic.misc import get_bitbake_var, BB_VARS
from wic import engine
from wic import help as hlp
@@ -85,66 +87,30 @@ def rootfs_dir_to_args(krootfs_dir):
rootfs_dir += '='.join([key, val])
return rootfs_dir.strip()
-def callback_rootfs_dir(option, opt, value, parser):
- """
- Build a dict using --rootfs_dir connection=dir
- """
- if not type(parser.values.rootfs_dir) is dict:
- parser.values.rootfs_dir = dict()
- if '=' in value:
- (key, rootfs_dir) = value.split('=')
- else:
- key = 'ROOTFS_DIR'
- rootfs_dir = value
+class RootfsArgAction(argparse.Action):
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ def __call__(self, parser, namespace, value, option_string=None):
+ if not "rootfs_dir" in vars(namespace) or \
+ not type(namespace.__dict__['rootfs_dir']) is dict:
+ namespace.__dict__['rootfs_dir'] = {}
+
+ if '=' in value:
+ (key, rootfs_dir) = value.split('=')
+ else:
+ key = 'ROOTFS_DIR'
+ rootfs_dir = value
+
+ namespace.__dict__['rootfs_dir'][key] = rootfs_dir
- parser.values.rootfs_dir[key] = rootfs_dir
-def wic_create_subcommand(args, usage_str):
+def wic_create_subcommand(options, usage_str):
"""
Command-line handling for image creation. The real work is done
by image.engine.wic_create()
"""
- parser = optparse.OptionParser(usage=usage_str)
-
- parser.add_option("-o", "--outdir", dest="outdir", default='.',
- help="name of directory to create image in")
- parser.add_option("-e", "--image-name", dest="image_name",
- help="name of the image to use the artifacts from "
- "e.g. core-image-sato")
- parser.add_option("-r", "--rootfs-dir", dest="rootfs_dir", type="string",
- action="callback", callback=callback_rootfs_dir,
- help="path to the /rootfs dir to use as the "
- ".wks rootfs source")
- parser.add_option("-b", "--bootimg-dir", dest="bootimg_dir",
- help="path to the dir containing the boot artifacts "
- "(e.g. /EFI or /syslinux dirs) to use as the "
- ".wks bootimg source")
- parser.add_option("-k", "--kernel-dir", dest="kernel_dir",
- help="path to the dir containing the kernel to use "
- "in the .wks bootimg")
- parser.add_option("-n", "--native-sysroot", dest="native_sysroot",
- help="path to the native sysroot containing the tools "
- "to use to build the image")
- parser.add_option("-s", "--skip-build-check", dest="build_check",
- action="store_false", default=True, help="skip the build check")
- parser.add_option("-f", "--build-rootfs", action="store_true", help="build rootfs")
- parser.add_option("-c", "--compress-with", choices=("gzip", "bzip2", "xz"),
- dest='compressor',
- help="compress image with specified compressor")
- parser.add_option("-m", "--bmap", action="store_true", help="generate .bmap")
- parser.add_option("-v", "--vars", dest='vars_dir',
- help="directory with <image>.env files that store "
- "bitbake variables")
- parser.add_option("-D", "--debug", dest="debug", action="store_true",
- default=False, help="output debug information")
-
- (options, args) = parser.parse_args(args)
-
- if len(args) != 1:
- parser.print_help()
- raise WicError("Wrong number of arguments, exiting")
-
if options.build_rootfs and not bitbake_main:
raise WicError("Can't build rootfs as bitbake is not in the $PATH")
@@ -188,32 +154,34 @@ def wic_create_subcommand(args, usage_str):
rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", options.image_name)
kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE", options.image_name)
bootimg_dir = get_bitbake_var("STAGING_DATADIR", options.image_name)
- native_sysroot = get_bitbake_var("RECIPE_SYSROOT_NATIVE",
- options.image_name) #, cache=False)
+
+ native_sysroot = options.native_sysroot
+ if options.vars_dir and not native_sysroot:
+ native_sysroot = get_bitbake_var("RECIPE_SYSROOT_NATIVE", options.image_name)
else:
if options.build_rootfs:
raise WicError("Image name is not specified, exiting. "
"(Use -e/--image-name to specify it)")
native_sysroot = options.native_sysroot
- if not native_sysroot or not os.path.isdir(native_sysroot):
+ if not options.vars_dir and (not native_sysroot or not os.path.isdir(native_sysroot)):
logger.info("Building wic-tools...\n")
if bitbake_main(BitBakeConfigParameters("bitbake wic-tools".split()),
cookerdata.CookerConfiguration()):
raise WicError("bitbake wic-tools failed")
native_sysroot = get_bitbake_var("RECIPE_SYSROOT_NATIVE", "wic-tools")
- if not native_sysroot:
- raise WicError("Unable to find the location of the native "
- "tools sysroot to use")
- wks_file = args[0]
+ if not native_sysroot:
+ raise WicError("Unable to find the location of the native tools sysroot")
+
+ wks_file = options.wks_file
if not wks_file.endswith(".wks"):
wks_file = engine.find_canned_image(scripts_path, wks_file)
if not wks_file:
raise WicError("No image named %s found, exiting. (Use 'wic list images' "
"to list available images, or specify a fully-qualified OE "
- "kickstart (.wks) filename)" % args[0])
+ "kickstart (.wks) filename)" % options.wks_file)
if not options.image_name:
rootfs_dir = ''
@@ -264,59 +232,290 @@ def wic_list_subcommand(args, usage_str):
Command-line handling for listing available images.
The real work is done by image.engine.wic_list()
"""
- parser = optparse.OptionParser(usage=usage_str)
- args = parser.parse_args(args)[1]
-
if not engine.wic_list(args, scripts_path):
- parser.print_help()
raise WicError("Bad list arguments, exiting")
-def wic_help_topic_subcommand(args, usage_str):
+def wic_ls_subcommand(args, usage_str):
+ """
+ Command-line handling for list content of images.
+ The real work is done by engine.wic_ls()
+ """
+ engine.wic_ls(args, args.native_sysroot)
+
+def wic_cp_subcommand(args, usage_str):
+ """
+ Command-line handling for copying files/dirs to images.
+ The real work is done by engine.wic_cp()
+ """
+ engine.wic_cp(args, args.native_sysroot)
+
+def wic_rm_subcommand(args, usage_str):
"""
- Command-line handling for help-only 'subcommands'. This is
- essentially a dummy command that doesn nothing but allow users to
- use the existing subcommand infrastructure to display help on a
- particular topic not attached to any particular subcommand.
+ Command-line handling for removing files/dirs from images.
+ The real work is done by engine.wic_rm()
+ """
+ engine.wic_rm(args, args.native_sysroot)
+
+def wic_write_subcommand(args, usage_str):
+ """
+ Command-line handling for writing images.
+ The real work is done by engine.wic_write()
+ """
+ engine.wic_write(args, args.native_sysroot)
+
+def wic_help_subcommand(args, usage_str):
+ """
+ Command-line handling for help subcommand to keep the current
+ structure of the function definitions.
"""
pass
+def wic_help_topic_subcommand(usage_str, help_str):
+ """
+ Display function for help 'sub-subcommands'.
+ """
+ print(help_str)
+ return
+
+
wic_help_topic_usage = """
"""
-subcommands = {
- "create": [wic_create_subcommand,
- hlp.wic_create_usage,
- hlp.wic_create_help],
- "list": [wic_list_subcommand,
- hlp.wic_list_usage,
- hlp.wic_list_help],
+helptopics = {
"plugins": [wic_help_topic_subcommand,
wic_help_topic_usage,
- hlp.get_wic_plugins_help],
+ hlp.wic_plugins_help],
"overview": [wic_help_topic_subcommand,
wic_help_topic_usage,
hlp.wic_overview_help],
"kickstart": [wic_help_topic_subcommand,
wic_help_topic_usage,
hlp.wic_kickstart_help],
+ "create": [wic_help_topic_subcommand,
+ wic_help_topic_usage,
+ hlp.wic_create_help],
+ "ls": [wic_help_topic_subcommand,
+ wic_help_topic_usage,
+ hlp.wic_ls_help],
+ "cp": [wic_help_topic_subcommand,
+ wic_help_topic_usage,
+ hlp.wic_cp_help],
+ "rm": [wic_help_topic_subcommand,
+ wic_help_topic_usage,
+ hlp.wic_rm_help],
+ "write": [wic_help_topic_subcommand,
+ wic_help_topic_usage,
+ hlp.wic_write_help],
+ "list": [wic_help_topic_subcommand,
+ wic_help_topic_usage,
+ hlp.wic_list_help]
+}
+
+
+def wic_init_parser_create(subparser):
+ subparser.add_argument("wks_file")
+
+ subparser.add_argument("-o", "--outdir", dest="outdir", default='.',
+ help="name of directory to create image in")
+ subparser.add_argument("-e", "--image-name", dest="image_name",
+ help="name of the image to use the artifacts from "
+ "e.g. core-image-sato")
+ subparser.add_argument("-r", "--rootfs-dir", action=RootfsArgAction,
+ help="path to the /rootfs dir to use as the "
+ ".wks rootfs source")
+ subparser.add_argument("-b", "--bootimg-dir", dest="bootimg_dir",
+ help="path to the dir containing the boot artifacts "
+ "(e.g. /EFI or /syslinux dirs) to use as the "
+ ".wks bootimg source")
+ subparser.add_argument("-k", "--kernel-dir", dest="kernel_dir",
+ help="path to the dir containing the kernel to use "
+ "in the .wks bootimg")
+ subparser.add_argument("-n", "--native-sysroot", dest="native_sysroot",
+ help="path to the native sysroot containing the tools "
+ "to use to build the image")
+ subparser.add_argument("-s", "--skip-build-check", dest="build_check",
+ action="store_false", default=True, help="skip the build check")
+ subparser.add_argument("-f", "--build-rootfs", action="store_true", help="build rootfs")
+ subparser.add_argument("-c", "--compress-with", choices=("gzip", "bzip2", "xz"),
+ dest='compressor',
+ help="compress image with specified compressor")
+ subparser.add_argument("-m", "--bmap", action="store_true", help="generate .bmap")
+ subparser.add_argument("--no-fstab-update" ,action="store_true",
+ help="Do not change fstab file.")
+ subparser.add_argument("-v", "--vars", dest='vars_dir',
+ help="directory with <image>.env files that store "
+ "bitbake variables")
+ subparser.add_argument("-D", "--debug", dest="debug", action="store_true",
+ default=False, help="output debug information")
+ return
+
+
+def wic_init_parser_list(subparser):
+ subparser.add_argument("list_type",
+ help="can be 'images' or 'source-plugins' "
+ "to obtain a list. "
+ "If value is a valid .wks image file")
+ subparser.add_argument("help_for", default=[], nargs='*',
+ help="If 'list_type' is a valid .wks image file "
+ "this value can be 'help' to show the help information "
+ "defined inside the .wks file")
+ return
+
+def imgtype(arg):
+ """
+ Custom type for ArgumentParser
+ Converts path spec to named tuple: (image, partition, path)
+ """
+ image = arg
+ part = path = None
+ if ':' in image:
+ image, part = image.split(':')
+ if '/' in part:
+ part, path = part.split('/', 1)
+ if not path:
+ path = '/'
+
+ if not os.path.isfile(image):
+ err = "%s is not a regular file or symlink" % image
+ raise argparse.ArgumentTypeError(err)
+
+ return namedtuple('ImgType', 'image part path')(image, part, path)
+
+def wic_init_parser_ls(subparser):
+ subparser.add_argument("path", type=imgtype,
+ help="image spec: <image>[:<vfat partition>[<path>]]")
+ subparser.add_argument("-n", "--native-sysroot",
+ help="path to the native sysroot containing the tools")
+
+def imgpathtype(arg):
+ img = imgtype(arg)
+ if img.part is None:
+ raise argparse.ArgumentTypeError("partition number is not specified")
+ return img
+
+def wic_init_parser_cp(subparser):
+ subparser.add_argument("src",
+ help="source spec")
+ subparser.add_argument("dest", type=imgpathtype,
+ help="image spec: <image>:<vfat partition>[<path>]")
+ subparser.add_argument("-n", "--native-sysroot",
+ help="path to the native sysroot containing the tools")
+
+def wic_init_parser_rm(subparser):
+ subparser.add_argument("path", type=imgpathtype,
+ help="path: <image>:<vfat partition><path>")
+ subparser.add_argument("-n", "--native-sysroot",
+ help="path to the native sysroot containing the tools")
+
+def expandtype(rules):
+ """
+ Custom type for ArgumentParser
+ Converts expand rules to the dictionary {<partition>: size}
+ """
+ if rules == 'auto':
+ return {}
+ result = {}
+ for rule in rules.split('-'):
+ try:
+ part, size = rule.split(':')
+ except ValueError:
+ raise argparse.ArgumentTypeError("Incorrect rule format: %s" % rule)
+
+ if not part.isdigit():
+ raise argparse.ArgumentTypeError("Rule '%s': partition number must be integer" % rule)
+
+ # validate size
+ multiplier = 1
+ for suffix, mult in [('K', 1024), ('M', 1024 * 1024), ('G', 1024 * 1024 * 1024)]:
+ if size.upper().endswith(suffix):
+ multiplier = mult
+ size = size[:-1]
+ break
+ if not size.isdigit():
+ raise argparse.ArgumentTypeError("Rule '%s': size must be integer" % rule)
+
+ result[int(part)] = int(size) * multiplier
+
+ return result
+
+def wic_init_parser_write(subparser):
+ subparser.add_argument("image",
+ help="path to the wic image")
+ subparser.add_argument("target",
+ help="target file or device")
+ subparser.add_argument("-e", "--expand", type=expandtype,
+ help="expand rules: auto or <partition>:<size>[,<partition>:<size>]")
+ subparser.add_argument("-n", "--native-sysroot",
+ help="path to the native sysroot containing the tools")
+
+def wic_init_parser_help(subparser):
+ helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage)
+ for helptopic in helptopics:
+ helpparsers.add_parser(helptopic, help=helptopics[helptopic][2])
+ return
+
+
+subcommands = {
+ "create": [wic_create_subcommand,
+ hlp.wic_create_usage,
+ hlp.wic_create_help,
+ wic_init_parser_create],
+ "list": [wic_list_subcommand,
+ hlp.wic_list_usage,
+ hlp.wic_list_help,
+ wic_init_parser_list],
+ "ls": [wic_ls_subcommand,
+ hlp.wic_ls_usage,
+ hlp.wic_ls_help,
+ wic_init_parser_ls],
+ "cp": [wic_cp_subcommand,
+ hlp.wic_cp_usage,
+ hlp.wic_cp_help,
+ wic_init_parser_cp],
+ "rm": [wic_rm_subcommand,
+ hlp.wic_rm_usage,
+ hlp.wic_rm_help,
+ wic_init_parser_rm],
+ "write": [wic_write_subcommand,
+ hlp.wic_write_usage,
+ hlp.wic_write_help,
+ wic_init_parser_write],
+ "help": [wic_help_subcommand,
+ wic_help_topic_usage,
+ hlp.wic_help_help,
+ wic_init_parser_help]
}
+def init_parser(parser):
+ parser.add_argument("--version", action="version",
+ version="%(prog)s {version}".format(version=__version__))
+ subparsers = parser.add_subparsers(dest='command', help=hlp.wic_usage)
+ for subcmd in subcommands:
+ subparser = subparsers.add_parser(subcmd, help=subcommands[subcmd][2])
+ subcommands[subcmd][3](subparser)
+
+
def main(argv):
- parser = optparse.OptionParser(version="wic version %s" % __version__,
- usage=hlp.wic_usage)
+ parser = argparse.ArgumentParser(
+ description="wic version %s" % __version__)
- parser.disable_interspersed_args()
+ init_parser(parser)
- args = parser.parse_args(argv)[1]
+ args = parser.parse_args(argv)
- if len(args):
- if args[0] == "help":
- if len(args) == 1:
+ if "command" in vars(args):
+ if args.command == "help":
+ if args.help_topic is None:
parser.print_help()
- raise WicError("help command requires parameter")
+ print()
+ print("Please specify a help topic")
+ elif args.help_topic in helptopics:
+ hlpt = helptopics[args.help_topic]
+ hlpt[0](hlpt[1], hlpt[2])
+ return 0
return hlp.invoke_subcommand(args, parser, hlp.wic_help_usage, subcommands)
diff --git a/import-layers/yocto-poky/scripts/yocto-compat-layer.py b/import-layers/yocto-poky/scripts/yocto-check-layer
index ba64b4d6e..5a4fd752c 100755
--- a/import-layers/yocto-poky/scripts/yocto-compat-layer.py
+++ b/import-layers/yocto-poky/scripts/yocto-check-layer
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-# Yocto Project compatibility layer tool
+# Yocto Project layer checking tool
#
# Copyright (C) 2017 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
@@ -22,29 +22,29 @@ import scriptpath
scriptpath.add_oe_lib_path()
scriptpath.add_bitbake_lib_path()
-from compatlayer import LayerType, detect_layers, add_layer, add_layer_dependencies, get_signatures
+from checklayer import LayerType, detect_layers, add_layer, add_layer_dependencies, get_signatures
from oeqa.utils.commands import get_bb_vars
-PROGNAME = 'yocto-compat-layer'
+PROGNAME = 'yocto-check-layer'
CASES_PATHS = [os.path.join(os.path.abspath(os.path.dirname(__file__)),
- 'lib', 'compatlayer', 'cases')]
+ 'lib', 'checklayer', 'cases')]
logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
-def test_layer_compatibility(td, layer, test_software_layer_signatures):
- from compatlayer.context import CompatLayerTestContext
+def test_layer(td, layer, test_software_layer_signatures):
+ from checklayer.context import CheckLayerTestContext
logger.info("Starting to analyze: %s" % layer['name'])
logger.info("----------------------------------------------------------------------")
- tc = CompatLayerTestContext(td=td, logger=logger, layer=layer, test_software_layer_signatures=test_software_layer_signatures)
+ tc = CheckLayerTestContext(td=td, logger=logger, layer=layer, test_software_layer_signatures=test_software_layer_signatures)
tc.loadTests(CASES_PATHS)
return tc.runTests()
def main():
parser = argparse.ArgumentParser(
- description="Yocto Project compatibility layer tool",
+ description="Yocto Project layer checking tool",
add_help=False)
parser.add_argument('layers', metavar='LAYER_DIR', nargs='+',
- help='Layer to test compatibility with Yocto Project')
+ help='Layer to check')
parser.add_argument('-o', '--output-log',
help='File to output log (optional)', action='store')
parser.add_argument('--dependency', nargs="+",
@@ -179,21 +179,24 @@ def main():
layers_tested = layers_tested + 1
continue
- result = test_layer_compatibility(td, layer, args.test_software_layer_signatures)
+ result = test_layer(td, layer, args.test_software_layer_signatures)
results[layer['name']] = result
results_status[layer['name']] = 'PASS' if results[layer['name']].wasSuccessful() else 'FAIL'
layers_tested = layers_tested + 1
+ ret = 0
if layers_tested:
logger.info('')
logger.info('Summary of results:')
logger.info('')
for layer_name in results_status:
logger.info('%s ... %s' % (layer_name, results_status[layer_name]))
+ if not results[layer_name] or not results[layer_name].wasSuccessful():
+ ret = 2 # ret = 1 used for initialization errors
cleanup_bblayers(None, None)
- return 0
+ return ret
if __name__ == '__main__':
try:
diff --git a/import-layers/yocto-poky/scripts/yocto-check-layer-wrapper b/import-layers/yocto-poky/scripts/yocto-check-layer-wrapper
new file mode 100755
index 000000000..bbf6ee176
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/yocto-check-layer-wrapper
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+
+# Yocto Project layer check tool wrapper
+#
+# Creates a temporary build directory to run the yocto-check-layer
+# script to avoid a contaminated environment.
+#
+# Copyright (C) 2017 Intel Corporation
+# Released under the MIT license (see COPYING.MIT)
+
+if [ -z "$BUILDDIR" ]; then
+ echo "Please source oe-init-build-env before run this script."
+ exit 2
+fi
+
+# since we are using a temp directory, use the realpath for output
+# log option
+output_log=''
+while getopts o: name
+do
+ case $name in
+ o) output_log=$(realpath "$OPTARG")
+ esac
+done
+shift $(($OPTIND - 1))
+
+# generate a temp directory to run check layer script
+base_dir=$(realpath $BUILDDIR/../)
+cd $base_dir
+
+build_dir=$(mktemp -p $base_dir -d -t build-XXXX)
+
+source oe-init-build-env $build_dir
+if [[ $output_log != '' ]]; then
+ yocto-check-layer -o "$output_log" "$*"
+else
+ yocto-check-layer "$@"
+fi
+retcode=$?
+
+rm -rf $build_dir
+
+exit $retcode
diff --git a/import-layers/yocto-poky/scripts/yocto-compat-layer-wrapper b/import-layers/yocto-poky/scripts/yocto-compat-layer-wrapper
deleted file mode 100755
index db4b6871b..000000000
--- a/import-layers/yocto-poky/scripts/yocto-compat-layer-wrapper
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env bash
-
-# Yocto Project compatibility layer tool wrapper
-#
-# Creates a temprary build directory to run Yocto Project Compatible
-# script to avoid a contaminated environment.
-#
-# Copyright (C) 2017 Intel Corporation
-# Released under the MIT license (see COPYING.MIT)
-
-if [ -z "$BUILDDIR" ]; then
- echo "Please source oe-init-build-env before run this script."
- exit 2
-fi
-
-base_dir=$(realpath $BUILDDIR/../)
-cd $base_dir
-
-build_dir=$(mktemp -p $base_dir -d -t build-XXXX)
-
-source oe-init-build-env $build_dir
-yocto-compat-layer.py "$@"
-retcode=$?
-
-rm -rf $build_dir
-
-exit $retcode
OpenPOWER on IntegriCloud