summaryrefslogtreecommitdiffstats
path: root/meta
diff options
context:
space:
mode:
authorJoshua Lock <joshua.g.lock@intel.com>2016-12-14 21:13:04 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-12-16 10:23:23 +0000
commitc4e2c59088765d1f1de7ec57cde91980f887c2ff (patch)
treea2fda8ac5916fb59a711e9220c2177008cca9347 /meta
parentd5e67725ac11e3296cad104470931ffa16824b90 (diff)
downloadpoky-c4e2c59088765d1f1de7ec57cde91980f887c2ff.tar.gz
meta: remove True option to getVar calls
getVar() now defaults to expanding by default, thus remove the True option from getVar() calls with a regex search and replace. Search made with the following regex: getVar ?\(( ?[^,()]*), True\) (From OE-Core rev: 7c552996597faaee2fbee185b250c0ee30ea3b5f) Signed-off-by: Joshua Lock <joshua.g.lock@intel.com> Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta')
-rw-r--r--meta/classes/allarch.bbclass4
-rw-r--r--meta/classes/archiver.bbclass74
-rw-r--r--meta/classes/autotools.bbclass12
-rw-r--r--meta/classes/base.bbclass126
-rw-r--r--meta/classes/binconfig.bbclass2
-rw-r--r--meta/classes/blacklist.bbclass4
-rw-r--r--meta/classes/bugzilla.bbclass24
-rw-r--r--meta/classes/buildhistory.bbclass62
-rw-r--r--meta/classes/buildstats-summary.bbclass2
-rw-r--r--meta/classes/buildstats.bbclass14
-rw-r--r--meta/classes/ccache.bbclass2
-rw-r--r--meta/classes/chrpath.bbclass4
-rw-r--r--meta/classes/cmake.bbclass16
-rw-r--r--meta/classes/cml1.bbclass4
-rw-r--r--meta/classes/compress_doc.bbclass26
-rw-r--r--meta/classes/copyleft_compliance.bbclass10
-rw-r--r--meta/classes/copyleft_filter.bbclass10
-rw-r--r--meta/classes/cross-canadian.bbclass34
-rw-r--r--meta/classes/crosssdk.bbclass2
-rw-r--r--meta/classes/cve-check.bbclass46
-rw-r--r--meta/classes/debian.bbclass24
-rw-r--r--meta/classes/devshell.bbclass6
-rw-r--r--meta/classes/distro_features_check.bbclass8
-rw-r--r--meta/classes/distrodata.bbclass132
-rw-r--r--meta/classes/distutils-base.bbclass2
-rw-r--r--meta/classes/distutils3-base.bbclass2
-rw-r--r--meta/classes/externalsrc.bbclass38
-rw-r--r--meta/classes/extrausers.bbclass2
-rw-r--r--meta/classes/fontcache.bbclass14
-rw-r--r--meta/classes/fs-uuid.bbclass2
-rw-r--r--meta/classes/gconf.bbclass12
-rw-r--r--meta/classes/gettext.bbclass8
-rw-r--r--meta/classes/gio-module-cache.bbclass10
-rw-r--r--meta/classes/grub-efi.bbclass22
-rw-r--r--meta/classes/gsettings.bbclass10
-rw-r--r--meta/classes/gtk-icon-cache.bbclass14
-rw-r--r--meta/classes/gtk-immodules-cache.bbclass10
-rw-r--r--meta/classes/icecc.bbclass4
-rw-r--r--meta/classes/image-buildinfo.bbclass10
-rw-r--r--meta/classes/image-live.bbclass8
-rw-r--r--meta/classes/image-vm.bbclass12
-rw-r--r--meta/classes/image.bbclass96
-rw-r--r--meta/classes/image_types.bbclass24
-rw-r--r--meta/classes/insane.bbclass194
-rw-r--r--meta/classes/kernel-arch.bbclass6
-rw-r--r--meta/classes/kernel-fitimage.bbclass14
-rw-r--r--meta/classes/kernel-grub.bbclass2
-rw-r--r--meta/classes/kernel-module-split.bbclass38
-rw-r--r--meta/classes/kernel-uimage.bbclass8
-rw-r--r--meta/classes/kernel-yocto.bbclass2
-rw-r--r--meta/classes/kernel.bbclass40
-rw-r--r--meta/classes/libc-common.bbclass10
-rw-r--r--meta/classes/libc-package.bbclass60
-rw-r--r--meta/classes/license.bbclass96
-rw-r--r--meta/classes/live-vm-common.bbclass8
-rw-r--r--meta/classes/metadata_scm.bbclass2
-rw-r--r--meta/classes/migrate_localcount.bbclass12
-rw-r--r--meta/classes/mime.bbclass12
-rw-r--r--meta/classes/module.bbclass2
-rw-r--r--meta/classes/multilib.bbclass32
-rw-r--r--meta/classes/multilib_global.bbclass22
-rw-r--r--meta/classes/native.bbclass10
-rw-r--r--meta/classes/nativesdk.bbclass8
-rw-r--r--meta/classes/npm.bbclass4
-rw-r--r--meta/classes/oelint.bbclass4
-rw-r--r--meta/classes/package.bbclass282
-rw-r--r--meta/classes/package_deb.bbclass54
-rw-r--r--meta/classes/package_ipk.bbclass60
-rw-r--r--meta/classes/package_rpm.bbclass136
-rw-r--r--meta/classes/package_tar.bbclass14
-rw-r--r--meta/classes/packagedata.bbclass10
-rw-r--r--meta/classes/packagefeed-stability.bbclass14
-rw-r--r--meta/classes/packagegroup.bbclass8
-rw-r--r--meta/classes/patch.bbclass20
-rw-r--r--meta/classes/pixbufcache.bbclass10
-rw-r--r--meta/classes/populate_sdk_base.bbclass36
-rw-r--r--meta/classes/populate_sdk_ext.bbclass108
-rw-r--r--meta/classes/prexport.bbclass4
-rw-r--r--meta/classes/ptest.bbclass2
-rw-r--r--meta/classes/qemu.bbclass10
-rw-r--r--meta/classes/qemuboot.bbclass8
-rw-r--r--meta/classes/recipe_sanity.bbclass26
-rw-r--r--meta/classes/report-error.bbclass22
-rw-r--r--meta/classes/rm_work.bbclass6
-rw-r--r--meta/classes/rootfs-postcommands.bbclass6
-rw-r--r--meta/classes/rootfs_deb.bbclass4
-rw-r--r--meta/classes/rootfs_ipk.bbclass2
-rw-r--r--meta/classes/rootfs_rpm.bbclass2
-rw-r--r--meta/classes/sanity.bbclass108
-rw-r--r--meta/classes/sign_ipk.bbclass12
-rw-r--r--meta/classes/sign_package_feed.bbclass4
-rw-r--r--meta/classes/sign_rpm.bbclass12
-rw-r--r--meta/classes/siteconfig.bbclass2
-rw-r--r--meta/classes/siteinfo.bbclass14
-rw-r--r--meta/classes/spdx.bbclass26
-rw-r--r--meta/classes/sstate.bbclass114
-rw-r--r--meta/classes/staging.bbclass22
-rw-r--r--meta/classes/syslinux.bbclass34
-rw-r--r--meta/classes/systemd-boot.bbclass14
-rw-r--r--meta/classes/systemd.bbclass38
-rw-r--r--meta/classes/terminal.bbclass8
-rw-r--r--meta/classes/testexport.bbclass40
-rw-r--r--meta/classes/testimage.bbclass12
-rw-r--r--meta/classes/testsdk.bbclass12
-rw-r--r--meta/classes/tinderclient.bbclass54
-rw-r--r--meta/classes/toaster.bbclass14
-rw-r--r--meta/classes/toolchain-scripts.bbclass4
-rw-r--r--meta/classes/uboot-config.bbclass14
-rw-r--r--meta/classes/uboot-extlinux-config.bbclass24
-rw-r--r--meta/classes/uboot-sign.bbclass6
-rw-r--r--meta/classes/uninative.bbclass18
-rw-r--r--meta/classes/update-alternatives.bbclass52
-rw-r--r--meta/classes/update-rc.d.bbclass28
-rw-r--r--meta/classes/useradd-staticids.bbclass48
-rw-r--r--meta/classes/useradd.bbclass22
-rw-r--r--meta/classes/utility-tasks.bbclass4
-rw-r--r--meta/classes/utils.bbclass30
-rw-r--r--meta/classes/waf.bbclass2
-rw-r--r--meta/conf/bitbake.conf30
-rw-r--r--meta/conf/distro/defaultsetup.conf2
-rw-r--r--meta/conf/distro/include/tclibc-glibc.inc6
-rw-r--r--meta/conf/machine/include/arm/arch-arm.inc2
-rw-r--r--meta/conf/machine/include/arm/arch-arm64.inc2
-rw-r--r--meta/conf/machine/include/arm/feature-arm-thumb.inc10
-rw-r--r--meta/conf/machine/include/arm/feature-arm-vfp.inc10
-rw-r--r--meta/conf/machine/include/mips/feature-mips-mips16e.inc6
-rw-r--r--meta/conf/multilib.conf2
-rw-r--r--meta/lib/buildstats.py4
-rw-r--r--meta/lib/oe/classextend.py12
-rw-r--r--meta/lib/oe/copy_buildsystem.py12
-rw-r--r--meta/lib/oe/data.py2
-rw-r--r--meta/lib/oe/distro_check.py20
-rw-r--r--meta/lib/oe/gpg_sign.py4
-rw-r--r--meta/lib/oe/manifest.py26
-rw-r--r--meta/lib/oe/package.py2
-rw-r--r--meta/lib/oe/package_manager.py174
-rw-r--r--meta/lib/oe/packagedata.py2
-rw-r--r--meta/lib/oe/packagegroup.py6
-rw-r--r--meta/lib/oe/patch.py30
-rw-r--r--meta/lib/oe/path.py2
-rw-r--r--meta/lib/oe/prservice.py26
-rw-r--r--meta/lib/oe/qa.py4
-rw-r--r--meta/lib/oe/recipeutils.py38
-rw-r--r--meta/lib/oe/rootfs.py114
-rw-r--r--meta/lib/oe/sdk.py76
-rw-r--r--meta/lib/oe/sstatesig.py30
-rw-r--r--meta/lib/oe/terminal.py2
-rw-r--r--meta/lib/oe/utils.py30
-rw-r--r--meta/lib/oeqa/controllers/masterimage.py16
-rw-r--r--meta/lib/oeqa/oetest.py60
-rwxr-xr-xmeta/lib/oeqa/runexported.py8
-rw-r--r--meta/lib/oeqa/runtime/_ptest.py16
-rw-r--r--meta/lib/oeqa/runtime/date.py4
-rw-r--r--meta/lib/oeqa/runtime/multilib.py2
-rw-r--r--meta/lib/oeqa/runtime/parselogs.py4
-rw-r--r--meta/lib/oeqa/runtime/rpm.py6
-rw-r--r--meta/lib/oeqa/runtime/scp.py2
-rw-r--r--meta/lib/oeqa/runtime/smart.py18
-rw-r--r--meta/lib/oeqa/runtime/systemd.py2
-rw-r--r--meta/lib/oeqa/runtime/x32lib.py2
-rw-r--r--meta/lib/oeqa/sdk/gcc.py2
-rw-r--r--meta/lib/oeqa/selftest/tinfoil.py16
-rw-r--r--meta/lib/oeqa/targetcontrol.py52
-rw-r--r--meta/lib/oeqa/utils/commands.py2
-rw-r--r--meta/lib/oeqa/utils/dump.py4
-rw-r--r--meta/lib/oeqa/utils/package_manager.py12
-rw-r--r--meta/lib/oeqa/utils/targetbuild.py8
-rw-r--r--meta/lib/oeqa/utils/testexport.py14
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb2
-rw-r--r--meta/recipes-bsp/grub/grub-efi_2.00.bb2
-rw-r--r--meta/recipes-bsp/u-boot/u-boot.inc2
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5.inc4
-rw-r--r--meta/recipes-connectivity/connman/connman.inc4
-rw-r--r--meta/recipes-core/base-files/base-files_3.0.14.bb2
-rw-r--r--meta/recipes-core/busybox/busybox.inc14
-rw-r--r--meta/recipes-core/busybox/busybox_1.24.1.bb4
-rw-r--r--meta/recipes-core/busybox/busybox_git.bb4
-rw-r--r--meta/recipes-core/coreutils/coreutils_6.9.bb8
-rw-r--r--meta/recipes-core/coreutils/coreutils_8.25.bb8
-rw-r--r--meta/recipes-core/glibc/glibc-ld.inc6
-rw-r--r--meta/recipes-core/glibc/glibc-locale.inc8
-rw-r--r--meta/recipes-core/glibc/glibc-package.inc4
-rw-r--r--meta/recipes-core/glibc/glibc.inc6
-rw-r--r--meta/recipes-core/libxml/libxml2_2.9.4.bb2
-rw-r--r--meta/recipes-core/meta/meta-environment-extsdk.bb4
-rw-r--r--meta/recipes-core/meta/meta-environment.bb6
-rw-r--r--meta/recipes-core/meta/meta-extsdk-toolchain.bb2
-rw-r--r--meta/recipes-core/meta/meta-world-pkgdata.bb2
-rw-r--r--meta/recipes-core/meta/signing-keys.bb18
-rw-r--r--meta/recipes-core/os-release/os-release.bb4
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-base.bb4
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-sdk.bb2
-rw-r--r--meta/recipes-core/psplash/psplash_git.bb20
-rw-r--r--meta/recipes-core/systemd/systemd_232.bb4
-rw-r--r--meta/recipes-core/util-linux/util-linux.inc2
-rw-r--r--meta/recipes-core/volatile-binds/volatile-binds.bb2
-rw-r--r--meta/recipes-devtools/apt/apt-native.inc4
-rw-r--r--meta/recipes-devtools/apt/apt-package.inc4
-rw-r--r--meta/recipes-devtools/automake/automake.inc2
-rw-r--r--meta/recipes-devtools/binutils/binutils-2.27.inc2
-rw-r--r--meta/recipes-devtools/binutils/binutils.inc6
-rw-r--r--meta/recipes-devtools/cmake/cmake.inc2
-rw-r--r--meta/recipes-devtools/cmake/cmake_3.7.0.bb4
-rw-r--r--meta/recipes-devtools/dpkg/dpkg.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-common.inc32
-rw-r--r--meta/recipes-devtools/gcc/gcc-configure-common.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-multilib-config.inc20
-rw-r--r--meta/recipes-devtools/gcc/libgcc-common.inc38
-rw-r--r--meta/recipes-devtools/gcc/libgfortran.inc2
-rw-r--r--meta/recipes-devtools/git/git.inc2
-rw-r--r--meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb2
-rw-r--r--meta/recipes-devtools/perl/perl_5.24.0.bb2
-rw-r--r--meta/recipes-devtools/prelink/prelink_git.bb2
-rw-r--r--meta/recipes-devtools/python/python-smartpm_git.bb6
-rw-r--r--meta/recipes-devtools/qemu/qemu-targets.inc4
-rw-r--r--meta/recipes-devtools/rpm/rpm_5.4.16.bb60
-rw-r--r--meta/recipes-devtools/swig/swig.inc4
-rw-r--r--meta/recipes-devtools/vala/vala.inc2
-rw-r--r--meta/recipes-devtools/valgrind/valgrind_3.12.0.bb2
-rw-r--r--meta/recipes-extended/cups/cups.inc2
-rw-r--r--meta/recipes-extended/man/man_1.6g.bb4
-rw-r--r--meta/recipes-extended/net-tools/net-tools_1.60-26.bb8
-rw-r--r--meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb2
-rw-r--r--meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb4
-rw-r--r--meta/recipes-extended/pam/libpam_1.3.0.bb14
-rw-r--r--meta/recipes-extended/procps/procps_3.3.12.bb8
-rw-r--r--meta/recipes-extended/texinfo/texinfo_6.3.bb2
-rw-r--r--meta/recipes-extended/tzdata/tzdata_2016i.bb2
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.36.0.bb2
-rw-r--r--meta/recipes-gnome/gobject-introspection/gobject-introspection_1.50.0.bb2
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3.inc2
-rw-r--r--meta/recipes-gnome/gtk+/gtk+_2.24.31.bb2
-rw-r--r--meta/recipes-graphics/cairo/cairo-fpu.inc2
-rw-r--r--meta/recipes-graphics/mesa/mesa.inc6
-rw-r--r--meta/recipes-graphics/xorg-driver/xorg-driver-common.inc4
-rw-r--r--meta/recipes-graphics/xorg-lib/libxft_2.3.2.bb2
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg.inc4
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb2
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_4.4.bb2
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_4.8.bb2
-rw-r--r--meta/recipes-kernel/linux/linux-yocto.inc4
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_git.bb4
-rw-r--r--meta/recipes-kernel/perf/perf-features.inc2
-rw-r--r--meta/recipes-kernel/perf/perf.bb2
-rw-r--r--meta/recipes-multimedia/alsa/alsa-fpu.inc2
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-plugins-package.inc10
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc4
-rw-r--r--meta/recipes-rt/images/core-image-rt-sdk.bb2
-rw-r--r--meta/recipes-rt/images/core-image-rt.bb2
-rw-r--r--meta/recipes-support/attr/ea-acl.inc8
-rw-r--r--meta/recipes-support/boost/boost-1.62.0.inc4
-rw-r--r--meta/recipes-support/boost/boost.inc8
-rw-r--r--meta/recipes-support/gnutls/gnutls.inc2
-rw-r--r--meta/recipes-support/icu/icu_58.1.bb2
-rw-r--r--meta/recipes-support/libiconv/libiconv_1.11.1.bb2
-rw-r--r--meta/recipes-support/libiconv/libiconv_1.14.bb4
-rw-r--r--meta/recipes-support/libnl/libnl_3.2.28.bb2
-rw-r--r--meta/recipes-support/libsoup/libsoup-2.4_2.56.0.bb2
-rw-r--r--meta/recipes-support/sqlite/sqlite3.inc2
-rw-r--r--meta/recipes-support/taglib/taglib_1.11.1.bb2
261 files changed, 2306 insertions, 2306 deletions
diff --git a/meta/classes/allarch.bbclass b/meta/classes/allarch.bbclass
index ddc2a85050..9dce49879a 100644
--- a/meta/classes/allarch.bbclass
+++ b/meta/classes/allarch.bbclass
@@ -11,7 +11,7 @@ PACKAGE_ARCH = "all"
11python () { 11python () {
12 # Allow this class to be included but overridden - only set 12 # Allow this class to be included but overridden - only set
13 # the values if we're still "all" package arch. 13 # the values if we're still "all" package arch.
14 if d.getVar("PACKAGE_ARCH", True) == "all": 14 if d.getVar("PACKAGE_ARCH") == "all":
15 # No need for virtual/libc or a cross compiler 15 # No need for virtual/libc or a cross compiler
16 d.setVar("INHIBIT_DEFAULT_DEPS","1") 16 d.setVar("INHIBIT_DEFAULT_DEPS","1")
17 17
@@ -47,6 +47,6 @@ python () {
47 d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS") 47 d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS")
48 d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS") 48 d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS")
49 elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d): 49 elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d):
50 bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE", True)) 50 bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE"))
51} 51}
52 52
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index 3543ca9c58..52959776c3 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -52,10 +52,10 @@ do_deploy_all_archives[dirs] = "${WORKDIR}"
52 52
53 53
54python () { 54python () {
55 pn = d.getVar('PN', True) 55 pn = d.getVar('PN')
56 assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split() 56 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
57 if pn in assume_provided: 57 if pn in assume_provided:
58 for p in d.getVar("PROVIDES", True).split(): 58 for p in d.getVar("PROVIDES").split():
59 if p != pn: 59 if p != pn:
60 pn = p 60 pn = p
61 break 61 break
@@ -68,7 +68,7 @@ python () {
68 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 68 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
69 69
70 # We just archive gcc-source for all the gcc related recipes 70 # We just archive gcc-source for all the gcc related recipes
71 if d.getVar('BPN', True) in ['gcc', 'libgcc'] \ 71 if d.getVar('BPN') in ['gcc', 'libgcc'] \
72 and not pn.startswith('gcc-source'): 72 and not pn.startswith('gcc-source'):
73 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 73 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
74 return 74 return
@@ -106,7 +106,7 @@ python () {
106 # Output the srpm package 106 # Output the srpm package
107 ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) 107 ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True)
108 if ar_srpm == "1": 108 if ar_srpm == "1":
109 if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': 109 if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm':
110 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) 110 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
111 if ar_dumpdata == "1": 111 if ar_dumpdata == "1":
112 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 112 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
@@ -130,9 +130,9 @@ python do_ar_original() {
130 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": 130 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original":
131 return 131 return
132 132
133 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 133 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
134 bb.note('Archiving the original source...') 134 bb.note('Archiving the original source...')
135 urls = d.getVar("SRC_URI", True).split() 135 urls = d.getVar("SRC_URI").split()
136 # destsuffix (git fetcher) and subdir (everything else) are allowed to be 136 # destsuffix (git fetcher) and subdir (everything else) are allowed to be
137 # absolute paths (for example, destsuffix=${S}/foobar). 137 # absolute paths (for example, destsuffix=${S}/foobar).
138 # That messes with unpacking inside our tmpdir below, because the fetchers 138 # That messes with unpacking inside our tmpdir below, because the fetchers
@@ -157,7 +157,7 @@ python do_ar_original() {
157 if os.path.isfile(local): 157 if os.path.isfile(local):
158 shutil.copy(local, ar_outdir) 158 shutil.copy(local, ar_outdir)
159 elif os.path.isdir(local): 159 elif os.path.isdir(local):
160 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) 160 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
161 fetch.unpack(tmpdir, (url,)) 161 fetch.unpack(tmpdir, (url,))
162 # To handle recipes with more than one source, we add the "name" 162 # To handle recipes with more than one source, we add the "name"
163 # URL parameter as suffix. We treat it as an error when 163 # URL parameter as suffix. We treat it as an error when
@@ -195,24 +195,24 @@ python do_ar_patched() {
195 return 195 return
196 196
197 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR 197 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
198 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 198 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
199 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) 199 ar_workdir = d.getVar('ARCHIVER_WORKDIR')
200 bb.note('Archiving the patched source...') 200 bb.note('Archiving the patched source...')
201 d.setVar('WORKDIR', ar_workdir) 201 d.setVar('WORKDIR', ar_workdir)
202 create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) 202 create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
203} 203}
204 204
205python do_ar_configured() { 205python do_ar_configured() {
206 import shutil 206 import shutil
207 207
208 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 208 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
209 if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': 209 if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured':
210 bb.note('Archiving the configured source...') 210 bb.note('Archiving the configured source...')
211 pn = d.getVar('PN', True) 211 pn = d.getVar('PN')
212 # "gcc-source-${PV}" recipes don't have "do_configure" 212 # "gcc-source-${PV}" recipes don't have "do_configure"
213 # task, so we need to run "do_preconfigure" instead 213 # task, so we need to run "do_preconfigure" instead
214 if pn.startswith("gcc-source-"): 214 if pn.startswith("gcc-source-"):
215 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) 215 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
216 bb.build.exec_func('do_preconfigure', d) 216 bb.build.exec_func('do_preconfigure', d)
217 217
218 # The libtool-native's do_configure will remove the 218 # The libtool-native's do_configure will remove the
@@ -221,7 +221,7 @@ python do_ar_configured() {
221 # instead of. 221 # instead of.
222 elif pn != 'libtool-native': 222 elif pn != 'libtool-native':
223 # Change the WORKDIR to make do_configure run in another dir. 223 # Change the WORKDIR to make do_configure run in another dir.
224 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) 224 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
225 if bb.data.inherits_class('kernel-yocto', d): 225 if bb.data.inherits_class('kernel-yocto', d):
226 bb.build.exec_func('do_kernel_configme', d) 226 bb.build.exec_func('do_kernel_configme', d)
227 if bb.data.inherits_class('cmake', d): 227 if bb.data.inherits_class('cmake', d):
@@ -235,12 +235,12 @@ python do_ar_configured() {
235 for func in (postfuncs or '').split(): 235 for func in (postfuncs or '').split():
236 if func != "do_qa_configure": 236 if func != "do_qa_configure":
237 bb.build.exec_func(func, d) 237 bb.build.exec_func(func, d)
238 srcdir = d.getVar('S', True) 238 srcdir = d.getVar('S')
239 builddir = d.getVar('B', True) 239 builddir = d.getVar('B')
240 if srcdir != builddir: 240 if srcdir != builddir:
241 if os.path.exists(builddir): 241 if os.path.exists(builddir):
242 oe.path.copytree(builddir, os.path.join(srcdir, \ 242 oe.path.copytree(builddir, os.path.join(srcdir, \
243 'build.%s.ar_configured' % d.getVar('PF', True))) 243 'build.%s.ar_configured' % d.getVar('PF')))
244 create_tarball(d, srcdir, 'configured', ar_outdir) 244 create_tarball(d, srcdir, 'configured', ar_outdir)
245} 245}
246 246
@@ -251,14 +251,14 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
251 import tarfile 251 import tarfile
252 252
253 # Make sure we are only creating a single tarball for gcc sources 253 # Make sure we are only creating a single tarball for gcc sources
254 if (d.getVar('SRC_URI', True) == ""): 254 if (d.getVar('SRC_URI') == ""):
255 return 255 return
256 256
257 bb.utils.mkdirhier(ar_outdir) 257 bb.utils.mkdirhier(ar_outdir)
258 if suffix: 258 if suffix:
259 filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix) 259 filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
260 else: 260 else:
261 filename = '%s.tar.gz' % d.getVar('PF', True) 261 filename = '%s.tar.gz' % d.getVar('PF')
262 tarname = os.path.join(ar_outdir, filename) 262 tarname = os.path.join(ar_outdir, filename)
263 263
264 bb.note('Creating %s' % tarname) 264 bb.note('Creating %s' % tarname)
@@ -286,7 +286,7 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
286 dirname = os.path.dirname(src) 286 dirname = os.path.dirname(src)
287 basename = os.path.basename(src) 287 basename = os.path.basename(src)
288 os.chdir(dirname) 288 os.chdir(dirname)
289 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) 289 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
290 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) 290 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
291 subprocess.call(diff_cmd, shell=True) 291 subprocess.call(diff_cmd, shell=True)
292 bb.utils.remove(src_patched, recurse=True) 292 bb.utils.remove(src_patched, recurse=True)
@@ -297,9 +297,9 @@ python do_unpack_and_patch() {
297 [ 'patched', 'configured'] and \ 297 [ 'patched', 'configured'] and \
298 d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': 298 d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1':
299 return 299 return
300 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 300 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
301 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) 301 ar_workdir = d.getVar('ARCHIVER_WORKDIR')
302 pn = d.getVar('PN', True) 302 pn = d.getVar('PN')
303 303
304 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR 304 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
305 if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): 305 if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')):
@@ -309,18 +309,18 @@ python do_unpack_and_patch() {
309 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 309 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
310 # possibly requiring of the following tasks (such as some recipes's 310 # possibly requiring of the following tasks (such as some recipes's
311 # do_patch required 'B' existed). 311 # do_patch required 'B' existed).
312 bb.utils.mkdirhier(d.getVar('B', True)) 312 bb.utils.mkdirhier(d.getVar('B'))
313 313
314 bb.build.exec_func('do_unpack', d) 314 bb.build.exec_func('do_unpack', d)
315 315
316 # Save the original source for creating the patches 316 # Save the original source for creating the patches
317 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': 317 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
318 src = d.getVar('S', True).rstrip('/') 318 src = d.getVar('S').rstrip('/')
319 src_orig = '%s.orig' % src 319 src_orig = '%s.orig' % src
320 oe.path.copytree(src, src_orig) 320 oe.path.copytree(src, src_orig)
321 321
322 # Make sure gcc and kernel sources are patched only once 322 # Make sure gcc and kernel sources are patched only once
323 if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): 323 if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))):
324 bb.build.exec_func('do_patch', d) 324 bb.build.exec_func('do_patch', d)
325 325
326 # Create the patches 326 # Create the patches
@@ -339,14 +339,14 @@ python do_ar_recipe () {
339 339
340 require_re = re.compile( r"require\s+(.+)" ) 340 require_re = re.compile( r"require\s+(.+)" )
341 include_re = re.compile( r"include\s+(.+)" ) 341 include_re = re.compile( r"include\s+(.+)" )
342 bbfile = d.getVar('FILE', True) 342 bbfile = d.getVar('FILE')
343 outdir = os.path.join(d.getVar('WORKDIR', True), \ 343 outdir = os.path.join(d.getVar('WORKDIR'), \
344 '%s-recipe' % d.getVar('PF', True)) 344 '%s-recipe' % d.getVar('PF'))
345 bb.utils.mkdirhier(outdir) 345 bb.utils.mkdirhier(outdir)
346 shutil.copy(bbfile, outdir) 346 shutil.copy(bbfile, outdir)
347 347
348 pn = d.getVar('PN', True) 348 pn = d.getVar('PN')
349 bbappend_files = d.getVar('BBINCLUDED', True).split() 349 bbappend_files = d.getVar('BBINCLUDED').split()
350 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend 350 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
351 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. 351 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
352 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) 352 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn)
@@ -356,7 +356,7 @@ python do_ar_recipe () {
356 shutil.copy(file, outdir) 356 shutil.copy(file, outdir)
357 357
358 dirname = os.path.dirname(bbfile) 358 dirname = os.path.dirname(bbfile)
359 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) 359 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
360 f = open(bbfile, 'r') 360 f = open(bbfile, 'r')
361 for line in f.readlines(): 361 for line in f.readlines():
362 incfile = None 362 incfile = None
@@ -370,7 +370,7 @@ python do_ar_recipe () {
370 if incfile: 370 if incfile:
371 shutil.copy(incfile, outdir) 371 shutil.copy(incfile, outdir)
372 372
373 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) 373 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
374 bb.utils.remove(outdir, recurse=True) 374 bb.utils.remove(outdir, recurse=True)
375} 375}
376 376
@@ -379,8 +379,8 @@ python do_dumpdata () {
379 dump environment data to ${PF}-showdata.dump 379 dump environment data to ${PF}-showdata.dump
380 """ 380 """
381 381
382 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ 382 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
383 '%s-showdata.dump' % d.getVar('PF', True)) 383 '%s-showdata.dump' % d.getVar('PF'))
384 bb.note('Dumping metadata into %s' % dumpfile) 384 bb.note('Dumping metadata into %s' % dumpfile)
385 with open(dumpfile, "w") as f: 385 with open(dumpfile, "w") as f:
386 # emit variables and shell functions 386 # emit variables and shell functions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index c43ea9a7ef..c43531b050 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -1,8 +1,8 @@
1def autotools_dep_prepend(d): 1def autotools_dep_prepend(d):
2 if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): 2 if d.getVar('INHIBIT_AUTOTOOLS_DEPS'):
3 return '' 3 return ''
4 4
5 pn = d.getVar('PN', True) 5 pn = d.getVar('PN')
6 deps = '' 6 deps = ''
7 7
8 if pn in ['autoconf-native', 'automake-native', 'help2man-native']: 8 if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
@@ -14,7 +14,7 @@ def autotools_dep_prepend(d):
14 if not bb.data.inherits_class('native', d) \ 14 if not bb.data.inherits_class('native', d) \
15 and not bb.data.inherits_class('nativesdk', d) \ 15 and not bb.data.inherits_class('nativesdk', d) \
16 and not bb.data.inherits_class('cross', d) \ 16 and not bb.data.inherits_class('cross', d) \
17 and not d.getVar('INHIBIT_DEFAULT_DEPS', True): 17 and not d.getVar('INHIBIT_DEFAULT_DEPS'):
18 deps += 'libtool-cross ' 18 deps += 'libtool-cross '
19 19
20 return deps + 'gnu-config-native ' 20 return deps + 'gnu-config-native '
@@ -139,15 +139,15 @@ ACLOCALDIR = "${WORKDIR}/aclocal-copy"
139python autotools_copy_aclocals () { 139python autotools_copy_aclocals () {
140 import copy 140 import copy
141 141
142 s = d.getVar("AUTOTOOLS_SCRIPT_PATH", True) 142 s = d.getVar("AUTOTOOLS_SCRIPT_PATH")
143 if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): 143 if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"):
144 if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): 144 if not d.getVar("AUTOTOOLS_COPYACLOCAL", False):
145 return 145 return
146 146
147 taskdepdata = d.getVar("BB_TASKDEPDATA", False) 147 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
148 #bb.warn(str(taskdepdata)) 148 #bb.warn(str(taskdepdata))
149 pn = d.getVar("PN", True) 149 pn = d.getVar("PN")
150 aclocaldir = d.getVar("ACLOCALDIR", True) 150 aclocaldir = d.getVar("ACLOCALDIR")
151 oe.path.remove(aclocaldir) 151 oe.path.remove(aclocaldir)
152 bb.utils.mkdirhier(aclocaldir) 152 bb.utils.mkdirhier(aclocaldir)
153 start = None 153 start = None
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 19673e6913..2765ebf61b 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -16,7 +16,7 @@ OE_IMPORTS[type] = "list"
16def oe_import(d): 16def oe_import(d):
17 import sys 17 import sys
18 18
19 bbpath = d.getVar("BBPATH", True).split(":") 19 bbpath = d.getVar("BBPATH").split(":")
20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] 20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21 21
22 def inject(name, value): 22 def inject(name, value):
@@ -37,7 +37,7 @@ def oe_import(d):
37OE_IMPORTED := "${@oe_import(d)}" 37OE_IMPORTED := "${@oe_import(d)}"
38 38
39def lsb_distro_identifier(d): 39def lsb_distro_identifier(d):
40 adjust = d.getVar('LSB_DISTRO_ADJUST', True) 40 adjust = d.getVar('LSB_DISTRO_ADJUST')
41 adjust_func = None 41 adjust_func = None
42 if adjust: 42 if adjust:
43 try: 43 try:
@@ -72,7 +72,7 @@ def base_dep_prepend(d):
72 # we need that built is the responsibility of the patch function / class, not 72 # we need that built is the responsibility of the patch function / class, not
73 # the application. 73 # the application.
74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False): 74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
75 if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): 75 if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " 76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
77 return deps 77 return deps
78 78
@@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} "
83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" 83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
84# THISDIR only works properly with imediate expansion as it has to run 84# THISDIR only works properly with imediate expansion as it has to run
85# in the context of the location its used (:=) 85# in the context of the location its used (:=)
86THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" 86THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
87 87
88def extra_path_elements(d): 88def extra_path_elements(d):
89 path = "" 89 path = ""
90 elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() 90 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
91 for e in elements: 91 for e in elements:
92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" 92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
93 return path 93 return path
@@ -96,11 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}"
96 96
97def get_lic_checksum_file_list(d): 97def get_lic_checksum_file_list(d):
98 filelist = [] 98 filelist = []
99 lic_files = d.getVar("LIC_FILES_CHKSUM", True) or '' 99 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
100 tmpdir = d.getVar("TMPDIR", True) 100 tmpdir = d.getVar("TMPDIR")
101 s = d.getVar("S", True) 101 s = d.getVar("S")
102 b = d.getVar("B", True) 102 b = d.getVar("B")
103 workdir = d.getVar("WORKDIR", True) 103 workdir = d.getVar("WORKDIR")
104 104
105 urls = lic_files.split() 105 urls = lic_files.split()
106 for url in urls: 106 for url in urls:
@@ -116,7 +116,7 @@ def get_lic_checksum_file_list(d):
116 continue 116 continue
117 filelist.append(path + ":" + str(os.path.exists(path))) 117 filelist.append(path + ":" + str(os.path.exists(path)))
118 except bb.fetch.MalformedUrl: 118 except bb.fetch.MalformedUrl:
119 bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) 119 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
120 return " ".join(filelist) 120 return " ".join(filelist)
121 121
122addtask fetch 122addtask fetch
@@ -126,7 +126,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
126do_fetch[vardeps] += "SRCREV" 126do_fetch[vardeps] += "SRCREV"
127python base_do_fetch() { 127python base_do_fetch() {
128 128
129 src_uri = (d.getVar('SRC_URI', True) or "").split() 129 src_uri = (d.getVar('SRC_URI') or "").split()
130 if len(src_uri) == 0: 130 if len(src_uri) == 0:
131 return 131 return
132 132
@@ -141,31 +141,31 @@ addtask unpack after do_fetch
141do_unpack[dirs] = "${WORKDIR}" 141do_unpack[dirs] = "${WORKDIR}"
142 142
143python () { 143python () {
144 if d.getVar('S', True) != d.getVar('WORKDIR', True): 144 if d.getVar('S') != d.getVar('WORKDIR'):
145 d.setVarFlag('do_unpack', 'cleandirs', '${S}') 145 d.setVarFlag('do_unpack', 'cleandirs', '${S}')
146 else: 146 else:
147 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) 147 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
148} 148}
149python base_do_unpack() { 149python base_do_unpack() {
150 src_uri = (d.getVar('SRC_URI', True) or "").split() 150 src_uri = (d.getVar('SRC_URI') or "").split()
151 if len(src_uri) == 0: 151 if len(src_uri) == 0:
152 return 152 return
153 153
154 try: 154 try:
155 fetcher = bb.fetch2.Fetch(src_uri, d) 155 fetcher = bb.fetch2.Fetch(src_uri, d)
156 fetcher.unpack(d.getVar('WORKDIR', True)) 156 fetcher.unpack(d.getVar('WORKDIR'))
157 except bb.fetch2.BBFetchException as e: 157 except bb.fetch2.BBFetchException as e:
158 bb.fatal(str(e)) 158 bb.fatal(str(e))
159} 159}
160 160
161def pkgarch_mapping(d): 161def pkgarch_mapping(d):
162 # Compatibility mappings of TUNE_PKGARCH (opt in) 162 # Compatibility mappings of TUNE_PKGARCH (opt in)
163 if d.getVar("PKGARCHCOMPAT_ARMV7A", True): 163 if d.getVar("PKGARCHCOMPAT_ARMV7A"):
164 if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": 164 if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon":
165 d.setVar("TUNE_PKGARCH", "armv7a") 165 d.setVar("TUNE_PKGARCH", "armv7a")
166 166
167def get_layers_branch_rev(d): 167def get_layers_branch_rev(d):
168 layers = (d.getVar("BBLAYERS", True) or "").split() 168 layers = (d.getVar("BBLAYERS") or "").split()
169 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ 169 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
170 base_get_metadata_git_branch(i, None).strip(), \ 170 base_get_metadata_git_branch(i, None).strip(), \
171 base_get_metadata_git_revision(i, None)) \ 171 base_get_metadata_git_revision(i, None)) \
@@ -192,7 +192,7 @@ BUILDCFG_FUNCS[type] = "list"
192def buildcfg_vars(d): 192def buildcfg_vars(d):
193 statusvars = oe.data.typed_value('BUILDCFG_VARS', d) 193 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
194 for var in statusvars: 194 for var in statusvars:
195 value = d.getVar(var, True) 195 value = d.getVar(var)
196 if value is not None: 196 if value is not None:
197 yield '%-17s = "%s"' % (var, value) 197 yield '%-17s = "%s"' % (var, value)
198 198
@@ -200,7 +200,7 @@ def buildcfg_neededvars(d):
200 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) 200 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
201 pesteruser = [] 201 pesteruser = []
202 for v in needed_vars: 202 for v in needed_vars:
203 val = d.getVar(v, True) 203 val = d.getVar(v)
204 if not val or val == 'INVALID': 204 if not val or val == 'INVALID':
205 pesteruser.append(v) 205 pesteruser.append(v)
206 206
@@ -233,7 +233,7 @@ python base_eventhandler() {
233 if flines: 233 if flines:
234 statuslines.extend(flines) 234 statuslines.extend(flines)
235 235
236 statusheader = e.data.getVar('BUILDCFG_HEADER', True) 236 statusheader = e.data.getVar('BUILDCFG_HEADER')
237 if statusheader: 237 if statusheader:
238 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 238 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
239 239
@@ -241,7 +241,7 @@ python base_eventhandler() {
241 # target ones and we'd see dulpicate key names overwriting each other 241 # target ones and we'd see dulpicate key names overwriting each other
242 # for various PREFERRED_PROVIDERS 242 # for various PREFERRED_PROVIDERS
243 if isinstance(e, bb.event.RecipePreFinalise): 243 if isinstance(e, bb.event.RecipePreFinalise):
244 if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True): 244 if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"):
245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") 245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
246 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") 246 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
247 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") 247 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
@@ -267,14 +267,14 @@ python base_eventhandler() {
267 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in 267 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
268 # particular. 268 # particular.
269 # 269 #
270 pn = d.getVar('PN', True) 270 pn = d.getVar('PN')
271 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 271 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
272 if not source_mirror_fetch: 272 if not source_mirror_fetch:
273 provs = (d.getVar("PROVIDES", True) or "").split() 273 provs = (d.getVar("PROVIDES") or "").split()
274 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() 274 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
275 for p in provs: 275 for p in provs:
276 if p.startswith("virtual/") and p not in multiwhitelist: 276 if p.startswith("virtual/") and p not in multiwhitelist:
277 profprov = d.getVar("PREFERRED_PROVIDER_" + p, True) 277 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
278 if profprov and pn != profprov: 278 if profprov and pn != profprov:
279 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) 279 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
280} 280}
@@ -336,9 +336,9 @@ def set_packagetriplet(d):
336 tos = [] 336 tos = []
337 tvs = [] 337 tvs = []
338 338
339 archs.append(d.getVar("PACKAGE_ARCHS", True).split()) 339 archs.append(d.getVar("PACKAGE_ARCHS").split())
340 tos.append(d.getVar("TARGET_OS", True)) 340 tos.append(d.getVar("TARGET_OS"))
341 tvs.append(d.getVar("TARGET_VENDOR", True)) 341 tvs.append(d.getVar("TARGET_VENDOR"))
342 342
343 def settriplet(d, varname, archs, tos, tvs): 343 def settriplet(d, varname, archs, tos, tvs):
344 triplets = [] 344 triplets = []
@@ -350,16 +350,16 @@ def set_packagetriplet(d):
350 350
351 settriplet(d, "PKGTRIPLETS", archs, tos, tvs) 351 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
352 352
353 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 353 variants = d.getVar("MULTILIB_VARIANTS") or ""
354 for item in variants.split(): 354 for item in variants.split():
355 localdata = bb.data.createCopy(d) 355 localdata = bb.data.createCopy(d)
356 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 356 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
357 localdata.setVar("OVERRIDES", overrides) 357 localdata.setVar("OVERRIDES", overrides)
358 bb.data.update_data(localdata) 358 bb.data.update_data(localdata)
359 359
360 archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) 360 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
361 tos.append(localdata.getVar("TARGET_OS", True)) 361 tos.append(localdata.getVar("TARGET_OS"))
362 tvs.append(localdata.getVar("TARGET_VENDOR", True)) 362 tvs.append(localdata.getVar("TARGET_VENDOR"))
363 363
364 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) 364 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
365 365
@@ -374,10 +374,10 @@ python () {
374 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" 374 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
375 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} 375 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
376 if pkgconfigflags: 376 if pkgconfigflags:
377 pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() 377 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
378 pn = d.getVar("PN", True) 378 pn = d.getVar("PN")
379 379
380 mlprefix = d.getVar("MLPREFIX", True) 380 mlprefix = d.getVar("MLPREFIX")
381 381
382 def expandFilter(appends, extension, prefix): 382 def expandFilter(appends, extension, prefix):
383 appends = bb.utils.explode_deps(d.expand(" ".join(appends))) 383 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
@@ -419,7 +419,7 @@ python () {
419 num = len(items) 419 num = len(items)
420 if num > 4: 420 if num > 4:
421 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" 421 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
422 % (d.getVar('PN', True), flag)) 422 % (d.getVar('PN'), flag))
423 423
424 if flag in pkgconfig: 424 if flag in pkgconfig:
425 if num >= 3 and items[2]: 425 if num >= 3 and items[2]:
@@ -434,8 +434,8 @@ python () {
434 appendVar('RDEPENDS_${PN}', extrardeps) 434 appendVar('RDEPENDS_${PN}', extrardeps)
435 appendVar('PACKAGECONFIG_CONFARGS', extraconf) 435 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
436 436
437 pn = d.getVar('PN', True) 437 pn = d.getVar('PN')
438 license = d.getVar('LICENSE', True) 438 license = d.getVar('LICENSE')
439 if license == "INVALID": 439 if license == "INVALID":
440 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 440 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
441 441
@@ -465,26 +465,26 @@ python () {
465 d.setVarFlag('do_devshell', 'fakeroot', '1') 465 d.setVarFlag('do_devshell', 'fakeroot', '1')
466 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 466 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
467 467
468 need_machine = d.getVar('COMPATIBLE_MACHINE', True) 468 need_machine = d.getVar('COMPATIBLE_MACHINE')
469 if need_machine: 469 if need_machine:
470 import re 470 import re
471 compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") 471 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
472 for m in compat_machines: 472 for m in compat_machines:
473 if re.match(need_machine, m): 473 if re.match(need_machine, m):
474 break 474 break
475 else: 475 else:
476 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) 476 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
477 477
478 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 478 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
479 if not source_mirror_fetch: 479 if not source_mirror_fetch:
480 need_host = d.getVar('COMPATIBLE_HOST', True) 480 need_host = d.getVar('COMPATIBLE_HOST')
481 if need_host: 481 if need_host:
482 import re 482 import re
483 this_host = d.getVar('HOST_SYS', True) 483 this_host = d.getVar('HOST_SYS')
484 if not re.match(need_host, this_host): 484 if not re.match(need_host, this_host):
485 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) 485 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
486 486
487 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() 487 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
488 488
489 check_license = False if pn.startswith("nativesdk-") else True 489 check_license = False if pn.startswith("nativesdk-") else True
490 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", 490 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
@@ -503,21 +503,21 @@ python () {
503 for lic in bad_licenses: 503 for lic in bad_licenses:
504 spdx_license = return_spdx(d, lic) 504 spdx_license = return_spdx(d, lic)
505 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: 505 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
506 whitelist.extend((d.getVar(w + lic, True) or "").split()) 506 whitelist.extend((d.getVar(w + lic) or "").split())
507 if spdx_license: 507 if spdx_license:
508 whitelist.extend((d.getVar(w + spdx_license, True) or "").split()) 508 whitelist.extend((d.getVar(w + spdx_license) or "").split())
509 ''' 509 '''
510 We need to track what we are whitelisting and why. If pn is 510 We need to track what we are whitelisting and why. If pn is
511 incompatible we need to be able to note that the image that 511 incompatible we need to be able to note that the image that
512 is created may infact contain incompatible licenses despite 512 is created may infact contain incompatible licenses despite
513 INCOMPATIBLE_LICENSE being set. 513 INCOMPATIBLE_LICENSE being set.
514 ''' 514 '''
515 incompatwl.extend((d.getVar(w + lic, True) or "").split()) 515 incompatwl.extend((d.getVar(w + lic) or "").split())
516 if spdx_license: 516 if spdx_license:
517 incompatwl.extend((d.getVar(w + spdx_license, True) or "").split()) 517 incompatwl.extend((d.getVar(w + spdx_license) or "").split())
518 518
519 if not pn in whitelist: 519 if not pn in whitelist:
520 pkgs = d.getVar('PACKAGES', True).split() 520 pkgs = d.getVar('PACKAGES').split()
521 skipped_pkgs = [] 521 skipped_pkgs = []
522 unskipped_pkgs = [] 522 unskipped_pkgs = []
523 for pkg in pkgs: 523 for pkg in pkgs:
@@ -529,7 +529,7 @@ python () {
529 if unskipped_pkgs: 529 if unskipped_pkgs:
530 for pkg in skipped_pkgs: 530 for pkg in skipped_pkgs:
531 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) 531 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
532 mlprefix = d.getVar('MLPREFIX', True) 532 mlprefix = d.getVar('MLPREFIX')
533 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) 533 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
534 for pkg in unskipped_pkgs: 534 for pkg in unskipped_pkgs:
535 bb.debug(1, "INCLUDING the package " + pkg) 535 bb.debug(1, "INCLUDING the package " + pkg)
@@ -545,8 +545,8 @@ python () {
545 # matching of license expressions - just check that all license strings 545 # matching of license expressions - just check that all license strings
546 # in LICENSE_<pkg> are found in LICENSE. 546 # in LICENSE_<pkg> are found in LICENSE.
547 license_set = oe.license.list_licenses(license) 547 license_set = oe.license.list_licenses(license)
548 for pkg in d.getVar('PACKAGES', True).split(): 548 for pkg in d.getVar('PACKAGES').split():
549 pkg_license = d.getVar('LICENSE_' + pkg, True) 549 pkg_license = d.getVar('LICENSE_' + pkg)
550 if pkg_license: 550 if pkg_license:
551 unlisted = oe.license.list_licenses(pkg_license) - license_set 551 unlisted = oe.license.list_licenses(pkg_license) - license_set
552 if unlisted: 552 if unlisted:
@@ -554,7 +554,7 @@ python () {
554 "listed in LICENSE" % (pkg, ' '.join(unlisted))) 554 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
555 555
556 needsrcrev = False 556 needsrcrev = False
557 srcuri = d.getVar('SRC_URI', True) 557 srcuri = d.getVar('SRC_URI')
558 for uri in srcuri.split(): 558 for uri in srcuri.split():
559 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] 559 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
560 560
@@ -614,8 +614,8 @@ python () {
614 set_packagetriplet(d) 614 set_packagetriplet(d)
615 615
616 # 'multimachine' handling 616 # 'multimachine' handling
617 mach_arch = d.getVar('MACHINE_ARCH', True) 617 mach_arch = d.getVar('MACHINE_ARCH')
618 pkg_arch = d.getVar('PACKAGE_ARCH', True) 618 pkg_arch = d.getVar('PACKAGE_ARCH')
619 619
620 if (pkg_arch == mach_arch): 620 if (pkg_arch == mach_arch):
621 # Already machine specific - nothing further to do 621 # Already machine specific - nothing further to do
@@ -625,11 +625,11 @@ python () {
625 # We always try to scan SRC_URI for urls with machine overrides 625 # We always try to scan SRC_URI for urls with machine overrides
626 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 626 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
627 # 627 #
628 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) 628 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
629 if override != '0': 629 if override != '0':
630 paths = [] 630 paths = []
631 fpaths = (d.getVar('FILESPATH', True) or '').split(':') 631 fpaths = (d.getVar('FILESPATH') or '').split(':')
632 machine = d.getVar('MACHINE', True) 632 machine = d.getVar('MACHINE')
633 for p in fpaths: 633 for p in fpaths:
634 if os.path.basename(p) == machine and os.path.isdir(p): 634 if os.path.basename(p) == machine and os.path.isdir(p):
635 paths.append(p) 635 paths.append(p)
@@ -646,16 +646,16 @@ python () {
646 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 646 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
647 return 647 return
648 648
649 packages = d.getVar('PACKAGES', True).split() 649 packages = d.getVar('PACKAGES').split()
650 for pkg in packages: 650 for pkg in packages:
651 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) 651 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
652 652
653 # We could look for != PACKAGE_ARCH here but how to choose 653 # We could look for != PACKAGE_ARCH here but how to choose
654 # if multiple differences are present? 654 # if multiple differences are present?
655 # Look through PACKAGE_ARCHS for the priority order? 655 # Look through PACKAGE_ARCHS for the priority order?
656 if pkgarch and pkgarch == mach_arch: 656 if pkgarch and pkgarch == mach_arch:
657 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 657 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
658 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) 658 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
659} 659}
660 660
661addtask cleansstate after do_clean 661addtask cleansstate after do_clean
@@ -666,7 +666,7 @@ addtask cleanall after do_cleansstate
666do_cleansstate[nostamp] = "1" 666do_cleansstate[nostamp] = "1"
667 667
668python do_cleanall() { 668python do_cleanall() {
669 src_uri = (d.getVar('SRC_URI', True) or "").split() 669 src_uri = (d.getVar('SRC_URI') or "").split()
670 if len(src_uri) == 0: 670 if len(src_uri) == 0:
671 return 671 return
672 672
diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass
index 8591308aa7..5372294142 100644
--- a/meta/classes/binconfig.bbclass
+++ b/meta/classes/binconfig.bbclass
@@ -22,7 +22,7 @@ def get_binconfig_mangle(d):
22 s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" 22 s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'"
23 s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" 23 s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'"
24 if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False): 24 if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False):
25 s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE", True) 25 s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE")
26 26
27 return s 27 return s
28 28
diff --git a/meta/classes/blacklist.bbclass b/meta/classes/blacklist.bbclass
index a0141a82c0..c6f422398c 100644
--- a/meta/classes/blacklist.bbclass
+++ b/meta/classes/blacklist.bbclass
@@ -16,7 +16,7 @@
16addhandler blacklist_multilib_eventhandler 16addhandler blacklist_multilib_eventhandler
17blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed" 17blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed"
18python blacklist_multilib_eventhandler() { 18python blacklist_multilib_eventhandler() {
19 multilibs = e.data.getVar('MULTILIBS', True) 19 multilibs = e.data.getVar('MULTILIBS')
20 if not multilibs: 20 if not multilibs:
21 return 21 return
22 22
@@ -38,7 +38,7 @@ python blacklist_multilib_eventhandler() {
38} 38}
39 39
40python () { 40python () {
41 blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN', True), True) 41 blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN'), True)
42 42
43 if blacklist: 43 if blacklist:
44 raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) 44 raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist))
diff --git a/meta/classes/bugzilla.bbclass b/meta/classes/bugzilla.bbclass
index 3fc8956428..6d9a8211f0 100644
--- a/meta/classes/bugzilla.bbclass
+++ b/meta/classes/bugzilla.bbclass
@@ -110,12 +110,12 @@ python bugzilla_eventhandler() {
110 return 110 return
111 111
112 if name == "TaskFailed": 112 if name == "TaskFailed":
113 xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) 113 xmlrpc = data.getVar("BUGZILLA_XMLRPC")
114 user = data.getVar("BUGZILLA_USER", True) 114 user = data.getVar("BUGZILLA_USER")
115 passw = data.getVar("BUGZILLA_PASS", True) 115 passw = data.getVar("BUGZILLA_PASS")
116 product = data.getVar("BUGZILLA_PRODUCT", True) 116 product = data.getVar("BUGZILLA_PRODUCT")
117 compon = data.getVar("BUGZILLA_COMPONENT", True) 117 compon = data.getVar("BUGZILLA_COMPONENT")
118 version = data.getVar("BUGZILLA_VERSION", True) 118 version = data.getVar("BUGZILLA_VERSION")
119 119
120 proxy = data.getVar('http_proxy', True ) 120 proxy = data.getVar('http_proxy', True )
121 if (proxy): 121 if (proxy):
@@ -133,14 +133,14 @@ python bugzilla_eventhandler() {
133 'component': compon} 133 'component': compon}
134 134
135 # evil hack to figure out what is going on 135 # evil hack to figure out what is going on
136 debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a") 136 debug_file = open(os.path.join(data.getVar("TMPDIR"),"..","bugzilla-log"),"a")
137 137
138 file = None 138 file = None
139 bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True), 139 bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN"),
140 "pv" : data.getVar("PV", True), 140 "pv" : data.getVar("PV"),
141 } 141 }
142 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) 142 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task))
143 text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) ) 143 text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar( 'MACHINE', True ) )
144 if len(log_file) != 0: 144 if len(log_file) != 0:
145 print >> debug_file, "Adding log file %s" % log_file[0] 145 print >> debug_file, "Adding log file %s" % log_file[0]
146 file = open(log_file[0], 'r') 146 file = open(log_file[0], 'r')
@@ -168,7 +168,7 @@ python bugzilla_eventhandler() {
168 168
169 if bug_number and log: 169 if bug_number and log:
170 print >> debug_file, "The bug is known as '%s'" % bug_number 170 print >> debug_file, "The bug is known as '%s'" % bug_number
171 desc = "Build log for machine %s" % (data.getVar('MACHINE', True)) 171 desc = "Build log for machine %s" % (data.getVar('MACHINE'))
172 if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): 172 if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc):
173 print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number 173 print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number
174 else: 174 else:
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index d82e9bb55c..73cd88669a 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -64,18 +64,18 @@ PATCH_GIT_USER_NAME ?= "OpenEmbedded"
64# Write out metadata about this package for comparison when writing future packages 64# Write out metadata about this package for comparison when writing future packages
65# 65#
66python buildhistory_emit_pkghistory() { 66python buildhistory_emit_pkghistory() {
67 if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: 67 if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']:
68 return 0 68 return 0
69 69
70 if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): 70 if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
71 return 0 71 return 0
72 72
73 import re 73 import re
74 import json 74 import json
75 import errno 75 import errno
76 76
77 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 77 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
78 oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE', True) 78 oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE')
79 79
80 class RecipeInfo: 80 class RecipeInfo:
81 def __init__(self, name): 81 def __init__(self, name):
@@ -182,12 +182,12 @@ python buildhistory_emit_pkghistory() {
182 items.sort() 182 items.sort()
183 return ' '.join(items) 183 return ' '.join(items)
184 184
185 pn = d.getVar('PN', True) 185 pn = d.getVar('PN')
186 pe = d.getVar('PE', True) or "0" 186 pe = d.getVar('PE') or "0"
187 pv = d.getVar('PV', True) 187 pv = d.getVar('PV')
188 pr = d.getVar('PR', True) 188 pr = d.getVar('PR')
189 189
190 pkgdata_dir = d.getVar('PKGDATA_DIR', True) 190 pkgdata_dir = d.getVar('PKGDATA_DIR')
191 packages = "" 191 packages = ""
192 try: 192 try:
193 with open(os.path.join(pkgdata_dir, pn)) as f: 193 with open(os.path.join(pkgdata_dir, pn)) as f:
@@ -203,7 +203,7 @@ python buildhistory_emit_pkghistory() {
203 raise 203 raise
204 204
205 packagelist = packages.split() 205 packagelist = packages.split()
206 preserve = d.getVar('BUILDHISTORY_PRESERVE', True).split() 206 preserve = d.getVar('BUILDHISTORY_PRESERVE').split()
207 if not os.path.exists(pkghistdir): 207 if not os.path.exists(pkghistdir):
208 bb.utils.mkdirhier(pkghistdir) 208 bb.utils.mkdirhier(pkghistdir)
209 else: 209 else:
@@ -223,11 +223,11 @@ python buildhistory_emit_pkghistory() {
223 rcpinfo.pe = pe 223 rcpinfo.pe = pe
224 rcpinfo.pv = pv 224 rcpinfo.pv = pv
225 rcpinfo.pr = pr 225 rcpinfo.pr = pr
226 rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or "")) 226 rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or ""))
227 rcpinfo.packages = packages 227 rcpinfo.packages = packages
228 write_recipehistory(rcpinfo, d) 228 write_recipehistory(rcpinfo, d)
229 229
230 pkgdest = d.getVar('PKGDEST', True) 230 pkgdest = d.getVar('PKGDEST')
231 for pkg in packagelist: 231 for pkg in packagelist:
232 pkgdata = {} 232 pkgdata = {}
233 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 233 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
@@ -293,7 +293,7 @@ python buildhistory_emit_pkghistory() {
293def write_recipehistory(rcpinfo, d): 293def write_recipehistory(rcpinfo, d):
294 bb.debug(2, "Writing recipe history") 294 bb.debug(2, "Writing recipe history")
295 295
296 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 296 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
297 297
298 infofile = os.path.join(pkghistdir, "latest") 298 infofile = os.path.join(pkghistdir, "latest")
299 with open(infofile, "w") as f: 299 with open(infofile, "w") as f:
@@ -308,7 +308,7 @@ def write_recipehistory(rcpinfo, d):
308def write_pkghistory(pkginfo, d): 308def write_pkghistory(pkginfo, d):
309 bb.debug(2, "Writing package history for package %s" % pkginfo.name) 309 bb.debug(2, "Writing package history for package %s" % pkginfo.name)
310 310
311 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 311 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
312 312
313 pkgpath = os.path.join(pkghistdir, pkginfo.name) 313 pkgpath = os.path.join(pkghistdir, pkginfo.name)
314 if not os.path.exists(pkgpath): 314 if not os.path.exists(pkgpath):
@@ -369,7 +369,7 @@ def buildhistory_list_installed(d, rootfs_type="image"):
369 pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target") 369 pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target")
370 370
371 for output_type, output_file in process_list: 371 for output_type, output_file in process_list:
372 output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) 372 output_file_full = os.path.join(d.getVar('WORKDIR'), output_file)
373 373
374 with open(output_file_full, 'w') as output: 374 with open(output_file_full, 'w') as output:
375 output.write(format_pkg_list(pkgs, output_type)) 375 output.write(format_pkg_list(pkgs, output_type))
@@ -550,7 +550,7 @@ END
550python buildhistory_get_extra_sdkinfo() { 550python buildhistory_get_extra_sdkinfo() {
551 import operator 551 import operator
552 import math 552 import math
553 if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': 553 if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
554 tasksizes = {} 554 tasksizes = {}
555 filesizes = {} 555 filesizes = {}
556 for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')): 556 for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')):
@@ -591,7 +591,7 @@ SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_e
591SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " 591SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
592 592
593def buildhistory_get_build_id(d): 593def buildhistory_get_build_id(d):
594 if d.getVar('BB_WORKERCONTEXT', True) != '1': 594 if d.getVar('BB_WORKERCONTEXT') != '1':
595 return "" 595 return ""
596 localdata = bb.data.createCopy(d) 596 localdata = bb.data.createCopy(d)
597 bb.data.update_data(localdata) 597 bb.data.update_data(localdata)
@@ -605,12 +605,12 @@ def buildhistory_get_build_id(d):
605 if flines: 605 if flines:
606 statuslines.extend(flines) 606 statuslines.extend(flines)
607 607
608 statusheader = d.getVar('BUILDCFG_HEADER', True) 608 statusheader = d.getVar('BUILDCFG_HEADER')
609 return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 609 return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
610 610
611def buildhistory_get_metadata_revs(d): 611def buildhistory_get_metadata_revs(d):
612 # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want 612 # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want
613 layers = (d.getVar("BBLAYERS", True) or "").split() 613 layers = (d.getVar("BBLAYERS") or "").split()
614 medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ 614 medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \
615 base_get_metadata_git_branch(i, None).strip(), \ 615 base_get_metadata_git_branch(i, None).strip(), \
616 base_get_metadata_git_revision(i, None)) \ 616 base_get_metadata_git_revision(i, None)) \
@@ -622,7 +622,7 @@ def outputvars(vars, listvars, d):
622 listvars = listvars.split() 622 listvars = listvars.split()
623 ret = "" 623 ret = ""
624 for var in vars: 624 for var in vars:
625 value = d.getVar(var, True) or "" 625 value = d.getVar(var) or ""
626 if var in listvars: 626 if var in listvars:
627 # Squash out spaces 627 # Squash out spaces
628 value = oe.utils.squashspaces(value) 628 value = oe.utils.squashspaces(value)
@@ -630,17 +630,17 @@ def outputvars(vars, listvars, d):
630 return ret.rstrip('\n') 630 return ret.rstrip('\n')
631 631
632def buildhistory_get_imagevars(d): 632def buildhistory_get_imagevars(d):
633 if d.getVar('BB_WORKERCONTEXT', True) != '1': 633 if d.getVar('BB_WORKERCONTEXT') != '1':
634 return "" 634 return ""
635 imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" 635 imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND"
636 listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" 636 listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE"
637 return outputvars(imagevars, listvars, d) 637 return outputvars(imagevars, listvars, d)
638 638
639def buildhistory_get_sdkvars(d): 639def buildhistory_get_sdkvars(d):
640 if d.getVar('BB_WORKERCONTEXT', True) != '1': 640 if d.getVar('BB_WORKERCONTEXT') != '1':
641 return "" 641 return ""
642 sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" 642 sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
643 if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': 643 if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
644 # Extensible SDK uses some additional variables 644 # Extensible SDK uses some additional variables
645 sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN" 645 sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN"
646 listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST" 646 listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST"
@@ -735,16 +735,16 @@ END
735} 735}
736 736
737python buildhistory_eventhandler() { 737python buildhistory_eventhandler() {
738 if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): 738 if e.data.getVar('BUILDHISTORY_FEATURES').strip():
739 reset = e.data.getVar("BUILDHISTORY_RESET", True) 739 reset = e.data.getVar("BUILDHISTORY_RESET")
740 olddir = e.data.getVar("BUILDHISTORY_OLD_DIR", True) 740 olddir = e.data.getVar("BUILDHISTORY_OLD_DIR")
741 if isinstance(e, bb.event.BuildStarted): 741 if isinstance(e, bb.event.BuildStarted):
742 if reset: 742 if reset:
743 import shutil 743 import shutil
744 # Clean up after potentially interrupted build. 744 # Clean up after potentially interrupted build.
745 if os.path.isdir(olddir): 745 if os.path.isdir(olddir):
746 shutil.rmtree(olddir) 746 shutil.rmtree(olddir)
747 rootdir = e.data.getVar("BUILDHISTORY_DIR", True) 747 rootdir = e.data.getVar("BUILDHISTORY_DIR")
748 entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ] 748 entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ]
749 bb.utils.mkdirhier(olddir) 749 bb.utils.mkdirhier(olddir)
750 for entry in entries: 750 for entry in entries:
@@ -754,7 +754,7 @@ python buildhistory_eventhandler() {
754 if reset: 754 if reset:
755 import shutil 755 import shutil
756 shutil.rmtree(olddir) 756 shutil.rmtree(olddir)
757 if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": 757 if e.data.getVar("BUILDHISTORY_COMMIT") == "1":
758 bb.note("Writing buildhistory") 758 bb.note("Writing buildhistory")
759 localdata = bb.data.createCopy(e.data) 759 localdata = bb.data.createCopy(e.data)
760 localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) 760 localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures))
@@ -774,7 +774,7 @@ def _get_srcrev_values(d):
774 """ 774 """
775 775
776 scms = [] 776 scms = []
777 fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) 777 fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
778 urldata = fetcher.ud 778 urldata = fetcher.ud
779 for u in urldata: 779 for u in urldata:
780 if urldata[u].method.supports_srcrev(): 780 if urldata[u].method.supports_srcrev():
@@ -806,7 +806,7 @@ def _get_srcrev_values(d):
806do_fetch[postfuncs] += "write_srcrev" 806do_fetch[postfuncs] += "write_srcrev"
807do_fetch[vardepsexclude] += "write_srcrev" 807do_fetch[vardepsexclude] += "write_srcrev"
808python write_srcrev() { 808python write_srcrev() {
809 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 809 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
810 srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') 810 srcrevfile = os.path.join(pkghistdir, 'latest_srcrev')
811 811
812 srcrevs, tag_srcrevs = _get_srcrev_values(d) 812 srcrevs, tag_srcrevs = _get_srcrev_values(d)
@@ -838,7 +838,7 @@ python write_srcrev() {
838 for name, srcrev in tag_srcrevs.items(): 838 for name, srcrev in tag_srcrevs.items():
839 f.write('# tag_%s = "%s"\n' % (name, srcrev)) 839 f.write('# tag_%s = "%s"\n' % (name, srcrev))
840 if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: 840 if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
841 pkg = d.getVar('PN', True) 841 pkg = d.getVar('PN')
842 bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) 842 bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
843 843
844 else: 844 else:
diff --git a/meta/classes/buildstats-summary.bbclass b/meta/classes/buildstats-summary.bbclass
index b86abcc3f1..f9b241b6c5 100644
--- a/meta/classes/buildstats-summary.bbclass
+++ b/meta/classes/buildstats-summary.bbclass
@@ -7,7 +7,7 @@ python buildstats_summary () {
7 if not os.path.exists(bsdir): 7 if not os.path.exists(bsdir):
8 return 8 return
9 9
10 sstatetasks = (e.data.getVar('SSTATETASKS', True) or '').split() 10 sstatetasks = (e.data.getVar('SSTATETASKS') or '').split()
11 built = collections.defaultdict(lambda: [set(), set()]) 11 built = collections.defaultdict(lambda: [set(), set()])
12 for pf in os.listdir(bsdir): 12 for pf in os.listdir(bsdir):
13 taskdir = os.path.join(bsdir, pf) 13 taskdir = os.path.join(bsdir, pf)
diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass
index c6b77e6a2a..8703cb2b33 100644
--- a/meta/classes/buildstats.bbclass
+++ b/meta/classes/buildstats.bbclass
@@ -75,8 +75,8 @@ def get_buildtimedata(var, d):
75 return timediff, cpuperc 75 return timediff, cpuperc
76 76
77def write_task_data(status, logfile, e, d): 77def write_task_data(status, logfile, e, d):
78 bn = d.getVar('BUILDNAME', True) 78 bn = d.getVar('BUILDNAME')
79 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) 79 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
80 with open(os.path.join(logfile), "a") as f: 80 with open(os.path.join(logfile), "a") as f:
81 elapsedtime = get_timedata("__timedata_task", d, e.time) 81 elapsedtime = get_timedata("__timedata_task", d, e.time)
82 if elapsedtime: 82 if elapsedtime:
@@ -106,9 +106,9 @@ python run_buildstats () {
106 import bb.event 106 import bb.event
107 import time, subprocess, platform 107 import time, subprocess, platform
108 108
109 bn = d.getVar('BUILDNAME', True) 109 bn = d.getVar('BUILDNAME')
110 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) 110 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
111 taskdir = os.path.join(bsdir, d.getVar('PF', True)) 111 taskdir = os.path.join(bsdir, d.getVar('PF'))
112 112
113 if isinstance(e, bb.event.BuildStarted): 113 if isinstance(e, bb.event.BuildStarted):
114 ######################################################################## 114 ########################################################################
@@ -162,7 +162,7 @@ python run_buildstats () {
162 if e.task == "do_rootfs": 162 if e.task == "do_rootfs":
163 bs = os.path.join(bsdir, "build_stats") 163 bs = os.path.join(bsdir, "build_stats")
164 with open(bs, "a") as f: 164 with open(bs, "a") as f:
165 rootfs = d.getVar('IMAGE_ROOTFS', True) 165 rootfs = d.getVar('IMAGE_ROOTFS')
166 if os.path.isdir(rootfs): 166 if os.path.isdir(rootfs):
167 try: 167 try:
168 rootfs_size = subprocess.check_output(["du", "-sh", rootfs], 168 rootfs_size = subprocess.check_output(["du", "-sh", rootfs],
@@ -197,7 +197,7 @@ python runqueue_stats () {
197 # are available that we need to find the output directory. 197 # are available that we need to find the output directory.
198 # The persistent SystemStats is stored in the datastore and 198 # The persistent SystemStats is stored in the datastore and
199 # closed when the build is done. 199 # closed when the build is done.
200 system_stats = d.getVar('_buildstats_system_stats', True) 200 system_stats = d.getVar('_buildstats_system_stats')
201 if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): 201 if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)):
202 system_stats = buildstats.SystemStats(d) 202 system_stats = buildstats.SystemStats(d)
203 d.setVar('_buildstats_system_stats', system_stats) 203 d.setVar('_buildstats_system_stats', system_stats)
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass
index 2e9837cf07..93fcacaf1a 100644
--- a/meta/classes/ccache.bbclass
+++ b/meta/classes/ccache.bbclass
@@ -1,4 +1,4 @@
1CCACHE = "${@bb.utils.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" 1CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}"
2export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" 2export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}"
3CCACHE_DISABLE[unexport] = "1" 3CCACHE_DISABLE[unexport] = "1"
4 4
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass
index f183b4aeeb..ad3c3975a5 100644
--- a/meta/classes/chrpath.bbclass
+++ b/meta/classes/chrpath.bbclass
@@ -44,7 +44,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d):
44 p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) 44 p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE)
45 out, err = p.communicate() 45 out, err = p.communicate()
46 if p.returncode != 0: 46 if p.returncode != 0:
47 bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN', True), p.returncode, out, err)) 47 bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err))
48 48
49def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): 49def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d):
50 import subprocess as sub 50 import subprocess as sub
@@ -72,7 +72,7 @@ def process_dir (rootdir, directory, d):
72 cmd = d.expand('${CHRPATH_BIN}') 72 cmd = d.expand('${CHRPATH_BIN}')
73 tmpdir = os.path.normpath(d.getVar('TMPDIR', False)) 73 tmpdir = os.path.normpath(d.getVar('TMPDIR', False))
74 baseprefix = os.path.normpath(d.expand('${base_prefix}')) 74 baseprefix = os.path.normpath(d.expand('${base_prefix}'))
75 hostos = d.getVar("HOST_OS", True) 75 hostos = d.getVar("HOST_OS")
76 76
77 #bb.debug("Checking %s for binaries to process" % directory) 77 #bb.debug("Checking %s for binaries to process" % directory)
78 if not os.path.exists(directory): 78 if not os.path.exists(directory):
diff --git a/meta/classes/cmake.bbclass b/meta/classes/cmake.bbclass
index fad0baa519..9e74599296 100644
--- a/meta/classes/cmake.bbclass
+++ b/meta/classes/cmake.bbclass
@@ -46,7 +46,7 @@ cmake_do_generate_toolchain_file() {
46# CMake system name must be something like "Linux". 46# CMake system name must be something like "Linux".
47# This is important for cross-compiling. 47# This is important for cross-compiling.
48set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` ) 48set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` )
49set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH', True))} ) 49set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH'))} )
50set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} ) 50set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} )
51set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} ) 51set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} )
52set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} ) 52set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} )
@@ -112,15 +112,15 @@ cmake_do_configure() {
112 ${OECMAKE_SITEFILE} \ 112 ${OECMAKE_SITEFILE} \
113 ${OECMAKE_SOURCEPATH} \ 113 ${OECMAKE_SOURCEPATH} \
114 -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \ 114 -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \
115 -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir', True), d.getVar('prefix', True))} \ 115 -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix'))} \
116 -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir', True), d.getVar('prefix', True))} \ 116 -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix'))} \
117 -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir', True), d.getVar('prefix', True))} \ 117 -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix'))} \
118 -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \ 118 -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \
119 -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir', True), d. getVar('prefix', True))} \ 119 -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d. getVar('prefix'))} \
120 -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \ 120 -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \
121 -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir', True), d.getVar('prefix', True))} \ 121 -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix'))} \
122 -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir', True), d.getVar('prefix', True))} \ 122 -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix'))} \
123 -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir', True), d.getVar('prefix', True))} \ 123 -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix'))} \
124 -DCMAKE_INSTALL_SO_NO_EXE=0 \ 124 -DCMAKE_INSTALL_SO_NO_EXE=0 \
125 -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \ 125 -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \
126 -DCMAKE_VERBOSE_MAKEFILE=1 \ 126 -DCMAKE_VERBOSE_MAKEFILE=1 \
diff --git a/meta/classes/cml1.bbclass b/meta/classes/cml1.bbclass
index 5834806269..187d407d98 100644
--- a/meta/classes/cml1.bbclass
+++ b/meta/classes/cml1.bbclass
@@ -26,7 +26,7 @@ python do_menuconfig() {
26 except OSError: 26 except OSError:
27 mtime = 0 27 mtime = 0
28 28
29 oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND', True), 29 oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'),
30 d.getVar('PN', True ) + ' Configuration', d) 30 d.getVar('PN', True ) + ' Configuration', d)
31 31
32 # FIXME this check can be removed when the minimum bitbake version has been bumped 32 # FIXME this check can be removed when the minimum bitbake version has been bumped
@@ -49,7 +49,7 @@ python do_diffconfig() {
49 import shutil 49 import shutil
50 import subprocess 50 import subprocess
51 51
52 workdir = d.getVar('WORKDIR', True) 52 workdir = d.getVar('WORKDIR')
53 fragment = workdir + '/fragment.cfg' 53 fragment = workdir + '/fragment.cfg'
54 configorig = '.config.orig' 54 configorig = '.config.orig'
55 config = '.config' 55 config = '.config'
diff --git a/meta/classes/compress_doc.bbclass b/meta/classes/compress_doc.bbclass
index 8073c173e5..069db1997b 100644
--- a/meta/classes/compress_doc.bbclass
+++ b/meta/classes/compress_doc.bbclass
@@ -31,25 +31,25 @@ DOC_DECOMPRESS_CMD[xz] ?= "unxz -v"
31 31
32PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives" 32PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives"
33python package_do_compress_doc() { 33python package_do_compress_doc() {
34 compress_mode = d.getVar('DOC_COMPRESS', True) 34 compress_mode = d.getVar('DOC_COMPRESS')
35 compress_list = (d.getVar('DOC_COMPRESS_LIST', True) or '').split() 35 compress_list = (d.getVar('DOC_COMPRESS_LIST') or '').split()
36 if compress_mode not in compress_list: 36 if compress_mode not in compress_list:
37 bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list)) 37 bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list))
38 38
39 dvar = d.getVar('PKGD', True) 39 dvar = d.getVar('PKGD')
40 compress_cmds = {} 40 compress_cmds = {}
41 decompress_cmds = {} 41 decompress_cmds = {}
42 for mode in compress_list: 42 for mode in compress_list:
43 compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True) 43 compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True)
44 decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True) 44 decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True)
45 45
46 mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir", True)) 46 mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir"))
47 if os.path.exists(mandir): 47 if os.path.exists(mandir):
48 # Decompress doc files which format is not compress_mode 48 # Decompress doc files which format is not compress_mode
49 decompress_doc(mandir, compress_mode, decompress_cmds) 49 decompress_doc(mandir, compress_mode, decompress_cmds)
50 compress_doc(mandir, compress_mode, compress_cmds) 50 compress_doc(mandir, compress_mode, compress_cmds)
51 51
52 infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir", True)) 52 infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir"))
53 if os.path.exists(infodir): 53 if os.path.exists(infodir):
54 # Decompress doc files which format is not compress_mode 54 # Decompress doc files which format is not compress_mode
55 decompress_doc(infodir, compress_mode, decompress_cmds) 55 decompress_doc(infodir, compress_mode, decompress_cmds)
@@ -218,18 +218,18 @@ python compress_doc_updatealternatives () {
218 if not bb.data.inherits_class('update-alternatives', d): 218 if not bb.data.inherits_class('update-alternatives', d):
219 return 219 return
220 220
221 mandir = d.getVar("mandir", True) 221 mandir = d.getVar("mandir")
222 infodir = d.getVar("infodir", True) 222 infodir = d.getVar("infodir")
223 compress_mode = d.getVar('DOC_COMPRESS', True) 223 compress_mode = d.getVar('DOC_COMPRESS')
224 for pkg in (d.getVar('PACKAGES', True) or "").split(): 224 for pkg in (d.getVar('PACKAGES') or "").split():
225 old_names = (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split() 225 old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split()
226 new_names = [] 226 new_names = []
227 for old_name in old_names: 227 for old_name in old_names:
228 old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True) 228 old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True)
229 old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \ 229 old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \
230 d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \ 230 d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \
231 d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or \ 231 d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \
232 d.getVar('ALTERNATIVE_TARGET', True) or \ 232 d.getVar('ALTERNATIVE_TARGET') or \
233 old_link 233 old_link
234 # Sometimes old_target is specified as relative to the link name. 234 # Sometimes old_target is specified as relative to the link name.
235 old_target = os.path.join(os.path.dirname(old_link), old_target) 235 old_target = os.path.join(os.path.dirname(old_link), old_target)
@@ -247,7 +247,7 @@ python compress_doc_updatealternatives () {
247 elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True): 247 elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True):
248 d.delVarFlag('ALTERNATIVE_TARGET', old_name) 248 d.delVarFlag('ALTERNATIVE_TARGET', old_name)
249 d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target) 249 d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target)
250 elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True): 250 elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg):
251 d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target) 251 d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target)
252 elif d.getVar('ALTERNATIVE_TARGET', old_name, True): 252 elif d.getVar('ALTERNATIVE_TARGET', old_name, True):
253 d.setVar('ALTERNATIVE_TARGET', new_target) 253 d.setVar('ALTERNATIVE_TARGET', new_target)
diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass
index 907c1836b3..eabf12ce7a 100644
--- a/meta/classes/copyleft_compliance.bbclass
+++ b/meta/classes/copyleft_compliance.bbclass
@@ -13,7 +13,7 @@ python do_prepare_copyleft_sources () {
13 import os.path 13 import os.path
14 import shutil 14 import shutil
15 15
16 p = d.getVar('P', True) 16 p = d.getVar('P')
17 included, reason = copyleft_should_include(d) 17 included, reason = copyleft_should_include(d)
18 if not included: 18 if not included:
19 bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason)) 19 bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason))
@@ -21,13 +21,13 @@ python do_prepare_copyleft_sources () {
21 else: 21 else:
22 bb.debug(1, 'copyleft: %s is included: %s' % (p, reason)) 22 bb.debug(1, 'copyleft: %s is included: %s' % (p, reason))
23 23
24 sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True) 24 sources_dir = d.getVar('COPYLEFT_SOURCES_DIR')
25 dl_dir = d.getVar('DL_DIR', True) 25 dl_dir = d.getVar('DL_DIR')
26 src_uri = d.getVar('SRC_URI', True).split() 26 src_uri = d.getVar('SRC_URI').split()
27 fetch = bb.fetch2.Fetch(src_uri, d) 27 fetch = bb.fetch2.Fetch(src_uri, d)
28 ud = fetch.ud 28 ud = fetch.ud
29 29
30 pf = d.getVar('PF', True) 30 pf = d.getVar('PF')
31 dest = os.path.join(sources_dir, pf) 31 dest = os.path.join(sources_dir, pf)
32 shutil.rmtree(dest, ignore_errors=True) 32 shutil.rmtree(dest, ignore_errors=True)
33 bb.utils.mkdirhier(dest) 33 bb.utils.mkdirhier(dest)
diff --git a/meta/classes/copyleft_filter.bbclass b/meta/classes/copyleft_filter.bbclass
index 46be7f7d2f..426956f08f 100644
--- a/meta/classes/copyleft_filter.bbclass
+++ b/meta/classes/copyleft_filter.bbclass
@@ -49,7 +49,7 @@ def copyleft_should_include(d):
49 49
50 included, motive = False, 'recipe did not match anything' 50 included, motive = False, 'recipe did not match anything'
51 51
52 recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True) 52 recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE')
53 if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d): 53 if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d):
54 include, motive = False, 'recipe type "%s" is excluded' % recipe_type 54 include, motive = False, 'recipe type "%s" is excluded' % recipe_type
55 55
@@ -57,9 +57,9 @@ def copyleft_should_include(d):
57 exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d) 57 exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d)
58 58
59 try: 59 try:
60 is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude) 60 is_included, reason = oe.license.is_included(d.getVar('LICENSE'), include, exclude)
61 except oe.license.LicenseError as exc: 61 except oe.license.LicenseError as exc:
62 bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) 62 bb.fatal('%s: %s' % (d.getVar('PF'), exc))
63 else: 63 else:
64 if is_included: 64 if is_included:
65 if reason: 65 if reason:
@@ -69,10 +69,10 @@ def copyleft_should_include(d):
69 else: 69 else:
70 included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason) 70 included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason)
71 71
72 if any(fnmatch(d.getVar('PN', True), name) \ 72 if any(fnmatch(d.getVar('PN'), name) \
73 for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)): 73 for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)):
74 included, motive = True, 'recipe included by name' 74 included, motive = True, 'recipe included by name'
75 if any(fnmatch(d.getVar('PN', True), name) \ 75 if any(fnmatch(d.getVar('PN'), name) \
76 for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)): 76 for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)):
77 included, motive = False, 'recipe excluded by name' 77 included, motive = False, 'recipe excluded by name'
78 78
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass
index 21921b3dd0..64db1134f5 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes/cross-canadian.bbclass
@@ -20,25 +20,25 @@ CANADIANEXTRAOS = "${BASECANADIANEXTRAOS}"
20CANADIANEXTRAVENDOR = "" 20CANADIANEXTRAVENDOR = ""
21MODIFYTOS ??= "1" 21MODIFYTOS ??= "1"
22python () { 22python () {
23 archs = d.getVar('PACKAGE_ARCHS', True).split() 23 archs = d.getVar('PACKAGE_ARCHS').split()
24 sdkarchs = [] 24 sdkarchs = []
25 for arch in archs: 25 for arch in archs:
26 sdkarchs.append(arch + '-${SDKPKGSUFFIX}') 26 sdkarchs.append(arch + '-${SDKPKGSUFFIX}')
27 d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) 27 d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs))
28 28
29 # Allow the following code segment to be disabled, e.g. meta-environment 29 # Allow the following code segment to be disabled, e.g. meta-environment
30 if d.getVar("MODIFYTOS", True) != "1": 30 if d.getVar("MODIFYTOS") != "1":
31 return 31 return
32 32
33 if d.getVar("TCLIBC", True) == "baremetal": 33 if d.getVar("TCLIBC") == "baremetal":
34 return 34 return
35 35
36 tos = d.getVar("TARGET_OS", True) 36 tos = d.getVar("TARGET_OS")
37 whitelist = [] 37 whitelist = []
38 extralibcs = [""] 38 extralibcs = [""]
39 if "uclibc" in d.getVar("BASECANADIANEXTRAOS", True): 39 if "uclibc" in d.getVar("BASECANADIANEXTRAOS"):
40 extralibcs.append("uclibc") 40 extralibcs.append("uclibc")
41 if "musl" in d.getVar("BASECANADIANEXTRAOS", True): 41 if "musl" in d.getVar("BASECANADIANEXTRAOS"):
42 extralibcs.append("musl") 42 extralibcs.append("musl")
43 for variant in ["", "spe", "x32", "eabi", "n32"]: 43 for variant in ["", "spe", "x32", "eabi", "n32"]:
44 for libc in extralibcs: 44 for libc in extralibcs:
@@ -51,33 +51,33 @@ python () {
51 entry = entry + "-" + libc 51 entry = entry + "-" + libc
52 whitelist.append(entry) 52 whitelist.append(entry)
53 if tos not in whitelist: 53 if tos not in whitelist:
54 bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS", True)) 54 bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS"))
55 55
56 for n in ["PROVIDES", "DEPENDS"]: 56 for n in ["PROVIDES", "DEPENDS"]:
57 d.setVar(n, d.getVar(n, True)) 57 d.setVar(n, d.getVar(n))
58 d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN", True)) 58 d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN"))
59 for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]: 59 for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]:
60 n = prefix + "_FOR_TARGET" 60 n = prefix + "_FOR_TARGET"
61 d.setVar(n, d.getVar(n, True)) 61 d.setVar(n, d.getVar(n))
62 # This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS 62 # This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS
63 # however we need the old value in some variables. We expand those here first. 63 # however we need the old value in some variables. We expand those here first.
64 tarch = d.getVar("TARGET_ARCH", True) 64 tarch = d.getVar("TARGET_ARCH")
65 if tarch == "x86_64": 65 if tarch == "x86_64":
66 d.setVar("LIBCEXTENSION", "") 66 d.setVar("LIBCEXTENSION", "")
67 d.setVar("ABIEXTENSION", "") 67 d.setVar("ABIEXTENSION", "")
68 d.appendVar("CANADIANEXTRAOS", " linux-gnux32") 68 d.appendVar("CANADIANEXTRAOS", " linux-gnux32")
69 for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): 69 for extraos in d.getVar("BASECANADIANEXTRAOS").split():
70 d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32") 70 d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32")
71 elif tarch == "powerpc": 71 elif tarch == "powerpc":
72 # PowerPC can build "linux" and "linux-gnuspe" 72 # PowerPC can build "linux" and "linux-gnuspe"
73 d.setVar("LIBCEXTENSION", "") 73 d.setVar("LIBCEXTENSION", "")
74 d.setVar("ABIEXTENSION", "") 74 d.setVar("ABIEXTENSION", "")
75 d.appendVar("CANADIANEXTRAOS", " linux-gnuspe") 75 d.appendVar("CANADIANEXTRAOS", " linux-gnuspe")
76 for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): 76 for extraos in d.getVar("BASECANADIANEXTRAOS").split():
77 d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe") 77 d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe")
78 elif tarch == "mips64": 78 elif tarch == "mips64":
79 d.appendVar("CANADIANEXTRAOS", " linux-gnun32") 79 d.appendVar("CANADIANEXTRAOS", " linux-gnun32")
80 for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): 80 for extraos in d.getVar("BASECANADIANEXTRAOS").split():
81 d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32") 81 d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32")
82 if tarch == "arm" or tarch == "armeb": 82 if tarch == "arm" or tarch == "armeb":
83 d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi") 83 d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi")
@@ -86,10 +86,10 @@ python () {
86 d.setVar("TARGET_OS", "linux") 86 d.setVar("TARGET_OS", "linux")
87 87
88 # Also need to handle multilib target vendors 88 # Also need to handle multilib target vendors
89 vendors = d.getVar("CANADIANEXTRAVENDOR", True) 89 vendors = d.getVar("CANADIANEXTRAVENDOR")
90 if not vendors: 90 if not vendors:
91 vendors = all_multilib_tune_values(d, 'TARGET_VENDOR') 91 vendors = all_multilib_tune_values(d, 'TARGET_VENDOR')
92 origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL", True) 92 origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL")
93 if origvendor: 93 if origvendor:
94 d.setVar("TARGET_VENDOR", origvendor) 94 d.setVar("TARGET_VENDOR", origvendor)
95 if origvendor not in vendors.split(): 95 if origvendor not in vendors.split():
@@ -116,7 +116,7 @@ HOST_LD_ARCH = "${SDK_LD_ARCH}"
116HOST_AS_ARCH = "${SDK_AS_ARCH}" 116HOST_AS_ARCH = "${SDK_AS_ARCH}"
117 117
118#assign DPKG_ARCH 118#assign DPKG_ARCH
119DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH', True), '')}" 119DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH'), '')}"
120 120
121CPPFLAGS = "${BUILDSDK_CPPFLAGS}" 121CPPFLAGS = "${BUILDSDK_CPPFLAGS}"
122CFLAGS = "${BUILDSDK_CFLAGS}" 122CFLAGS = "${BUILDSDK_CFLAGS}"
diff --git a/meta/classes/crosssdk.bbclass b/meta/classes/crosssdk.bbclass
index 7315c38f13..eaf2beb94d 100644
--- a/meta/classes/crosssdk.bbclass
+++ b/meta/classes/crosssdk.bbclass
@@ -5,7 +5,7 @@ MACHINEOVERRIDES = ""
5PACKAGE_ARCH = "${SDK_ARCH}" 5PACKAGE_ARCH = "${SDK_ARCH}"
6python () { 6python () {
7 # set TUNE_PKGARCH to SDK_ARCH 7 # set TUNE_PKGARCH to SDK_ARCH
8 d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH', True)) 8 d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH'))
9} 9}
10 10
11STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}" 11STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}"
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index 75b8fa9ab9..aad0573ee9 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -51,7 +51,7 @@ python do_cve_check () {
51 Check recipe for patched and unpatched CVEs 51 Check recipe for patched and unpatched CVEs
52 """ 52 """
53 53
54 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): 54 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
55 patched_cves = get_patches_cves(d) 55 patched_cves = get_patches_cves(d)
56 patched, unpatched = check_cves(d, patched_cves) 56 patched, unpatched = check_cves(d, patched_cves)
57 if patched or unpatched: 57 if patched or unpatched:
@@ -70,7 +70,7 @@ python cve_check_cleanup () {
70 Delete the file used to gather all the CVE information. 70 Delete the file used to gather all the CVE information.
71 """ 71 """
72 72
73 bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE", True)) 73 bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE"))
74} 74}
75 75
76addhandler cve_check_cleanup 76addhandler cve_check_cleanup
@@ -83,12 +83,12 @@ python cve_check_write_rootfs_manifest () {
83 83
84 import shutil 84 import shutil
85 85
86 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): 86 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
87 bb.note("Writing rootfs CVE manifest") 87 bb.note("Writing rootfs CVE manifest")
88 deploy_dir = d.getVar("DEPLOY_DIR_IMAGE", True) 88 deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
89 link_name = d.getVar("IMAGE_LINK_NAME", True) 89 link_name = d.getVar("IMAGE_LINK_NAME")
90 manifest_name = d.getVar("CVE_CHECK_MANIFEST", True) 90 manifest_name = d.getVar("CVE_CHECK_MANIFEST")
91 cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE", True) 91 cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
92 92
93 shutil.copyfile(cve_tmp_file, manifest_name) 93 shutil.copyfile(cve_tmp_file, manifest_name)
94 94
@@ -101,7 +101,7 @@ python cve_check_write_rootfs_manifest () {
101 bb.plain("Image CVE report stored in: %s" % manifest_name) 101 bb.plain("Image CVE report stored in: %s" % manifest_name)
102} 102}
103 103
104ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST', True) == '1' else ''}" 104ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
105 105
106def get_patches_cves(d): 106def get_patches_cves(d):
107 """ 107 """
@@ -110,7 +110,7 @@ def get_patches_cves(d):
110 110
111 import re 111 import re
112 112
113 pn = d.getVar("PN", True) 113 pn = d.getVar("PN")
114 cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") 114 cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
115 patched_cves = set() 115 patched_cves = set()
116 bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) 116 bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
@@ -149,15 +149,15 @@ def check_cves(d, patched_cves):
149 cves_patched = [] 149 cves_patched = []
150 cves_unpatched = [] 150 cves_unpatched = []
151 bpn = d.getVar("CVE_PRODUCT") 151 bpn = d.getVar("CVE_PRODUCT")
152 pv = d.getVar("PV", True).split("git+")[0] 152 pv = d.getVar("PV").split("git+")[0]
153 cves = " ".join(patched_cves) 153 cves = " ".join(patched_cves)
154 cve_db_dir = d.getVar("CVE_CHECK_DB_DIR", True) 154 cve_db_dir = d.getVar("CVE_CHECK_DB_DIR")
155 cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST", True)) 155 cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST"))
156 cve_cmd = "cve-check-tool" 156 cve_cmd = "cve-check-tool"
157 cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir] 157 cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir]
158 158
159 # If the recipe has been whitlisted we return empty lists 159 # If the recipe has been whitlisted we return empty lists
160 if d.getVar("PN", True) in d.getVar("CVE_CHECK_PN_WHITELIST", True).split(): 160 if d.getVar("PN") in d.getVar("CVE_CHECK_PN_WHITELIST").split():
161 bb.note("Recipe has been whitelisted, skipping check") 161 bb.note("Recipe has been whitelisted, skipping check")
162 return ([], []) 162 return ([], [])
163 163
@@ -210,7 +210,7 @@ def get_cve_info(d, cves):
210 from pysqlite2 import dbapi2 as sqlite3 210 from pysqlite2 import dbapi2 as sqlite3
211 211
212 cve_data = {} 212 cve_data = {}
213 db_file = d.getVar("CVE_CHECK_DB_FILE", True) 213 db_file = d.getVar("CVE_CHECK_DB_FILE")
214 placeholder = ",".join("?" * len(cves)) 214 placeholder = ",".join("?" * len(cves))
215 query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder 215 query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder
216 conn = sqlite3.connect(db_file) 216 conn = sqlite3.connect(db_file)
@@ -231,15 +231,15 @@ def cve_write_data(d, patched, unpatched, cve_data):
231 CVE manifest if enabled. 231 CVE manifest if enabled.
232 """ 232 """
233 233
234 cve_file = d.getVar("CVE_CHECK_LOCAL_FILE", True) 234 cve_file = d.getVar("CVE_CHECK_LOCAL_FILE")
235 nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId=" 235 nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId="
236 write_string = "" 236 write_string = ""
237 first_alert = True 237 first_alert = True
238 bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR", True)) 238 bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR"))
239 239
240 for cve in sorted(cve_data): 240 for cve in sorted(cve_data):
241 write_string += "PACKAGE NAME: %s\n" % d.getVar("PN", True) 241 write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
242 write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV", True) 242 write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV")
243 write_string += "CVE: %s\n" % cve 243 write_string += "CVE: %s\n" % cve
244 if cve in patched: 244 if cve in patched:
245 write_string += "CVE STATUS: Patched\n" 245 write_string += "CVE STATUS: Patched\n"
@@ -257,13 +257,13 @@ def cve_write_data(d, patched, unpatched, cve_data):
257 bb.note("Writing file %s with CVE information" % cve_file) 257 bb.note("Writing file %s with CVE information" % cve_file)
258 f.write(write_string) 258 f.write(write_string)
259 259
260 if d.getVar("CVE_CHECK_COPY_FILES", True) == "1": 260 if d.getVar("CVE_CHECK_COPY_FILES") == "1":
261 cve_dir = d.getVar("CVE_CHECK_DIR", True) 261 cve_dir = d.getVar("CVE_CHECK_DIR")
262 bb.utils.mkdirhier(cve_dir) 262 bb.utils.mkdirhier(cve_dir)
263 deploy_file = os.path.join(cve_dir, d.getVar("PN", True)) 263 deploy_file = os.path.join(cve_dir, d.getVar("PN"))
264 with open(deploy_file, "w") as f: 264 with open(deploy_file, "w") as f:
265 f.write(write_string) 265 f.write(write_string)
266 266
267 if d.getVar("CVE_CHECK_CREATE_MANIFEST", True) == "1": 267 if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
268 with open(d.getVar("CVE_CHECK_TMP_FILE", True), "a") as f: 268 with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
269 f.write("%s" % write_string) 269 f.write("%s" % write_string)
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass
index be7cacca98..8124558b81 100644
--- a/meta/classes/debian.bbclass
+++ b/meta/classes/debian.bbclass
@@ -20,17 +20,17 @@ do_package_write_tar[rdeptask] = "${DEBIANRDEP}"
20do_package_write_rpm[rdeptask] = "${DEBIANRDEP}" 20do_package_write_rpm[rdeptask] = "${DEBIANRDEP}"
21 21
22python () { 22python () {
23 if not d.getVar("PACKAGES", True): 23 if not d.getVar("PACKAGES"):
24 d.setVar("DEBIANRDEP", "") 24 d.setVar("DEBIANRDEP", "")
25} 25}
26 26
27python debian_package_name_hook () { 27python debian_package_name_hook () {
28 import glob, copy, stat, errno, re 28 import glob, copy, stat, errno, re
29 29
30 pkgdest = d.getVar('PKGDEST', True) 30 pkgdest = d.getVar('PKGDEST')
31 packages = d.getVar('PACKAGES', True) 31 packages = d.getVar('PACKAGES')
32 bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") 32 bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir")) + "$")
33 lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") 33 lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir")) + "$")
34 so_re = re.compile("lib.*\.so") 34 so_re = re.compile("lib.*\.so")
35 35
36 def socrunch(s): 36 def socrunch(s):
@@ -53,11 +53,11 @@ python debian_package_name_hook () {
53 return (s[stat.ST_MODE] & stat.S_IEXEC) 53 return (s[stat.ST_MODE] & stat.S_IEXEC)
54 54
55 def add_rprovides(pkg, d): 55 def add_rprovides(pkg, d):
56 newpkg = d.getVar('PKG_' + pkg, True) 56 newpkg = d.getVar('PKG_' + pkg)
57 if newpkg and newpkg != pkg: 57 if newpkg and newpkg != pkg:
58 provs = (d.getVar('RPROVIDES_' + pkg, True) or "").split() 58 provs = (d.getVar('RPROVIDES_' + pkg) or "").split()
59 if pkg not in provs: 59 if pkg not in provs:
60 d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV", True) + ")") 60 d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")")
61 61
62 def auto_libname(packages, orig_pkg): 62 def auto_libname(packages, orig_pkg):
63 sonames = [] 63 sonames = []
@@ -70,7 +70,7 @@ python debian_package_name_hook () {
70 if lib_re.match(root): 70 if lib_re.match(root):
71 has_libs = 1 71 has_libs = 1
72 if so_re.match(os.path.basename(file)): 72 if so_re.match(os.path.basename(file)):
73 cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + file + " 2>/dev/null" 73 cmd = (d.getVar('TARGET_PREFIX') or "") + "objdump -p " + file + " 2>/dev/null"
74 fd = os.popen(cmd) 74 fd = os.popen(cmd)
75 lines = fd.readlines() 75 lines = fd.readlines()
76 fd.close() 76 fd.close()
@@ -84,7 +84,7 @@ python debian_package_name_hook () {
84 if len(sonames) == 1: 84 if len(sonames) == 1:
85 soname = sonames[0] 85 soname = sonames[0]
86 elif len(sonames) > 1: 86 elif len(sonames) > 1:
87 lead = d.getVar('LEAD_SONAME', True) 87 lead = d.getVar('LEAD_SONAME')
88 if lead: 88 if lead:
89 r = re.compile(lead) 89 r = re.compile(lead)
90 filtered = [] 90 filtered = []
@@ -115,7 +115,7 @@ python debian_package_name_hook () {
115 newpkg = pkgname 115 newpkg = pkgname
116 else: 116 else:
117 newpkg = pkg.replace(orig_pkg, devname, 1) 117 newpkg = pkg.replace(orig_pkg, devname, 1)
118 mlpre=d.getVar('MLPREFIX', True) 118 mlpre=d.getVar('MLPREFIX')
119 if mlpre: 119 if mlpre:
120 if not newpkg.find(mlpre) == 0: 120 if not newpkg.find(mlpre) == 0:
121 newpkg = mlpre + newpkg 121 newpkg = mlpre + newpkg
@@ -131,7 +131,7 @@ python debian_package_name_hook () {
131 # and later 131 # and later
132 # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw 132 # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
133 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 133 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
134 for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): 134 for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS') or "").split(), reverse=True):
135 auto_libname(packages, pkg) 135 auto_libname(packages, pkg)
136} 136}
137 137
diff --git a/meta/classes/devshell.bbclass b/meta/classes/devshell.bbclass
index be71aff35f..864ace4cb4 100644
--- a/meta/classes/devshell.bbclass
+++ b/meta/classes/devshell.bbclass
@@ -5,14 +5,14 @@ DEVSHELL = "${SHELL}"
5python do_devshell () { 5python do_devshell () {
6 if d.getVarFlag("do_devshell", "manualfakeroot", True): 6 if d.getVarFlag("do_devshell", "manualfakeroot", True):
7 d.prependVar("DEVSHELL", "pseudo ") 7 d.prependVar("DEVSHELL", "pseudo ")
8 fakeenv = d.getVar("FAKEROOTENV", True).split() 8 fakeenv = d.getVar("FAKEROOTENV").split()
9 for f in fakeenv: 9 for f in fakeenv:
10 k = f.split("=") 10 k = f.split("=")
11 d.setVar(k[0], k[1]) 11 d.setVar(k[0], k[1])
12 d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0]) 12 d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0])
13 d.delVarFlag("do_devshell", "fakeroot") 13 d.delVarFlag("do_devshell", "fakeroot")
14 14
15 oe_terminal(d.getVar('DEVSHELL', True), 'OpenEmbedded Developer Shell', d) 15 oe_terminal(d.getVar('DEVSHELL'), 'OpenEmbedded Developer Shell', d)
16} 16}
17 17
18addtask devshell after do_patch 18addtask devshell after do_patch
@@ -82,7 +82,7 @@ def devpyshell(d):
82 more = False 82 more = False
83 83
84 i = code.InteractiveInterpreter(locals=_context) 84 i = code.InteractiveInterpreter(locals=_context)
85 print("OE PyShell (PN = %s)\n" % d.getVar("PN", True)) 85 print("OE PyShell (PN = %s)\n" % d.getVar("PN"))
86 86
87 def prompt(more): 87 def prompt(more):
88 if more: 88 if more:
diff --git a/meta/classes/distro_features_check.bbclass b/meta/classes/distro_features_check.bbclass
index 7e91dbcf4a..e74d3c04ba 100644
--- a/meta/classes/distro_features_check.bbclass
+++ b/meta/classes/distro_features_check.bbclass
@@ -11,15 +11,15 @@
11 11
12python () { 12python () {
13 # Assume at least one var is set. 13 # Assume at least one var is set.
14 distro_features = (d.getVar('DISTRO_FEATURES', True) or "").split() 14 distro_features = (d.getVar('DISTRO_FEATURES') or "").split()
15 15
16 any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES', True) 16 any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES')
17 if any_of_distro_features: 17 if any_of_distro_features:
18 any_of_distro_features = any_of_distro_features.split() 18 any_of_distro_features = any_of_distro_features.split()
19 if set.isdisjoint(set(any_of_distro_features),set(distro_features)): 19 if set.isdisjoint(set(any_of_distro_features),set(distro_features)):
20 raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features) 20 raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features)
21 21
22 required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES', True) 22 required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES')
23 if required_distro_features: 23 if required_distro_features:
24 required_distro_features = required_distro_features.split() 24 required_distro_features = required_distro_features.split()
25 for f in required_distro_features: 25 for f in required_distro_features:
@@ -28,7 +28,7 @@ python () {
28 else: 28 else:
29 raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f) 29 raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f)
30 30
31 conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES', True) 31 conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES')
32 if conflict_distro_features: 32 if conflict_distro_features:
33 conflict_distro_features = conflict_distro_features.split() 33 conflict_distro_features = conflict_distro_features.split()
34 for f in conflict_distro_features: 34 for f in conflict_distro_features:
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index fbb7402e0c..5b3a3e0f1c 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -25,75 +25,75 @@ addtask distrodata_np
25do_distrodata_np[nostamp] = "1" 25do_distrodata_np[nostamp] = "1"
26python do_distrodata_np() { 26python do_distrodata_np() {
27 localdata = bb.data.createCopy(d) 27 localdata = bb.data.createCopy(d)
28 pn = d.getVar("PN", True) 28 pn = d.getVar("PN")
29 bb.note("Package Name: %s" % pn) 29 bb.note("Package Name: %s" % pn)
30 30
31 import oe.distro_check as dist_check 31 import oe.distro_check as dist_check
32 tmpdir = d.getVar('TMPDIR', True) 32 tmpdir = d.getVar('TMPDIR')
33 distro_check_dir = os.path.join(tmpdir, "distro_check") 33 distro_check_dir = os.path.join(tmpdir, "distro_check")
34 datetime = localdata.getVar('DATETIME', True) 34 datetime = localdata.getVar('DATETIME')
35 dist_check.update_distro_data(distro_check_dir, datetime, localdata) 35 dist_check.update_distro_data(distro_check_dir, datetime, localdata)
36 36
37 if pn.find("-native") != -1: 37 if pn.find("-native") != -1:
38 pnstripped = pn.split("-native") 38 pnstripped = pn.split("-native")
39 bb.note("Native Split: %s" % pnstripped) 39 bb.note("Native Split: %s" % pnstripped)
40 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 40 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
41 bb.data.update_data(localdata) 41 bb.data.update_data(localdata)
42 42
43 if pn.find("-cross") != -1: 43 if pn.find("-cross") != -1:
44 pnstripped = pn.split("-cross") 44 pnstripped = pn.split("-cross")
45 bb.note("cross Split: %s" % pnstripped) 45 bb.note("cross Split: %s" % pnstripped)
46 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 46 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
47 bb.data.update_data(localdata) 47 bb.data.update_data(localdata)
48 48
49 if pn.find("-crosssdk") != -1: 49 if pn.find("-crosssdk") != -1:
50 pnstripped = pn.split("-crosssdk") 50 pnstripped = pn.split("-crosssdk")
51 bb.note("cross Split: %s" % pnstripped) 51 bb.note("cross Split: %s" % pnstripped)
52 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 52 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
53 bb.data.update_data(localdata) 53 bb.data.update_data(localdata)
54 54
55 if pn.startswith("nativesdk-"): 55 if pn.startswith("nativesdk-"):
56 pnstripped = pn.replace("nativesdk-", "") 56 pnstripped = pn.replace("nativesdk-", "")
57 bb.note("NativeSDK Split: %s" % pnstripped) 57 bb.note("NativeSDK Split: %s" % pnstripped)
58 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) 58 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
59 bb.data.update_data(localdata) 59 bb.data.update_data(localdata)
60 60
61 61
62 if pn.find("-initial") != -1: 62 if pn.find("-initial") != -1:
63 pnstripped = pn.split("-initial") 63 pnstripped = pn.split("-initial")
64 bb.note("initial Split: %s" % pnstripped) 64 bb.note("initial Split: %s" % pnstripped)
65 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 65 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
66 bb.data.update_data(localdata) 66 bb.data.update_data(localdata)
67 67
68 """generate package information from .bb file""" 68 """generate package information from .bb file"""
69 pname = localdata.getVar('PN', True) 69 pname = localdata.getVar('PN')
70 pcurver = localdata.getVar('PV', True) 70 pcurver = localdata.getVar('PV')
71 pdesc = localdata.getVar('DESCRIPTION', True) 71 pdesc = localdata.getVar('DESCRIPTION')
72 if pdesc is not None: 72 if pdesc is not None:
73 pdesc = pdesc.replace(',','') 73 pdesc = pdesc.replace(',','')
74 pdesc = pdesc.replace('\n','') 74 pdesc = pdesc.replace('\n','')
75 75
76 pgrp = localdata.getVar('SECTION', True) 76 pgrp = localdata.getVar('SECTION')
77 plicense = localdata.getVar('LICENSE', True).replace(',','_') 77 plicense = localdata.getVar('LICENSE').replace(',','_')
78 78
79 rstatus = localdata.getVar('RECIPE_COLOR', True) 79 rstatus = localdata.getVar('RECIPE_COLOR')
80 if rstatus is not None: 80 if rstatus is not None:
81 rstatus = rstatus.replace(',','') 81 rstatus = rstatus.replace(',','')
82 82
83 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) 83 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
84 if pcurver == pupver: 84 if pcurver == pupver:
85 vermatch="1" 85 vermatch="1"
86 else: 86 else:
87 vermatch="0" 87 vermatch="0"
88 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) 88 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
89 if noupdate_reason is None: 89 if noupdate_reason is None:
90 noupdate="0" 90 noupdate="0"
91 else: 91 else:
92 noupdate="1" 92 noupdate="1"
93 noupdate_reason = noupdate_reason.replace(',','') 93 noupdate_reason = noupdate_reason.replace(',','')
94 94
95 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 95 maintainer = localdata.getVar('RECIPE_MAINTAINER')
96 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) 96 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
97 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) 97 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
98 98
99 bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \ 99 bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \
@@ -109,80 +109,80 @@ addtask distrodata
109do_distrodata[nostamp] = "1" 109do_distrodata[nostamp] = "1"
110python do_distrodata() { 110python do_distrodata() {
111 import csv 111 import csv
112 logpath = d.getVar('LOG_DIR', True) 112 logpath = d.getVar('LOG_DIR')
113 bb.utils.mkdirhier(logpath) 113 bb.utils.mkdirhier(logpath)
114 logfile = os.path.join(logpath, "distrodata.csv") 114 logfile = os.path.join(logpath, "distrodata.csv")
115 115
116 import oe.distro_check as dist_check 116 import oe.distro_check as dist_check
117 localdata = bb.data.createCopy(d) 117 localdata = bb.data.createCopy(d)
118 tmpdir = d.getVar('TMPDIR', True) 118 tmpdir = d.getVar('TMPDIR')
119 distro_check_dir = os.path.join(tmpdir, "distro_check") 119 distro_check_dir = os.path.join(tmpdir, "distro_check")
120 datetime = localdata.getVar('DATETIME', True) 120 datetime = localdata.getVar('DATETIME')
121 dist_check.update_distro_data(distro_check_dir, datetime, localdata) 121 dist_check.update_distro_data(distro_check_dir, datetime, localdata)
122 122
123 pn = d.getVar("PN", True) 123 pn = d.getVar("PN")
124 bb.note("Package Name: %s" % pn) 124 bb.note("Package Name: %s" % pn)
125 125
126 if pn.find("-native") != -1: 126 if pn.find("-native") != -1:
127 pnstripped = pn.split("-native") 127 pnstripped = pn.split("-native")
128 bb.note("Native Split: %s" % pnstripped) 128 bb.note("Native Split: %s" % pnstripped)
129 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 129 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
130 bb.data.update_data(localdata) 130 bb.data.update_data(localdata)
131 131
132 if pn.startswith("nativesdk-"): 132 if pn.startswith("nativesdk-"):
133 pnstripped = pn.replace("nativesdk-", "") 133 pnstripped = pn.replace("nativesdk-", "")
134 bb.note("NativeSDK Split: %s" % pnstripped) 134 bb.note("NativeSDK Split: %s" % pnstripped)
135 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) 135 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
136 bb.data.update_data(localdata) 136 bb.data.update_data(localdata)
137 137
138 if pn.find("-cross") != -1: 138 if pn.find("-cross") != -1:
139 pnstripped = pn.split("-cross") 139 pnstripped = pn.split("-cross")
140 bb.note("cross Split: %s" % pnstripped) 140 bb.note("cross Split: %s" % pnstripped)
141 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 141 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
142 bb.data.update_data(localdata) 142 bb.data.update_data(localdata)
143 143
144 if pn.find("-crosssdk") != -1: 144 if pn.find("-crosssdk") != -1:
145 pnstripped = pn.split("-crosssdk") 145 pnstripped = pn.split("-crosssdk")
146 bb.note("cross Split: %s" % pnstripped) 146 bb.note("cross Split: %s" % pnstripped)
147 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 147 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
148 bb.data.update_data(localdata) 148 bb.data.update_data(localdata)
149 149
150 if pn.find("-initial") != -1: 150 if pn.find("-initial") != -1:
151 pnstripped = pn.split("-initial") 151 pnstripped = pn.split("-initial")
152 bb.note("initial Split: %s" % pnstripped) 152 bb.note("initial Split: %s" % pnstripped)
153 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 153 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
154 bb.data.update_data(localdata) 154 bb.data.update_data(localdata)
155 155
156 """generate package information from .bb file""" 156 """generate package information from .bb file"""
157 pname = localdata.getVar('PN', True) 157 pname = localdata.getVar('PN')
158 pcurver = localdata.getVar('PV', True) 158 pcurver = localdata.getVar('PV')
159 pdesc = localdata.getVar('DESCRIPTION', True) 159 pdesc = localdata.getVar('DESCRIPTION')
160 if pdesc is not None: 160 if pdesc is not None:
161 pdesc = pdesc.replace(',','') 161 pdesc = pdesc.replace(',','')
162 pdesc = pdesc.replace('\n','') 162 pdesc = pdesc.replace('\n','')
163 163
164 pgrp = localdata.getVar('SECTION', True) 164 pgrp = localdata.getVar('SECTION')
165 plicense = localdata.getVar('LICENSE', True).replace(',','_') 165 plicense = localdata.getVar('LICENSE').replace(',','_')
166 166
167 rstatus = localdata.getVar('RECIPE_COLOR', True) 167 rstatus = localdata.getVar('RECIPE_COLOR')
168 if rstatus is not None: 168 if rstatus is not None:
169 rstatus = rstatus.replace(',','') 169 rstatus = rstatus.replace(',','')
170 170
171 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) 171 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
172 if pcurver == pupver: 172 if pcurver == pupver:
173 vermatch="1" 173 vermatch="1"
174 else: 174 else:
175 vermatch="0" 175 vermatch="0"
176 176
177 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) 177 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
178 if noupdate_reason is None: 178 if noupdate_reason is None:
179 noupdate="0" 179 noupdate="0"
180 else: 180 else:
181 noupdate="1" 181 noupdate="1"
182 noupdate_reason = noupdate_reason.replace(',','') 182 noupdate_reason = noupdate_reason.replace(',','')
183 183
184 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 184 maintainer = localdata.getVar('RECIPE_MAINTAINER')
185 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) 185 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
186 # do the comparison 186 # do the comparison
187 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) 187 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
188 188
@@ -272,60 +272,60 @@ python do_checkpkg() {
272 from bb.fetch2 import FetchError, NoMethodError, decodeurl 272 from bb.fetch2 import FetchError, NoMethodError, decodeurl
273 273
274 """first check whether a uri is provided""" 274 """first check whether a uri is provided"""
275 src_uri = (d.getVar('SRC_URI', True) or '').split() 275 src_uri = (d.getVar('SRC_URI') or '').split()
276 if src_uri: 276 if src_uri:
277 uri_type, _, _, _, _, _ = decodeurl(src_uri[0]) 277 uri_type, _, _, _, _, _ = decodeurl(src_uri[0])
278 else: 278 else:
279 uri_type = "none" 279 uri_type = "none"
280 280
281 """initialize log files.""" 281 """initialize log files."""
282 logpath = d.getVar('LOG_DIR', True) 282 logpath = d.getVar('LOG_DIR')
283 bb.utils.mkdirhier(logpath) 283 bb.utils.mkdirhier(logpath)
284 logfile = os.path.join(logpath, "checkpkg.csv") 284 logfile = os.path.join(logpath, "checkpkg.csv")
285 285
286 """generate package information from .bb file""" 286 """generate package information from .bb file"""
287 pname = d.getVar('PN', True) 287 pname = d.getVar('PN')
288 288
289 if pname.find("-native") != -1: 289 if pname.find("-native") != -1:
290 if d.getVar('BBCLASSEXTEND', True): 290 if d.getVar('BBCLASSEXTEND'):
291 return 291 return
292 pnstripped = pname.split("-native") 292 pnstripped = pname.split("-native")
293 bb.note("Native Split: %s" % pnstripped) 293 bb.note("Native Split: %s" % pnstripped)
294 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 294 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
295 bb.data.update_data(localdata) 295 bb.data.update_data(localdata)
296 296
297 if pname.startswith("nativesdk-"): 297 if pname.startswith("nativesdk-"):
298 if d.getVar('BBCLASSEXTEND', True): 298 if d.getVar('BBCLASSEXTEND'):
299 return 299 return
300 pnstripped = pname.replace("nativesdk-", "") 300 pnstripped = pname.replace("nativesdk-", "")
301 bb.note("NativeSDK Split: %s" % pnstripped) 301 bb.note("NativeSDK Split: %s" % pnstripped)
302 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) 302 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
303 bb.data.update_data(localdata) 303 bb.data.update_data(localdata)
304 304
305 if pname.find("-cross") != -1: 305 if pname.find("-cross") != -1:
306 pnstripped = pname.split("-cross") 306 pnstripped = pname.split("-cross")
307 bb.note("cross Split: %s" % pnstripped) 307 bb.note("cross Split: %s" % pnstripped)
308 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 308 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
309 bb.data.update_data(localdata) 309 bb.data.update_data(localdata)
310 310
311 if pname.find("-initial") != -1: 311 if pname.find("-initial") != -1:
312 pnstripped = pname.split("-initial") 312 pnstripped = pname.split("-initial")
313 bb.note("initial Split: %s" % pnstripped) 313 bb.note("initial Split: %s" % pnstripped)
314 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 314 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
315 bb.data.update_data(localdata) 315 bb.data.update_data(localdata)
316 316
317 pdesc = localdata.getVar('DESCRIPTION', True) 317 pdesc = localdata.getVar('DESCRIPTION')
318 pgrp = localdata.getVar('SECTION', True) 318 pgrp = localdata.getVar('SECTION')
319 pversion = localdata.getVar('PV', True) 319 pversion = localdata.getVar('PV')
320 plicense = localdata.getVar('LICENSE', True) 320 plicense = localdata.getVar('LICENSE')
321 psection = localdata.getVar('SECTION', True) 321 psection = localdata.getVar('SECTION')
322 phome = localdata.getVar('HOMEPAGE', True) 322 phome = localdata.getVar('HOMEPAGE')
323 prelease = localdata.getVar('PR', True) 323 prelease = localdata.getVar('PR')
324 pdepends = localdata.getVar('DEPENDS', True) 324 pdepends = localdata.getVar('DEPENDS')
325 pbugtracker = localdata.getVar('BUGTRACKER', True) 325 pbugtracker = localdata.getVar('BUGTRACKER')
326 ppe = localdata.getVar('PE', True) 326 ppe = localdata.getVar('PE')
327 psrcuri = localdata.getVar('SRC_URI', True) 327 psrcuri = localdata.getVar('SRC_URI')
328 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 328 maintainer = localdata.getVar('RECIPE_MAINTAINER')
329 329
330 """ Get upstream version version """ 330 """ Get upstream version version """
331 pupver = "" 331 pupver = ""
@@ -362,7 +362,7 @@ python do_checkpkg() {
362 psrcuri = "none" 362 psrcuri = "none"
363 pdepends = "".join(pdepends.split("\t")) 363 pdepends = "".join(pdepends.split("\t"))
364 pdesc = "".join(pdesc.split("\t")) 364 pdesc = "".join(pdesc.split("\t"))
365 no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True) 365 no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON')
366 lf = bb.utils.lockfile("%s.lock" % logfile) 366 lf = bb.utils.lockfile("%s.lock" % logfile)
367 with open(logfile, "a") as f: 367 with open(logfile, "a") as f:
368 writer = csv.writer(f, delimiter='\t') 368 writer = csv.writer(f, delimiter='\t')
@@ -401,12 +401,12 @@ python do_distro_check() {
401 401
402 localdata = bb.data.createCopy(d) 402 localdata = bb.data.createCopy(d)
403 bb.data.update_data(localdata) 403 bb.data.update_data(localdata)
404 tmpdir = d.getVar('TMPDIR', True) 404 tmpdir = d.getVar('TMPDIR')
405 distro_check_dir = os.path.join(tmpdir, "distro_check") 405 distro_check_dir = os.path.join(tmpdir, "distro_check")
406 logpath = d.getVar('LOG_DIR', True) 406 logpath = d.getVar('LOG_DIR')
407 bb.utils.mkdirhier(logpath) 407 bb.utils.mkdirhier(logpath)
408 result_file = os.path.join(logpath, "distrocheck.csv") 408 result_file = os.path.join(logpath, "distrocheck.csv")
409 datetime = localdata.getVar('DATETIME', True) 409 datetime = localdata.getVar('DATETIME')
410 dc.update_distro_data(distro_check_dir, datetime, localdata) 410 dc.update_distro_data(distro_check_dir, datetime, localdata)
411 411
412 # do the comparison 412 # do the comparison
@@ -449,12 +449,12 @@ do_checklicense[nostamp] = "1"
449python do_checklicense() { 449python do_checklicense() {
450 import csv 450 import csv
451 import shutil 451 import shutil
452 logpath = d.getVar('LOG_DIR', True) 452 logpath = d.getVar('LOG_DIR')
453 bb.utils.mkdirhier(logpath) 453 bb.utils.mkdirhier(logpath)
454 pn = d.getVar('PN', True) 454 pn = d.getVar('PN')
455 logfile = os.path.join(logpath, "missinglicense.csv") 455 logfile = os.path.join(logpath, "missinglicense.csv")
456 generic_directory = d.getVar('COMMON_LICENSE_DIR', True) 456 generic_directory = d.getVar('COMMON_LICENSE_DIR')
457 license_types = d.getVar('LICENSE', True) 457 license_types = d.getVar('LICENSE')
458 for license_type in ((license_types.replace('+', '').replace('|', '&') 458 for license_type in ((license_types.replace('+', '').replace('|', '&')
459 .replace('(', '').replace(')', '').replace(';', '') 459 .replace('(', '').replace(')', '').replace(';', '')
460 .replace(',', '').replace(" ", "").split("&"))): 460 .replace(',', '').replace(" ", "").split("&"))):
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass
index aa18e8b292..9f398d7051 100644
--- a/meta/classes/distutils-base.bbclass
+++ b/meta/classes/distutils-base.bbclass
@@ -1,4 +1,4 @@
1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" 1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" 2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
3 3
4inherit distutils-common-base pythonnative 4inherit distutils-common-base pythonnative
diff --git a/meta/classes/distutils3-base.bbclass b/meta/classes/distutils3-base.bbclass
index 82ab6a3d1c..7dbf07ac4b 100644
--- a/meta/classes/distutils3-base.bbclass
+++ b/meta/classes/distutils3-base.bbclass
@@ -1,4 +1,4 @@
1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" 1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" 2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
3 3
4inherit distutils-common-base python3native 4inherit distutils-common-base python3native
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index 31908c3ca2..5ba6c3472b 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -28,34 +28,34 @@ SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
28EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" 28EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}"
29 29
30python () { 30python () {
31 externalsrc = d.getVar('EXTERNALSRC', True) 31 externalsrc = d.getVar('EXTERNALSRC')
32 32
33 # If this is the base recipe and EXTERNALSRC is set for it or any of its 33 # If this is the base recipe and EXTERNALSRC is set for it or any of its
34 # derivatives, then enable BB_DONT_CACHE to force the recipe to always be 34 # derivatives, then enable BB_DONT_CACHE to force the recipe to always be
35 # re-parsed so that the file-checksums function for do_compile is run every 35 # re-parsed so that the file-checksums function for do_compile is run every
36 # time. 36 # time.
37 bpn = d.getVar('BPN', True) 37 bpn = d.getVar('BPN')
38 if bpn == d.getVar('PN', True): 38 if bpn == d.getVar('PN'):
39 classextend = (d.getVar('BBCLASSEXTEND', True) or '').split() 39 classextend = (d.getVar('BBCLASSEXTEND') or '').split()
40 if (externalsrc or 40 if (externalsrc or
41 ('native' in classextend and 41 ('native' in classextend and
42 d.getVar('EXTERNALSRC_pn-%s-native' % bpn, True)) or 42 d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or
43 ('nativesdk' in classextend and 43 ('nativesdk' in classextend and
44 d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn, True)) or 44 d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or
45 ('cross' in classextend and 45 ('cross' in classextend and
46 d.getVar('EXTERNALSRC_pn-%s-cross' % bpn, True))): 46 d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))):
47 d.setVar('BB_DONT_CACHE', '1') 47 d.setVar('BB_DONT_CACHE', '1')
48 48
49 if externalsrc: 49 if externalsrc:
50 d.setVar('S', externalsrc) 50 d.setVar('S', externalsrc)
51 externalsrcbuild = d.getVar('EXTERNALSRC_BUILD', True) 51 externalsrcbuild = d.getVar('EXTERNALSRC_BUILD')
52 if externalsrcbuild: 52 if externalsrcbuild:
53 d.setVar('B', externalsrcbuild) 53 d.setVar('B', externalsrcbuild)
54 else: 54 else:
55 d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') 55 d.setVar('B', '${WORKDIR}/${BPN}-${PV}/')
56 56
57 local_srcuri = [] 57 local_srcuri = []
58 fetch = bb.fetch2.Fetch((d.getVar('SRC_URI', True) or '').split(), d) 58 fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
59 for url in fetch.urls: 59 for url in fetch.urls:
60 url_data = fetch.ud[url] 60 url_data = fetch.ud[url]
61 parm = url_data.parm 61 parm = url_data.parm
@@ -94,7 +94,7 @@ python () {
94 # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string 94 # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
95 d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) 95 d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
96 96
97 for task in d.getVar("SRCTREECOVEREDTASKS", True).split(): 97 for task in d.getVar("SRCTREECOVEREDTASKS").split():
98 if local_srcuri and task in fetch_tasks: 98 if local_srcuri and task in fetch_tasks:
99 continue 99 continue
100 bb.build.deltask(task, d) 100 bb.build.deltask(task, d)
@@ -106,13 +106,13 @@ python () {
106 d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') 106 d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
107 107
108 # We don't want the workdir to go away 108 # We don't want the workdir to go away
109 d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN', True)) 109 d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
110 110
111 # If B=S the same builddir is used even for different architectures. 111 # If B=S the same builddir is used even for different architectures.
112 # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that 112 # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that
113 # change of do_configure task hash is correctly detected and stamps are 113 # change of do_configure task hash is correctly detected and stamps are
114 # invalidated if e.g. MACHINE changes. 114 # invalidated if e.g. MACHINE changes.
115 if d.getVar('S', True) == d.getVar('B', True): 115 if d.getVar('S') == d.getVar('B'):
116 configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' 116 configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate'
117 d.setVar('CONFIGURESTAMPFILE', configstamp) 117 d.setVar('CONFIGURESTAMPFILE', configstamp)
118 d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') 118 d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}')
@@ -120,10 +120,10 @@ python () {
120 120
121python externalsrc_configure_prefunc() { 121python externalsrc_configure_prefunc() {
122 # Create desired symlinks 122 # Create desired symlinks
123 symlinks = (d.getVar('EXTERNALSRC_SYMLINKS', True) or '').split() 123 symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()
124 for symlink in symlinks: 124 for symlink in symlinks:
125 symsplit = symlink.split(':', 1) 125 symsplit = symlink.split(':', 1)
126 lnkfile = os.path.join(d.getVar('S', True), symsplit[0]) 126 lnkfile = os.path.join(d.getVar('S'), symsplit[0])
127 target = d.expand(symsplit[1]) 127 target = d.expand(symsplit[1])
128 if len(symsplit) > 1: 128 if len(symsplit) > 1:
129 if os.path.islink(lnkfile): 129 if os.path.islink(lnkfile):
@@ -139,7 +139,7 @@ python externalsrc_configure_prefunc() {
139 139
140python externalsrc_compile_prefunc() { 140python externalsrc_compile_prefunc() {
141 # Make it obvious that this is happening, since forgetting about it could lead to much confusion 141 # Make it obvious that this is happening, since forgetting about it could lead to much confusion
142 bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN', True), d.getVar('EXTERNALSRC', True))) 142 bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC')))
143} 143}
144 144
145def srctree_hash_files(d): 145def srctree_hash_files(d):
@@ -147,7 +147,7 @@ def srctree_hash_files(d):
147 import subprocess 147 import subprocess
148 import tempfile 148 import tempfile
149 149
150 s_dir = d.getVar('EXTERNALSRC', True) 150 s_dir = d.getVar('EXTERNALSRC')
151 git_dir = os.path.join(s_dir, '.git') 151 git_dir = os.path.join(s_dir, '.git')
152 oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') 152 oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1')
153 153
@@ -165,7 +165,7 @@ def srctree_hash_files(d):
165 fobj.write(sha1) 165 fobj.write(sha1)
166 ret = oe_hash_file + ':True' 166 ret = oe_hash_file + ':True'
167 else: 167 else:
168 ret = d.getVar('EXTERNALSRC', True) + '/*:True' 168 ret = d.getVar('EXTERNALSRC') + '/*:True'
169 return ret 169 return ret
170 170
171def srctree_configure_hash_files(d): 171def srctree_configure_hash_files(d):
@@ -173,7 +173,7 @@ def srctree_configure_hash_files(d):
173 Get the list of files that should trigger do_configure to re-execute, 173 Get the list of files that should trigger do_configure to re-execute,
174 based on the value of CONFIGURE_FILES 174 based on the value of CONFIGURE_FILES
175 """ 175 """
176 in_files = (d.getVar('CONFIGURE_FILES', True) or '').split() 176 in_files = (d.getVar('CONFIGURE_FILES') or '').split()
177 out_items = [] 177 out_items = []
178 search_files = [] 178 search_files = []
179 for entry in in_files: 179 for entry in in_files:
@@ -182,7 +182,7 @@ def srctree_configure_hash_files(d):
182 else: 182 else:
183 search_files.append(entry) 183 search_files.append(entry)
184 if search_files: 184 if search_files:
185 s_dir = d.getVar('EXTERNALSRC', True) 185 s_dir = d.getVar('EXTERNALSRC')
186 for root, _, files in os.walk(s_dir): 186 for root, _, files in os.walk(s_dir):
187 for f in files: 187 for f in files:
188 if f in search_files: 188 if f in search_files:
diff --git a/meta/classes/extrausers.bbclass b/meta/classes/extrausers.bbclass
index 852810e866..402fc7d808 100644
--- a/meta/classes/extrausers.bbclass
+++ b/meta/classes/extrausers.bbclass
@@ -15,7 +15,7 @@
15 15
16inherit useradd_base 16inherit useradd_base
17 17
18PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS', True))]}" 18PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}"
19 19
20# Image level user / group settings 20# Image level user / group settings
21ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;" 21ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;"
diff --git a/meta/classes/fontcache.bbclass b/meta/classes/fontcache.bbclass
index 8ebdfc4f5c..d047a79aa6 100644
--- a/meta/classes/fontcache.bbclass
+++ b/meta/classes/fontcache.bbclass
@@ -30,26 +30,26 @@ fi
30} 30}
31 31
32python () { 32python () {
33 font_pkgs = d.getVar('FONT_PACKAGES', True).split() 33 font_pkgs = d.getVar('FONT_PACKAGES').split()
34 deps = d.getVar("FONT_EXTRA_RDEPENDS", True) 34 deps = d.getVar("FONT_EXTRA_RDEPENDS")
35 35
36 for pkg in font_pkgs: 36 for pkg in font_pkgs:
37 if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps) 37 if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps)
38} 38}
39 39
40python add_fontcache_postinsts() { 40python add_fontcache_postinsts() {
41 for pkg in d.getVar('FONT_PACKAGES', True).split(): 41 for pkg in d.getVar('FONT_PACKAGES').split():
42 bb.note("adding fonts postinst and postrm scripts to %s" % pkg) 42 bb.note("adding fonts postinst and postrm scripts to %s" % pkg)
43 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) 43 postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
44 if not postinst: 44 if not postinst:
45 postinst = '#!/bin/sh\n' 45 postinst = '#!/bin/sh\n'
46 postinst += d.getVar('fontcache_common', True) 46 postinst += d.getVar('fontcache_common')
47 d.setVar('pkg_postinst_%s' % pkg, postinst) 47 d.setVar('pkg_postinst_%s' % pkg, postinst)
48 48
49 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) 49 postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
50 if not postrm: 50 if not postrm:
51 postrm = '#!/bin/sh\n' 51 postrm = '#!/bin/sh\n'
52 postrm += d.getVar('fontcache_common', True) 52 postrm += d.getVar('fontcache_common')
53 d.setVar('pkg_postrm_%s' % pkg, postrm) 53 d.setVar('pkg_postrm_%s' % pkg, postrm)
54} 54}
55 55
diff --git a/meta/classes/fs-uuid.bbclass b/meta/classes/fs-uuid.bbclass
index bd2613cf10..313c5a3597 100644
--- a/meta/classes/fs-uuid.bbclass
+++ b/meta/classes/fs-uuid.bbclass
@@ -3,7 +3,7 @@
3# on ext file systems and depends on tune2fs. 3# on ext file systems and depends on tune2fs.
4def get_rootfs_uuid(d): 4def get_rootfs_uuid(d):
5 import subprocess 5 import subprocess
6 rootfs = d.getVar('ROOTFS', True) 6 rootfs = d.getVar('ROOTFS')
7 output = subprocess.check_output(['tune2fs', '-l', rootfs]) 7 output = subprocess.check_output(['tune2fs', '-l', rootfs])
8 for line in output.split('\n'): 8 for line in output.split('\n'):
9 if line.startswith('Filesystem UUID:'): 9 if line.startswith('Filesystem UUID:'):
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass
index d7afa7282f..d07beadc95 100644
--- a/meta/classes/gconf.bbclass
+++ b/meta/classes/gconf.bbclass
@@ -42,8 +42,8 @@ done
42 42
43python populate_packages_append () { 43python populate_packages_append () {
44 import re 44 import re
45 packages = d.getVar('PACKAGES', True).split() 45 packages = d.getVar('PACKAGES').split()
46 pkgdest = d.getVar('PKGDEST', True) 46 pkgdest = d.getVar('PKGDEST')
47 47
48 for pkg in packages: 48 for pkg in packages:
49 schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) 49 schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
@@ -56,15 +56,15 @@ python populate_packages_append () {
56 if schemas != []: 56 if schemas != []:
57 bb.note("adding gconf postinst and prerm scripts to %s" % pkg) 57 bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
58 d.setVar('SCHEMA_FILES', " ".join(schemas)) 58 d.setVar('SCHEMA_FILES', " ".join(schemas))
59 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 59 postinst = d.getVar('pkg_postinst_%s' % pkg)
60 if not postinst: 60 if not postinst:
61 postinst = '#!/bin/sh\n' 61 postinst = '#!/bin/sh\n'
62 postinst += d.getVar('gconf_postinst', True) 62 postinst += d.getVar('gconf_postinst')
63 d.setVar('pkg_postinst_%s' % pkg, postinst) 63 d.setVar('pkg_postinst_%s' % pkg, postinst)
64 prerm = d.getVar('pkg_prerm_%s' % pkg, True) 64 prerm = d.getVar('pkg_prerm_%s' % pkg)
65 if not prerm: 65 if not prerm:
66 prerm = '#!/bin/sh\n' 66 prerm = '#!/bin/sh\n'
67 prerm += d.getVar('gconf_prerm', True) 67 prerm += d.getVar('gconf_prerm')
68 d.setVar('pkg_prerm_%s' % pkg, prerm) 68 d.setVar('pkg_prerm_%s' % pkg, prerm)
69 d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') 69 d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf')
70} 70}
diff --git a/meta/classes/gettext.bbclass b/meta/classes/gettext.bbclass
index 03b89b2455..0be14246bf 100644
--- a/meta/classes/gettext.bbclass
+++ b/meta/classes/gettext.bbclass
@@ -1,15 +1,15 @@
1def gettext_dependencies(d): 1def gettext_dependencies(d):
2 if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): 2 if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
3 return "" 3 return ""
4 if d.getVar('USE_NLS', True) == 'no': 4 if d.getVar('USE_NLS') == 'no':
5 return "gettext-minimal-native" 5 return "gettext-minimal-native"
6 return d.getVar('DEPENDS_GETTEXT', False) 6 return d.getVar('DEPENDS_GETTEXT', False)
7 7
8def gettext_oeconf(d): 8def gettext_oeconf(d):
9 if d.getVar('USE_NLS', True) == 'no': 9 if d.getVar('USE_NLS') == 'no':
10 return '--disable-nls' 10 return '--disable-nls'
11 # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set 11 # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set
12 if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): 12 if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
13 return '--disable-nls' 13 return '--disable-nls'
14 return "--enable-nls" 14 return "--enable-nls"
15 15
diff --git a/meta/classes/gio-module-cache.bbclass b/meta/classes/gio-module-cache.bbclass
index 91461b11e7..39b7bef720 100644
--- a/meta/classes/gio-module-cache.bbclass
+++ b/meta/classes/gio-module-cache.bbclass
@@ -17,21 +17,21 @@ fi
17} 17}
18 18
19python populate_packages_append () { 19python populate_packages_append () {
20 packages = d.getVar('GIO_MODULE_PACKAGES', True).split() 20 packages = d.getVar('GIO_MODULE_PACKAGES').split()
21 21
22 for pkg in packages: 22 for pkg in packages:
23 bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) 23 bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg)
24 24
25 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 25 postinst = d.getVar('pkg_postinst_%s' % pkg)
26 if not postinst: 26 if not postinst:
27 postinst = '#!/bin/sh\n' 27 postinst = '#!/bin/sh\n'
28 postinst += d.getVar('gio_module_cache_common', True) 28 postinst += d.getVar('gio_module_cache_common')
29 d.setVar('pkg_postinst_%s' % pkg, postinst) 29 d.setVar('pkg_postinst_%s' % pkg, postinst)
30 30
31 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 31 postrm = d.getVar('pkg_postrm_%s' % pkg)
32 if not postrm: 32 if not postrm:
33 postrm = '#!/bin/sh\n' 33 postrm = '#!/bin/sh\n'
34 postrm += d.getVar('gio_module_cache_common', True) 34 postrm += d.getVar('gio_module_cache_common')
35 d.setVar('pkg_postrm_%s' % pkg, postrm) 35 d.setVar('pkg_postrm_%s' % pkg, postrm)
36} 36}
37 37
diff --git a/meta/classes/grub-efi.bbclass b/meta/classes/grub-efi.bbclass
index 17417ba5d9..3dc9146d90 100644
--- a/meta/classes/grub-efi.bbclass
+++ b/meta/classes/grub-efi.bbclass
@@ -72,14 +72,14 @@ efi_hddimg_populate() {
72python build_efi_cfg() { 72python build_efi_cfg() {
73 import sys 73 import sys
74 74
75 workdir = d.getVar('WORKDIR', True) 75 workdir = d.getVar('WORKDIR')
76 if not workdir: 76 if not workdir:
77 bb.error("WORKDIR not defined, unable to package") 77 bb.error("WORKDIR not defined, unable to package")
78 return 78 return
79 79
80 gfxserial = d.getVar('GRUB_GFXSERIAL', True) or "" 80 gfxserial = d.getVar('GRUB_GFXSERIAL') or ""
81 81
82 labels = d.getVar('LABELS', True) 82 labels = d.getVar('LABELS')
83 if not labels: 83 if not labels:
84 bb.debug(1, "LABELS not defined, nothing to do") 84 bb.debug(1, "LABELS not defined, nothing to do")
85 return 85 return
@@ -88,7 +88,7 @@ python build_efi_cfg() {
88 bb.debug(1, "No labels, nothing to do") 88 bb.debug(1, "No labels, nothing to do")
89 return 89 return
90 90
91 cfile = d.getVar('GRUB_CFG', True) 91 cfile = d.getVar('GRUB_CFG')
92 if not cfile: 92 if not cfile:
93 bb.fatal('Unable to read GRUB_CFG') 93 bb.fatal('Unable to read GRUB_CFG')
94 94
@@ -99,33 +99,33 @@ python build_efi_cfg() {
99 99
100 cfgfile.write('# Automatically created by OE\n') 100 cfgfile.write('# Automatically created by OE\n')
101 101
102 opts = d.getVar('GRUB_OPTS', True) 102 opts = d.getVar('GRUB_OPTS')
103 if opts: 103 if opts:
104 for opt in opts.split(';'): 104 for opt in opts.split(';'):
105 cfgfile.write('%s\n' % opt) 105 cfgfile.write('%s\n' % opt)
106 106
107 cfgfile.write('default=%s\n' % (labels.split()[0])) 107 cfgfile.write('default=%s\n' % (labels.split()[0]))
108 108
109 timeout = d.getVar('GRUB_TIMEOUT', True) 109 timeout = d.getVar('GRUB_TIMEOUT')
110 if timeout: 110 if timeout:
111 cfgfile.write('timeout=%s\n' % timeout) 111 cfgfile.write('timeout=%s\n' % timeout)
112 else: 112 else:
113 cfgfile.write('timeout=50\n') 113 cfgfile.write('timeout=50\n')
114 114
115 root = d.getVar('GRUB_ROOT', True) 115 root = d.getVar('GRUB_ROOT')
116 if not root: 116 if not root:
117 bb.fatal('GRUB_ROOT not defined') 117 bb.fatal('GRUB_ROOT not defined')
118 118
119 if gfxserial == "1": 119 if gfxserial == "1":
120 btypes = [ [ " graphics console", "" ], 120 btypes = [ [ " graphics console", "" ],
121 [ " serial console", d.getVar('GRUB_SERIAL', True) or "" ] ] 121 [ " serial console", d.getVar('GRUB_SERIAL') or "" ] ]
122 else: 122 else:
123 btypes = [ [ "", "" ] ] 123 btypes = [ [ "", "" ] ]
124 124
125 for label in labels.split(): 125 for label in labels.split():
126 localdata = d.createCopy() 126 localdata = d.createCopy()
127 127
128 overrides = localdata.getVar('OVERRIDES', True) 128 overrides = localdata.getVar('OVERRIDES')
129 if not overrides: 129 if not overrides:
130 bb.fatal('OVERRIDES not defined') 130 bb.fatal('OVERRIDES not defined')
131 131
@@ -141,8 +141,8 @@ python build_efi_cfg() {
141 141
142 cfgfile.write(' %s' % replace_rootfs_uuid(d, root)) 142 cfgfile.write(' %s' % replace_rootfs_uuid(d, root))
143 143
144 append = localdata.getVar('APPEND', True) 144 append = localdata.getVar('APPEND')
145 initrd = localdata.getVar('INITRD', True) 145 initrd = localdata.getVar('INITRD')
146 146
147 if append: 147 if append:
148 append = replace_rootfs_uuid(d, append) 148 append = replace_rootfs_uuid(d, append)
diff --git a/meta/classes/gsettings.bbclass b/meta/classes/gsettings.bbclass
index dec5abc026..e6d1c8a893 100644
--- a/meta/classes/gsettings.bbclass
+++ b/meta/classes/gsettings.bbclass
@@ -18,20 +18,20 @@ gsettings_postinstrm () {
18} 18}
19 19
20python populate_packages_append () { 20python populate_packages_append () {
21 pkg = d.getVar('PN', True) 21 pkg = d.getVar('PN')
22 bb.note("adding gsettings postinst scripts to %s" % pkg) 22 bb.note("adding gsettings postinst scripts to %s" % pkg)
23 23
24 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) 24 postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
25 if not postinst: 25 if not postinst:
26 postinst = '#!/bin/sh\n' 26 postinst = '#!/bin/sh\n'
27 postinst += d.getVar('gsettings_postinstrm', True) 27 postinst += d.getVar('gsettings_postinstrm')
28 d.setVar('pkg_postinst_%s' % pkg, postinst) 28 d.setVar('pkg_postinst_%s' % pkg, postinst)
29 29
30 bb.note("adding gsettings postrm scripts to %s" % pkg) 30 bb.note("adding gsettings postrm scripts to %s" % pkg)
31 31
32 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) 32 postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
33 if not postrm: 33 if not postrm:
34 postrm = '#!/bin/sh\n' 34 postrm = '#!/bin/sh\n'
35 postrm += d.getVar('gsettings_postinstrm', True) 35 postrm += d.getVar('gsettings_postinstrm')
36 d.setVar('pkg_postrm_%s' % pkg, postrm) 36 d.setVar('pkg_postrm_%s' % pkg, postrm)
37} 37}
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass
index 0f1052b08c..c5d8d7cfed 100644
--- a/meta/classes/gtk-icon-cache.bbclass
+++ b/meta/classes/gtk-icon-cache.bbclass
@@ -35,11 +35,11 @@ fi
35} 35}
36 36
37python populate_packages_append () { 37python populate_packages_append () {
38 packages = d.getVar('PACKAGES', True).split() 38 packages = d.getVar('PACKAGES').split()
39 pkgdest = d.getVar('PKGDEST', True) 39 pkgdest = d.getVar('PKGDEST')
40 40
41 for pkg in packages: 41 for pkg in packages:
42 icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) 42 icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir'))
43 if not os.path.exists(icon_dir): 43 if not os.path.exists(icon_dir):
44 continue 44 continue
45 45
@@ -49,16 +49,16 @@ python populate_packages_append () {
49 49
50 bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) 50 bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
51 51
52 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 52 postinst = d.getVar('pkg_postinst_%s' % pkg)
53 if not postinst: 53 if not postinst:
54 postinst = '#!/bin/sh\n' 54 postinst = '#!/bin/sh\n'
55 postinst += d.getVar('gtk_icon_cache_postinst', True) 55 postinst += d.getVar('gtk_icon_cache_postinst')
56 d.setVar('pkg_postinst_%s' % pkg, postinst) 56 d.setVar('pkg_postinst_%s' % pkg, postinst)
57 57
58 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 58 postrm = d.getVar('pkg_postrm_%s' % pkg)
59 if not postrm: 59 if not postrm:
60 postrm = '#!/bin/sh\n' 60 postrm = '#!/bin/sh\n'
61 postrm += d.getVar('gtk_icon_cache_postrm', True) 61 postrm += d.getVar('gtk_icon_cache_postrm')
62 d.setVar('pkg_postrm_%s' % pkg, postrm) 62 d.setVar('pkg_postrm_%s' % pkg, postrm)
63} 63}
64 64
diff --git a/meta/classes/gtk-immodules-cache.bbclass b/meta/classes/gtk-immodules-cache.bbclass
index ebbc9dea89..baea95959b 100644
--- a/meta/classes/gtk-immodules-cache.bbclass
+++ b/meta/classes/gtk-immodules-cache.bbclass
@@ -61,21 +61,21 @@ fi
61} 61}
62 62
63python populate_packages_append () { 63python populate_packages_append () {
64 gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES', True).split() 64 gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES').split()
65 65
66 for pkg in gtkimmodules_pkgs: 66 for pkg in gtkimmodules_pkgs:
67 bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) 67 bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg)
68 68
69 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 69 postinst = d.getVar('pkg_postinst_%s' % pkg)
70 if not postinst: 70 if not postinst:
71 postinst = '#!/bin/sh\n' 71 postinst = '#!/bin/sh\n'
72 postinst += d.getVar('gtk_immodule_cache_postinst', True) 72 postinst += d.getVar('gtk_immodule_cache_postinst')
73 d.setVar('pkg_postinst_%s' % pkg, postinst) 73 d.setVar('pkg_postinst_%s' % pkg, postinst)
74 74
75 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 75 postrm = d.getVar('pkg_postrm_%s' % pkg)
76 if not postrm: 76 if not postrm:
77 postrm = '#!/bin/sh\n' 77 postrm = '#!/bin/sh\n'
78 postrm += d.getVar('gtk_immodule_cache_postrm', True) 78 postrm += d.getVar('gtk_immodule_cache_postrm')
79 d.setVar('pkg_postrm_%s' % pkg, postrm) 79 d.setVar('pkg_postrm_%s' % pkg, postrm)
80} 80}
81 81
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index a837894150..8a351cf3b8 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -100,7 +100,7 @@ def use_icecc(bb,d):
100 if icecc_is_allarch(bb, d): 100 if icecc_is_allarch(bb, d):
101 return "no" 101 return "no"
102 102
103 pn = d.getVar('PN', True) 103 pn = d.getVar('PN')
104 104
105 system_class_blacklist = [] 105 system_class_blacklist = []
106 user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split() 106 user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split()
@@ -139,7 +139,7 @@ def use_icecc(bb,d):
139 return "yes" 139 return "yes"
140 140
141def icecc_is_allarch(bb, d): 141def icecc_is_allarch(bb, d):
142 return d.getVar("PACKAGE_ARCH", True) == "all" or bb.data.inherits_class('allarch', d) 142 return d.getVar("PACKAGE_ARCH") == "all" or bb.data.inherits_class('allarch', d)
143 143
144def icecc_is_kernel(bb, d): 144def icecc_is_kernel(bb, d):
145 return \ 145 return \
diff --git a/meta/classes/image-buildinfo.bbclass b/meta/classes/image-buildinfo.bbclass
index da1edf7496..3b79de5ad8 100644
--- a/meta/classes/image-buildinfo.bbclass
+++ b/meta/classes/image-buildinfo.bbclass
@@ -18,7 +18,7 @@ def image_buildinfo_outputvars(vars, listvars, d):
18 listvars = listvars.split() 18 listvars = listvars.split()
19 ret = "" 19 ret = ""
20 for var in vars: 20 for var in vars:
21 value = d.getVar(var, True) or "" 21 value = d.getVar(var) or ""
22 if (d.getVarFlag(var, 'type', True) == "list"): 22 if (d.getVarFlag(var, 'type', True) == "list"):
23 value = oe.utils.squashspaces(value) 23 value = oe.utils.squashspaces(value)
24 ret += "%s = %s\n" % (var, value) 24 ret += "%s = %s\n" % (var, value)
@@ -42,7 +42,7 @@ def get_layer_git_status(path):
42 42
43# Returns layer revisions along with their respective status 43# Returns layer revisions along with their respective status
44def get_layer_revs(d): 44def get_layer_revs(d):
45 layers = (d.getVar("BBLAYERS", True) or "").split() 45 layers = (d.getVar("BBLAYERS") or "").split()
46 medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \ 46 medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \
47 base_get_metadata_git_branch(i, None).strip(), \ 47 base_get_metadata_git_branch(i, None).strip(), \
48 base_get_metadata_git_revision(i, None), \ 48 base_get_metadata_git_revision(i, None), \
@@ -52,11 +52,11 @@ def get_layer_revs(d):
52 52
53def buildinfo_target(d): 53def buildinfo_target(d):
54 # Get context 54 # Get context
55 if d.getVar('BB_WORKERCONTEXT', True) != '1': 55 if d.getVar('BB_WORKERCONTEXT') != '1':
56 return "" 56 return ""
57 # Single and list variables to be read 57 # Single and list variables to be read
58 vars = (d.getVar("IMAGE_BUILDINFO_VARS", True) or "") 58 vars = (d.getVar("IMAGE_BUILDINFO_VARS") or "")
59 listvars = (d.getVar("IMAGE_BUILDINFO_LVARS", True) or "") 59 listvars = (d.getVar("IMAGE_BUILDINFO_LVARS") or "")
60 return image_buildinfo_outputvars(vars, listvars, d) 60 return image_buildinfo_outputvars(vars, listvars, d)
61 61
62# Write build information to target filesystem 62# Write build information to target filesystem
diff --git a/meta/classes/image-live.bbclass b/meta/classes/image-live.bbclass
index 4a634dca96..a3d1b4e567 100644
--- a/meta/classes/image-live.bbclass
+++ b/meta/classes/image-live.bbclass
@@ -51,8 +51,8 @@ IMAGE_TYPEDEP_hddimg = "ext4"
51IMAGE_TYPES_MASKED += "live hddimg iso" 51IMAGE_TYPES_MASKED += "live hddimg iso"
52 52
53python() { 53python() {
54 image_b = d.getVar('IMAGE_BASENAME', True) 54 image_b = d.getVar('IMAGE_BASENAME')
55 initrd_i = d.getVar('INITRD_IMAGE_LIVE', True) 55 initrd_i = d.getVar('INITRD_IMAGE_LIVE')
56 if image_b == initrd_i: 56 if image_b == initrd_i:
57 bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i) 57 bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i)
58 bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.') 58 bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.')
@@ -264,9 +264,9 @@ build_hddimg() {
264 264
265python do_bootimg() { 265python do_bootimg() {
266 set_live_vm_vars(d, 'LIVE') 266 set_live_vm_vars(d, 'LIVE')
267 if d.getVar("PCBIOS", True) == "1": 267 if d.getVar("PCBIOS") == "1":
268 bb.build.exec_func('build_syslinux_cfg', d) 268 bb.build.exec_func('build_syslinux_cfg', d)
269 if d.getVar("EFI", True) == "1": 269 if d.getVar("EFI") == "1":
270 bb.build.exec_func('build_efi_cfg', d) 270 bb.build.exec_func('build_efi_cfg', d)
271 bb.build.exec_func('build_hddimg', d) 271 bb.build.exec_func('build_hddimg', d)
272 bb.build.exec_func('build_iso', d) 272 bb.build.exec_func('build_iso', d)
diff --git a/meta/classes/image-vm.bbclass b/meta/classes/image-vm.bbclass
index 2f35d6b4d1..35c9244e9b 100644
--- a/meta/classes/image-vm.bbclass
+++ b/meta/classes/image-vm.bbclass
@@ -112,9 +112,9 @@ build_boot_dd() {
112python do_bootdirectdisk() { 112python do_bootdirectdisk() {
113 validate_disk_signature(d) 113 validate_disk_signature(d)
114 set_live_vm_vars(d, 'VM') 114 set_live_vm_vars(d, 'VM')
115 if d.getVar("PCBIOS", True) == "1": 115 if d.getVar("PCBIOS") == "1":
116 bb.build.exec_func('build_syslinux_cfg', d) 116 bb.build.exec_func('build_syslinux_cfg', d)
117 if d.getVar("EFI", True) == "1": 117 if d.getVar("EFI") == "1":
118 bb.build.exec_func('build_efi_cfg', d) 118 bb.build.exec_func('build_efi_cfg', d)
119 bb.build.exec_func('build_boot_dd', d) 119 bb.build.exec_func('build_boot_dd', d)
120} 120}
@@ -132,7 +132,7 @@ def generate_disk_signature():
132def validate_disk_signature(d): 132def validate_disk_signature(d):
133 import re 133 import re
134 134
135 disk_signature = d.getVar("DISK_SIGNATURE", True) 135 disk_signature = d.getVar("DISK_SIGNATURE")
136 136
137 if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature): 137 if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature):
138 bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature) 138 bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature)
@@ -158,11 +158,11 @@ create_qcow2_image () {
158} 158}
159 159
160python do_vmimg() { 160python do_vmimg() {
161 if 'vmdk' in d.getVar('IMAGE_FSTYPES', True): 161 if 'vmdk' in d.getVar('IMAGE_FSTYPES'):
162 bb.build.exec_func('create_vmdk_image', d) 162 bb.build.exec_func('create_vmdk_image', d)
163 if 'vdi' in d.getVar('IMAGE_FSTYPES', True): 163 if 'vdi' in d.getVar('IMAGE_FSTYPES'):
164 bb.build.exec_func('create_vdi_image', d) 164 bb.build.exec_func('create_vdi_image', d)
165 if 'qcow2' in d.getVar('IMAGE_FSTYPES', True): 165 if 'qcow2' in d.getVar('IMAGE_FSTYPES'):
166 bb.build.exec_func('create_qcow2_image', d) 166 bb.build.exec_func('create_qcow2_image', d)
167} 167}
168 168
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index e63f6a3bfe..28bff9e75a 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -2,7 +2,7 @@ inherit rootfs_${IMAGE_PKGTYPE}
2 2
3# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk 3# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk
4# in the non-Linux SDK_OS case, such as mingw32 4# in the non-Linux SDK_OS case, such as mingw32
5SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS", True)]}" 5SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}"
6inherit ${SDKEXTCLASS} 6inherit ${SDKEXTCLASS}
7 7
8TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}" 8TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}"
@@ -133,7 +133,7 @@ def build_live(d):
133 if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg 133 if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg
134 d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d)) 134 d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d))
135 d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d)) 135 d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d))
136 if d.getVar('NOISO', True) == "0" or d.getVar('NOHDD', True) == "0": 136 if d.getVar('NOISO') == "0" or d.getVar('NOHDD') == "0":
137 return "image-live" 137 return "image-live"
138 return "" 138 return ""
139 return "image-live" 139 return "image-live"
@@ -145,7 +145,7 @@ IMAGE_TYPE_vm = '${@bb.utils.contains_any("IMAGE_FSTYPES", ["vmdk", "vdi", "qcow
145inherit ${IMAGE_TYPE_vm} 145inherit ${IMAGE_TYPE_vm}
146 146
147def build_uboot(d): 147def build_uboot(d):
148 if 'u-boot' in (d.getVar('IMAGE_FSTYPES', True) or ''): 148 if 'u-boot' in (d.getVar('IMAGE_FSTYPES') or ''):
149 return "image_types_uboot" 149 return "image_types_uboot"
150 else: 150 else:
151 return "" 151 return ""
@@ -158,7 +158,7 @@ python () {
158 d.appendVarFlag('do_rootfs', 'depends', deps) 158 d.appendVarFlag('do_rootfs', 'depends', deps)
159 159
160 deps = "" 160 deps = ""
161 for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split(): 161 for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split():
162 deps += " %s:do_populate_sysroot" % dep 162 deps += " %s:do_populate_sysroot" % dep
163 d.appendVarFlag('do_build', 'depends', deps) 163 d.appendVarFlag('do_build', 'depends', deps)
164 164
@@ -167,22 +167,22 @@ python () {
167 features = set(oe.data.typed_value('IMAGE_FEATURES', d)) 167 features = set(oe.data.typed_value('IMAGE_FEATURES', d))
168 remain_features = features.copy() 168 remain_features = features.copy()
169 for feature in features: 169 for feature in features:
170 replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature, True) or "").split()) 170 replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature) or "").split())
171 remain_features -= replaces 171 remain_features -= replaces
172 172
173 #Check for conflict image features 173 #Check for conflict image features
174 for feature in remain_features: 174 for feature in remain_features:
175 conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature, True) or "").split()) 175 conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature) or "").split())
176 temp = conflicts & remain_features 176 temp = conflicts & remain_features
177 if temp: 177 if temp:
178 bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN', True), feature, ' '.join(list(temp)))) 178 bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN'), feature, ' '.join(list(temp))))
179 179
180 d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features)))) 180 d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features))))
181 181
182 check_image_features(d) 182 check_image_features(d)
183 initramfs_image = d.getVar('INITRAMFS_IMAGE', True) or "" 183 initramfs_image = d.getVar('INITRAMFS_IMAGE') or ""
184 if initramfs_image != "": 184 if initramfs_image != "":
185 d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN', True)) 185 d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN'))
186 d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image) 186 d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image)
187} 187}
188 188
@@ -194,7 +194,7 @@ IMAGE_POSTPROCESS_COMMAND ?= ""
194# some default locales 194# some default locales
195IMAGE_LINGUAS ?= "de-de fr-fr en-gb" 195IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
196 196
197LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}" 197LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}"
198 198
199# Prefer image, but use the fallback files for lookups if the image ones 199# Prefer image, but use the fallback files for lookups if the image ones
200# aren't yet available. 200# aren't yet available.
@@ -229,20 +229,20 @@ fakeroot python do_rootfs () {
229 progress_reporter.next_stage() 229 progress_reporter.next_stage()
230 230
231 # Handle package exclusions 231 # Handle package exclusions
232 excl_pkgs = d.getVar("PACKAGE_EXCLUDE", True).split() 232 excl_pkgs = d.getVar("PACKAGE_EXCLUDE").split()
233 inst_pkgs = d.getVar("PACKAGE_INSTALL", True).split() 233 inst_pkgs = d.getVar("PACKAGE_INSTALL").split()
234 inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY", True).split() 234 inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY").split()
235 235
236 d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs)) 236 d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs))
237 d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs)) 237 d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs))
238 238
239 for pkg in excl_pkgs: 239 for pkg in excl_pkgs:
240 if pkg in inst_pkgs: 240 if pkg in inst_pkgs:
241 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) 241 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs))
242 inst_pkgs.remove(pkg) 242 inst_pkgs.remove(pkg)
243 243
244 if pkg in inst_attempt_pkgs: 244 if pkg in inst_attempt_pkgs:
245 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) 245 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs))
246 inst_attempt_pkgs.remove(pkg) 246 inst_attempt_pkgs.remove(pkg)
247 247
248 d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs)) 248 d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs))
@@ -252,7 +252,7 @@ fakeroot python do_rootfs () {
252 # We have to delay the runtime_mapping_rename until just before rootfs runs 252 # We have to delay the runtime_mapping_rename until just before rootfs runs
253 # otherwise, the multilib renaming could step in and squash any fixups that 253 # otherwise, the multilib renaming could step in and squash any fixups that
254 # may have occurred. 254 # may have occurred.
255 pn = d.getVar('PN', True) 255 pn = d.getVar('PN')
256 runtime_mapping_rename("PACKAGE_INSTALL", pn, d) 256 runtime_mapping_rename("PACKAGE_INSTALL", pn, d)
257 runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d) 257 runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d)
258 runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d) 258 runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d)
@@ -275,7 +275,7 @@ addtask rootfs before do_build
275fakeroot python do_image () { 275fakeroot python do_image () {
276 from oe.utils import execute_pre_post_process 276 from oe.utils import execute_pre_post_process
277 277
278 pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND", True) 278 pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND")
279 279
280 execute_pre_post_process(d, pre_process_cmds) 280 execute_pre_post_process(d, pre_process_cmds)
281} 281}
@@ -286,7 +286,7 @@ addtask do_image after do_rootfs before do_build
286fakeroot python do_image_complete () { 286fakeroot python do_image_complete () {
287 from oe.utils import execute_pre_post_process 287 from oe.utils import execute_pre_post_process
288 288
289 post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND", True) 289 post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND")
290 290
291 execute_pre_post_process(d, post_process_cmds) 291 execute_pre_post_process(d, post_process_cmds)
292} 292}
@@ -309,7 +309,7 @@ addtask do_image_complete after do_image before do_build
309fakeroot python do_image_qa () { 309fakeroot python do_image_qa () {
310 from oe.utils import ImageQAFailed 310 from oe.utils import ImageQAFailed
311 311
312 qa_cmds = (d.getVar('IMAGE_QA_COMMANDS', True) or '').split() 312 qa_cmds = (d.getVar('IMAGE_QA_COMMANDS') or '').split()
313 qamsg = "" 313 qamsg = ""
314 314
315 for cmd in qa_cmds: 315 for cmd in qa_cmds:
@@ -324,7 +324,7 @@ fakeroot python do_image_qa () {
324 qamsg = qamsg + '\n' 324 qamsg = qamsg + '\n'
325 325
326 if qamsg: 326 if qamsg:
327 imgname = d.getVar('IMAGE_NAME', True) 327 imgname = d.getVar('IMAGE_NAME')
328 bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg)) 328 bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg))
329} 329}
330addtask do_image_qa after do_image_complete before do_build 330addtask do_image_qa after do_image_complete before do_build
@@ -334,17 +334,17 @@ addtask do_image_qa after do_image_complete before do_build
334# to tmp/sysroots/<machine>/imgdata/<image>.env 334# to tmp/sysroots/<machine>/imgdata/<image>.env
335# 335#
336python do_rootfs_wicenv () { 336python do_rootfs_wicenv () {
337 wicvars = d.getVar('WICVARS', True) 337 wicvars = d.getVar('WICVARS')
338 if not wicvars: 338 if not wicvars:
339 return 339 return
340 340
341 stdir = d.getVar('STAGING_DIR_TARGET', True) 341 stdir = d.getVar('STAGING_DIR_TARGET')
342 outdir = os.path.join(stdir, 'imgdata') 342 outdir = os.path.join(stdir, 'imgdata')
343 bb.utils.mkdirhier(outdir) 343 bb.utils.mkdirhier(outdir)
344 basename = d.getVar('IMAGE_BASENAME', True) 344 basename = d.getVar('IMAGE_BASENAME')
345 with open(os.path.join(outdir, basename) + '.env', 'w') as envf: 345 with open(os.path.join(outdir, basename) + '.env', 'w') as envf:
346 for var in wicvars.split(): 346 for var in wicvars.split():
347 value = d.getVar(var, True) 347 value = d.getVar(var)
348 if value: 348 if value:
349 envf.write('%s="%s"\n' % (var, value.strip())) 349 envf.write('%s="%s"\n' % (var, value.strip()))
350} 350}
@@ -357,7 +357,7 @@ def setup_debugfs_variables(d):
357 d.appendVar('IMAGE_LINK_NAME', '-dbg') 357 d.appendVar('IMAGE_LINK_NAME', '-dbg')
358 d.appendVar('IMAGE_NAME','-dbg') 358 d.appendVar('IMAGE_NAME','-dbg')
359 d.setVar('IMAGE_BUILDING_DEBUGFS', 'true') 359 d.setVar('IMAGE_BUILDING_DEBUGFS', 'true')
360 debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True) 360 debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS')
361 if debugfs_image_fstypes: 361 if debugfs_image_fstypes:
362 d.setVar('IMAGE_FSTYPES', debugfs_image_fstypes) 362 d.setVar('IMAGE_FSTYPES', debugfs_image_fstypes)
363 363
@@ -375,7 +375,7 @@ python () {
375 # 375 #
376 # Without de-duplication, gen_conversion_cmds() below 376 # Without de-duplication, gen_conversion_cmds() below
377 # would create the same compression command multiple times. 377 # would create the same compression command multiple times.
378 ctypes = set(d.getVar('CONVERSIONTYPES', True).split()) 378 ctypes = set(d.getVar('CONVERSIONTYPES').split())
379 old_overrides = d.getVar('OVERRIDES', False) 379 old_overrides = d.getVar('OVERRIDES', False)
380 380
381 def _image_base_type(type): 381 def _image_base_type(type):
@@ -392,11 +392,11 @@ python () {
392 return basetype 392 return basetype
393 393
394 basetypes = {} 394 basetypes = {}
395 alltypes = d.getVar('IMAGE_FSTYPES', True).split() 395 alltypes = d.getVar('IMAGE_FSTYPES').split()
396 typedeps = {} 396 typedeps = {}
397 397
398 if d.getVar('IMAGE_GEN_DEBUGFS', True) == "1": 398 if d.getVar('IMAGE_GEN_DEBUGFS') == "1":
399 debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True).split() 399 debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS').split()
400 for t in debugfs_fstypes: 400 for t in debugfs_fstypes:
401 alltypes.append("debugfs_" + t) 401 alltypes.append("debugfs_" + t)
402 402
@@ -411,7 +411,7 @@ python () {
411 if t.startswith("debugfs_"): 411 if t.startswith("debugfs_"):
412 t = t[8:] 412 t = t[8:]
413 debug = "debugfs_" 413 debug = "debugfs_"
414 deps = (d.getVar('IMAGE_TYPEDEP_' + t, True) or "").split() 414 deps = (d.getVar('IMAGE_TYPEDEP_' + t) or "").split()
415 vardeps.add('IMAGE_TYPEDEP_' + t) 415 vardeps.add('IMAGE_TYPEDEP_' + t)
416 if baset not in typedeps: 416 if baset not in typedeps:
417 typedeps[baset] = set() 417 typedeps[baset] = set()
@@ -431,7 +431,7 @@ python () {
431 431
432 d.appendVarFlag('do_image', 'vardeps', ' '.join(vardeps)) 432 d.appendVarFlag('do_image', 'vardeps', ' '.join(vardeps))
433 433
434 maskedtypes = (d.getVar('IMAGE_TYPES_MASKED', True) or "").split() 434 maskedtypes = (d.getVar('IMAGE_TYPES_MASKED') or "").split()
435 maskedtypes = [dbg + t for t in maskedtypes for dbg in ("", "debugfs_")] 435 maskedtypes = [dbg + t for t in maskedtypes for dbg in ("", "debugfs_")]
436 436
437 for t in basetypes: 437 for t in basetypes:
@@ -459,7 +459,7 @@ python () {
459 localdata.delVar('DATETIME') 459 localdata.delVar('DATETIME')
460 localdata.delVar('TMPDIR') 460 localdata.delVar('TMPDIR')
461 461
462 image_cmd = localdata.getVar("IMAGE_CMD", True) 462 image_cmd = localdata.getVar("IMAGE_CMD")
463 vardeps.add('IMAGE_CMD_' + realt) 463 vardeps.add('IMAGE_CMD_' + realt)
464 if image_cmd: 464 if image_cmd:
465 cmds.append("\t" + image_cmd) 465 cmds.append("\t" + image_cmd)
@@ -481,7 +481,7 @@ python () {
481 # Create input image first. 481 # Create input image first.
482 gen_conversion_cmds(type) 482 gen_conversion_cmds(type)
483 localdata.setVar('type', type) 483 localdata.setVar('type', type)
484 cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype, True) or localdata.getVar("COMPRESS_CMD_" + ctype, True)) 484 cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype) or localdata.getVar("COMPRESS_CMD_" + ctype))
485 if cmd not in cmds: 485 if cmd not in cmds:
486 cmds.append(cmd) 486 cmds.append(cmd)
487 vardeps.add('CONVERSION_CMD_' + ctype) 487 vardeps.add('CONVERSION_CMD_' + ctype)
@@ -532,17 +532,17 @@ python () {
532def get_rootfs_size(d): 532def get_rootfs_size(d):
533 import subprocess 533 import subprocess
534 534
535 rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT', True)) 535 rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT'))
536 overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR', True)) 536 overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR'))
537 rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE', True)) 537 rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE'))
538 rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE', True)) 538 rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE'))
539 rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE', True) 539 rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE')
540 image_fstypes = d.getVar('IMAGE_FSTYPES', True) or '' 540 image_fstypes = d.getVar('IMAGE_FSTYPES') or ''
541 initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES', True) or '' 541 initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES') or ''
542 initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE', True) 542 initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE')
543 543
544 output = subprocess.check_output(['du', '-ks', 544 output = subprocess.check_output(['du', '-ks',
545 d.getVar('IMAGE_ROOTFS', True)]) 545 d.getVar('IMAGE_ROOTFS')])
546 size_kb = int(output.split()[0]) 546 size_kb = int(output.split()[0])
547 base_size = size_kb * overhead_factor 547 base_size = size_kb * overhead_factor
548 base_size = max(base_size, rootfs_req_size) + rootfs_extra_space 548 base_size = max(base_size, rootfs_req_size) + rootfs_extra_space
@@ -558,7 +558,7 @@ def get_rootfs_size(d):
558 # Do not check image size of the debugfs image. This is not supposed 558 # Do not check image size of the debugfs image. This is not supposed
559 # to be deployed, etc. so it doesn't make sense to limit the size 559 # to be deployed, etc. so it doesn't make sense to limit the size
560 # of the debug. 560 # of the debug.
561 if (d.getVar('IMAGE_BUILDING_DEBUGFS', True) or "") == "true": 561 if (d.getVar('IMAGE_BUILDING_DEBUGFS') or "") == "true":
562 return base_size 562 return base_size
563 563
564 # Check the rootfs size against IMAGE_ROOTFS_MAXSIZE (if set) 564 # Check the rootfs size against IMAGE_ROOTFS_MAXSIZE (if set)
@@ -589,11 +589,11 @@ python set_image_size () {
589# 589#
590python create_symlinks() { 590python create_symlinks() {
591 591
592 deploy_dir = d.getVar('IMGDEPLOYDIR', True) 592 deploy_dir = d.getVar('IMGDEPLOYDIR')
593 img_name = d.getVar('IMAGE_NAME', True) 593 img_name = d.getVar('IMAGE_NAME')
594 link_name = d.getVar('IMAGE_LINK_NAME', True) 594 link_name = d.getVar('IMAGE_LINK_NAME')
595 manifest_name = d.getVar('IMAGE_MANIFEST', True) 595 manifest_name = d.getVar('IMAGE_MANIFEST')
596 taskname = d.getVar("BB_CURRENTTASK", True) 596 taskname = d.getVar("BB_CURRENTTASK")
597 subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split() 597 subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split()
598 imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.") 598 imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.")
599 599
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass
index 9d66d7da7a..8f048497a1 100644
--- a/meta/classes/image_types.bbclass
+++ b/meta/classes/image_types.bbclass
@@ -17,20 +17,20 @@ def imagetypes_getdepends(d):
17 d += ":do_populate_sysroot" 17 d += ":do_populate_sysroot"
18 deps.add(d) 18 deps.add(d)
19 19
20 fstypes = set((d.getVar('IMAGE_FSTYPES', True) or "").split()) 20 fstypes = set((d.getVar('IMAGE_FSTYPES') or "").split())
21 fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS', True) or "").split()) 21 fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS') or "").split())
22 22
23 deps = set() 23 deps = set()
24 for typestring in fstypes: 24 for typestring in fstypes:
25 types = typestring.split(".") 25 types = typestring.split(".")
26 basetype, resttypes = types[0], types[1:] 26 basetype, resttypes = types[0], types[1:]
27 27
28 adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype, True) , deps) 28 adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype) , deps)
29 for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype, True) or "").split(): 29 for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype) or "").split():
30 adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends, True) , deps) 30 adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends) , deps)
31 for ctype in resttypes: 31 for ctype in resttypes:
32 adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype, True), deps) 32 adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype), deps)
33 adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype, True), deps) 33 adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype), deps)
34 34
35 # Sort the set so that ordering is consistant 35 # Sort the set so that ordering is consistant
36 return " ".join(sorted(deps)) 36 return " ".join(sorted(deps))
@@ -220,7 +220,7 @@ WKS_FILE_CHECKSUM = "${@'${WKS_FULL_PATH}:%s' % os.path.exists('${WKS_FULL_PATH}
220do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}" 220do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}"
221 221
222python () { 222python () {
223 if d.getVar('USING_WIC', True) and 'do_bootimg' in d: 223 if d.getVar('USING_WIC') and 'do_bootimg' in d:
224 bb.build.addtask('do_image_wic', '', 'do_bootimg', d) 224 bb.build.addtask('do_image_wic', '', 'do_bootimg', d)
225} 225}
226 226
@@ -228,7 +228,7 @@ python do_write_wks_template () {
228 """Write out expanded template contents to WKS_FULL_PATH.""" 228 """Write out expanded template contents to WKS_FULL_PATH."""
229 import re 229 import re
230 230
231 template_body = d.getVar('_WKS_TEMPLATE', True) 231 template_body = d.getVar('_WKS_TEMPLATE')
232 232
233 # Remove any remnant variable references left behind by the expansion 233 # Remove any remnant variable references left behind by the expansion
234 # due to undefined variables 234 # due to undefined variables
@@ -240,18 +240,18 @@ python do_write_wks_template () {
240 else: 240 else:
241 template_body = new_body 241 template_body = new_body
242 242
243 wks_file = d.getVar('WKS_FULL_PATH', True) 243 wks_file = d.getVar('WKS_FULL_PATH')
244 with open(wks_file, 'w') as f: 244 with open(wks_file, 'w') as f:
245 f.write(template_body) 245 f.write(template_body)
246} 246}
247 247
248python () { 248python () {
249 if d.getVar('USING_WIC', True): 249 if d.getVar('USING_WIC'):
250 wks_file_u = d.getVar('WKS_FULL_PATH', False) 250 wks_file_u = d.getVar('WKS_FULL_PATH', False)
251 wks_file = d.expand(wks_file_u) 251 wks_file = d.expand(wks_file_u)
252 base, ext = os.path.splitext(wks_file) 252 base, ext = os.path.splitext(wks_file)
253 if ext == '.in' and os.path.exists(wks_file): 253 if ext == '.in' and os.path.exists(wks_file):
254 wks_out_file = os.path.join(d.getVar('WORKDIR', True), os.path.basename(base)) 254 wks_out_file = os.path.join(d.getVar('WORKDIR'), os.path.basename(base))
255 d.setVar('WKS_FULL_PATH', wks_out_file) 255 d.setVar('WKS_FULL_PATH', wks_out_file)
256 d.setVar('WKS_TEMPLATE_PATH', wks_file_u) 256 d.setVar('WKS_TEMPLATE_PATH', wks_file_u)
257 d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True') 257 d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True')
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 01494e3443..1a742cf6f8 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -179,7 +179,7 @@ def package_qa_get_machine_dict(d):
179 179
180 # Add in any extra user supplied data which may come from a BSP layer, removing the 180 # Add in any extra user supplied data which may come from a BSP layer, removing the
181 # need to always change this class directly 181 # need to always change this class directly
182 extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS", True) or "").split() 182 extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS") or "").split()
183 for m in extra_machdata: 183 for m in extra_machdata:
184 call = m + "(machdata, d)" 184 call = m + "(machdata, d)"
185 locs = { "machdata" : machdata, "d" : d} 185 locs = { "machdata" : machdata, "d" : d}
@@ -194,23 +194,23 @@ def package_qa_clean_path(path, d, pkg=None):
194 TMPDIR is stripped, otherwise PKGDEST/pkg is stripped. 194 TMPDIR is stripped, otherwise PKGDEST/pkg is stripped.
195 """ 195 """
196 if pkg: 196 if pkg:
197 path = path.replace(os.path.join(d.getVar("PKGDEST", True), pkg), "/") 197 path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/")
198 return path.replace(d.getVar("TMPDIR", True), "/").replace("//", "/") 198 return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
199 199
200def package_qa_write_error(type, error, d): 200def package_qa_write_error(type, error, d):
201 logfile = d.getVar('QA_LOGFILE', True) 201 logfile = d.getVar('QA_LOGFILE')
202 if logfile: 202 if logfile:
203 p = d.getVar('P', True) 203 p = d.getVar('P')
204 with open(logfile, "a+") as f: 204 with open(logfile, "a+") as f:
205 f.write("%s: %s [%s]\n" % (p, error, type)) 205 f.write("%s: %s [%s]\n" % (p, error, type))
206 206
207def package_qa_handle_error(error_class, error_msg, d): 207def package_qa_handle_error(error_class, error_msg, d):
208 package_qa_write_error(error_class, error_msg, d) 208 package_qa_write_error(error_class, error_msg, d)
209 if error_class in (d.getVar("ERROR_QA", True) or "").split(): 209 if error_class in (d.getVar("ERROR_QA") or "").split():
210 bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) 210 bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
211 d.setVar("QA_SANE", False) 211 d.setVar("QA_SANE", False)
212 return False 212 return False
213 elif error_class in (d.getVar("WARN_QA", True) or "").split(): 213 elif error_class in (d.getVar("WARN_QA") or "").split():
214 bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) 214 bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
215 else: 215 else:
216 bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) 216 bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
@@ -226,7 +226,7 @@ QAPATHTEST[libexec] = "package_qa_check_libexec"
226def package_qa_check_libexec(path,name, d, elf, messages): 226def package_qa_check_libexec(path,name, d, elf, messages):
227 227
228 # Skip the case where the default is explicitly /usr/libexec 228 # Skip the case where the default is explicitly /usr/libexec
229 libexec = d.getVar('libexecdir', True) 229 libexec = d.getVar('libexecdir')
230 if libexec == "/usr/libexec": 230 if libexec == "/usr/libexec":
231 return True 231 return True
232 232
@@ -247,7 +247,7 @@ def package_qa_check_rpath(file,name, d, elf, messages):
247 if os.path.islink(file): 247 if os.path.islink(file):
248 return 248 return
249 249
250 bad_dirs = [d.getVar('BASE_WORKDIR', True), d.getVar('STAGING_DIR_TARGET', True)] 250 bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')]
251 251
252 phdrs = elf.run_objdump("-p", d) 252 phdrs = elf.run_objdump("-p", d)
253 253
@@ -275,8 +275,8 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
275 if os.path.islink(file): 275 if os.path.islink(file):
276 return 276 return
277 277
278 libdir = d.getVar("libdir", True) 278 libdir = d.getVar("libdir")
279 base_libdir = d.getVar("base_libdir", True) 279 base_libdir = d.getVar("base_libdir")
280 280
281 phdrs = elf.run_objdump("-p", d) 281 phdrs = elf.run_objdump("-p", d)
282 282
@@ -333,11 +333,11 @@ def package_qa_check_libdir(d):
333 """ 333 """
334 import re 334 import re
335 335
336 pkgdest = d.getVar('PKGDEST', True) 336 pkgdest = d.getVar('PKGDEST')
337 base_libdir = d.getVar("base_libdir",True) + os.sep 337 base_libdir = d.getVar("base_libdir",True) + os.sep
338 libdir = d.getVar("libdir", True) + os.sep 338 libdir = d.getVar("libdir") + os.sep
339 libexecdir = d.getVar("libexecdir", True) + os.sep 339 libexecdir = d.getVar("libexecdir") + os.sep
340 exec_prefix = d.getVar("exec_prefix", True) + os.sep 340 exec_prefix = d.getVar("exec_prefix") + os.sep
341 341
342 messages = [] 342 messages = []
343 343
@@ -352,10 +352,10 @@ def package_qa_check_libdir(d):
352 # Skip subdirectories for any packages with libdir in INSANE_SKIP 352 # Skip subdirectories for any packages with libdir in INSANE_SKIP
353 skippackages = [] 353 skippackages = []
354 for package in dirs: 354 for package in dirs:
355 if 'libdir' in (d.getVar('INSANE_SKIP_' + package, True) or "").split(): 355 if 'libdir' in (d.getVar('INSANE_SKIP_' + package) or "").split():
356 bb.note("Package %s skipping libdir QA test" % (package)) 356 bb.note("Package %s skipping libdir QA test" % (package))
357 skippackages.append(package) 357 skippackages.append(package)
358 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory' and package.endswith("-dbg"): 358 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"):
359 bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package)) 359 bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package))
360 skippackages.append(package) 360 skippackages.append(package)
361 for package in skippackages: 361 for package in skippackages:
@@ -416,10 +416,10 @@ def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages)
416 416
417 if elf: 417 if elf:
418 import subprocess as sub 418 import subprocess as sub
419 pn = d.getVar('PN', True) 419 pn = d.getVar('PN')
420 420
421 exec_prefix = d.getVar('exec_prefix', True) 421 exec_prefix = d.getVar('exec_prefix')
422 sysroot_path = d.getVar('STAGING_DIR_TARGET', True) 422 sysroot_path = d.getVar('STAGING_DIR_TARGET')
423 sysroot_path_usr = sysroot_path + exec_prefix 423 sysroot_path_usr = sysroot_path + exec_prefix
424 424
425 try: 425 try:
@@ -432,8 +432,8 @@ def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages)
432 if sysroot_path_usr in ldd_output: 432 if sysroot_path_usr in ldd_output:
433 ldd_output = ldd_output.replace(sysroot_path, "") 433 ldd_output = ldd_output.replace(sysroot_path, "")
434 434
435 pkgdest = d.getVar('PKGDEST', True) 435 pkgdest = d.getVar('PKGDEST')
436 packages = d.getVar('PACKAGES', True) 436 packages = d.getVar('PACKAGES')
437 437
438 for package in packages.split(): 438 for package in packages.split():
439 short_path = path.replace('%s/%s' % (pkgdest, package), "", 1) 439 short_path = path.replace('%s/%s' % (pkgdest, package), "", 1)
@@ -459,13 +459,13 @@ def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages):
459 if not elf: 459 if not elf:
460 import stat 460 import stat
461 import subprocess 461 import subprocess
462 pn = d.getVar('PN', True) 462 pn = d.getVar('PN')
463 463
464 # Ensure we're checking an executable script 464 # Ensure we're checking an executable script
465 statinfo = os.stat(path) 465 statinfo = os.stat(path)
466 if bool(statinfo.st_mode & stat.S_IXUSR): 466 if bool(statinfo.st_mode & stat.S_IXUSR):
467 # grep shell scripts for possible references to /exec_prefix/ 467 # grep shell scripts for possible references to /exec_prefix/
468 exec_prefix = d.getVar('exec_prefix', True) 468 exec_prefix = d.getVar('exec_prefix')
469 statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path) 469 statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path)
470 if subprocess.call(statement, shell=True) == 0: 470 if subprocess.call(statement, shell=True) == 0:
471 error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path) 471 error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path)
@@ -489,19 +489,19 @@ def unsafe_references_skippable(path, name, d):
489 return True 489 return True
490 490
491 # Skip unusual rootfs layouts which make these tests irrelevant 491 # Skip unusual rootfs layouts which make these tests irrelevant
492 exec_prefix = d.getVar('exec_prefix', True) 492 exec_prefix = d.getVar('exec_prefix')
493 if exec_prefix == "": 493 if exec_prefix == "":
494 return True 494 return True
495 495
496 pkgdest = d.getVar('PKGDEST', True) 496 pkgdest = d.getVar('PKGDEST')
497 pkgdest = pkgdest + "/" + name 497 pkgdest = pkgdest + "/" + name
498 pkgdest = os.path.abspath(pkgdest) 498 pkgdest = os.path.abspath(pkgdest)
499 base_bindir = pkgdest + d.getVar('base_bindir', True) 499 base_bindir = pkgdest + d.getVar('base_bindir')
500 base_sbindir = pkgdest + d.getVar('base_sbindir', True) 500 base_sbindir = pkgdest + d.getVar('base_sbindir')
501 base_libdir = pkgdest + d.getVar('base_libdir', True) 501 base_libdir = pkgdest + d.getVar('base_libdir')
502 bindir = pkgdest + d.getVar('bindir', True) 502 bindir = pkgdest + d.getVar('bindir')
503 sbindir = pkgdest + d.getVar('sbindir', True) 503 sbindir = pkgdest + d.getVar('sbindir')
504 libdir = pkgdest + d.getVar('libdir', True) 504 libdir = pkgdest + d.getVar('libdir')
505 505
506 if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir: 506 if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir:
507 return True 507 return True
@@ -523,13 +523,13 @@ def package_qa_check_arch(path,name,d, elf, messages):
523 if not elf: 523 if not elf:
524 return 524 return
525 525
526 target_os = d.getVar('TARGET_OS', True) 526 target_os = d.getVar('TARGET_OS')
527 target_arch = d.getVar('TARGET_ARCH', True) 527 target_arch = d.getVar('TARGET_ARCH')
528 provides = d.getVar('PROVIDES', True) 528 provides = d.getVar('PROVIDES')
529 bpn = d.getVar('BPN', True) 529 bpn = d.getVar('BPN')
530 530
531 if target_arch == "allarch": 531 if target_arch == "allarch":
532 pn = d.getVar('PN', True) 532 pn = d.getVar('PN')
533 package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") 533 package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
534 return 534 return
535 535
@@ -549,7 +549,7 @@ def package_qa_check_arch(path,name,d, elf, messages):
549 549
550 # Check the architecture and endiannes of the binary 550 # Check the architecture and endiannes of the binary
551 is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ 551 is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \
552 (target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE', True))) 552 (target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE')))
553 if not ((machine == elf.machine()) or is_32): 553 if not ((machine == elf.machine()) or is_32):
554 package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \ 554 package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \
555 (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d))) 555 (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d)))
@@ -608,9 +608,9 @@ def package_qa_hash_style(path, name, d, elf, messages):
608 if os.path.islink(path): 608 if os.path.islink(path):
609 return 609 return
610 610
611 gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True) 611 gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS')
612 if not gnu_hash: 612 if not gnu_hash:
613 gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True) 613 gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS')
614 if not gnu_hash: 614 if not gnu_hash:
615 return 615 return
616 616
@@ -649,7 +649,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
649 if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1: 649 if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1:
650 return 650 return
651 651
652 tmpdir = d.getVar('TMPDIR', True) 652 tmpdir = d.getVar('TMPDIR')
653 with open(path, 'rb') as f: 653 with open(path, 'rb') as f:
654 file_content = f.read().decode('utf-8', errors='ignore') 654 file_content = f.read().decode('utf-8', errors='ignore')
655 if tmpdir in file_content: 655 if tmpdir in file_content:
@@ -668,8 +668,8 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
668 668
669 driverdir = d.expand("${libdir}/xorg/modules/drivers/") 669 driverdir = d.expand("${libdir}/xorg/modules/drivers/")
670 if driverdir in path and path.endswith(".so"): 670 if driverdir in path and path.endswith(".so"):
671 mlprefix = d.getVar('MLPREFIX', True) or '' 671 mlprefix = d.getVar('MLPREFIX') or ''
672 for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name, True) or ""): 672 for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name) or ""):
673 if rdep.startswith("%sxorg-abi-" % mlprefix): 673 if rdep.startswith("%sxorg-abi-" % mlprefix):
674 return 674 return
675 package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) 675 package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
@@ -692,9 +692,9 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
692 if os.path.islink(path): 692 if os.path.islink(path):
693 target = os.readlink(path) 693 target = os.readlink(path)
694 if os.path.isabs(target): 694 if os.path.isabs(target):
695 tmpdir = d.getVar('TMPDIR', True) 695 tmpdir = d.getVar('TMPDIR')
696 if target.startswith(tmpdir): 696 if target.startswith(tmpdir):
697 trimmed = path.replace(os.path.join (d.getVar("PKGDEST", True), name), "") 697 trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
698 package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) 698 package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
699 699
700# Check license variables 700# Check license variables
@@ -706,17 +706,17 @@ python populate_lic_qa_checksum() {
706 import tempfile 706 import tempfile
707 sane = True 707 sane = True
708 708
709 lic_files = d.getVar('LIC_FILES_CHKSUM', True) or '' 709 lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
710 lic = d.getVar('LICENSE', True) 710 lic = d.getVar('LICENSE')
711 pn = d.getVar('PN', True) 711 pn = d.getVar('PN')
712 712
713 if lic == "CLOSED": 713 if lic == "CLOSED":
714 return 714 return
715 715
716 if not lic_files and d.getVar('SRC_URI', True): 716 if not lic_files and d.getVar('SRC_URI'):
717 sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) 717 sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
718 718
719 srcdir = d.getVar('S', True) 719 srcdir = d.getVar('S')
720 720
721 for url in lic_files.split(): 721 for url in lic_files.split():
722 try: 722 try:
@@ -794,7 +794,7 @@ def package_qa_check_staged(path,d):
794 """ 794 """
795 795
796 sane = True 796 sane = True
797 tmpdir = d.getVar('TMPDIR', True) 797 tmpdir = d.getVar('TMPDIR')
798 workdir = os.path.join(tmpdir, "work") 798 workdir = os.path.join(tmpdir, "work")
799 799
800 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): 800 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
@@ -845,8 +845,8 @@ def package_qa_walk(warnfuncs, errorfuncs, skip, package, d):
845 import oe.qa 845 import oe.qa
846 846
847 #if this will throw an exception, then fix the dict above 847 #if this will throw an exception, then fix the dict above
848 target_os = d.getVar('TARGET_OS', True) 848 target_os = d.getVar('TARGET_OS')
849 target_arch = d.getVar('TARGET_ARCH', True) 849 target_arch = d.getVar('TARGET_ARCH')
850 850
851 warnings = {} 851 warnings = {}
852 errors = {} 852 errors = {}
@@ -879,7 +879,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
879 bb.data.update_data(localdata) 879 bb.data.update_data(localdata)
880 880
881 # Now check the RDEPENDS 881 # Now check the RDEPENDS
882 rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "") 882 rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "")
883 883
884 # Now do the sanity check!!! 884 # Now do the sanity check!!!
885 if "build-deps" not in skip: 885 if "build-deps" not in skip:
@@ -895,7 +895,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
895 if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: 895 if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
896 continue 896 continue
897 if not rdep_data or not 'PN' in rdep_data: 897 if not rdep_data or not 'PN' in rdep_data:
898 pkgdata_dir = d.getVar("PKGDATA_DIR", True) 898 pkgdata_dir = d.getVar("PKGDATA_DIR")
899 try: 899 try:
900 possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend)) 900 possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
901 except OSError: 901 except OSError:
@@ -954,7 +954,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
954 # The python is not a package, but python-core provides it, so 954 # The python is not a package, but python-core provides it, so
955 # skip checking /usr/bin/python if python is in the rdeps, in 955 # skip checking /usr/bin/python if python is in the rdeps, in
956 # case there is a RDEPENDS_pkg = "python" in the recipe. 956 # case there is a RDEPENDS_pkg = "python" in the recipe.
957 for py in [ d.getVar('MLPREFIX', True) + "python", "python" ]: 957 for py in [ d.getVar('MLPREFIX') + "python", "python" ]:
958 if py in done: 958 if py in done:
959 filerdepends.pop("/usr/bin/python",None) 959 filerdepends.pop("/usr/bin/python",None)
960 done.remove(py) 960 done.remove(py)
@@ -987,7 +987,7 @@ def package_qa_check_deps(pkg, pkgdest, skip, d):
987 987
988 def check_valid_deps(var): 988 def check_valid_deps(var):
989 try: 989 try:
990 rvar = bb.utils.explode_dep_versions2(localdata.getVar(var, True) or "") 990 rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "")
991 except ValueError as e: 991 except ValueError as e:
992 bb.fatal("%s_%s: %s" % (var, pkg, e)) 992 bb.fatal("%s_%s: %s" % (var, pkg, e))
993 for dep in rvar: 993 for dep in rvar:
@@ -1010,10 +1010,10 @@ def package_qa_check_expanded_d(package, d, messages):
1010 variables, warn the user to use it correctly. 1010 variables, warn the user to use it correctly.
1011 """ 1011 """
1012 sane = True 1012 sane = True
1013 expanded_d = d.getVar('D', True) 1013 expanded_d = d.getVar('D')
1014 1014
1015 for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': 1015 for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
1016 bbvar = d.getVar(var + "_" + package, True) or "" 1016 bbvar = d.getVar(var + "_" + package) or ""
1017 if expanded_d in bbvar: 1017 if expanded_d in bbvar:
1018 if var == 'FILES': 1018 if var == 'FILES':
1019 package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) 1019 package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
@@ -1026,7 +1026,7 @@ def package_qa_check_expanded_d(package, d, messages):
1026def package_qa_check_encoding(keys, encode, d): 1026def package_qa_check_encoding(keys, encode, d):
1027 def check_encoding(key, enc): 1027 def check_encoding(key, enc):
1028 sane = True 1028 sane = True
1029 value = d.getVar(key, True) 1029 value = d.getVar(key)
1030 if value: 1030 if value:
1031 try: 1031 try:
1032 s = value.encode(enc) 1032 s = value.encode(enc)
@@ -1051,8 +1051,8 @@ def package_qa_check_host_user(path, name, d, elf, messages):
1051 if not os.path.lexists(path): 1051 if not os.path.lexists(path):
1052 return 1052 return
1053 1053
1054 dest = d.getVar('PKGDEST', True) 1054 dest = d.getVar('PKGDEST')
1055 pn = d.getVar('PN', True) 1055 pn = d.getVar('PN')
1056 home = os.path.join(dest, 'home') 1056 home = os.path.join(dest, 'home')
1057 if path == home or path.startswith(home + os.sep): 1057 if path == home or path.startswith(home + os.sep):
1058 return 1058 return
@@ -1065,12 +1065,12 @@ def package_qa_check_host_user(path, name, d, elf, messages):
1065 raise 1065 raise
1066 else: 1066 else:
1067 rootfs_path = path[len(dest):] 1067 rootfs_path = path[len(dest):]
1068 check_uid = int(d.getVar('HOST_USER_UID', True)) 1068 check_uid = int(d.getVar('HOST_USER_UID'))
1069 if stat.st_uid == check_uid: 1069 if stat.st_uid == check_uid:
1070 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid)) 1070 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid))
1071 return False 1071 return False
1072 1072
1073 check_gid = int(d.getVar('HOST_USER_GID', True)) 1073 check_gid = int(d.getVar('HOST_USER_GID'))
1074 if stat.st_gid == check_gid: 1074 if stat.st_gid == check_gid:
1075 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid)) 1075 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid))
1076 return False 1076 return False
@@ -1088,8 +1088,8 @@ python do_package_qa () {
1088 # Check non UTF-8 characters on recipe's metadata 1088 # Check non UTF-8 characters on recipe's metadata
1089 package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d) 1089 package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d)
1090 1090
1091 logdir = d.getVar('T', True) 1091 logdir = d.getVar('T')
1092 pkg = d.getVar('PN', True) 1092 pkg = d.getVar('PN')
1093 1093
1094 # Check the compile log for host contamination 1094 # Check the compile log for host contamination
1095 compilelog = os.path.join(logdir,"log.do_compile") 1095 compilelog = os.path.join(logdir,"log.do_compile")
@@ -1112,8 +1112,8 @@ python do_package_qa () {
1112 package_qa_handle_error("install-host-path", msg, d) 1112 package_qa_handle_error("install-host-path", msg, d)
1113 1113
1114 # Scan the packages... 1114 # Scan the packages...
1115 pkgdest = d.getVar('PKGDEST', True) 1115 pkgdest = d.getVar('PKGDEST')
1116 packages = set((d.getVar('PACKAGES', True) or '').split()) 1116 packages = set((d.getVar('PACKAGES') or '').split())
1117 1117
1118 cpath = oe.cachedpath.CachedPath() 1118 cpath = oe.cachedpath.CachedPath()
1119 global pkgfiles 1119 global pkgfiles
@@ -1142,7 +1142,7 @@ python do_package_qa () {
1142 testmatrix = d.getVarFlags(matrix_name) or {} 1142 testmatrix = d.getVarFlags(matrix_name) or {}
1143 g = globals() 1143 g = globals()
1144 warnchecks = [] 1144 warnchecks = []
1145 for w in (d.getVar("WARN_QA", True) or "").split(): 1145 for w in (d.getVar("WARN_QA") or "").split():
1146 if w in skip: 1146 if w in skip:
1147 continue 1147 continue
1148 if w in testmatrix and testmatrix[w] in g: 1148 if w in testmatrix and testmatrix[w] in g:
@@ -1151,7 +1151,7 @@ python do_package_qa () {
1151 oe.utils.write_ld_so_conf(d) 1151 oe.utils.write_ld_so_conf(d)
1152 1152
1153 errorchecks = [] 1153 errorchecks = []
1154 for e in (d.getVar("ERROR_QA", True) or "").split(): 1154 for e in (d.getVar("ERROR_QA") or "").split():
1155 if e in skip: 1155 if e in skip:
1156 continue 1156 continue
1157 if e in testmatrix and testmatrix[e] in g: 1157 if e in testmatrix and testmatrix[e] in g:
@@ -1160,7 +1160,7 @@ python do_package_qa () {
1160 oe.utils.write_ld_so_conf(d) 1160 oe.utils.write_ld_so_conf(d)
1161 return warnchecks, errorchecks 1161 return warnchecks, errorchecks
1162 1162
1163 skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split() 1163 skip = (d.getVar('INSANE_SKIP_' + package) or "").split()
1164 if skip: 1164 if skip:
1165 bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) 1165 bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
1166 1166
@@ -1180,10 +1180,10 @@ python do_package_qa () {
1180 package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) 1180 package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
1181 package_qa_check_deps(package, pkgdest, skip, d) 1181 package_qa_check_deps(package, pkgdest, skip, d)
1182 1182
1183 if 'libdir' in d.getVar("ALL_QA", True).split(): 1183 if 'libdir' in d.getVar("ALL_QA").split():
1184 package_qa_check_libdir(d) 1184 package_qa_check_libdir(d)
1185 1185
1186 qa_sane = d.getVar("QA_SANE", True) 1186 qa_sane = d.getVar("QA_SANE")
1187 if not qa_sane: 1187 if not qa_sane:
1188 bb.fatal("QA run found fatal errors. Please consider fixing them.") 1188 bb.fatal("QA run found fatal errors. Please consider fixing them.")
1189 bb.note("DONE with PACKAGE QA") 1189 bb.note("DONE with PACKAGE QA")
@@ -1216,7 +1216,7 @@ python do_qa_configure() {
1216 ########################################################################### 1216 ###########################################################################
1217 1217
1218 configs = [] 1218 configs = []
1219 workdir = d.getVar('WORKDIR', True) 1219 workdir = d.getVar('WORKDIR')
1220 1220
1221 if bb.data.inherits_class('autotools', d): 1221 if bb.data.inherits_class('autotools', d):
1222 bb.note("Checking autotools environment for common misconfiguration") 1222 bb.note("Checking autotools environment for common misconfiguration")
@@ -1237,16 +1237,16 @@ Rerun configure task after fixing this.""")
1237 # Check gettext configuration and dependencies are correct 1237 # Check gettext configuration and dependencies are correct
1238 ########################################################################### 1238 ###########################################################################
1239 1239
1240 cnf = d.getVar('EXTRA_OECONF', True) or "" 1240 cnf = d.getVar('EXTRA_OECONF') or ""
1241 if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: 1241 if "gettext" not in d.getVar('P') and "gcc-runtime" not in d.getVar('P') and "--disable-nls" not in cnf:
1242 ml = d.getVar("MLPREFIX", True) or "" 1242 ml = d.getVar("MLPREFIX") or ""
1243 if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): 1243 if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
1244 gt = "gettext-native" 1244 gt = "gettext-native"
1245 elif bb.data.inherits_class('cross-canadian', d): 1245 elif bb.data.inherits_class('cross-canadian', d):
1246 gt = "nativesdk-gettext" 1246 gt = "nativesdk-gettext"
1247 else: 1247 else:
1248 gt = "virtual/" + ml + "gettext" 1248 gt = "virtual/" + ml + "gettext"
1249 deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") 1249 deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "")
1250 if gt not in deps: 1250 if gt not in deps:
1251 for config in configs: 1251 for config in configs:
1252 gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config 1252 gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
@@ -1261,40 +1261,40 @@ Missing inherit gettext?""" % (gt, config))
1261 bb.note("Checking configure output for unrecognised options") 1261 bb.note("Checking configure output for unrecognised options")
1262 try: 1262 try:
1263 flag = "WARNING: unrecognized options:" 1263 flag = "WARNING: unrecognized options:"
1264 log = os.path.join(d.getVar('B', True), 'config.log') 1264 log = os.path.join(d.getVar('B'), 'config.log')
1265 output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ') 1265 output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ')
1266 options = set() 1266 options = set()
1267 for line in output.splitlines(): 1267 for line in output.splitlines():
1268 options |= set(line.partition(flag)[2].split()) 1268 options |= set(line.partition(flag)[2].split())
1269 whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST", True).split()) 1269 whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST").split())
1270 options -= whitelist 1270 options -= whitelist
1271 if options: 1271 if options:
1272 pn = d.getVar('PN', True) 1272 pn = d.getVar('PN')
1273 error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) 1273 error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
1274 package_qa_handle_error("unknown-configure-option", error_msg, d) 1274 package_qa_handle_error("unknown-configure-option", error_msg, d)
1275 except subprocess.CalledProcessError: 1275 except subprocess.CalledProcessError:
1276 pass 1276 pass
1277 1277
1278 # Check invalid PACKAGECONFIG 1278 # Check invalid PACKAGECONFIG
1279 pkgconfig = (d.getVar("PACKAGECONFIG", True) or "").split() 1279 pkgconfig = (d.getVar("PACKAGECONFIG") or "").split()
1280 if pkgconfig: 1280 if pkgconfig:
1281 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} 1281 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
1282 for pconfig in pkgconfig: 1282 for pconfig in pkgconfig:
1283 if pconfig not in pkgconfigflags: 1283 if pconfig not in pkgconfigflags:
1284 pn = d.getVar('PN', True) 1284 pn = d.getVar('PN')
1285 error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) 1285 error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
1286 package_qa_handle_error("invalid-packageconfig", error_msg, d) 1286 package_qa_handle_error("invalid-packageconfig", error_msg, d)
1287 1287
1288 qa_sane = d.getVar("QA_SANE", True) 1288 qa_sane = d.getVar("QA_SANE")
1289 if not qa_sane: 1289 if not qa_sane:
1290 bb.fatal("Fatal QA errors found, failing task.") 1290 bb.fatal("Fatal QA errors found, failing task.")
1291} 1291}
1292 1292
1293python do_qa_unpack() { 1293python do_qa_unpack() {
1294 src_uri = d.getVar('SRC_URI', True) 1294 src_uri = d.getVar('SRC_URI')
1295 s_dir = d.getVar('S', True) 1295 s_dir = d.getVar('S')
1296 if src_uri and not os.path.exists(s_dir): 1296 if src_uri and not os.path.exists(s_dir):
1297 bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN', True), d.getVar('S', False), s_dir)) 1297 bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
1298} 1298}
1299 1299
1300# The Staging Func, to check all staging 1300# The Staging Func, to check all staging
@@ -1310,7 +1310,7 @@ do_configure[postfuncs] += "do_qa_configure "
1310do_unpack[postfuncs] += "do_qa_unpack" 1310do_unpack[postfuncs] += "do_qa_unpack"
1311 1311
1312python () { 1312python () {
1313 tests = d.getVar('ALL_QA', True).split() 1313 tests = d.getVar('ALL_QA').split()
1314 if "desktop" in tests: 1314 if "desktop" in tests:
1315 d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") 1315 d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
1316 1316
@@ -1319,7 +1319,7 @@ python () {
1319 ########################################################################### 1319 ###########################################################################
1320 1320
1321 # Checking ${FILESEXTRAPATHS} 1321 # Checking ${FILESEXTRAPATHS}
1322 extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") 1322 extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
1323 if '__default' not in extrapaths.split(":"): 1323 if '__default' not in extrapaths.split(":"):
1324 msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n" 1324 msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n"
1325 msg += "type of assignment, and don't forget the colon.\n" 1325 msg += "type of assignment, and don't forget the colon.\n"
@@ -1331,29 +1331,29 @@ python () {
1331 msg += "%s\n" % extrapaths 1331 msg += "%s\n" % extrapaths
1332 bb.warn(msg) 1332 bb.warn(msg)
1333 1333
1334 overrides = d.getVar('OVERRIDES', True).split(':') 1334 overrides = d.getVar('OVERRIDES').split(':')
1335 pn = d.getVar('PN', True) 1335 pn = d.getVar('PN')
1336 if pn in overrides: 1336 if pn in overrides:
1337 msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE", True), pn) 1337 msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn)
1338 package_qa_handle_error("pn-overrides", msg, d) 1338 package_qa_handle_error("pn-overrides", msg, d)
1339 1339
1340 issues = [] 1340 issues = []
1341 if (d.getVar('PACKAGES', True) or "").split(): 1341 if (d.getVar('PACKAGES') or "").split():
1342 for dep in (d.getVar('QADEPENDS', True) or "").split(): 1342 for dep in (d.getVar('QADEPENDS') or "").split():
1343 d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep) 1343 d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep)
1344 for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': 1344 for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY':
1345 if d.getVar(var, False): 1345 if d.getVar(var, False):
1346 issues.append(var) 1346 issues.append(var)
1347 1347
1348 fakeroot_tests = d.getVar('FAKEROOT_QA', True).split() 1348 fakeroot_tests = d.getVar('FAKEROOT_QA').split()
1349 if set(tests) & set(fakeroot_tests): 1349 if set(tests) & set(fakeroot_tests):
1350 d.setVarFlag('do_package_qa', 'fakeroot', '1') 1350 d.setVarFlag('do_package_qa', 'fakeroot', '1')
1351 d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 1351 d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
1352 else: 1352 else:
1353 d.setVarFlag('do_package_qa', 'rdeptask', '') 1353 d.setVarFlag('do_package_qa', 'rdeptask', '')
1354 for i in issues: 1354 for i in issues:
1355 package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE", True), i), d) 1355 package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
1356 qa_sane = d.getVar("QA_SANE", True) 1356 qa_sane = d.getVar("QA_SANE")
1357 if not qa_sane: 1357 if not qa_sane:
1358 bb.fatal("Fatal QA errors found, failing task.") 1358 bb.fatal("Fatal QA errors found, failing task.")
1359} 1359}
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass
index ea976c66b3..e09cdc4184 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes/kernel-arch.bbclass
@@ -19,7 +19,7 @@ valid_archs = "alpha cris ia64 \
19def map_kernel_arch(a, d): 19def map_kernel_arch(a, d):
20 import re 20 import re
21 21
22 valid_archs = d.getVar('valid_archs', True).split() 22 valid_archs = d.getVar('valid_archs').split()
23 23
24 if re.match('(i.86|athlon|x86.64)$', a): return 'x86' 24 if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
25 elif re.match('armeb$', a): return 'arm' 25 elif re.match('armeb$', a): return 'arm'
@@ -34,7 +34,7 @@ def map_kernel_arch(a, d):
34 else: 34 else:
35 bb.error("cannot map '%s' to a linux kernel architecture" % a) 35 bb.error("cannot map '%s' to a linux kernel architecture" % a)
36 36
37export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" 37export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH'), d)}"
38 38
39def map_uboot_arch(a, d): 39def map_uboot_arch(a, d):
40 import re 40 import re
@@ -43,7 +43,7 @@ def map_uboot_arch(a, d):
43 elif re.match('i.86$', a): return 'x86' 43 elif re.match('i.86$', a): return 'x86'
44 return a 44 return a
45 45
46export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" 46export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH'), d)}"
47 47
48# Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture 48# Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture
49# specific options necessary for building the kernel and modules. 49# specific options necessary for building the kernel and modules.
diff --git a/meta/classes/kernel-fitimage.bbclass b/meta/classes/kernel-fitimage.bbclass
index 8580247f82..2934b3a31a 100644
--- a/meta/classes/kernel-fitimage.bbclass
+++ b/meta/classes/kernel-fitimage.bbclass
@@ -1,13 +1,13 @@
1inherit kernel-uboot uboot-sign 1inherit kernel-uboot uboot-sign
2 2
3python __anonymous () { 3python __anonymous () {
4 kerneltypes = d.getVar('KERNEL_IMAGETYPES', True) or "" 4 kerneltypes = d.getVar('KERNEL_IMAGETYPES') or ""
5 if 'fitImage' in kerneltypes.split(): 5 if 'fitImage' in kerneltypes.split():
6 depends = d.getVar("DEPENDS", True) 6 depends = d.getVar("DEPENDS")
7 depends = "%s u-boot-mkimage-native dtc-native" % depends 7 depends = "%s u-boot-mkimage-native dtc-native" % depends
8 d.setVar("DEPENDS", depends) 8 d.setVar("DEPENDS", depends)
9 9
10 if d.getVar("UBOOT_ARCH", True) == "x86": 10 if d.getVar("UBOOT_ARCH") == "x86":
11 replacementtype = "bzImage" 11 replacementtype = "bzImage"
12 else: 12 else:
13 replacementtype = "zImage" 13 replacementtype = "zImage"
@@ -15,19 +15,19 @@ python __anonymous () {
15 # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal 15 # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal
16 # to kernel.bbclass . We have to override it, since we pack zImage 16 # to kernel.bbclass . We have to override it, since we pack zImage
17 # (at least for now) into the fitImage . 17 # (at least for now) into the fitImage .
18 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" 18 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or ""
19 if 'fitImage' in typeformake.split(): 19 if 'fitImage' in typeformake.split():
20 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype)) 20 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype))
21 21
22 image = d.getVar('INITRAMFS_IMAGE', True) 22 image = d.getVar('INITRAMFS_IMAGE')
23 if image: 23 if image:
24 d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') 24 d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
25 25
26 # Verified boot will sign the fitImage and append the public key to 26 # Verified boot will sign the fitImage and append the public key to
27 # U-boot dtb. We ensure the U-Boot dtb is deployed before assembling 27 # U-boot dtb. We ensure the U-Boot dtb is deployed before assembling
28 # the fitImage: 28 # the fitImage:
29 if d.getVar('UBOOT_SIGN_ENABLE', True): 29 if d.getVar('UBOOT_SIGN_ENABLE'):
30 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot', True) or 'u-boot' 30 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
31 d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn) 31 d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn)
32} 32}
33 33
diff --git a/meta/classes/kernel-grub.bbclass b/meta/classes/kernel-grub.bbclass
index f7dcc0715a..5d92f3b636 100644
--- a/meta/classes/kernel-grub.bbclass
+++ b/meta/classes/kernel-grub.bbclass
@@ -92,7 +92,7 @@ python __anonymous () {
92 fi 92 fi
93''' 93'''
94 94
95 imagetypes = d.getVar('KERNEL_IMAGETYPES', True) 95 imagetypes = d.getVar('KERNEL_IMAGETYPES')
96 imagetypes = re.sub(r'\.gz$', '', imagetypes) 96 imagetypes = re.sub(r'\.gz$', '', imagetypes)
97 97
98 for type in imagetypes.split(): 98 for type in imagetypes.split():
diff --git a/meta/classes/kernel-module-split.bbclass b/meta/classes/kernel-module-split.bbclass
index 08d226276e..efe1b42656 100644
--- a/meta/classes/kernel-module-split.bbclass
+++ b/meta/classes/kernel-module-split.bbclass
@@ -39,10 +39,10 @@ python split_kernel_module_packages () {
39 39
40 def extract_modinfo(file): 40 def extract_modinfo(file):
41 import tempfile, subprocess 41 import tempfile, subprocess
42 tempfile.tempdir = d.getVar("WORKDIR", True) 42 tempfile.tempdir = d.getVar("WORKDIR")
43 tf = tempfile.mkstemp() 43 tf = tempfile.mkstemp()
44 tmpfile = tf[1] 44 tmpfile = tf[1]
45 cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX", True) or "", file, tmpfile) 45 cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX") or "", file, tmpfile)
46 subprocess.call(cmd, shell=True) 46 subprocess.call(cmd, shell=True)
47 f = open(tmpfile) 47 f = open(tmpfile)
48 l = f.read().split("\000") 48 l = f.read().split("\000")
@@ -60,12 +60,12 @@ python split_kernel_module_packages () {
60 def frob_metadata(file, pkg, pattern, format, basename): 60 def frob_metadata(file, pkg, pattern, format, basename):
61 vals = extract_modinfo(file) 61 vals = extract_modinfo(file)
62 62
63 dvar = d.getVar('PKGD', True) 63 dvar = d.getVar('PKGD')
64 64
65 # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append 65 # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
66 # appropriate modprobe commands to the postinst 66 # appropriate modprobe commands to the postinst
67 autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD", True) or "").split() 67 autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD") or "").split()
68 autoload = d.getVar('module_autoload_%s' % basename, True) 68 autoload = d.getVar('module_autoload_%s' % basename)
69 if autoload and autoload == basename: 69 if autoload and autoload == basename:
70 bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename) 70 bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename)
71 if autoload and basename not in autoloadlist: 71 if autoload and basename not in autoloadlist:
@@ -79,15 +79,15 @@ python split_kernel_module_packages () {
79 else: 79 else:
80 f.write('%s\n' % basename) 80 f.write('%s\n' % basename)
81 f.close() 81 f.close()
82 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 82 postinst = d.getVar('pkg_postinst_%s' % pkg)
83 if not postinst: 83 if not postinst:
84 bb.fatal("pkg_postinst_%s not defined" % pkg) 84 bb.fatal("pkg_postinst_%s not defined" % pkg)
85 postinst += d.getVar('autoload_postinst_fragment', True) % (autoload or basename) 85 postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename)
86 d.setVar('pkg_postinst_%s' % pkg, postinst) 86 d.setVar('pkg_postinst_%s' % pkg, postinst)
87 87
88 # Write out any modconf fragment 88 # Write out any modconf fragment
89 modconflist = (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split() 89 modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()
90 modconf = d.getVar('module_conf_%s' % basename, True) 90 modconf = d.getVar('module_conf_%s' % basename)
91 if modconf and basename in modconflist: 91 if modconf and basename in modconflist:
92 name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) 92 name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
93 f = open(name, 'w') 93 f = open(name, 'w')
@@ -96,15 +96,15 @@ python split_kernel_module_packages () {
96 elif modconf: 96 elif modconf:
97 bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename)) 97 bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename))
98 98
99 files = d.getVar('FILES_%s' % pkg, True) 99 files = d.getVar('FILES_%s' % pkg)
100 files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) 100 files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
101 d.setVar('FILES_%s' % pkg, files) 101 d.setVar('FILES_%s' % pkg, files)
102 102
103 if "description" in vals: 103 if "description" in vals:
104 old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" 104 old_desc = d.getVar('DESCRIPTION_' + pkg) or ""
105 d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) 105 d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
106 106
107 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") 107 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
108 modinfo_deps = [] 108 modinfo_deps = []
109 if "depends" in vals and vals["depends"] != "": 109 if "depends" in vals and vals["depends"] != "":
110 for dep in vals["depends"].split(","): 110 for dep in vals["depends"].split(","):
@@ -121,24 +121,24 @@ python split_kernel_module_packages () {
121 121
122 module_regex = '^(.*)\.k?o$' 122 module_regex = '^(.*)\.k?o$'
123 123
124 module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX', True) 124 module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX')
125 module_pattern = module_pattern_prefix + 'kernel-module-%s' 125 module_pattern = module_pattern_prefix + 'kernel-module-%s'
126 126
127 postinst = d.getVar('pkg_postinst_modules', True) 127 postinst = d.getVar('pkg_postinst_modules')
128 postrm = d.getVar('pkg_postrm_modules', True) 128 postrm = d.getVar('pkg_postrm_modules')
129 129
130 modules = do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION", True))) 130 modules = do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION")))
131 if modules: 131 if modules:
132 metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE', True) 132 metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE')
133 d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules)) 133 d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules))
134 134
135 # If modules-load.d and modprobe.d are empty at this point, remove them to 135 # If modules-load.d and modprobe.d are empty at this point, remove them to
136 # avoid warnings. removedirs only raises an OSError if an empty 136 # avoid warnings. removedirs only raises an OSError if an empty
137 # directory cannot be removed. 137 # directory cannot be removed.
138 dvar = d.getVar('PKGD', True) 138 dvar = d.getVar('PKGD')
139 for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]: 139 for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
140 if len(os.listdir(dir)) == 0: 140 if len(os.listdir(dir)) == 0:
141 os.rmdir(dir) 141 os.rmdir(dir)
142} 142}
143 143
144do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split()))}' 144do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()))}'
diff --git a/meta/classes/kernel-uimage.bbclass b/meta/classes/kernel-uimage.bbclass
index 340503a2d6..7e7185f903 100644
--- a/meta/classes/kernel-uimage.bbclass
+++ b/meta/classes/kernel-uimage.bbclass
@@ -1,8 +1,8 @@
1inherit kernel-uboot 1inherit kernel-uboot
2 2
3python __anonymous () { 3python __anonymous () {
4 if "uImage" in (d.getVar('KERNEL_IMAGETYPES', True) or "").split(): 4 if "uImage" in (d.getVar('KERNEL_IMAGETYPES') or "").split():
5 depends = d.getVar("DEPENDS", True) 5 depends = d.getVar("DEPENDS")
6 depends = "%s u-boot-mkimage-native" % depends 6 depends = "%s u-boot-mkimage-native" % depends
7 d.setVar("DEPENDS", depends) 7 d.setVar("DEPENDS", depends)
8 8
@@ -11,8 +11,8 @@ python __anonymous () {
11 # to build uImage using the kernel build system if and only if 11 # to build uImage using the kernel build system if and only if
12 # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into 12 # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into
13 # the uImage . 13 # the uImage .
14 if d.getVar("KEEPUIMAGE", True) != 'yes': 14 if d.getVar("KEEPUIMAGE") != 'yes':
15 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" 15 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or ""
16 if "uImage" in typeformake.split(): 16 if "uImage" in typeformake.split():
17 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('uImage', 'vmlinux')) 17 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('uImage', 'vmlinux'))
18} 18}
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass
index a60327a07e..c7fff5a512 100644
--- a/meta/classes/kernel-yocto.bbclass
+++ b/meta/classes/kernel-yocto.bbclass
@@ -290,7 +290,7 @@ python do_kernel_configcheck() {
290 if not os.path.exists(kmeta): 290 if not os.path.exists(kmeta):
291 kmeta = "." + kmeta 291 kmeta = "." + kmeta
292 292
293 pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH', True), "${S}/scripts/util/") 293 pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH'), "${S}/scripts/util/")
294 294
295 cmd = d.expand("scc --configs -o ${S}/.kernel-meta") 295 cmd = d.expand("scc --configs -o ${S}/.kernel-meta")
296 ret, configs = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd)) 296 ret, configs = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd))
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index b779a767d3..fa598cff7f 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -22,18 +22,18 @@ INITRAMFS_IMAGE_BUNDLE ?= ""
22# number and cause kernel to be rebuilt. To avoid this, make 22# number and cause kernel to be rebuilt. To avoid this, make
23# KERNEL_VERSION_NAME and KERNEL_VERSION_PKG_NAME depend on 23# KERNEL_VERSION_NAME and KERNEL_VERSION_PKG_NAME depend on
24# LINUX_VERSION which is a constant. 24# LINUX_VERSION which is a constant.
25KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION', True) or ""}" 25KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION') or ""}"
26KERNEL_VERSION_NAME[vardepvalue] = "${LINUX_VERSION}" 26KERNEL_VERSION_NAME[vardepvalue] = "${LINUX_VERSION}"
27KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION', True))}" 27KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}"
28KERNEL_VERSION_PKG_NAME[vardepvalue] = "${LINUX_VERSION}" 28KERNEL_VERSION_PKG_NAME[vardepvalue] = "${LINUX_VERSION}"
29 29
30python __anonymous () { 30python __anonymous () {
31 import re 31 import re
32 32
33 # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES 33 # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES
34 type = d.getVar('KERNEL_IMAGETYPE', True) or "" 34 type = d.getVar('KERNEL_IMAGETYPE') or ""
35 alttype = d.getVar('KERNEL_ALT_IMAGETYPE', True) or "" 35 alttype = d.getVar('KERNEL_ALT_IMAGETYPE') or ""
36 types = d.getVar('KERNEL_IMAGETYPES', True) or "" 36 types = d.getVar('KERNEL_IMAGETYPES') or ""
37 if type not in types.split(): 37 if type not in types.split():
38 types = (type + ' ' + types).strip() 38 types = (type + ' ' + types).strip()
39 if alttype not in types.split(): 39 if alttype not in types.split():
@@ -56,15 +56,15 @@ python __anonymous () {
56 56
57 d.setVar('ALLOW_EMPTY_kernel-image-' + typelower, '1') 57 d.setVar('ALLOW_EMPTY_kernel-image-' + typelower, '1')
58 58
59 imagedest = d.getVar('KERNEL_IMAGEDEST', True) 59 imagedest = d.getVar('KERNEL_IMAGEDEST')
60 priority = d.getVar('KERNEL_PRIORITY', True) 60 priority = d.getVar('KERNEL_PRIORITY')
61 postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n' 61 postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n'
62 d.setVar('pkg_postinst_kernel-image-' + typelower, postinst) 62 d.setVar('pkg_postinst_kernel-image-' + typelower, postinst)
63 63
64 postrm = '#!/bin/sh\n' + 'update-alternatives --remove' + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} || true' + '\n' 64 postrm = '#!/bin/sh\n' + 'update-alternatives --remove' + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} || true' + '\n'
65 d.setVar('pkg_postrm_kernel-image-' + typelower, postrm) 65 d.setVar('pkg_postrm_kernel-image-' + typelower, postrm)
66 66
67 image = d.getVar('INITRAMFS_IMAGE', True) 67 image = d.getVar('INITRAMFS_IMAGE')
68 if image: 68 if image:
69 d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') 69 d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
70 70
@@ -72,7 +72,7 @@ python __anonymous () {
72 # The preferred method is to set INITRAMFS_IMAGE, because 72 # The preferred method is to set INITRAMFS_IMAGE, because
73 # this INITRAMFS_TASK has circular dependency problems 73 # this INITRAMFS_TASK has circular dependency problems
74 # if the initramfs requires kernel modules 74 # if the initramfs requires kernel modules
75 image_task = d.getVar('INITRAMFS_TASK', True) 75 image_task = d.getVar('INITRAMFS_TASK')
76 if image_task: 76 if image_task:
77 d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}') 77 d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}')
78} 78}
@@ -101,15 +101,15 @@ inherit ${KERNEL_CLASSES}
101do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" 101do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}"
102do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" 102do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}"
103base_do_unpack_append () { 103base_do_unpack_append () {
104 s = d.getVar("S", True) 104 s = d.getVar("S")
105 if s[-1] == '/': 105 if s[-1] == '/':
106 # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail 106 # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail
107 s=s[:-1] 107 s=s[:-1]
108 kernsrc = d.getVar("STAGING_KERNEL_DIR", True) 108 kernsrc = d.getVar("STAGING_KERNEL_DIR")
109 if s != kernsrc: 109 if s != kernsrc:
110 bb.utils.mkdirhier(kernsrc) 110 bb.utils.mkdirhier(kernsrc)
111 bb.utils.remove(kernsrc, recurse=True) 111 bb.utils.remove(kernsrc, recurse=True)
112 if d.getVar("EXTERNALSRC", True): 112 if d.getVar("EXTERNALSRC"):
113 # With EXTERNALSRC S will not be wiped so we can symlink to it 113 # With EXTERNALSRC S will not be wiped so we can symlink to it
114 os.symlink(s, kernsrc) 114 os.symlink(s, kernsrc)
115 else: 115 else:
@@ -127,9 +127,9 @@ PACKAGES_DYNAMIC += "^kernel-firmware-.*"
127export OS = "${TARGET_OS}" 127export OS = "${TARGET_OS}"
128export CROSS_COMPILE = "${TARGET_PREFIX}" 128export CROSS_COMPILE = "${TARGET_PREFIX}"
129 129
130KERNEL_PRIORITY ?= "${@int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[0]) * 10000 + \ 130KERNEL_PRIORITY ?= "${@int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[0]) * 10000 + \
131 int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[1]) * 100 + \ 131 int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[1]) * 100 + \
132 int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[-1])}" 132 int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[-1])}"
133 133
134KERNEL_RELEASE ?= "${KERNEL_VERSION}" 134KERNEL_RELEASE ?= "${KERNEL_VERSION}"
135 135
@@ -140,7 +140,7 @@ KERNEL_IMAGEDEST = "boot"
140# 140#
141# configuration 141# configuration
142# 142#
143export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE", True) or "ttyS0"}" 143export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE") or "ttyS0"}"
144 144
145KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}" 145KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}"
146 146
@@ -430,14 +430,14 @@ sysroot_stage_all () {
430KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig" 430KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig"
431 431
432python check_oldest_kernel() { 432python check_oldest_kernel() {
433 oldest_kernel = d.getVar('OLDEST_KERNEL', True) 433 oldest_kernel = d.getVar('OLDEST_KERNEL')
434 kernel_version = d.getVar('KERNEL_VERSION', True) 434 kernel_version = d.getVar('KERNEL_VERSION')
435 tclibc = d.getVar('TCLIBC', True) 435 tclibc = d.getVar('TCLIBC')
436 if tclibc == 'glibc': 436 if tclibc == 'glibc':
437 kernel_version = kernel_version.split('-', 1)[0] 437 kernel_version = kernel_version.split('-', 1)[0]
438 if oldest_kernel and kernel_version: 438 if oldest_kernel and kernel_version:
439 if bb.utils.vercmp_string(kernel_version, oldest_kernel) < 0: 439 if bb.utils.vercmp_string(kernel_version, oldest_kernel) < 0:
440 bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN', True), oldest_kernel, kernel_version, tclibc)) 440 bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN'), oldest_kernel, kernel_version, tclibc))
441} 441}
442 442
443check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION" 443check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION"
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass
index 11b0065a6d..9ea2c03749 100644
--- a/meta/classes/libc-common.bbclass
+++ b/meta/classes/libc-common.bbclass
@@ -17,15 +17,15 @@ do_install() {
17} 17}
18 18
19def get_libc_fpu_setting(bb, d): 19def get_libc_fpu_setting(bb, d):
20 if d.getVar('TARGET_FPU', True) in [ 'soft', 'ppc-efd' ]: 20 if d.getVar('TARGET_FPU') in [ 'soft', 'ppc-efd' ]:
21 return "--without-fp" 21 return "--without-fp"
22 return "" 22 return ""
23 23
24python populate_packages_prepend () { 24python populate_packages_prepend () {
25 if d.getVar('DEBIAN_NAMES', True): 25 if d.getVar('DEBIAN_NAMES'):
26 pkgs = d.getVar('PACKAGES', True).split() 26 pkgs = d.getVar('PACKAGES').split()
27 bpn = d.getVar('BPN', True) 27 bpn = d.getVar('BPN')
28 prefix = d.getVar('MLPREFIX', True) or "" 28 prefix = d.getVar('MLPREFIX') or ""
29 # Set the base package... 29 # Set the base package...
30 d.setVar('PKG_' + prefix + bpn, prefix + 'libc6') 30 d.setVar('PKG_' + prefix + bpn, prefix + 'libc6')
31 libcprefix = prefix + bpn + '-' 31 libcprefix = prefix + bpn + '-'
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass
index 071978b519..739adce694 100644
--- a/meta/classes/libc-package.bbclass
+++ b/meta/classes/libc-package.bbclass
@@ -12,24 +12,24 @@ GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
12GLIBC_SPLIT_LC_PACKAGES ?= "0" 12GLIBC_SPLIT_LC_PACKAGES ?= "0"
13 13
14python __anonymous () { 14python __anonymous () {
15 enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True) 15 enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION")
16 16
17 pn = d.getVar("PN", True) 17 pn = d.getVar("PN")
18 if pn.endswith("-initial"): 18 if pn.endswith("-initial"):
19 enabled = False 19 enabled = False
20 20
21 if enabled and int(enabled): 21 if enabled and int(enabled):
22 import re 22 import re
23 23
24 target_arch = d.getVar("TARGET_ARCH", True) 24 target_arch = d.getVar("TARGET_ARCH")
25 binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or "" 25 binary_arches = d.getVar("BINARY_LOCALE_ARCHES") or ""
26 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "" 26 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or ""
27 27
28 for regexp in binary_arches.split(" "): 28 for regexp in binary_arches.split(" "):
29 r = re.compile(regexp) 29 r = re.compile(regexp)
30 30
31 if r.match(target_arch): 31 if r.match(target_arch):
32 depends = d.getVar("DEPENDS", True) 32 depends = d.getVar("DEPENDS")
33 if use_cross_localedef == "1" : 33 if use_cross_localedef == "1" :
34 depends = "%s cross-localedef-native" % depends 34 depends = "%s cross-localedef-native" % depends
35 else: 35 else:
@@ -94,21 +94,21 @@ inherit qemu
94 94
95python package_do_split_gconvs () { 95python package_do_split_gconvs () {
96 import re 96 import re
97 if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): 97 if (d.getVar('PACKAGE_NO_GCONV') == '1'):
98 bb.note("package requested not splitting gconvs") 98 bb.note("package requested not splitting gconvs")
99 return 99 return
100 100
101 if not d.getVar('PACKAGES', True): 101 if not d.getVar('PACKAGES'):
102 return 102 return
103 103
104 mlprefix = d.getVar("MLPREFIX", True) or "" 104 mlprefix = d.getVar("MLPREFIX") or ""
105 105
106 bpn = d.getVar('BPN', True) 106 bpn = d.getVar('BPN')
107 libdir = d.getVar('libdir', True) 107 libdir = d.getVar('libdir')
108 if not libdir: 108 if not libdir:
109 bb.error("libdir not defined") 109 bb.error("libdir not defined")
110 return 110 return
111 datadir = d.getVar('datadir', True) 111 datadir = d.getVar('datadir')
112 if not datadir: 112 if not datadir:
113 bb.error("datadir not defined") 113 bb.error("datadir not defined")
114 return 114 return
@@ -116,7 +116,7 @@ python package_do_split_gconvs () {
116 gconv_libdir = base_path_join(libdir, "gconv") 116 gconv_libdir = base_path_join(libdir, "gconv")
117 charmap_dir = base_path_join(datadir, "i18n", "charmaps") 117 charmap_dir = base_path_join(datadir, "i18n", "charmaps")
118 locales_dir = base_path_join(datadir, "i18n", "locales") 118 locales_dir = base_path_join(datadir, "i18n", "locales")
119 binary_locales_dir = d.getVar('localedir', True) 119 binary_locales_dir = d.getVar('localedir')
120 120
121 def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): 121 def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
122 deps = [] 122 deps = []
@@ -183,13 +183,13 @@ python package_do_split_gconvs () {
183 description='locale definition for %s', hook=calc_locale_deps, extra_depends='') 183 description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
184 d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv') 184 d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv')
185 185
186 use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) 186 use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE")
187 187
188 dot_re = re.compile("(.*)\.(.*)") 188 dot_re = re.compile("(.*)\.(.*)")
189 189
190 # Read in supported locales and associated encodings 190 # Read in supported locales and associated encodings
191 supported = {} 191 supported = {}
192 with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f: 192 with open(base_path_join(d.getVar('WORKDIR'), "SUPPORTED")) as f:
193 for line in f.readlines(): 193 for line in f.readlines():
194 try: 194 try:
195 locale, charset = line.rstrip().split() 195 locale, charset = line.rstrip().split()
@@ -198,7 +198,7 @@ python package_do_split_gconvs () {
198 supported[locale] = charset 198 supported[locale] = charset
199 199
200 # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales 200 # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
201 to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True) 201 to_generate = d.getVar('GLIBC_GENERATE_LOCALES')
202 if not to_generate or to_generate == 'all': 202 if not to_generate or to_generate == 'all':
203 to_generate = sorted(supported.keys()) 203 to_generate = sorted(supported.keys())
204 else: 204 else:
@@ -215,14 +215,14 @@ python package_do_split_gconvs () {
215 def output_locale_source(name, pkgname, locale, encoding): 215 def output_locale_source(name, pkgname, locale, encoding):
216 d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \ 216 d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \
217 (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) 217 (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
218 d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ 218 d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst') \
219 % (locale, encoding, locale)) 219 % (locale, encoding, locale))
220 d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ 220 d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm') % \
221 (locale, encoding, locale)) 221 (locale, encoding, locale))
222 222
223 def output_locale_binary_rdepends(name, pkgname, locale, encoding): 223 def output_locale_binary_rdepends(name, pkgname, locale, encoding):
224 dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name)) 224 dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name))
225 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES', True) 225 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES')
226 if lcsplit and int(lcsplit): 226 if lcsplit and int(lcsplit):
227 d.appendVar('PACKAGES', ' ' + dep) 227 d.appendVar('PACKAGES', ' ' + dep)
228 d.setVar('ALLOW_EMPTY_%s' % dep, '1') 228 d.setVar('ALLOW_EMPTY_%s' % dep, '1')
@@ -231,16 +231,16 @@ python package_do_split_gconvs () {
231 commands = {} 231 commands = {}
232 232
233 def output_locale_binary(name, pkgname, locale, encoding): 233 def output_locale_binary(name, pkgname, locale, encoding):
234 treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") 234 treedir = base_path_join(d.getVar("WORKDIR"), "locale-tree")
235 ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) 235 ldlibdir = base_path_join(treedir, d.getVar("base_libdir"))
236 path = d.getVar("PATH", True) 236 path = d.getVar("PATH")
237 i18npath = base_path_join(treedir, datadir, "i18n") 237 i18npath = base_path_join(treedir, datadir, "i18n")
238 gconvpath = base_path_join(treedir, "iconvdata") 238 gconvpath = base_path_join(treedir, "iconvdata")
239 outputpath = base_path_join(treedir, binary_locales_dir) 239 outputpath = base_path_join(treedir, binary_locales_dir)
240 240
241 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" 241 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or "0"
242 if use_cross_localedef == "1": 242 if use_cross_localedef == "1":
243 target_arch = d.getVar('TARGET_ARCH', True) 243 target_arch = d.getVar('TARGET_ARCH')
244 locale_arch_options = { \ 244 locale_arch_options = { \
245 "arm": " --uint32-align=4 --little-endian ", \ 245 "arm": " --uint32-align=4 --little-endian ", \
246 "armeb": " --uint32-align=4 --big-endian ", \ 246 "armeb": " --uint32-align=4 --big-endian ", \
@@ -279,7 +279,7 @@ python package_do_split_gconvs () {
279 --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ 279 --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
280 % (treedir, datadir, locale, encoding, name) 280 % (treedir, datadir, locale, encoding, name)
281 281
282 qemu_options = d.getVar('QEMU_OPTIONS', True) 282 qemu_options = d.getVar('QEMU_OPTIONS')
283 283
284 cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ 284 cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
285 -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ 285 -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
@@ -292,7 +292,7 @@ python package_do_split_gconvs () {
292 def output_locale(name, locale, encoding): 292 def output_locale(name, locale, encoding):
293 pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name) 293 pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name)
294 d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') 294 d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
295 d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) 295 d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES')))
296 rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) 296 rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
297 m = re.match("(.*)_(.*)", name) 297 m = re.match("(.*)_(.*)", name)
298 if m: 298 if m:
@@ -311,8 +311,8 @@ python package_do_split_gconvs () {
311 bb.note("preparing tree for binary locale generation") 311 bb.note("preparing tree for binary locale generation")
312 bb.build.exec_func("do_prep_locale_tree", d) 312 bb.build.exec_func("do_prep_locale_tree", d)
313 313
314 utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) 314 utf8_only = int(d.getVar('LOCALE_UTF8_ONLY') or 0)
315 utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT', True) or 0) 315 utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT') or 0)
316 316
317 encodings = {} 317 encodings = {}
318 for locale in to_generate: 318 for locale in to_generate:
@@ -344,7 +344,7 @@ python package_do_split_gconvs () {
344 d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg) 344 d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg)
345 345
346 if use_bin == "compile": 346 if use_bin == "compile":
347 makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") 347 makefile = base_path_join(d.getVar("WORKDIR"), "locale-tree", "Makefile")
348 m = open(makefile, "w") 348 m = open(makefile, "w")
349 m.write("all: %s\n\n" % " ".join(commands.keys())) 349 m.write("all: %s\n\n" % " ".join(commands.keys()))
350 for cmd in commands: 350 for cmd in commands:
@@ -358,7 +358,7 @@ python package_do_split_gconvs () {
358 bb.build.exec_func("do_collect_bins_from_locale_tree", d) 358 bb.build.exec_func("do_collect_bins_from_locale_tree", d)
359 359
360 if use_bin in ('compile', 'precompiled'): 360 if use_bin in ('compile', 'precompiled'):
361 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES', True) 361 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES')
362 if lcsplit and int(lcsplit): 362 if lcsplit and int(lcsplit):
363 do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \ 363 do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \
364 output_pattern=bpn+'-binary-localedata-%s', \ 364 output_pattern=bpn+'-binary-localedata-%s', \
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass
index afcfbfc038..c5d1204ca1 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes/license.bbclass
@@ -37,13 +37,13 @@ python license_create_manifest() {
37 import oe.packagedata 37 import oe.packagedata
38 from oe.rootfs import image_list_installed_packages 38 from oe.rootfs import image_list_installed_packages
39 39
40 build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS', True) 40 build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS')
41 if build_images_from_feeds == "1": 41 if build_images_from_feeds == "1":
42 return 0 42 return 0
43 43
44 pkg_dic = {} 44 pkg_dic = {}
45 for pkg in sorted(image_list_installed_packages(d)): 45 for pkg in sorted(image_list_installed_packages(d)):
46 pkg_info = os.path.join(d.getVar('PKGDATA_DIR', True), 46 pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
47 'runtime-reverse', pkg) 47 'runtime-reverse', pkg)
48 pkg_name = os.path.basename(os.readlink(pkg_info)) 48 pkg_name = os.path.basename(os.readlink(pkg_info))
49 49
@@ -52,15 +52,15 @@ python license_create_manifest() {
52 pkg_lic_name = "LICENSE_" + pkg_name 52 pkg_lic_name = "LICENSE_" + pkg_name
53 pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name] 53 pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name]
54 54
55 rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 55 rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'),
56 d.getVar('IMAGE_NAME', True), 'license.manifest') 56 d.getVar('IMAGE_NAME'), 'license.manifest')
57 write_license_files(d, rootfs_license_manifest, pkg_dic) 57 write_license_files(d, rootfs_license_manifest, pkg_dic)
58} 58}
59 59
60def write_license_files(d, license_manifest, pkg_dic): 60def write_license_files(d, license_manifest, pkg_dic):
61 import re 61 import re
62 62
63 bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE", True) or "").split() 63 bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE") or "").split()
64 bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses) 64 bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses)
65 bad_licenses = expand_wildcard_licenses(d, bad_licenses) 65 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
66 66
@@ -72,7 +72,7 @@ def write_license_files(d, license_manifest, pkg_dic):
72 oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"], 72 oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"],
73 bad_licenses, canonical_license, d) 73 bad_licenses, canonical_license, d)
74 except oe.license.LicenseError as exc: 74 except oe.license.LicenseError as exc:
75 bb.fatal('%s: %s' % (d.getVar('P', True), exc)) 75 bb.fatal('%s: %s' % (d.getVar('P'), exc))
76 else: 76 else:
77 pkg_dic[pkg]["LICENSES"] = re.sub('[|&()*]', ' ', pkg_dic[pkg]["LICENSE"]) 77 pkg_dic[pkg]["LICENSES"] = re.sub('[|&()*]', ' ', pkg_dic[pkg]["LICENSE"])
78 pkg_dic[pkg]["LICENSES"] = re.sub(' *', ' ', pkg_dic[pkg]["LICENSES"]) 78 pkg_dic[pkg]["LICENSES"] = re.sub(' *', ' ', pkg_dic[pkg]["LICENSES"])
@@ -98,7 +98,7 @@ def write_license_files(d, license_manifest, pkg_dic):
98 license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"]) 98 license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"])
99 99
100 for lic in pkg_dic[pkg]["LICENSES"]: 100 for lic in pkg_dic[pkg]["LICENSES"]:
101 lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 101 lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY'),
102 pkg_dic[pkg]["PN"], "generic_%s" % 102 pkg_dic[pkg]["PN"], "generic_%s" %
103 re.sub('\+', '', lic)) 103 re.sub('\+', '', lic))
104 # add explicity avoid of CLOSED license because isn't generic 104 # add explicity avoid of CLOSED license because isn't generic
@@ -114,8 +114,8 @@ def write_license_files(d, license_manifest, pkg_dic):
114 # - Just copy the manifest 114 # - Just copy the manifest
115 # - Copy the manifest and the license directories 115 # - Copy the manifest and the license directories
116 # With both options set we see a .5 M increase in core-image-minimal 116 # With both options set we see a .5 M increase in core-image-minimal
117 copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST', True) 117 copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST')
118 copy_lic_dirs = d.getVar('COPY_LIC_DIRS', True) 118 copy_lic_dirs = d.getVar('COPY_LIC_DIRS')
119 if copy_lic_manifest == "1": 119 if copy_lic_manifest == "1":
120 rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'), 120 rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'),
121 'usr', 'share', 'common-licenses') 121 'usr', 'share', 'common-licenses')
@@ -129,7 +129,7 @@ def write_license_files(d, license_manifest, pkg_dic):
129 for pkg in sorted(pkg_dic): 129 for pkg in sorted(pkg_dic):
130 pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg) 130 pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg)
131 bb.utils.mkdirhier(pkg_rootfs_license_dir) 131 bb.utils.mkdirhier(pkg_rootfs_license_dir)
132 pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 132 pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
133 pkg_dic[pkg]["PN"]) 133 pkg_dic[pkg]["PN"])
134 licenses = os.listdir(pkg_license_dir) 134 licenses = os.listdir(pkg_license_dir)
135 for lic in licenses: 135 for lic in licenses:
@@ -166,7 +166,7 @@ def license_deployed_manifest(d):
166 166
167 dep_dic = {} 167 dep_dic = {}
168 man_dic = {} 168 man_dic = {}
169 lic_dir = d.getVar("LICENSE_DIRECTORY", True) 169 lic_dir = d.getVar("LICENSE_DIRECTORY")
170 170
171 dep_dic = get_deployed_dependencies(d) 171 dep_dic = get_deployed_dependencies(d)
172 for dep in dep_dic.keys(): 172 for dep in dep_dic.keys():
@@ -181,8 +181,8 @@ def license_deployed_manifest(d):
181 key,val = line.split(": ", 1) 181 key,val = line.split(": ", 1)
182 man_dic[dep][key] = val[:-1] 182 man_dic[dep][key] = val[:-1]
183 183
184 lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 184 lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
185 d.getVar('IMAGE_NAME', True)) 185 d.getVar('IMAGE_NAME'))
186 bb.utils.mkdirhier(lic_manifest_dir) 186 bb.utils.mkdirhier(lic_manifest_dir)
187 image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest') 187 image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest')
188 write_license_files(d, image_license_manifest, man_dic) 188 write_license_files(d, image_license_manifest, man_dic)
@@ -202,7 +202,7 @@ def get_deployed_dependencies(d):
202 depends = list(set([dep[0] for dep 202 depends = list(set([dep[0] for dep
203 in list(taskdata.values()) 203 in list(taskdata.values())
204 if not dep[0].endswith("-native")])) 204 if not dep[0].endswith("-native")]))
205 extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) 205 extra_depends = d.getVar("EXTRA_IMAGEDEPENDS")
206 boot_depends = get_boot_dependencies(d) 206 boot_depends = get_boot_dependencies(d)
207 depends.extend(extra_depends.split()) 207 depends.extend(extra_depends.split())
208 depends.extend(boot_depends) 208 depends.extend(boot_depends)
@@ -212,13 +212,13 @@ def get_deployed_dependencies(d):
212 # the SSTATE_MANIFESTS for "deploy" task. 212 # the SSTATE_MANIFESTS for "deploy" task.
213 # The manifest file name contains the arch. Because we are not running 213 # The manifest file name contains the arch. Because we are not running
214 # in the recipe context it is necessary to check every arch used. 214 # in the recipe context it is necessary to check every arch used.
215 sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS", True) 215 sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS")
216 sstate_archs = d.getVar("SSTATE_ARCHS", True) 216 sstate_archs = d.getVar("SSTATE_ARCHS")
217 extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS", True) 217 extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS")
218 archs = list(set(("%s %s" % (sstate_archs, extra_archs)).split())) 218 archs = list(set(("%s %s" % (sstate_archs, extra_archs)).split()))
219 for dep in depends: 219 for dep in depends:
220 # Some recipes have an arch on their own, so we try that first. 220 # Some recipes have an arch on their own, so we try that first.
221 special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep, True) 221 special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep)
222 if special_arch: 222 if special_arch:
223 sstate_manifest_file = os.path.join(sstate_manifest_dir, 223 sstate_manifest_file = os.path.join(sstate_manifest_dir,
224 "manifest-%s-%s.deploy" % (special_arch, dep)) 224 "manifest-%s-%s.deploy" % (special_arch, dep))
@@ -254,7 +254,7 @@ def get_boot_dependencies(d):
254 in boot_depends_string.split() 254 in boot_depends_string.split()
255 if not dep.split(":")[0].endswith("-native")] 255 if not dep.split(":")[0].endswith("-native")]
256 for dep in boot_depends: 256 for dep in boot_depends:
257 info_file = os.path.join(d.getVar("LICENSE_DIRECTORY", True), 257 info_file = os.path.join(d.getVar("LICENSE_DIRECTORY"),
258 dep, "recipeinfo") 258 dep, "recipeinfo")
259 # If the recipe and dependency name is the same 259 # If the recipe and dependency name is the same
260 if os.path.exists(info_file): 260 if os.path.exists(info_file):
@@ -265,7 +265,7 @@ def get_boot_dependencies(d):
265 # The fifth field contains what the task provides 265 # The fifth field contains what the task provides
266 if dep in taskdep[4]: 266 if dep in taskdep[4]:
267 info_file = os.path.join( 267 info_file = os.path.join(
268 d.getVar("LICENSE_DIRECTORY", True), 268 d.getVar("LICENSE_DIRECTORY"),
269 taskdep[0], "recipeinfo") 269 taskdep[0], "recipeinfo")
270 if os.path.exists(info_file): 270 if os.path.exists(info_file):
271 depends.append(taskdep[0]) 271 depends.append(taskdep[0])
@@ -295,7 +295,7 @@ python do_populate_lic() {
295 lic_files_paths = find_license_files(d) 295 lic_files_paths = find_license_files(d)
296 296
297 # The base directory we wrangle licenses to 297 # The base directory we wrangle licenses to
298 destdir = os.path.join(d.getVar('LICSSTATEDIR', True), d.getVar('PN', True)) 298 destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('PN'))
299 copy_license_files(lic_files_paths, destdir) 299 copy_license_files(lic_files_paths, destdir)
300 info = get_recipe_info(d) 300 info = get_recipe_info(d)
301 with open(os.path.join(destdir, "recipeinfo"), "w") as f: 301 with open(os.path.join(destdir, "recipeinfo"), "w") as f:
@@ -306,11 +306,11 @@ python do_populate_lic() {
306# it would be better to copy them in do_install_append, but find_license_filesa is python 306# it would be better to copy them in do_install_append, but find_license_filesa is python
307python perform_packagecopy_prepend () { 307python perform_packagecopy_prepend () {
308 enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) 308 enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
309 if d.getVar('CLASSOVERRIDE', True) == 'class-target' and enabled: 309 if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled:
310 lic_files_paths = find_license_files(d) 310 lic_files_paths = find_license_files(d)
311 311
312 # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY 312 # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY
313 destdir = d.getVar('D', True) + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY', True), d.getVar('PN', True)) 313 destdir = d.getVar('D') + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY'), d.getVar('PN'))
314 copy_license_files(lic_files_paths, destdir) 314 copy_license_files(lic_files_paths, destdir)
315 add_package_and_files(d) 315 add_package_and_files(d)
316} 316}
@@ -318,15 +318,15 @@ perform_packagecopy[vardeps] += "LICENSE_CREATE_PACKAGE"
318 318
319def get_recipe_info(d): 319def get_recipe_info(d):
320 info = {} 320 info = {}
321 info["PV"] = d.getVar("PV", True) 321 info["PV"] = d.getVar("PV")
322 info["PR"] = d.getVar("PR", True) 322 info["PR"] = d.getVar("PR")
323 info["LICENSE"] = d.getVar("LICENSE", True) 323 info["LICENSE"] = d.getVar("LICENSE")
324 return info 324 return info
325 325
326def add_package_and_files(d): 326def add_package_and_files(d):
327 packages = d.getVar('PACKAGES', True) 327 packages = d.getVar('PACKAGES')
328 files = d.getVar('LICENSE_FILES_DIRECTORY', True) 328 files = d.getVar('LICENSE_FILES_DIRECTORY')
329 pn = d.getVar('PN', True) 329 pn = d.getVar('PN')
330 pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False)) 330 pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False))
331 if pn_lic in packages: 331 if pn_lic in packages:
332 bb.warn("%s package already existed in %s." % (pn_lic, pn)) 332 bb.warn("%s package already existed in %s." % (pn_lic, pn))
@@ -334,7 +334,7 @@ def add_package_and_files(d):
334 # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY 334 # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY
335 d.setVar('PACKAGES', "%s %s" % (pn_lic, packages)) 335 d.setVar('PACKAGES', "%s %s" % (pn_lic, packages))
336 d.setVar('FILES_' + pn_lic, files) 336 d.setVar('FILES_' + pn_lic, files)
337 rrecommends_pn = d.getVar('RRECOMMENDS_' + pn, True) 337 rrecommends_pn = d.getVar('RRECOMMENDS_' + pn)
338 if rrecommends_pn: 338 if rrecommends_pn:
339 d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn)) 339 d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn))
340 else: 340 else:
@@ -390,12 +390,12 @@ def find_license_files(d):
390 from collections import defaultdict, OrderedDict 390 from collections import defaultdict, OrderedDict
391 391
392 # All the license files for the package 392 # All the license files for the package
393 lic_files = d.getVar('LIC_FILES_CHKSUM', True) or "" 393 lic_files = d.getVar('LIC_FILES_CHKSUM') or ""
394 pn = d.getVar('PN', True) 394 pn = d.getVar('PN')
395 # The license files are located in S/LIC_FILE_CHECKSUM. 395 # The license files are located in S/LIC_FILE_CHECKSUM.
396 srcdir = d.getVar('S', True) 396 srcdir = d.getVar('S')
397 # Directory we store the generic licenses as set in the distro configuration 397 # Directory we store the generic licenses as set in the distro configuration
398 generic_directory = d.getVar('COMMON_LICENSE_DIR', True) 398 generic_directory = d.getVar('COMMON_LICENSE_DIR')
399 # List of basename, path tuples 399 # List of basename, path tuples
400 lic_files_paths = [] 400 lic_files_paths = []
401 # Entries from LIC_FILES_CHKSUM 401 # Entries from LIC_FILES_CHKSUM
@@ -403,7 +403,7 @@ def find_license_files(d):
403 license_source_dirs = [] 403 license_source_dirs = []
404 license_source_dirs.append(generic_directory) 404 license_source_dirs.append(generic_directory)
405 try: 405 try:
406 additional_lic_dirs = d.getVar('LICENSE_PATH', True).split() 406 additional_lic_dirs = d.getVar('LICENSE_PATH').split()
407 for lic_dir in additional_lic_dirs: 407 for lic_dir in additional_lic_dirs:
408 license_source_dirs.append(lic_dir) 408 license_source_dirs.append(lic_dir)
409 except: 409 except:
@@ -473,18 +473,18 @@ def find_license_files(d):
473 try: 473 try:
474 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) 474 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
475 except bb.fetch.MalformedUrl: 475 except bb.fetch.MalformedUrl:
476 bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF', True), url)) 476 bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF'), url))
477 # We want the license filename and path 477 # We want the license filename and path
478 chksum = parm['md5'] if 'md5' in parm else parm['sha256'] 478 chksum = parm['md5'] if 'md5' in parm else parm['sha256']
479 lic_chksums[path] = chksum 479 lic_chksums[path] = chksum
480 480
481 v = FindVisitor() 481 v = FindVisitor()
482 try: 482 try:
483 v.visit_string(d.getVar('LICENSE', True)) 483 v.visit_string(d.getVar('LICENSE'))
484 except oe.license.InvalidLicense as exc: 484 except oe.license.InvalidLicense as exc:
485 bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) 485 bb.fatal('%s: %s' % (d.getVar('PF'), exc))
486 except SyntaxError: 486 except SyntaxError:
487 bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF', True))) 487 bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF')))
488 488
489 # Add files from LIC_FILES_CHKSUM to list of license files 489 # Add files from LIC_FILES_CHKSUM to list of license files
490 lic_chksum_paths = defaultdict(OrderedDict) 490 lic_chksum_paths = defaultdict(OrderedDict)
@@ -542,7 +542,7 @@ def expand_wildcard_licenses(d, wildcard_licenses):
542 542
543def incompatible_license_contains(license, truevalue, falsevalue, d): 543def incompatible_license_contains(license, truevalue, falsevalue, d):
544 license = canonical_license(d, license) 544 license = canonical_license(d, license)
545 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() 545 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
546 bad_licenses = expand_wildcard_licenses(d, bad_licenses) 546 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
547 return truevalue if license in bad_licenses else falsevalue 547 return truevalue if license in bad_licenses else falsevalue
548 548
@@ -553,9 +553,9 @@ def incompatible_license(d, dont_want_licenses, package=None):
553 as canonical (SPDX) names. 553 as canonical (SPDX) names.
554 """ 554 """
555 import oe.license 555 import oe.license
556 license = d.getVar("LICENSE_%s" % package, True) if package else None 556 license = d.getVar("LICENSE_%s" % package) if package else None
557 if not license: 557 if not license:
558 license = d.getVar('LICENSE', True) 558 license = d.getVar('LICENSE')
559 559
560 # Handles an "or" or two license sets provided by 560 # Handles an "or" or two license sets provided by
561 # flattened_licenses(), pick one that works if possible. 561 # flattened_licenses(), pick one that works if possible.
@@ -566,7 +566,7 @@ def incompatible_license(d, dont_want_licenses, package=None):
566 try: 566 try:
567 licenses = oe.license.flattened_licenses(license, choose_lic_set) 567 licenses = oe.license.flattened_licenses(license, choose_lic_set)
568 except oe.license.LicenseError as exc: 568 except oe.license.LicenseError as exc:
569 bb.fatal('%s: %s' % (d.getVar('P', True), exc)) 569 bb.fatal('%s: %s' % (d.getVar('P'), exc))
570 return any(not oe.license.license_ok(canonical_license(d, l), \ 570 return any(not oe.license.license_ok(canonical_license(d, l), \
571 dont_want_licenses) for l in licenses) 571 dont_want_licenses) for l in licenses)
572 572
@@ -614,16 +614,16 @@ def check_license_flags(d):
614 614
615 def all_license_flags_match(license_flags, whitelist): 615 def all_license_flags_match(license_flags, whitelist):
616 """ Return first unmatched flag, None if all flags match """ 616 """ Return first unmatched flag, None if all flags match """
617 pn = d.getVar('PN', True) 617 pn = d.getVar('PN')
618 split_whitelist = whitelist.split() 618 split_whitelist = whitelist.split()
619 for flag in license_flags.split(): 619 for flag in license_flags.split():
620 if not license_flag_matches(flag, split_whitelist, pn): 620 if not license_flag_matches(flag, split_whitelist, pn):
621 return flag 621 return flag
622 return None 622 return None
623 623
624 license_flags = d.getVar('LICENSE_FLAGS', True) 624 license_flags = d.getVar('LICENSE_FLAGS')
625 if license_flags: 625 if license_flags:
626 whitelist = d.getVar('LICENSE_FLAGS_WHITELIST', True) 626 whitelist = d.getVar('LICENSE_FLAGS_WHITELIST')
627 if not whitelist: 627 if not whitelist:
628 return license_flags 628 return license_flags
629 unmatched_flag = all_license_flags_match(license_flags, whitelist) 629 unmatched_flag = all_license_flags_match(license_flags, whitelist)
@@ -637,8 +637,8 @@ def check_license_format(d):
637 Validate operators in LICENSES. 637 Validate operators in LICENSES.
638 No spaces are allowed between LICENSES. 638 No spaces are allowed between LICENSES.
639 """ 639 """
640 pn = d.getVar('PN', True) 640 pn = d.getVar('PN')
641 licenses = d.getVar('LICENSE', True) 641 licenses = d.getVar('LICENSE')
642 from oe.license import license_operator, license_operator_chars, license_pattern 642 from oe.license import license_operator, license_operator_chars, license_pattern
643 643
644 elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) 644 elements = list(filter(lambda x: x.strip(), license_operator.split(licenses)))
diff --git a/meta/classes/live-vm-common.bbclass b/meta/classes/live-vm-common.bbclass
index 734697f9e6..27b137dec6 100644
--- a/meta/classes/live-vm-common.bbclass
+++ b/meta/classes/live-vm-common.bbclass
@@ -4,11 +4,11 @@ def set_live_vm_vars(d, suffix):
4 vars = ['GRUB_CFG', 'SYSLINUX_CFG', 'ROOT', 'LABELS', 'INITRD'] 4 vars = ['GRUB_CFG', 'SYSLINUX_CFG', 'ROOT', 'LABELS', 'INITRD']
5 for var in vars: 5 for var in vars:
6 var_with_suffix = var + '_' + suffix 6 var_with_suffix = var + '_' + suffix
7 if d.getVar(var, True): 7 if d.getVar(var):
8 bb.warn('Found potential conflicted var %s, please use %s rather than %s' % \ 8 bb.warn('Found potential conflicted var %s, please use %s rather than %s' % \
9 (var, var_with_suffix, var)) 9 (var, var_with_suffix, var))
10 elif d.getVar(var_with_suffix, True): 10 elif d.getVar(var_with_suffix):
11 d.setVar(var, d.getVar(var_with_suffix, True)) 11 d.setVar(var, d.getVar(var_with_suffix))
12 12
13 13
14EFI = "${@bb.utils.contains("MACHINE_FEATURES", "efi", "1", "0", d)}" 14EFI = "${@bb.utils.contains("MACHINE_FEATURES", "efi", "1", "0", d)}"
@@ -25,7 +25,7 @@ def pcbios(d):
25 return pcbios 25 return pcbios
26 26
27PCBIOS = "${@pcbios(d)}" 27PCBIOS = "${@pcbios(d)}"
28PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS', True) == '1']}" 28PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS') == '1']}"
29 29
30inherit ${EFI_CLASS} 30inherit ${EFI_CLASS}
31inherit ${PCBIOS_CLASS} 31inherit ${PCBIOS_CLASS}
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass
index 2e6fac209a..fa791f04c4 100644
--- a/meta/classes/metadata_scm.bbclass
+++ b/meta/classes/metadata_scm.bbclass
@@ -26,7 +26,7 @@ def base_detect_branch(d):
26 return "<unknown>" 26 return "<unknown>"
27 27
28def base_get_scmbasepath(d): 28def base_get_scmbasepath(d):
29 return os.path.join(d.getVar('COREBASE', True), 'meta') 29 return os.path.join(d.getVar('COREBASE'), 'meta')
30 30
31def base_get_metadata_monotone_branch(path, d): 31def base_get_metadata_monotone_branch(path, d):
32 monotone_branch = "<unknown>" 32 monotone_branch = "<unknown>"
diff --git a/meta/classes/migrate_localcount.bbclass b/meta/classes/migrate_localcount.bbclass
index aa0df8bb76..810a541316 100644
--- a/meta/classes/migrate_localcount.bbclass
+++ b/meta/classes/migrate_localcount.bbclass
@@ -6,12 +6,12 @@ python migrate_localcount_handler () {
6 if not e.data: 6 if not e.data:
7 return 7 return
8 8
9 pv = e.data.getVar('PV', True) 9 pv = e.data.getVar('PV')
10 if not 'AUTOINC' in pv: 10 if not 'AUTOINC' in pv:
11 return 11 return
12 12
13 localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data) 13 localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data)
14 pn = e.data.getVar('PN', True) 14 pn = e.data.getVar('PN')
15 revs = localcounts.get_by_pattern('%%-%s_rev' % pn) 15 revs = localcounts.get_by_pattern('%%-%s_rev' % pn)
16 counts = localcounts.get_by_pattern('%%-%s_count' % pn) 16 counts = localcounts.get_by_pattern('%%-%s_count' % pn)
17 if not revs or not counts: 17 if not revs or not counts:
@@ -21,10 +21,10 @@ python migrate_localcount_handler () {
21 bb.warn("The number of revs and localcounts don't match in %s" % pn) 21 bb.warn("The number of revs and localcounts don't match in %s" % pn)
22 return 22 return
23 23
24 version = e.data.getVar('PRAUTOINX', True) 24 version = e.data.getVar('PRAUTOINX')
25 srcrev = bb.fetch2.get_srcrev(e.data) 25 srcrev = bb.fetch2.get_srcrev(e.data)
26 base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)] 26 base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)]
27 pkgarch = e.data.getVar('PACKAGE_ARCH', True) 27 pkgarch = e.data.getVar('PACKAGE_ARCH')
28 value = max(int(count) for count in counts) 28 value = max(int(count) for count in counts)
29 29
30 if len(revs) == 1: 30 if len(revs) == 1:
@@ -33,8 +33,8 @@ python migrate_localcount_handler () {
33 else: 33 else:
34 value += 1 34 value += 1
35 35
36 bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR', True)) 36 bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR'))
37 df = e.data.getVar('LOCALCOUNT_DUMPFILE', True) 37 df = e.data.getVar('LOCALCOUNT_DUMPFILE')
38 flock = bb.utils.lockfile("%s.lock" % df) 38 flock = bb.utils.lockfile("%s.lock" % df)
39 with open(df, 'a') as fd: 39 with open(df, 'a') as fd:
40 fd.write('PRAUTO$%s$%s$%s = "%s"\n' % 40 fd.write('PRAUTO$%s$%s$%s = "%s"\n' %
diff --git a/meta/classes/mime.bbclass b/meta/classes/mime.bbclass
index 721c73fcff..6cd59af391 100644
--- a/meta/classes/mime.bbclass
+++ b/meta/classes/mime.bbclass
@@ -28,8 +28,8 @@ fi
28 28
29python populate_packages_append () { 29python populate_packages_append () {
30 import re 30 import re
31 packages = d.getVar('PACKAGES', True).split() 31 packages = d.getVar('PACKAGES').split()
32 pkgdest = d.getVar('PKGDEST', True) 32 pkgdest = d.getVar('PKGDEST')
33 33
34 for pkg in packages: 34 for pkg in packages:
35 mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg) 35 mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
@@ -41,15 +41,15 @@ python populate_packages_append () {
41 mimes.append(f) 41 mimes.append(f)
42 if mimes: 42 if mimes:
43 bb.note("adding mime postinst and postrm scripts to %s" % pkg) 43 bb.note("adding mime postinst and postrm scripts to %s" % pkg)
44 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 44 postinst = d.getVar('pkg_postinst_%s' % pkg)
45 if not postinst: 45 if not postinst:
46 postinst = '#!/bin/sh\n' 46 postinst = '#!/bin/sh\n'
47 postinst += d.getVar('mime_postinst', True) 47 postinst += d.getVar('mime_postinst')
48 d.setVar('pkg_postinst_%s' % pkg, postinst) 48 d.setVar('pkg_postinst_%s' % pkg, postinst)
49 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 49 postrm = d.getVar('pkg_postrm_%s' % pkg)
50 if not postrm: 50 if not postrm:
51 postrm = '#!/bin/sh\n' 51 postrm = '#!/bin/sh\n'
52 postrm += d.getVar('mime_postrm', True) 52 postrm += d.getVar('mime_postrm')
53 d.setVar('pkg_postrm_%s' % pkg, postrm) 53 d.setVar('pkg_postrm_%s' % pkg, postrm)
54 bb.note("adding shared-mime-info-data dependency to %s" % pkg) 54 bb.note("adding shared-mime-info-data dependency to %s" % pkg)
55 d.appendVar('RDEPENDS_' + pkg, " shared-mime-info-data") 55 d.appendVar('RDEPENDS_' + pkg, " shared-mime-info-data")
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass
index 68e3d341a3..95d42da9e7 100644
--- a/meta/classes/module.bbclass
+++ b/meta/classes/module.bbclass
@@ -9,7 +9,7 @@ EXTRA_OEMAKE += "KERNEL_SRC=${STAGING_KERNEL_DIR}"
9MODULES_INSTALL_TARGET ?= "modules_install" 9MODULES_INSTALL_TARGET ?= "modules_install"
10 10
11python __anonymous () { 11python __anonymous () {
12 depends = d.getVar('DEPENDS', True) 12 depends = d.getVar('DEPENDS')
13 extra_symbols = [] 13 extra_symbols = []
14 for dep in depends.split(): 14 for dep in depends.split():
15 if dep.startswith("kernel-module-"): 15 if dep.startswith("kernel-module-"):
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index d5a31287a8..498f7f5735 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -1,20 +1,20 @@
1python multilib_virtclass_handler () { 1python multilib_virtclass_handler () {
2 cls = e.data.getVar("BBEXTENDCURR", True) 2 cls = e.data.getVar("BBEXTENDCURR")
3 variant = e.data.getVar("BBEXTENDVARIANT", True) 3 variant = e.data.getVar("BBEXTENDVARIANT")
4 if cls != "multilib" or not variant: 4 if cls != "multilib" or not variant:
5 return 5 return
6 6
7 e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR', True)) 7 e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR'))
8 8
9 # There should only be one kernel in multilib configs 9 # There should only be one kernel in multilib configs
10 # We also skip multilib setup for module packages. 10 # We also skip multilib setup for module packages.
11 provides = (e.data.getVar("PROVIDES", True) or "").split() 11 provides = (e.data.getVar("PROVIDES") or "").split()
12 if "virtual/kernel" in provides or bb.data.inherits_class('module-base', e.data): 12 if "virtual/kernel" in provides or bb.data.inherits_class('module-base', e.data):
13 raise bb.parse.SkipPackage("We shouldn't have multilib variants for the kernel") 13 raise bb.parse.SkipPackage("We shouldn't have multilib variants for the kernel")
14 14
15 save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME", True) or "" 15 save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME") or ""
16 for name in save_var_name.split(): 16 for name in save_var_name.split():
17 val=e.data.getVar(name, True) 17 val=e.data.getVar(name)
18 if val: 18 if val:
19 e.data.setVar(name + "_MULTILIB_ORIGINAL", val) 19 e.data.setVar(name + "_MULTILIB_ORIGINAL", val)
20 20
@@ -26,7 +26,7 @@ python multilib_virtclass_handler () {
26 if bb.data.inherits_class('image', e.data): 26 if bb.data.inherits_class('image', e.data):
27 e.data.setVar("MLPREFIX", variant + "-") 27 e.data.setVar("MLPREFIX", variant + "-")
28 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 28 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False))
29 e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT', True)) 29 e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT'))
30 target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False) 30 target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False)
31 if target_vendor: 31 if target_vendor:
32 e.data.setVar("TARGET_VENDOR", target_vendor) 32 e.data.setVar("TARGET_VENDOR", target_vendor)
@@ -50,7 +50,7 @@ python multilib_virtclass_handler () {
50 50
51 51
52 # Expand this since this won't work correctly once we set a multilib into place 52 # Expand this since this won't work correctly once we set a multilib into place
53 e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) 53 e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
54 54
55 override = ":virtclass-multilib-" + variant 55 override = ":virtclass-multilib-" + variant
56 56
@@ -60,7 +60,7 @@ python multilib_virtclass_handler () {
60 60
61 # Expand the WHITELISTs with multilib prefix 61 # Expand the WHITELISTs with multilib prefix
62 for whitelist in ["WHITELIST_GPL-3.0", "LGPLv2_WHITELIST_GPL-3.0"]: 62 for whitelist in ["WHITELIST_GPL-3.0", "LGPLv2_WHITELIST_GPL-3.0"]:
63 pkgs = e.data.getVar(whitelist, True) 63 pkgs = e.data.getVar(whitelist)
64 for pkg in pkgs.split(): 64 for pkg in pkgs.split():
65 pkgs += " " + variant + "-" + pkg 65 pkgs += " " + variant + "-" + pkg
66 e.data.setVar(whitelist, pkgs) 66 e.data.setVar(whitelist, pkgs)
@@ -78,7 +78,7 @@ multilib_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
78STAGINGCC_prepend = "${BBEXTENDVARIANT}-" 78STAGINGCC_prepend = "${BBEXTENDVARIANT}-"
79 79
80python __anonymous () { 80python __anonymous () {
81 variant = d.getVar("BBEXTENDVARIANT", True) 81 variant = d.getVar("BBEXTENDVARIANT")
82 82
83 import oe.classextend 83 import oe.classextend
84 84
@@ -88,7 +88,7 @@ python __anonymous () {
88 clsextend.map_depends_variable("PACKAGE_INSTALL") 88 clsextend.map_depends_variable("PACKAGE_INSTALL")
89 clsextend.map_depends_variable("LINGUAS_INSTALL") 89 clsextend.map_depends_variable("LINGUAS_INSTALL")
90 clsextend.map_depends_variable("RDEPENDS") 90 clsextend.map_depends_variable("RDEPENDS")
91 pinstall = d.getVar("LINGUAS_INSTALL", True) + " " + d.getVar("PACKAGE_INSTALL", True) 91 pinstall = d.getVar("LINGUAS_INSTALL") + " " + d.getVar("PACKAGE_INSTALL")
92 d.setVar("PACKAGE_INSTALL", pinstall) 92 d.setVar("PACKAGE_INSTALL", pinstall)
93 d.setVar("LINGUAS_INSTALL", "") 93 d.setVar("LINGUAS_INSTALL", "")
94 # FIXME, we need to map this to something, not delete it! 94 # FIXME, we need to map this to something, not delete it!
@@ -104,7 +104,7 @@ python __anonymous () {
104 return 104 return
105 105
106 clsextend.rename_packages() 106 clsextend.rename_packages()
107 clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) 107 clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split())
108 108
109 clsextend.map_packagevars() 109 clsextend.map_packagevars()
110 clsextend.map_regexp_variable("PACKAGES_DYNAMIC") 110 clsextend.map_regexp_variable("PACKAGES_DYNAMIC")
@@ -119,7 +119,7 @@ PACKAGEFUNCS_append = " do_package_qa_multilib"
119python do_package_qa_multilib() { 119python do_package_qa_multilib() {
120 120
121 def check_mlprefix(pkg, var, mlprefix): 121 def check_mlprefix(pkg, var, mlprefix):
122 values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var, True) or "") 122 values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var) or "")
123 candidates = [] 123 candidates = []
124 for i in values: 124 for i in values:
125 if i.startswith('virtual/'): 125 if i.startswith('virtual/'):
@@ -130,14 +130,14 @@ python do_package_qa_multilib() {
130 candidates.append(i) 130 candidates.append(i)
131 if len(candidates) > 0: 131 if len(candidates) > 0:
132 msg = "%s package %s - suspicious values '%s' in %s" \ 132 msg = "%s package %s - suspicious values '%s' in %s" \
133 % (d.getVar('PN', True), pkg, ' '.join(candidates), var) 133 % (d.getVar('PN'), pkg, ' '.join(candidates), var)
134 package_qa_handle_error("multilib", msg, d) 134 package_qa_handle_error("multilib", msg, d)
135 135
136 ml = d.getVar('MLPREFIX', True) 136 ml = d.getVar('MLPREFIX')
137 if not ml: 137 if not ml:
138 return 138 return
139 139
140 packages = d.getVar('PACKAGES', True) 140 packages = d.getVar('PACKAGES')
141 for pkg in packages.split(): 141 for pkg in packages.split():
142 check_mlprefix(pkg, 'RDEPENDS', ml) 142 check_mlprefix(pkg, 'RDEPENDS', ml)
143 check_mlprefix(pkg, 'RPROVIDES', ml) 143 check_mlprefix(pkg, 'RPROVIDES', ml)
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass
index 11ae2681f0..aba0371648 100644
--- a/meta/classes/multilib_global.bbclass
+++ b/meta/classes/multilib_global.bbclass
@@ -1,7 +1,7 @@
1def preferred_ml_updates(d): 1def preferred_ml_updates(d):
2 # If any PREFERRED_PROVIDER or PREFERRED_VERSION are set, 2 # If any PREFERRED_PROVIDER or PREFERRED_VERSION are set,
3 # we need to mirror these variables in the multilib case; 3 # we need to mirror these variables in the multilib case;
4 multilibs = d.getVar('MULTILIBS', True) or "" 4 multilibs = d.getVar('MULTILIBS') or ""
5 if not multilibs: 5 if not multilibs:
6 return 6 return
7 7
@@ -102,7 +102,7 @@ def preferred_ml_updates(d):
102 prov = prov.replace("virtual/", "") 102 prov = prov.replace("virtual/", "")
103 return "virtual/" + prefix + "-" + prov 103 return "virtual/" + prefix + "-" + prov
104 104
105 mp = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() 105 mp = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
106 extramp = [] 106 extramp = []
107 for p in mp: 107 for p in mp:
108 if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p: 108 if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p:
@@ -111,14 +111,14 @@ def preferred_ml_updates(d):
111 extramp.append(translate_provide(pref, p)) 111 extramp.append(translate_provide(pref, p))
112 d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp)) 112 d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp))
113 113
114 abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() 114 abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
115 extras = [] 115 extras = []
116 for p in prefixes: 116 for p in prefixes:
117 for a in abisafe: 117 for a in abisafe:
118 extras.append(p + "-" + a) 118 extras.append(p + "-" + a)
119 d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras)) 119 d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras))
120 120
121 siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() 121 siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
122 extras = [] 122 extras = []
123 for p in prefixes: 123 for p in prefixes:
124 for a in siggen_exclude: 124 for a in siggen_exclude:
@@ -128,7 +128,7 @@ def preferred_ml_updates(d):
128 128
129python multilib_virtclass_handler_vendor () { 129python multilib_virtclass_handler_vendor () {
130 if isinstance(e, bb.event.ConfigParsed): 130 if isinstance(e, bb.event.ConfigParsed):
131 for v in e.data.getVar("MULTILIB_VARIANTS", True).split(): 131 for v in e.data.getVar("MULTILIB_VARIANTS").split():
132 if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None: 132 if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None:
133 e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) 133 e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v)
134 preferred_ml_updates(e.data) 134 preferred_ml_updates(e.data)
@@ -140,14 +140,14 @@ python multilib_virtclass_handler_global () {
140 if not e.data: 140 if not e.data:
141 return 141 return
142 142
143 variant = e.data.getVar("BBEXTENDVARIANT", True) 143 variant = e.data.getVar("BBEXTENDVARIANT")
144 144
145 if isinstance(e, bb.event.RecipeParsed) and not variant: 145 if isinstance(e, bb.event.RecipeParsed) and not variant:
146 if bb.data.inherits_class('kernel', e.data) or \ 146 if bb.data.inherits_class('kernel', e.data) or \
147 bb.data.inherits_class('module-base', e.data) or \ 147 bb.data.inherits_class('module-base', e.data) or \
148 (bb.data.inherits_class('allarch', e.data) and\ 148 (bb.data.inherits_class('allarch', e.data) and\
149 not bb.data.inherits_class('packagegroup', e.data)): 149 not bb.data.inherits_class('packagegroup', e.data)):
150 variants = (e.data.getVar("MULTILIB_VARIANTS", True) or "").split() 150 variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split()
151 151
152 import oe.classextend 152 import oe.classextend
153 clsextends = [] 153 clsextends = []
@@ -155,21 +155,21 @@ python multilib_virtclass_handler_global () {
155 clsextends.append(oe.classextend.ClassExtender(variant, e.data)) 155 clsextends.append(oe.classextend.ClassExtender(variant, e.data))
156 156
157 # Process PROVIDES 157 # Process PROVIDES
158 origprovs = provs = e.data.getVar("PROVIDES", True) or "" 158 origprovs = provs = e.data.getVar("PROVIDES") or ""
159 for clsextend in clsextends: 159 for clsextend in clsextends:
160 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) 160 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False)
161 e.data.setVar("PROVIDES", provs) 161 e.data.setVar("PROVIDES", provs)
162 162
163 # Process RPROVIDES 163 # Process RPROVIDES
164 origrprovs = rprovs = e.data.getVar("RPROVIDES", True) or "" 164 origrprovs = rprovs = e.data.getVar("RPROVIDES") or ""
165 for clsextend in clsextends: 165 for clsextend in clsextends:
166 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) 166 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False)
167 if rprovs.strip(): 167 if rprovs.strip():
168 e.data.setVar("RPROVIDES", rprovs) 168 e.data.setVar("RPROVIDES", rprovs)
169 169
170 # Process RPROVIDES_${PN}... 170 # Process RPROVIDES_${PN}...
171 for pkg in (e.data.getVar("PACKAGES", True) or "").split(): 171 for pkg in (e.data.getVar("PACKAGES") or "").split():
172 origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg, True) or "" 172 origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg) or ""
173 for clsextend in clsextends: 173 for clsextend in clsextends:
174 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False) 174 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False)
175 rprovs = rprovs + " " + clsextend.extname + "-" + pkg 175 rprovs = rprovs + " " + clsextend.extname + "-" + pkg
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass
index 143f8a9147..5592e1d1c1 100644
--- a/meta/classes/native.bbclass
+++ b/meta/classes/native.bbclass
@@ -116,18 +116,18 @@ MACHINEOVERRIDES = ""
116PATH_prepend = "${COREBASE}/scripts/native-intercept:" 116PATH_prepend = "${COREBASE}/scripts/native-intercept:"
117 117
118python native_virtclass_handler () { 118python native_virtclass_handler () {
119 classextend = e.data.getVar('BBCLASSEXTEND', True) or "" 119 classextend = e.data.getVar('BBCLASSEXTEND') or ""
120 if "native" not in classextend: 120 if "native" not in classextend:
121 return 121 return
122 122
123 pn = e.data.getVar("PN", True) 123 pn = e.data.getVar("PN")
124 if not pn.endswith("-native"): 124 if not pn.endswith("-native"):
125 return 125 return
126 126
127 def map_dependencies(varname, d, suffix = ""): 127 def map_dependencies(varname, d, suffix = ""):
128 if suffix: 128 if suffix:
129 varname = varname + "_" + suffix 129 varname = varname + "_" + suffix
130 deps = d.getVar(varname, True) 130 deps = d.getVar(varname)
131 if not deps: 131 if not deps:
132 return 132 return
133 deps = bb.utils.explode_deps(deps) 133 deps = bb.utils.explode_deps(deps)
@@ -146,14 +146,14 @@ python native_virtclass_handler () {
146 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native") 146 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native")
147 147
148 map_dependencies("DEPENDS", e.data) 148 map_dependencies("DEPENDS", e.data)
149 for pkg in [e.data.getVar("PN", True), "", "${PN}"]: 149 for pkg in [e.data.getVar("PN"), "", "${PN}"]:
150 map_dependencies("RDEPENDS", e.data, pkg) 150 map_dependencies("RDEPENDS", e.data, pkg)
151 map_dependencies("RRECOMMENDS", e.data, pkg) 151 map_dependencies("RRECOMMENDS", e.data, pkg)
152 map_dependencies("RSUGGESTS", e.data, pkg) 152 map_dependencies("RSUGGESTS", e.data, pkg)
153 map_dependencies("RPROVIDES", e.data, pkg) 153 map_dependencies("RPROVIDES", e.data, pkg)
154 map_dependencies("RREPLACES", e.data, pkg) 154 map_dependencies("RREPLACES", e.data, pkg)
155 155
156 provides = e.data.getVar("PROVIDES", True) 156 provides = e.data.getVar("PROVIDES")
157 nprovides = [] 157 nprovides = []
158 for prov in provides.split(): 158 for prov in provides.split():
159 if prov.find(pn) != -1: 159 if prov.find(pn) != -1:
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass
index 31dde4a90f..2ac8fd82ed 100644
--- a/meta/classes/nativesdk.bbclass
+++ b/meta/classes/nativesdk.bbclass
@@ -64,17 +64,17 @@ export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${libdir}/pkgconfig"
64export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}" 64export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}"
65 65
66python nativesdk_virtclass_handler () { 66python nativesdk_virtclass_handler () {
67 pn = e.data.getVar("PN", True) 67 pn = e.data.getVar("PN")
68 if not (pn.endswith("-nativesdk") or pn.startswith("nativesdk-")): 68 if not (pn.endswith("-nativesdk") or pn.startswith("nativesdk-")):
69 return 69 return
70 70
71 e.data.setVar("MLPREFIX", "nativesdk-") 71 e.data.setVar("MLPREFIX", "nativesdk-")
72 e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN", True).replace("-nativesdk", "").replace("nativesdk-", "")) 72 e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN").replace("-nativesdk", "").replace("nativesdk-", ""))
73 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk") 73 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk")
74} 74}
75 75
76python () { 76python () {
77 pn = d.getVar("PN", True) 77 pn = d.getVar("PN")
78 if not pn.startswith("nativesdk-"): 78 if not pn.startswith("nativesdk-"):
79 return 79 return
80 80
@@ -82,7 +82,7 @@ python () {
82 82
83 clsextend = oe.classextend.NativesdkClassExtender("nativesdk", d) 83 clsextend = oe.classextend.NativesdkClassExtender("nativesdk", d)
84 clsextend.rename_packages() 84 clsextend.rename_packages()
85 clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) 85 clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split())
86 86
87 clsextend.map_depends_variable("DEPENDS") 87 clsextend.map_depends_variable("DEPENDS")
88 clsextend.map_packagevars() 88 clsextend.map_packagevars()
diff --git a/meta/classes/npm.bbclass b/meta/classes/npm.bbclass
index 43228c57d3..1c778a7637 100644
--- a/meta/classes/npm.bbclass
+++ b/meta/classes/npm.bbclass
@@ -13,7 +13,7 @@ def npm_oe_arch_map(target_arch, d):
13 elif re.match('arm64$', target_arch): return 'arm' 13 elif re.match('arm64$', target_arch): return 'arm'
14 return target_arch 14 return target_arch
15 15
16NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH', True), d)}" 16NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH'), d)}"
17 17
18npm_do_compile() { 18npm_do_compile() {
19 # Copy in any additionally fetched modules 19 # Copy in any additionally fetched modules
@@ -59,7 +59,7 @@ python populate_packages_prepend () {
59 description = pdata.get('description', None) 59 description = pdata.get('description', None)
60 if description: 60 if description:
61 d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'")) 61 d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'"))
62 d.appendVar('RDEPENDS_%s' % d.getVar('PN', True), ' %s' % ' '.join(pkgnames).replace('_', '-')) 62 d.appendVar('RDEPENDS_%s' % d.getVar('PN'), ' %s' % ' '.join(pkgnames).replace('_', '-'))
63} 63}
64 64
65FILES_${PN} += " \ 65FILES_${PN} += " \
diff --git a/meta/classes/oelint.bbclass b/meta/classes/oelint.bbclass
index c4febc2cfa..2589d34059 100644
--- a/meta/classes/oelint.bbclass
+++ b/meta/classes/oelint.bbclass
@@ -1,7 +1,7 @@
1addtask lint before do_build 1addtask lint before do_build
2do_lint[nostamp] = "1" 2do_lint[nostamp] = "1"
3python do_lint() { 3python do_lint() {
4 pkgname = d.getVar("PN", True) 4 pkgname = d.getVar("PN")
5 5
6 ############################## 6 ##############################
7 # Test that DESCRIPTION exists 7 # Test that DESCRIPTION exists
@@ -35,7 +35,7 @@ python do_lint() {
35 # Check that all patches have Signed-off-by and Upstream-Status 35 # Check that all patches have Signed-off-by and Upstream-Status
36 # 36 #
37 srcuri = d.getVar("SRC_URI", False).split() 37 srcuri = d.getVar("SRC_URI", False).split()
38 fpaths = (d.getVar('FILESPATH', True) or '').split(':') 38 fpaths = (d.getVar('FILESPATH') or '').split(':')
39 39
40 def findPatch(patchname): 40 def findPatch(patchname):
41 for dir in fpaths: 41 for dir in fpaths:
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index a6f0a7a63d..27034653b8 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -120,7 +120,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
120 120
121 """ 121 """
122 122
123 dvar = d.getVar('PKGD', True) 123 dvar = d.getVar('PKGD')
124 root = d.expand(root) 124 root = d.expand(root)
125 output_pattern = d.expand(output_pattern) 125 output_pattern = d.expand(output_pattern)
126 extra_depends = d.expand(extra_depends) 126 extra_depends = d.expand(extra_depends)
@@ -130,7 +130,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
130 if not os.path.exists(dvar + root): 130 if not os.path.exists(dvar + root):
131 return [] 131 return []
132 132
133 ml = d.getVar("MLPREFIX", True) 133 ml = d.getVar("MLPREFIX")
134 if ml: 134 if ml:
135 if not output_pattern.startswith(ml): 135 if not output_pattern.startswith(ml):
136 output_pattern = ml + output_pattern 136 output_pattern = ml + output_pattern
@@ -145,7 +145,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
145 extra_depends = " ".join(newdeps) 145 extra_depends = " ".join(newdeps)
146 146
147 147
148 packages = d.getVar('PACKAGES', True).split() 148 packages = d.getVar('PACKAGES').split()
149 split_packages = set() 149 split_packages = set()
150 150
151 if postinst: 151 if postinst:
@@ -163,7 +163,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
163 objs.append(relpath) 163 objs.append(relpath)
164 164
165 if extra_depends == None: 165 if extra_depends == None:
166 extra_depends = d.getVar("PN", True) 166 extra_depends = d.getVar("PN")
167 167
168 if not summary: 168 if not summary:
169 summary = description 169 summary = description
@@ -189,7 +189,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
189 packages = [pkg] + packages 189 packages = [pkg] + packages
190 else: 190 else:
191 packages.append(pkg) 191 packages.append(pkg)
192 oldfiles = d.getVar('FILES_' + pkg, True) 192 oldfiles = d.getVar('FILES_' + pkg)
193 newfile = os.path.join(root, o) 193 newfile = os.path.join(root, o)
194 # These names will be passed through glob() so if the filename actually 194 # These names will be passed through glob() so if the filename actually
195 # contains * or ? (rare, but possible) we need to handle that specially 195 # contains * or ? (rare, but possible) we need to handle that specially
@@ -214,9 +214,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
214 d.setVar('FILES_' + pkg, oldfiles + " " + newfile) 214 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
215 if extra_depends != '': 215 if extra_depends != '':
216 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) 216 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
217 if not d.getVar('DESCRIPTION_' + pkg, True): 217 if not d.getVar('DESCRIPTION_' + pkg):
218 d.setVar('DESCRIPTION_' + pkg, description % on) 218 d.setVar('DESCRIPTION_' + pkg, description % on)
219 if not d.getVar('SUMMARY_' + pkg, True): 219 if not d.getVar('SUMMARY_' + pkg):
220 d.setVar('SUMMARY_' + pkg, summary % on) 220 d.setVar('SUMMARY_' + pkg, summary % on)
221 if postinst: 221 if postinst:
222 d.setVar('pkg_postinst_' + pkg, postinst) 222 d.setVar('pkg_postinst_' + pkg, postinst)
@@ -231,9 +231,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
231PACKAGE_DEPENDS += "file-native" 231PACKAGE_DEPENDS += "file-native"
232 232
233python () { 233python () {
234 if d.getVar('PACKAGES', True) != '': 234 if d.getVar('PACKAGES') != '':
235 deps = "" 235 deps = ""
236 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split(): 236 for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
237 deps += " %s:do_populate_sysroot" % dep 237 deps += " %s:do_populate_sysroot" % dep
238 d.appendVarFlag('do_package', 'depends', deps) 238 d.appendVarFlag('do_package', 'depends', deps)
239 239
@@ -286,14 +286,14 @@ def files_from_filevars(filevars):
286 286
287# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files 287# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
288def get_conffiles(pkg, d): 288def get_conffiles(pkg, d):
289 pkgdest = d.getVar('PKGDEST', True) 289 pkgdest = d.getVar('PKGDEST')
290 root = os.path.join(pkgdest, pkg) 290 root = os.path.join(pkgdest, pkg)
291 cwd = os.getcwd() 291 cwd = os.getcwd()
292 os.chdir(root) 292 os.chdir(root)
293 293
294 conffiles = d.getVar('CONFFILES_%s' % pkg, True); 294 conffiles = d.getVar('CONFFILES_%s' % pkg);
295 if conffiles == None: 295 if conffiles == None:
296 conffiles = d.getVar('CONFFILES', True) 296 conffiles = d.getVar('CONFFILES')
297 if conffiles == None: 297 if conffiles == None:
298 conffiles = "" 298 conffiles = ""
299 conffiles = conffiles.split() 299 conffiles = conffiles.split()
@@ -318,7 +318,7 @@ def get_conffiles(pkg, d):
318 return conf_list 318 return conf_list
319 319
320def checkbuildpath(file, d): 320def checkbuildpath(file, d):
321 tmpdir = d.getVar('TMPDIR', True) 321 tmpdir = d.getVar('TMPDIR')
322 with open(file) as f: 322 with open(file) as f:
323 file_content = f.read() 323 file_content = f.read()
324 if tmpdir in file_content: 324 if tmpdir in file_content:
@@ -335,8 +335,8 @@ def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
335 335
336 import stat 336 import stat
337 337
338 dvar = d.getVar('PKGD', True) 338 dvar = d.getVar('PKGD')
339 objcopy = d.getVar("OBJCOPY", True) 339 objcopy = d.getVar("OBJCOPY")
340 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") 340 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
341 341
342 # We ignore kernel modules, we don't generate debug info files. 342 # We ignore kernel modules, we don't generate debug info files.
@@ -382,11 +382,11 @@ def copydebugsources(debugsrcdir, d):
382 382
383 sourcefile = d.expand("${WORKDIR}/debugsources.list") 383 sourcefile = d.expand("${WORKDIR}/debugsources.list")
384 if debugsrcdir and os.path.isfile(sourcefile): 384 if debugsrcdir and os.path.isfile(sourcefile):
385 dvar = d.getVar('PKGD', True) 385 dvar = d.getVar('PKGD')
386 strip = d.getVar("STRIP", True) 386 strip = d.getVar("STRIP")
387 objcopy = d.getVar("OBJCOPY", True) 387 objcopy = d.getVar("OBJCOPY")
388 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") 388 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
389 workdir = d.getVar("WORKDIR", True) 389 workdir = d.getVar("WORKDIR")
390 workparentdir = os.path.dirname(os.path.dirname(workdir)) 390 workparentdir = os.path.dirname(os.path.dirname(workdir))
391 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) 391 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
392 392
@@ -468,20 +468,20 @@ def get_package_additional_metadata (pkg_type, d):
468 return "\n".join(metadata_fields).strip() 468 return "\n".join(metadata_fields).strip()
469 469
470def runtime_mapping_rename (varname, pkg, d): 470def runtime_mapping_rename (varname, pkg, d):
471 #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) 471 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
472 472
473 if bb.data.inherits_class('packagegroup', d): 473 if bb.data.inherits_class('packagegroup', d):
474 return 474 return
475 475
476 new_depends = {} 476 new_depends = {}
477 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "") 477 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
478 for depend in deps: 478 for depend in deps:
479 new_depend = get_package_mapping(depend, pkg, d) 479 new_depend = get_package_mapping(depend, pkg, d)
480 new_depends[new_depend] = deps[depend] 480 new_depends[new_depend] = deps[depend]
481 481
482 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) 482 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
483 483
484 #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) 484 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
485 485
486# 486#
487# Package functions suitable for inclusion in PACKAGEFUNCS 487# Package functions suitable for inclusion in PACKAGEFUNCS
@@ -492,34 +492,34 @@ python package_get_auto_pr() {
492 import re 492 import re
493 493
494 # Support per recipe PRSERV_HOST 494 # Support per recipe PRSERV_HOST
495 pn = d.getVar('PN', True) 495 pn = d.getVar('PN')
496 host = d.getVar("PRSERV_HOST_" + pn, True) 496 host = d.getVar("PRSERV_HOST_" + pn)
497 if not (host is None): 497 if not (host is None):
498 d.setVar("PRSERV_HOST", host) 498 d.setVar("PRSERV_HOST", host)
499 499
500 pkgv = d.getVar("PKGV", True) 500 pkgv = d.getVar("PKGV")
501 501
502 # PR Server not active, handle AUTOINC 502 # PR Server not active, handle AUTOINC
503 if not d.getVar('PRSERV_HOST', True): 503 if not d.getVar('PRSERV_HOST'):
504 if 'AUTOINC' in pkgv: 504 if 'AUTOINC' in pkgv:
505 d.setVar("PKGV", pkgv.replace("AUTOINC", "0")) 505 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
506 return 506 return
507 507
508 auto_pr = None 508 auto_pr = None
509 pv = d.getVar("PV", True) 509 pv = d.getVar("PV")
510 version = d.getVar("PRAUTOINX", True) 510 version = d.getVar("PRAUTOINX")
511 pkgarch = d.getVar("PACKAGE_ARCH", True) 511 pkgarch = d.getVar("PACKAGE_ARCH")
512 checksum = d.getVar("BB_TASKHASH", True) 512 checksum = d.getVar("BB_TASKHASH")
513 513
514 if d.getVar('PRSERV_LOCKDOWN', True): 514 if d.getVar('PRSERV_LOCKDOWN'):
515 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None 515 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
516 if auto_pr is None: 516 if auto_pr is None:
517 bb.fatal("Can NOT get PRAUTO from lockdown exported file") 517 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
518 d.setVar('PRAUTO',str(auto_pr)) 518 d.setVar('PRAUTO',str(auto_pr))
519 return 519 return
520 520
521 try: 521 try:
522 conn = d.getVar("__PRSERV_CONN", True) 522 conn = d.getVar("__PRSERV_CONN")
523 if conn is None: 523 if conn is None:
524 conn = oe.prservice.prserv_make_conn(d) 524 conn = oe.prservice.prserv_make_conn(d)
525 if conn is not None: 525 if conn is not None:
@@ -540,19 +540,19 @@ python package_get_auto_pr() {
540LOCALEBASEPN ??= "${PN}" 540LOCALEBASEPN ??= "${PN}"
541 541
542python package_do_split_locales() { 542python package_do_split_locales() {
543 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): 543 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
544 bb.debug(1, "package requested not splitting locales") 544 bb.debug(1, "package requested not splitting locales")
545 return 545 return
546 546
547 packages = (d.getVar('PACKAGES', True) or "").split() 547 packages = (d.getVar('PACKAGES') or "").split()
548 548
549 datadir = d.getVar('datadir', True) 549 datadir = d.getVar('datadir')
550 if not datadir: 550 if not datadir:
551 bb.note("datadir not defined") 551 bb.note("datadir not defined")
552 return 552 return
553 553
554 dvar = d.getVar('PKGD', True) 554 dvar = d.getVar('PKGD')
555 pn = d.getVar('LOCALEBASEPN', True) 555 pn = d.getVar('LOCALEBASEPN')
556 556
557 if pn + '-locale' in packages: 557 if pn + '-locale' in packages:
558 packages.remove(pn + '-locale') 558 packages.remove(pn + '-locale')
@@ -565,10 +565,10 @@ python package_do_split_locales() {
565 565
566 locales = os.listdir(localedir) 566 locales = os.listdir(localedir)
567 567
568 summary = d.getVar('SUMMARY', True) or pn 568 summary = d.getVar('SUMMARY') or pn
569 description = d.getVar('DESCRIPTION', True) or "" 569 description = d.getVar('DESCRIPTION') or ""
570 locale_section = d.getVar('LOCALE_SECTION', True) 570 locale_section = d.getVar('LOCALE_SECTION')
571 mlprefix = d.getVar('MLPREFIX', True) or "" 571 mlprefix = d.getVar('MLPREFIX') or ""
572 for l in sorted(locales): 572 for l in sorted(locales):
573 ln = legitimize_package_name(l) 573 ln = legitimize_package_name(l)
574 pkg = pn + '-locale-' + ln 574 pkg = pn + '-locale-' + ln
@@ -589,14 +589,14 @@ python package_do_split_locales() {
589 # glibc-localedata-translit* won't install as a dependency 589 # glibc-localedata-translit* won't install as a dependency
590 # for some other package which breaks meta-toolchain 590 # for some other package which breaks meta-toolchain
591 # Probably breaks since virtual-locale- isn't provided anywhere 591 # Probably breaks since virtual-locale- isn't provided anywhere
592 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split() 592 #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split()
593 #rdep.append('%s-locale*' % pn) 593 #rdep.append('%s-locale*' % pn)
594 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) 594 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
595} 595}
596 596
597python perform_packagecopy () { 597python perform_packagecopy () {
598 dest = d.getVar('D', True) 598 dest = d.getVar('D')
599 dvar = d.getVar('PKGD', True) 599 dvar = d.getVar('PKGD')
600 600
601 # Start by package population by taking a copy of the installed 601 # Start by package population by taking a copy of the installed
602 # files to operate on 602 # files to operate on
@@ -730,8 +730,8 @@ python fixup_perms () {
730 # paths are resolved via BBPATH 730 # paths are resolved via BBPATH
731 def get_fs_perms_list(d): 731 def get_fs_perms_list(d):
732 str = "" 732 str = ""
733 bbpath = d.getVar('BBPATH', True) 733 bbpath = d.getVar('BBPATH')
734 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) 734 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES')
735 if not fs_perms_tables: 735 if not fs_perms_tables:
736 fs_perms_tables = 'files/fs-perms.txt' 736 fs_perms_tables = 'files/fs-perms.txt'
737 for conf_file in fs_perms_tables.split(): 737 for conf_file in fs_perms_tables.split():
@@ -740,7 +740,7 @@ python fixup_perms () {
740 740
741 741
742 742
743 dvar = d.getVar('PKGD', True) 743 dvar = d.getVar('PKGD')
744 744
745 fs_perms_table = {} 745 fs_perms_table = {}
746 fs_link_table = {} 746 fs_link_table = {}
@@ -769,7 +769,7 @@ python fixup_perms () {
769 'oldincludedir' ] 769 'oldincludedir' ]
770 770
771 for path in target_path_vars: 771 for path in target_path_vars:
772 dir = d.getVar(path, True) or "" 772 dir = d.getVar(path) or ""
773 if dir == "": 773 if dir == "":
774 continue 774 continue
775 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) 775 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
@@ -854,20 +854,20 @@ python fixup_perms () {
854python split_and_strip_files () { 854python split_and_strip_files () {
855 import stat, errno 855 import stat, errno
856 856
857 dvar = d.getVar('PKGD', True) 857 dvar = d.getVar('PKGD')
858 pn = d.getVar('PN', True) 858 pn = d.getVar('PN')
859 859
860 oldcwd = os.getcwd() 860 oldcwd = os.getcwd()
861 os.chdir(dvar) 861 os.chdir(dvar)
862 862
863 # We default to '.debug' style 863 # We default to '.debug' style
864 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': 864 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
865 # Single debug-file-directory style debug info 865 # Single debug-file-directory style debug info
866 debugappend = ".debug" 866 debugappend = ".debug"
867 debugdir = "" 867 debugdir = ""
868 debuglibdir = "/usr/lib/debug" 868 debuglibdir = "/usr/lib/debug"
869 debugsrcdir = "/usr/src/debug" 869 debugsrcdir = "/usr/src/debug"
870 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src': 870 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
871 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug 871 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
872 debugappend = "" 872 debugappend = ""
873 debugdir = "/.debug" 873 debugdir = "/.debug"
@@ -918,10 +918,10 @@ python split_and_strip_files () {
918 symlinks = {} 918 symlinks = {}
919 kernmods = [] 919 kernmods = []
920 inodes = {} 920 inodes = {}
921 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) 921 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
922 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) 922 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
923 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1' or \ 923 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
924 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): 924 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
925 for root, dirs, files in cpath.walk(dvar): 925 for root, dirs, files in cpath.walk(dvar):
926 for f in files: 926 for f in files:
927 file = os.path.join(root, f) 927 file = os.path.join(root, f)
@@ -962,7 +962,7 @@ python split_and_strip_files () {
962 elf_file = isELF(file) 962 elf_file = isELF(file)
963 if elf_file & 1: 963 if elf_file & 1:
964 if elf_file & 2: 964 if elf_file & 2:
965 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): 965 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
966 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) 966 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
967 else: 967 else:
968 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) 968 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
@@ -991,7 +991,7 @@ python split_and_strip_files () {
991 # 991 #
992 # First lets process debug splitting 992 # First lets process debug splitting
993 # 993 #
994 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): 994 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
995 for file in elffiles: 995 for file in elffiles:
996 src = file[len(dvar):] 996 src = file[len(dvar):]
997 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend 997 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
@@ -1054,8 +1054,8 @@ python split_and_strip_files () {
1054 # 1054 #
1055 # Now lets go back over things and strip them 1055 # Now lets go back over things and strip them
1056 # 1056 #
1057 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): 1057 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1058 strip = d.getVar("STRIP", True) 1058 strip = d.getVar("STRIP")
1059 sfiles = [] 1059 sfiles = []
1060 for file in elffiles: 1060 for file in elffiles:
1061 elf_file = int(elffiles[file]) 1061 elf_file = int(elffiles[file])
@@ -1075,16 +1075,16 @@ python split_and_strip_files () {
1075python populate_packages () { 1075python populate_packages () {
1076 import glob, re 1076 import glob, re
1077 1077
1078 workdir = d.getVar('WORKDIR', True) 1078 workdir = d.getVar('WORKDIR')
1079 outdir = d.getVar('DEPLOY_DIR', True) 1079 outdir = d.getVar('DEPLOY_DIR')
1080 dvar = d.getVar('PKGD', True) 1080 dvar = d.getVar('PKGD')
1081 packages = d.getVar('PACKAGES', True) 1081 packages = d.getVar('PACKAGES')
1082 pn = d.getVar('PN', True) 1082 pn = d.getVar('PN')
1083 1083
1084 bb.utils.mkdirhier(outdir) 1084 bb.utils.mkdirhier(outdir)
1085 os.chdir(dvar) 1085 os.chdir(dvar)
1086 1086
1087 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG", True) or False) 1087 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1088 1088
1089 # Sanity check PACKAGES for duplicates 1089 # Sanity check PACKAGES for duplicates
1090 # Sanity should be moved to sanity.bbclass once we have the infrastucture 1090 # Sanity should be moved to sanity.bbclass once we have the infrastucture
@@ -1099,7 +1099,7 @@ python populate_packages () {
1099 else: 1099 else:
1100 package_list.append(pkg) 1100 package_list.append(pkg)
1101 d.setVar('PACKAGES', ' '.join(package_list)) 1101 d.setVar('PACKAGES', ' '.join(package_list))
1102 pkgdest = d.getVar('PKGDEST', True) 1102 pkgdest = d.getVar('PKGDEST')
1103 1103
1104 seen = [] 1104 seen = []
1105 1105
@@ -1120,7 +1120,7 @@ python populate_packages () {
1120 root = os.path.join(pkgdest, pkg) 1120 root = os.path.join(pkgdest, pkg)
1121 bb.utils.mkdirhier(root) 1121 bb.utils.mkdirhier(root)
1122 1122
1123 filesvar = d.getVar('FILES_%s' % pkg, True) or "" 1123 filesvar = d.getVar('FILES_%s' % pkg) or ""
1124 if "//" in filesvar: 1124 if "//" in filesvar:
1125 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg 1125 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1126 package_qa_handle_error("files-invalid", msg, d) 1126 package_qa_handle_error("files-invalid", msg, d)
@@ -1188,7 +1188,7 @@ python populate_packages () {
1188 # Handle LICENSE_EXCLUSION 1188 # Handle LICENSE_EXCLUSION
1189 package_list = [] 1189 package_list = []
1190 for pkg in packages.split(): 1190 for pkg in packages.split():
1191 if d.getVar('LICENSE_EXCLUSION-' + pkg, True): 1191 if d.getVar('LICENSE_EXCLUSION-' + pkg):
1192 msg = "%s has an incompatible license. Excluding from packaging." % pkg 1192 msg = "%s has an incompatible license. Excluding from packaging." % pkg
1193 package_qa_handle_error("incompatible-license", msg, d) 1193 package_qa_handle_error("incompatible-license", msg, d)
1194 else: 1194 else:
@@ -1207,7 +1207,7 @@ python populate_packages () {
1207 1207
1208 if unshipped != []: 1208 if unshipped != []:
1209 msg = pn + ": Files/directories were installed but not shipped in any package:" 1209 msg = pn + ": Files/directories were installed but not shipped in any package:"
1210 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): 1210 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split():
1211 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) 1211 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1212 else: 1212 else:
1213 for f in unshipped: 1213 for f in unshipped:
@@ -1220,7 +1220,7 @@ populate_packages[dirs] = "${D}"
1220 1220
1221python package_fixsymlinks () { 1221python package_fixsymlinks () {
1222 import errno 1222 import errno
1223 pkgdest = d.getVar('PKGDEST', True) 1223 pkgdest = d.getVar('PKGDEST')
1224 packages = d.getVar("PACKAGES", False).split() 1224 packages = d.getVar("PACKAGES", False).split()
1225 1225
1226 dangling_links = {} 1226 dangling_links = {}
@@ -1255,7 +1255,7 @@ python package_fixsymlinks () {
1255 bb.note("%s contains dangling symlink to %s" % (pkg, l)) 1255 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1256 1256
1257 for pkg in newrdepends: 1257 for pkg in newrdepends:
1258 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") 1258 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
1259 for p in newrdepends[pkg]: 1259 for p in newrdepends[pkg]:
1260 if p not in rdepends: 1260 if p not in rdepends:
1261 rdepends[p] = [] 1261 rdepends[p] = []
@@ -1309,9 +1309,9 @@ python emit_pkgdata() {
1309 with open(subdata_file, 'w') as fd: 1309 with open(subdata_file, 'w') as fd:
1310 fd.write("PKG_%s: %s" % (ml_pkg, pkg)) 1310 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1311 1311
1312 packages = d.getVar('PACKAGES', True) 1312 packages = d.getVar('PACKAGES')
1313 pkgdest = d.getVar('PKGDEST', True) 1313 pkgdest = d.getVar('PKGDEST')
1314 pkgdatadir = d.getVar('PKGDESTWORK', True) 1314 pkgdatadir = d.getVar('PKGDESTWORK')
1315 1315
1316 # Take shared lock since we're only reading, not writing 1316 # Take shared lock since we're only reading, not writing
1317 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) 1317 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
@@ -1321,9 +1321,9 @@ python emit_pkgdata() {
1321 f.write("PACKAGES: %s\n" % packages) 1321 f.write("PACKAGES: %s\n" % packages)
1322 f.close() 1322 f.close()
1323 1323
1324 pn = d.getVar('PN', True) 1324 pn = d.getVar('PN')
1325 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split() 1325 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
1326 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split() 1326 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
1327 1327
1328 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): 1328 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1329 write_extra_pkgs(variants, pn, packages, pkgdatadir) 1329 write_extra_pkgs(variants, pn, packages, pkgdatadir)
@@ -1331,10 +1331,10 @@ python emit_pkgdata() {
1331 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)): 1331 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1332 write_extra_pkgs(global_variants, pn, packages, pkgdatadir) 1332 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1333 1333
1334 workdir = d.getVar('WORKDIR', True) 1334 workdir = d.getVar('WORKDIR')
1335 1335
1336 for pkg in packages.split(): 1336 for pkg in packages.split():
1337 pkgval = d.getVar('PKG_%s' % pkg, True) 1337 pkgval = d.getVar('PKG_%s' % pkg)
1338 if pkgval is None: 1338 if pkgval is None:
1339 pkgval = pkg 1339 pkgval = pkg
1340 d.setVar('PKG_%s' % pkg, pkg) 1340 d.setVar('PKG_%s' % pkg, pkg)
@@ -1377,11 +1377,11 @@ python emit_pkgdata() {
1377 write_if_exists(sf, pkg, 'pkg_prerm') 1377 write_if_exists(sf, pkg, 'pkg_prerm')
1378 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') 1378 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1379 write_if_exists(sf, pkg, 'FILES_INFO') 1379 write_if_exists(sf, pkg, 'FILES_INFO')
1380 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): 1380 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split():
1381 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) 1381 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1382 1382
1383 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') 1383 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1384 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): 1384 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split():
1385 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) 1385 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1386 1386
1387 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) 1387 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
@@ -1394,9 +1394,9 @@ python emit_pkgdata() {
1394 bb.utils.mkdirhier(os.path.dirname(subdata_sym)) 1394 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1395 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) 1395 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1396 1396
1397 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) 1397 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg)
1398 if not allow_empty: 1398 if not allow_empty:
1399 allow_empty = d.getVar('ALLOW_EMPTY', True) 1399 allow_empty = d.getVar('ALLOW_EMPTY')
1400 root = "%s/%s" % (pkgdest, pkg) 1400 root = "%s/%s" % (pkgdest, pkg)
1401 os.chdir(root) 1401 os.chdir(root)
1402 g = glob('*') 1402 g = glob('*')
@@ -1435,19 +1435,19 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI
1435# FILERDEPENDS_filepath_pkg - per file dep 1435# FILERDEPENDS_filepath_pkg - per file dep
1436 1436
1437python package_do_filedeps() { 1437python package_do_filedeps() {
1438 if d.getVar('SKIP_FILEDEPS', True) == '1': 1438 if d.getVar('SKIP_FILEDEPS') == '1':
1439 return 1439 return
1440 1440
1441 pkgdest = d.getVar('PKGDEST', True) 1441 pkgdest = d.getVar('PKGDEST')
1442 packages = d.getVar('PACKAGES', True) 1442 packages = d.getVar('PACKAGES')
1443 rpmdeps = d.getVar('RPMDEPS', True) 1443 rpmdeps = d.getVar('RPMDEPS')
1444 1444
1445 def chunks(files, n): 1445 def chunks(files, n):
1446 return [files[i:i+n] for i in range(0, len(files), n)] 1446 return [files[i:i+n] for i in range(0, len(files), n)]
1447 1447
1448 pkglist = [] 1448 pkglist = []
1449 for pkg in packages.split(): 1449 for pkg in packages.split():
1450 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1': 1450 if d.getVar('SKIP_FILEDEPS_' + pkg) == '1':
1451 continue 1451 continue
1452 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): 1452 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1453 continue 1453 continue
@@ -1496,22 +1496,22 @@ python package_do_shlibs() {
1496 return 1496 return
1497 1497
1498 lib_re = re.compile("^.*\.so") 1498 lib_re = re.compile("^.*\.so")
1499 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) 1499 libdir_re = re.compile(".*/%s$" % d.getVar('baselib'))
1500 1500
1501 packages = d.getVar('PACKAGES', True) 1501 packages = d.getVar('PACKAGES')
1502 targetos = d.getVar('TARGET_OS', True) 1502 targetos = d.getVar('TARGET_OS')
1503 1503
1504 workdir = d.getVar('WORKDIR', True) 1504 workdir = d.getVar('WORKDIR')
1505 1505
1506 ver = d.getVar('PKGV', True) 1506 ver = d.getVar('PKGV')
1507 if not ver: 1507 if not ver:
1508 msg = "PKGV not defined" 1508 msg = "PKGV not defined"
1509 package_qa_handle_error("pkgv-undefined", msg, d) 1509 package_qa_handle_error("pkgv-undefined", msg, d)
1510 return 1510 return
1511 1511
1512 pkgdest = d.getVar('PKGDEST', True) 1512 pkgdest = d.getVar('PKGDEST')
1513 1513
1514 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) 1514 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1515 1515
1516 # Take shared lock since we're only reading, not writing 1516 # Take shared lock since we're only reading, not writing
1517 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) 1517 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
@@ -1519,7 +1519,7 @@ python package_do_shlibs() {
1519 def linux_so(file, needed, sonames, renames, pkgver): 1519 def linux_so(file, needed, sonames, renames, pkgver):
1520 needs_ldconfig = False 1520 needs_ldconfig = False
1521 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') 1521 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1522 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null" 1522 cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1523 fd = os.popen(cmd) 1523 fd = os.popen(cmd)
1524 lines = fd.readlines() 1524 lines = fd.readlines()
1525 fd.close() 1525 fd.close()
@@ -1601,12 +1601,12 @@ python package_do_shlibs() {
1601 if name and name not in needed[pkg]: 1601 if name and name not in needed[pkg]:
1602 needed[pkg].append((name, file, [])) 1602 needed[pkg].append((name, file, []))
1603 1603
1604 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": 1604 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1605 snap_symlinks = True 1605 snap_symlinks = True
1606 else: 1606 else:
1607 snap_symlinks = False 1607 snap_symlinks = False
1608 1608
1609 if (d.getVar('USE_LDCONFIG', True) or "1") == "1": 1609 if (d.getVar('USE_LDCONFIG') or "1") == "1":
1610 use_ldconfig = True 1610 use_ldconfig = True
1611 else: 1611 else:
1612 use_ldconfig = False 1612 use_ldconfig = False
@@ -1615,14 +1615,14 @@ python package_do_shlibs() {
1615 shlib_provider = oe.package.read_shlib_providers(d) 1615 shlib_provider = oe.package.read_shlib_providers(d)
1616 1616
1617 for pkg in packages.split(): 1617 for pkg in packages.split():
1618 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or "" 1618 private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1619 private_libs = private_libs.split() 1619 private_libs = private_libs.split()
1620 needs_ldconfig = False 1620 needs_ldconfig = False
1621 bb.debug(2, "calculating shlib provides for %s" % pkg) 1621 bb.debug(2, "calculating shlib provides for %s" % pkg)
1622 1622
1623 pkgver = d.getVar('PKGV_' + pkg, True) 1623 pkgver = d.getVar('PKGV_' + pkg)
1624 if not pkgver: 1624 if not pkgver:
1625 pkgver = d.getVar('PV_' + pkg, True) 1625 pkgver = d.getVar('PV_' + pkg)
1626 if not pkgver: 1626 if not pkgver:
1627 pkgver = ver 1627 pkgver = ver
1628 1628
@@ -1659,18 +1659,18 @@ python package_do_shlibs() {
1659 fd.close() 1659 fd.close()
1660 if needs_ldconfig and use_ldconfig: 1660 if needs_ldconfig and use_ldconfig:
1661 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) 1661 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1662 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 1662 postinst = d.getVar('pkg_postinst_%s' % pkg)
1663 if not postinst: 1663 if not postinst:
1664 postinst = '#!/bin/sh\n' 1664 postinst = '#!/bin/sh\n'
1665 postinst += d.getVar('ldconfig_postinst_fragment', True) 1665 postinst += d.getVar('ldconfig_postinst_fragment')
1666 d.setVar('pkg_postinst_%s' % pkg, postinst) 1666 d.setVar('pkg_postinst_%s' % pkg, postinst)
1667 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) 1667 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1668 1668
1669 bb.utils.unlockfile(lf) 1669 bb.utils.unlockfile(lf)
1670 1670
1671 assumed_libs = d.getVar('ASSUME_SHLIBS', True) 1671 assumed_libs = d.getVar('ASSUME_SHLIBS')
1672 if assumed_libs: 1672 if assumed_libs:
1673 libdir = d.getVar("libdir", True) 1673 libdir = d.getVar("libdir")
1674 for e in assumed_libs.split(): 1674 for e in assumed_libs.split():
1675 l, dep_pkg = e.split(":") 1675 l, dep_pkg = e.split(":")
1676 lib_ver = None 1676 lib_ver = None
@@ -1682,7 +1682,7 @@ python package_do_shlibs() {
1682 shlib_provider[l] = {} 1682 shlib_provider[l] = {}
1683 shlib_provider[l][libdir] = (dep_pkg, lib_ver) 1683 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1684 1684
1685 libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)] 1685 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1686 1686
1687 for pkg in packages.split(): 1687 for pkg in packages.split():
1688 bb.debug(2, "calculating shlib requirements for %s" % pkg) 1688 bb.debug(2, "calculating shlib requirements for %s" % pkg)
@@ -1736,12 +1736,12 @@ python package_do_shlibs() {
1736python package_do_pkgconfig () { 1736python package_do_pkgconfig () {
1737 import re 1737 import re
1738 1738
1739 packages = d.getVar('PACKAGES', True) 1739 packages = d.getVar('PACKAGES')
1740 workdir = d.getVar('WORKDIR', True) 1740 workdir = d.getVar('WORKDIR')
1741 pkgdest = d.getVar('PKGDEST', True) 1741 pkgdest = d.getVar('PKGDEST')
1742 1742
1743 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() 1743 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1744 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) 1744 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1745 1745
1746 pc_re = re.compile('(.*)\.pc$') 1746 pc_re = re.compile('(.*)\.pc$')
1747 var_re = re.compile('(.*)=(.*)') 1747 var_re = re.compile('(.*)=(.*)')
@@ -1826,7 +1826,7 @@ python package_do_pkgconfig () {
1826 1826
1827def read_libdep_files(d): 1827def read_libdep_files(d):
1828 pkglibdeps = {} 1828 pkglibdeps = {}
1829 packages = d.getVar('PACKAGES', True).split() 1829 packages = d.getVar('PACKAGES').split()
1830 for pkg in packages: 1830 for pkg in packages:
1831 pkglibdeps[pkg] = {} 1831 pkglibdeps[pkg] = {}
1832 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": 1832 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
@@ -1846,9 +1846,9 @@ def read_libdep_files(d):
1846python read_shlibdeps () { 1846python read_shlibdeps () {
1847 pkglibdeps = read_libdep_files(d) 1847 pkglibdeps = read_libdep_files(d)
1848 1848
1849 packages = d.getVar('PACKAGES', True).split() 1849 packages = d.getVar('PACKAGES').split()
1850 for pkg in packages: 1850 for pkg in packages:
1851 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") 1851 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
1852 for dep in pkglibdeps[pkg]: 1852 for dep in pkglibdeps[pkg]:
1853 # Add the dep if it's not already there, or if no comparison is set 1853 # Add the dep if it's not already there, or if no comparison is set
1854 if dep not in rdepends: 1854 if dep not in rdepends:
@@ -1873,14 +1873,14 @@ python package_depchains() {
1873 package. 1873 package.
1874 """ 1874 """
1875 1875
1876 packages = d.getVar('PACKAGES', True) 1876 packages = d.getVar('PACKAGES')
1877 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() 1877 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
1878 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() 1878 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
1879 1879
1880 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): 1880 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1881 1881
1882 #bb.note('depends for %s is %s' % (base, depends)) 1882 #bb.note('depends for %s is %s' % (base, depends))
1883 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") 1883 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
1884 1884
1885 for depend in depends: 1885 for depend in depends:
1886 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): 1886 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
@@ -1901,7 +1901,7 @@ python package_depchains() {
1901 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): 1901 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1902 1902
1903 #bb.note('rdepends for %s is %s' % (base, rdepends)) 1903 #bb.note('rdepends for %s is %s' % (base, rdepends))
1904 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") 1904 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
1905 1905
1906 for depend in rdepends: 1906 for depend in rdepends:
1907 if depend.find('virtual-locale-') != -1: 1907 if depend.find('virtual-locale-') != -1:
@@ -1924,12 +1924,12 @@ python package_depchains() {
1924 list.append(dep) 1924 list.append(dep)
1925 1925
1926 depends = [] 1926 depends = []
1927 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): 1927 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
1928 add_dep(depends, dep) 1928 add_dep(depends, dep)
1929 1929
1930 rdepends = [] 1930 rdepends = []
1931 for pkg in packages.split(): 1931 for pkg in packages.split():
1932 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): 1932 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""):
1933 add_dep(rdepends, dep) 1933 add_dep(rdepends, dep)
1934 1934
1935 #bb.note('rdepends is %s' % rdepends) 1935 #bb.note('rdepends is %s' % rdepends)
@@ -1959,7 +1959,7 @@ python package_depchains() {
1959 for pkg in pkglibdeps: 1959 for pkg in pkglibdeps:
1960 for k in pkglibdeps[pkg]: 1960 for k in pkglibdeps[pkg]:
1961 add_dep(pkglibdeplist, k) 1961 add_dep(pkglibdeplist, k)
1962 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (bb.data.inherits_class('packagegroup', d))) 1962 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
1963 1963
1964 for suffix in pkgs: 1964 for suffix in pkgs:
1965 for pkg in pkgs[suffix]: 1965 for pkg in pkgs[suffix]:
@@ -1976,7 +1976,7 @@ python package_depchains() {
1976 pkg_addrrecs(pkg, base, suffix, func, rdepends, d) 1976 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1977 else: 1977 else:
1978 rdeps = [] 1978 rdeps = []
1979 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""): 1979 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""):
1980 add_dep(rdeps, dep) 1980 add_dep(rdeps, dep)
1981 pkg_addrrecs(pkg, base, suffix, func, rdeps, d) 1981 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1982} 1982}
@@ -1987,8 +1987,8 @@ PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDE
1987 1987
1988def gen_packagevar(d): 1988def gen_packagevar(d):
1989 ret = [] 1989 ret = []
1990 pkgs = (d.getVar("PACKAGES", True) or "").split() 1990 pkgs = (d.getVar("PACKAGES") or "").split()
1991 vars = (d.getVar("PACKAGEVARS", True) or "").split() 1991 vars = (d.getVar("PACKAGEVARS") or "").split()
1992 for p in pkgs: 1992 for p in pkgs:
1993 for v in vars: 1993 for v in vars:
1994 ret.append(v + "_" + p) 1994 ret.append(v + "_" + p)
@@ -2036,16 +2036,16 @@ python do_package () {
2036 # Sanity test the setup 2036 # Sanity test the setup
2037 ########################################################################### 2037 ###########################################################################
2038 2038
2039 packages = (d.getVar('PACKAGES', True) or "").split() 2039 packages = (d.getVar('PACKAGES') or "").split()
2040 if len(packages) < 1: 2040 if len(packages) < 1:
2041 bb.debug(1, "No packages to build, skipping do_package") 2041 bb.debug(1, "No packages to build, skipping do_package")
2042 return 2042 return
2043 2043
2044 workdir = d.getVar('WORKDIR', True) 2044 workdir = d.getVar('WORKDIR')
2045 outdir = d.getVar('DEPLOY_DIR', True) 2045 outdir = d.getVar('DEPLOY_DIR')
2046 dest = d.getVar('D', True) 2046 dest = d.getVar('D')
2047 dvar = d.getVar('PKGD', True) 2047 dvar = d.getVar('PKGD')
2048 pn = d.getVar('PN', True) 2048 pn = d.getVar('PN')
2049 2049
2050 if not workdir or not outdir or not dest or not dvar or not pn: 2050 if not workdir or not outdir or not dest or not dvar or not pn:
2051 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" 2051 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
@@ -2063,7 +2063,7 @@ python do_package () {
2063 # code pre-expands some frequently used variables 2063 # code pre-expands some frequently used variables
2064 2064
2065 def expandVar(x, d): 2065 def expandVar(x, d):
2066 d.setVar(x, d.getVar(x, True)) 2066 d.setVar(x, d.getVar(x))
2067 2067
2068 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': 2068 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2069 expandVar(x, d) 2069 expandVar(x, d)
@@ -2072,7 +2072,7 @@ python do_package () {
2072 # Setup PKGD (from D) 2072 # Setup PKGD (from D)
2073 ########################################################################### 2073 ###########################################################################
2074 2074
2075 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split(): 2075 for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
2076 bb.build.exec_func(f, d) 2076 bb.build.exec_func(f, d)
2077 2077
2078 ########################################################################### 2078 ###########################################################################
@@ -2081,7 +2081,7 @@ python do_package () {
2081 2081
2082 cpath = oe.cachedpath.CachedPath() 2082 cpath = oe.cachedpath.CachedPath()
2083 2083
2084 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split(): 2084 for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
2085 bb.build.exec_func(f, d) 2085 bb.build.exec_func(f, d)
2086 2086
2087 ########################################################################### 2087 ###########################################################################
@@ -2091,18 +2091,18 @@ python do_package () {
2091 # Build global list of files in each split package 2091 # Build global list of files in each split package
2092 global pkgfiles 2092 global pkgfiles
2093 pkgfiles = {} 2093 pkgfiles = {}
2094 packages = d.getVar('PACKAGES', True).split() 2094 packages = d.getVar('PACKAGES').split()
2095 pkgdest = d.getVar('PKGDEST', True) 2095 pkgdest = d.getVar('PKGDEST')
2096 for pkg in packages: 2096 for pkg in packages:
2097 pkgfiles[pkg] = [] 2097 pkgfiles[pkg] = []
2098 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): 2098 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2099 for file in files: 2099 for file in files:
2100 pkgfiles[pkg].append(walkroot + os.sep + file) 2100 pkgfiles[pkg].append(walkroot + os.sep + file)
2101 2101
2102 for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): 2102 for f in (d.getVar('PACKAGEFUNCS') or '').split():
2103 bb.build.exec_func(f, d) 2103 bb.build.exec_func(f, d)
2104 2104
2105 qa_sane = d.getVar("QA_SANE", True) 2105 qa_sane = d.getVar("QA_SANE")
2106 if not qa_sane: 2106 if not qa_sane:
2107 bb.fatal("Fatal QA errors found, failing task.") 2107 bb.fatal("Fatal QA errors found, failing task.")
2108} 2108}
@@ -2149,7 +2149,7 @@ def mapping_rename_hook(d):
2149 Rewrite variables to account for package renaming in things 2149 Rewrite variables to account for package renaming in things
2150 like debian.bbclass or manual PKG variable name changes 2150 like debian.bbclass or manual PKG variable name changes
2151 """ 2151 """
2152 pkg = d.getVar("PKG", True) 2152 pkg = d.getVar("PKG")
2153 runtime_mapping_rename("RDEPENDS", pkg, d) 2153 runtime_mapping_rename("RDEPENDS", pkg, d)
2154 runtime_mapping_rename("RRECOMMENDS", pkg, d) 2154 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2155 runtime_mapping_rename("RSUGGESTS", pkg, d) 2155 runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass
index fb6034cab1..68eca61a26 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes/package_deb.bbclass
@@ -6,14 +6,14 @@ inherit package
6 6
7IMAGE_PKGTYPE ?= "deb" 7IMAGE_PKGTYPE ?= "deb"
8 8
9DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH', True), d.getVar('TUNE_FEATURES', True))}" 9DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'))}"
10DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}" 10DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}"
11 11
12PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" 12PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs"
13 13
14APTCONF_TARGET = "${WORKDIR}" 14APTCONF_TARGET = "${WORKDIR}"
15 15
16APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" 16APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
17 17
18def debian_arch_map(arch, tune): 18def debian_arch_map(arch, tune):
19 tune_features = tune.split() 19 tune_features = tune.split()
@@ -56,22 +56,22 @@ python do_package_deb () {
56 56
57 oldcwd = os.getcwd() 57 oldcwd = os.getcwd()
58 58
59 workdir = d.getVar('WORKDIR', True) 59 workdir = d.getVar('WORKDIR')
60 if not workdir: 60 if not workdir:
61 bb.error("WORKDIR not defined, unable to package") 61 bb.error("WORKDIR not defined, unable to package")
62 return 62 return
63 63
64 outdir = d.getVar('PKGWRITEDIRDEB', True) 64 outdir = d.getVar('PKGWRITEDIRDEB')
65 if not outdir: 65 if not outdir:
66 bb.error("PKGWRITEDIRDEB not defined, unable to package") 66 bb.error("PKGWRITEDIRDEB not defined, unable to package")
67 return 67 return
68 68
69 packages = d.getVar('PACKAGES', True) 69 packages = d.getVar('PACKAGES')
70 if not packages: 70 if not packages:
71 bb.debug(1, "PACKAGES not defined, nothing to package") 71 bb.debug(1, "PACKAGES not defined, nothing to package")
72 return 72 return
73 73
74 tmpdir = d.getVar('TMPDIR', True) 74 tmpdir = d.getVar('TMPDIR')
75 75
76 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): 76 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
77 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) 77 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@@ -80,7 +80,7 @@ python do_package_deb () {
80 bb.debug(1, "No packages; nothing to do") 80 bb.debug(1, "No packages; nothing to do")
81 return 81 return
82 82
83 pkgdest = d.getVar('PKGDEST', True) 83 pkgdest = d.getVar('PKGDEST')
84 84
85 def cleanupcontrol(root): 85 def cleanupcontrol(root):
86 for p in ['CONTROL', 'DEBIAN']: 86 for p in ['CONTROL', 'DEBIAN']:
@@ -96,7 +96,7 @@ python do_package_deb () {
96 96
97 localdata.setVar('ROOT', '') 97 localdata.setVar('ROOT', '')
98 localdata.setVar('ROOT_%s' % pkg, root) 98 localdata.setVar('ROOT_%s' % pkg, root)
99 pkgname = localdata.getVar('PKG_%s' % pkg, True) 99 pkgname = localdata.getVar('PKG_%s' % pkg)
100 if not pkgname: 100 if not pkgname:
101 pkgname = pkg 101 pkgname = pkg
102 localdata.setVar('PKG', pkgname) 102 localdata.setVar('PKG', pkgname)
@@ -106,7 +106,7 @@ python do_package_deb () {
106 bb.data.update_data(localdata) 106 bb.data.update_data(localdata)
107 basedir = os.path.join(os.path.dirname(root)) 107 basedir = os.path.join(os.path.dirname(root))
108 108
109 pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True)) 109 pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH'))
110 bb.utils.mkdirhier(pkgoutdir) 110 bb.utils.mkdirhier(pkgoutdir)
111 111
112 os.chdir(root) 112 os.chdir(root)
@@ -114,7 +114,7 @@ python do_package_deb () {
114 from glob import glob 114 from glob import glob
115 g = glob('*') 115 g = glob('*')
116 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": 116 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1":
117 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) 117 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
118 bb.utils.unlockfile(lf) 118 bb.utils.unlockfile(lf)
119 continue 119 continue
120 120
@@ -129,7 +129,7 @@ python do_package_deb () {
129 bb.fatal("unable to open control file for writing") 129 bb.fatal("unable to open control file for writing")
130 130
131 fields = [] 131 fields = []
132 pe = d.getVar('PKGE', True) 132 pe = d.getVar('PKGE')
133 if pe and int(pe) > 0: 133 if pe and int(pe) > 0:
134 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) 134 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
135 else: 135 else:
@@ -141,7 +141,7 @@ python do_package_deb () {
141 fields.append(["Architecture: %s\n", ['DPKG_ARCH']]) 141 fields.append(["Architecture: %s\n", ['DPKG_ARCH']])
142 fields.append(["OE: %s\n", ['PN']]) 142 fields.append(["OE: %s\n", ['PN']])
143 fields.append(["PackageArch: %s\n", ['PACKAGE_ARCH']]) 143 fields.append(["PackageArch: %s\n", ['PACKAGE_ARCH']])
144 if d.getVar('HOMEPAGE', True): 144 if d.getVar('HOMEPAGE'):
145 fields.append(["Homepage: %s\n", ['HOMEPAGE']]) 145 fields.append(["Homepage: %s\n", ['HOMEPAGE']])
146 146
147 # Package, Version, Maintainer, Description - mandatory 147 # Package, Version, Maintainer, Description - mandatory
@@ -151,10 +151,10 @@ python do_package_deb () {
151 def pullData(l, d): 151 def pullData(l, d):
152 l2 = [] 152 l2 = []
153 for i in l: 153 for i in l:
154 data = d.getVar(i, True) 154 data = d.getVar(i)
155 if data is None: 155 if data is None:
156 raise KeyError(f) 156 raise KeyError(f)
157 if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH', True) == 'all': 157 if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH') == 'all':
158 data = 'all' 158 data = 'all'
159 elif i == 'PACKAGE_ARCH' or i == 'DPKG_ARCH': 159 elif i == 'PACKAGE_ARCH' or i == 'DPKG_ARCH':
160 # The params in deb package control don't allow character 160 # The params in deb package control don't allow character
@@ -165,7 +165,7 @@ python do_package_deb () {
165 return l2 165 return l2
166 166
167 ctrlfile.write("Package: %s\n" % pkgname) 167 ctrlfile.write("Package: %s\n" % pkgname)
168 if d.getVar('PACKAGE_ARCH', True) == "all": 168 if d.getVar('PACKAGE_ARCH') == "all":
169 ctrlfile.write("Multi-Arch: foreign\n") 169 ctrlfile.write("Multi-Arch: foreign\n")
170 # check for required fields 170 # check for required fields
171 try: 171 try:
@@ -175,9 +175,9 @@ python do_package_deb () {
175 raise KeyError(f) 175 raise KeyError(f)
176 # Special behavior for description... 176 # Special behavior for description...
177 if 'DESCRIPTION' in fs: 177 if 'DESCRIPTION' in fs:
178 summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." 178 summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "."
179 ctrlfile.write('Description: %s\n' % summary) 179 ctrlfile.write('Description: %s\n' % summary)
180 description = localdata.getVar('DESCRIPTION', True) or "." 180 description = localdata.getVar('DESCRIPTION') or "."
181 description = textwrap.dedent(description).strip() 181 description = textwrap.dedent(description).strip()
182 if '\\n' in description: 182 if '\\n' in description:
183 # Manually indent 183 # Manually indent
@@ -231,7 +231,7 @@ python do_package_deb () {
231 elif (v or "").startswith("> "): 231 elif (v or "").startswith("> "):
232 var[dep][i] = var[dep][i].replace("> ", ">> ") 232 var[dep][i] = var[dep][i].replace("> ", ">> ")
233 233
234 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") 234 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
235 debian_cmp_remap(rdepends) 235 debian_cmp_remap(rdepends)
236 for dep in list(rdepends.keys()): 236 for dep in list(rdepends.keys()):
237 if dep == pkg: 237 if dep == pkg:
@@ -239,20 +239,20 @@ python do_package_deb () {
239 continue 239 continue
240 if '*' in dep: 240 if '*' in dep:
241 del rdepends[dep] 241 del rdepends[dep]
242 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") 242 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "")
243 debian_cmp_remap(rrecommends) 243 debian_cmp_remap(rrecommends)
244 for dep in list(rrecommends.keys()): 244 for dep in list(rrecommends.keys()):
245 if '*' in dep: 245 if '*' in dep:
246 del rrecommends[dep] 246 del rrecommends[dep]
247 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") 247 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "")
248 debian_cmp_remap(rsuggests) 248 debian_cmp_remap(rsuggests)
249 # Deliberately drop version information here, not wanted/supported by deb 249 # Deliberately drop version information here, not wanted/supported by deb
250 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) 250 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), [])
251 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) 251 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0]))
252 debian_cmp_remap(rprovides) 252 debian_cmp_remap(rprovides)
253 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") 253 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "")
254 debian_cmp_remap(rreplaces) 254 debian_cmp_remap(rreplaces)
255 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") 255 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "")
256 debian_cmp_remap(rconflicts) 256 debian_cmp_remap(rconflicts)
257 if rdepends: 257 if rdepends:
258 ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) 258 ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends))
@@ -269,7 +269,7 @@ python do_package_deb () {
269 ctrlfile.close() 269 ctrlfile.close()
270 270
271 for script in ["preinst", "postinst", "prerm", "postrm"]: 271 for script in ["preinst", "postinst", "prerm", "postrm"]:
272 scriptvar = localdata.getVar('pkg_%s' % script, True) 272 scriptvar = localdata.getVar('pkg_%s' % script)
273 if not scriptvar: 273 if not scriptvar:
274 continue 274 continue
275 scriptvar = scriptvar.strip() 275 scriptvar = scriptvar.strip()
@@ -308,7 +308,7 @@ python do_package_deb () {
308 conffiles.close() 308 conffiles.close()
309 309
310 os.chdir(basedir) 310 os.chdir(basedir)
311 ret = subprocess.call("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH", True), root, pkgoutdir), shell=True) 311 ret = subprocess.call("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH"), root, pkgoutdir), shell=True)
312 if ret != 0: 312 if ret != 0:
313 bb.utils.unlockfile(lf) 313 bb.utils.unlockfile(lf)
314 bb.fatal("dpkg-deb execution failed") 314 bb.fatal("dpkg-deb execution failed")
@@ -328,7 +328,7 @@ do_package_write_deb[sstate-inputdirs] = "${PKGWRITEDIRDEB}"
328do_package_write_deb[sstate-outputdirs] = "${DEPLOY_DIR_DEB}" 328do_package_write_deb[sstate-outputdirs] = "${DEPLOY_DIR_DEB}"
329 329
330python do_package_write_deb_setscene () { 330python do_package_write_deb_setscene () {
331 tmpdir = d.getVar('TMPDIR', True) 331 tmpdir = d.getVar('TMPDIR')
332 332
333 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): 333 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
334 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) 334 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@@ -338,7 +338,7 @@ python do_package_write_deb_setscene () {
338addtask do_package_write_deb_setscene 338addtask do_package_write_deb_setscene
339 339
340python () { 340python () {
341 if d.getVar('PACKAGES', True) != '': 341 if d.getVar('PACKAGES') != '':
342 deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' 342 deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
343 d.appendVarFlag('do_package_write_deb', 'depends', deps) 343 d.appendVarFlag('do_package_write_deb', 'depends', deps)
344 d.setVarFlag('do_package_write_deb', 'fakeroot', "1") 344 d.setVarFlag('do_package_write_deb', 'fakeroot', "1")
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index eb00932336..7018a600a9 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -11,8 +11,8 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
11OPKGBUILDCMD ??= "opkg-build" 11OPKGBUILDCMD ??= "opkg-build"
12 12
13OPKG_ARGS += "--force_postinstall --prefer-arch-to-version" 13OPKG_ARGS += "--force_postinstall --prefer-arch-to-version"
14OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" 14OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
15OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE', True) or "").split())][(d.getVar("PACKAGE_EXCLUDE", True) or "") != ""]}" 15OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE') or "").split())][(d.getVar("PACKAGE_EXCLUDE") or "") != ""]}"
16 16
17OPKGLIBDIR = "${localstatedir}/lib" 17OPKGLIBDIR = "${localstatedir}/lib"
18 18
@@ -24,15 +24,15 @@ python do_package_ipk () {
24 24
25 oldcwd = os.getcwd() 25 oldcwd = os.getcwd()
26 26
27 workdir = d.getVar('WORKDIR', True) 27 workdir = d.getVar('WORKDIR')
28 outdir = d.getVar('PKGWRITEDIRIPK', True) 28 outdir = d.getVar('PKGWRITEDIRIPK')
29 tmpdir = d.getVar('TMPDIR', True) 29 tmpdir = d.getVar('TMPDIR')
30 pkgdest = d.getVar('PKGDEST', True) 30 pkgdest = d.getVar('PKGDEST')
31 if not workdir or not outdir or not tmpdir: 31 if not workdir or not outdir or not tmpdir:
32 bb.error("Variables incorrectly set, unable to package") 32 bb.error("Variables incorrectly set, unable to package")
33 return 33 return
34 34
35 packages = d.getVar('PACKAGES', True) 35 packages = d.getVar('PACKAGES')
36 if not packages or packages == '': 36 if not packages or packages == '':
37 bb.debug(1, "No packages; nothing to do") 37 bb.debug(1, "No packages; nothing to do")
38 return 38 return
@@ -56,7 +56,7 @@ python do_package_ipk () {
56 56
57 localdata.setVar('ROOT', '') 57 localdata.setVar('ROOT', '')
58 localdata.setVar('ROOT_%s' % pkg, root) 58 localdata.setVar('ROOT_%s' % pkg, root)
59 pkgname = localdata.getVar('PKG_%s' % pkg, True) 59 pkgname = localdata.getVar('PKG_%s' % pkg)
60 if not pkgname: 60 if not pkgname:
61 pkgname = pkg 61 pkgname = pkg
62 localdata.setVar('PKG', pkgname) 62 localdata.setVar('PKG', pkgname)
@@ -65,7 +65,7 @@ python do_package_ipk () {
65 65
66 bb.data.update_data(localdata) 66 bb.data.update_data(localdata)
67 basedir = os.path.join(os.path.dirname(root)) 67 basedir = os.path.join(os.path.dirname(root))
68 arch = localdata.getVar('PACKAGE_ARCH', True) 68 arch = localdata.getVar('PACKAGE_ARCH')
69 69
70 if localdata.getVar('IPK_HIERARCHICAL_FEED', False) == "1": 70 if localdata.getVar('IPK_HIERARCHICAL_FEED', False) == "1":
71 # Spread packages across subdirectories so each isn't too crowded 71 # Spread packages across subdirectories so each isn't too crowded
@@ -98,7 +98,7 @@ python do_package_ipk () {
98 from glob import glob 98 from glob import glob
99 g = glob('*') 99 g = glob('*')
100 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": 100 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1":
101 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) 101 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
102 bb.utils.unlockfile(lf) 102 bb.utils.unlockfile(lf)
103 continue 103 continue
104 104
@@ -111,7 +111,7 @@ python do_package_ipk () {
111 bb.fatal("unable to open control file for writing") 111 bb.fatal("unable to open control file for writing")
112 112
113 fields = [] 113 fields = []
114 pe = d.getVar('PKGE', True) 114 pe = d.getVar('PKGE')
115 if pe and int(pe) > 0: 115 if pe and int(pe) > 0:
116 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) 116 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
117 else: 117 else:
@@ -123,13 +123,13 @@ python do_package_ipk () {
123 fields.append(["License: %s\n", ['LICENSE']]) 123 fields.append(["License: %s\n", ['LICENSE']])
124 fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']]) 124 fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']])
125 fields.append(["OE: %s\n", ['PN']]) 125 fields.append(["OE: %s\n", ['PN']])
126 if d.getVar('HOMEPAGE', True): 126 if d.getVar('HOMEPAGE'):
127 fields.append(["Homepage: %s\n", ['HOMEPAGE']]) 127 fields.append(["Homepage: %s\n", ['HOMEPAGE']])
128 128
129 def pullData(l, d): 129 def pullData(l, d):
130 l2 = [] 130 l2 = []
131 for i in l: 131 for i in l:
132 l2.append(d.getVar(i, True)) 132 l2.append(d.getVar(i))
133 return l2 133 return l2
134 134
135 ctrlfile.write("Package: %s\n" % pkgname) 135 ctrlfile.write("Package: %s\n" % pkgname)
@@ -141,9 +141,9 @@ python do_package_ipk () {
141 raise KeyError(f) 141 raise KeyError(f)
142 # Special behavior for description... 142 # Special behavior for description...
143 if 'DESCRIPTION' in fs: 143 if 'DESCRIPTION' in fs:
144 summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." 144 summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "."
145 ctrlfile.write('Description: %s\n' % summary) 145 ctrlfile.write('Description: %s\n' % summary)
146 description = localdata.getVar('DESCRIPTION', True) or "." 146 description = localdata.getVar('DESCRIPTION') or "."
147 description = textwrap.dedent(description).strip() 147 description = textwrap.dedent(description).strip()
148 if '\\n' in description: 148 if '\\n' in description:
149 # Manually indent 149 # Manually indent
@@ -185,19 +185,19 @@ python do_package_ipk () {
185 elif (v or "").startswith("> "): 185 elif (v or "").startswith("> "):
186 var[dep][i] = var[dep][i].replace("> ", ">> ") 186 var[dep][i] = var[dep][i].replace("> ", ">> ")
187 187
188 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") 188 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
189 debian_cmp_remap(rdepends) 189 debian_cmp_remap(rdepends)
190 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") 190 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "")
191 debian_cmp_remap(rrecommends) 191 debian_cmp_remap(rrecommends)
192 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") 192 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "")
193 debian_cmp_remap(rsuggests) 193 debian_cmp_remap(rsuggests)
194 # Deliberately drop version information here, not wanted/supported by ipk 194 # Deliberately drop version information here, not wanted/supported by ipk
195 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) 195 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), [])
196 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) 196 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0]))
197 debian_cmp_remap(rprovides) 197 debian_cmp_remap(rprovides)
198 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") 198 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "")
199 debian_cmp_remap(rreplaces) 199 debian_cmp_remap(rreplaces)
200 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") 200 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "")
201 debian_cmp_remap(rconflicts) 201 debian_cmp_remap(rconflicts)
202 202
203 if rdepends: 203 if rdepends:
@@ -212,14 +212,14 @@ python do_package_ipk () {
212 ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) 212 ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
213 if rconflicts: 213 if rconflicts:
214 ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) 214 ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
215 src_uri = localdata.getVar("SRC_URI", True).strip() or "None" 215 src_uri = localdata.getVar("SRC_URI").strip() or "None"
216 if src_uri: 216 if src_uri:
217 src_uri = re.sub("\s+", " ", src_uri) 217 src_uri = re.sub("\s+", " ", src_uri)
218 ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) 218 ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
219 ctrlfile.close() 219 ctrlfile.close()
220 220
221 for script in ["preinst", "postinst", "prerm", "postrm"]: 221 for script in ["preinst", "postinst", "prerm", "postrm"]:
222 scriptvar = localdata.getVar('pkg_%s' % script, True) 222 scriptvar = localdata.getVar('pkg_%s' % script)
223 if not scriptvar: 223 if not scriptvar:
224 continue 224 continue
225 try: 225 try:
@@ -244,15 +244,15 @@ python do_package_ipk () {
244 conffiles.close() 244 conffiles.close()
245 245
246 os.chdir(basedir) 246 os.chdir(basedir)
247 ret = subprocess.call("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", True), 247 ret = subprocess.call("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH"),
248 d.getVar("OPKGBUILDCMD", True), pkg, pkgoutdir), shell=True) 248 d.getVar("OPKGBUILDCMD"), pkg, pkgoutdir), shell=True)
249 if ret != 0: 249 if ret != 0:
250 bb.utils.unlockfile(lf) 250 bb.utils.unlockfile(lf)
251 bb.fatal("opkg-build execution failed") 251 bb.fatal("opkg-build execution failed")
252 252
253 if d.getVar('IPK_SIGN_PACKAGES', True) == '1': 253 if d.getVar('IPK_SIGN_PACKAGES') == '1':
254 ipkver = "%s-%s" % (d.getVar('PKGV', True), d.getVar('PKGR', True)) 254 ipkver = "%s-%s" % (d.getVar('PKGV'), d.getVar('PKGR'))
255 ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH', True)) 255 ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH'))
256 sign_ipk(d, ipk_to_sign) 256 sign_ipk(d, ipk_to_sign)
257 257
258 cleanupcontrol(root) 258 cleanupcontrol(root)
@@ -268,7 +268,7 @@ do_package_write_ipk[sstate-inputdirs] = "${PKGWRITEDIRIPK}"
268do_package_write_ipk[sstate-outputdirs] = "${DEPLOY_DIR_IPK}" 268do_package_write_ipk[sstate-outputdirs] = "${DEPLOY_DIR_IPK}"
269 269
270python do_package_write_ipk_setscene () { 270python do_package_write_ipk_setscene () {
271 tmpdir = d.getVar('TMPDIR', True) 271 tmpdir = d.getVar('TMPDIR')
272 272
273 if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK): 273 if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK):
274 os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN")) 274 os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"))
@@ -278,7 +278,7 @@ python do_package_write_ipk_setscene () {
278addtask do_package_write_ipk_setscene 278addtask do_package_write_ipk_setscene
279 279
280python () { 280python () {
281 if d.getVar('PACKAGES', True) != '': 281 if d.getVar('PACKAGES') != '':
282 deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' 282 deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
283 d.appendVarFlag('do_package_write_ipk', 'depends', deps) 283 d.appendVarFlag('do_package_write_ipk', 'depends', deps)
284 d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") 284 d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index c431545f7c..722c62877a 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -13,9 +13,9 @@ MERGEPERFILEDEPS = "1"
13 13
14# Construct per file dependencies file 14# Construct per file dependencies file
15def write_rpm_perfiledata(srcname, d): 15def write_rpm_perfiledata(srcname, d):
16 workdir = d.getVar('WORKDIR', True) 16 workdir = d.getVar('WORKDIR')
17 packages = d.getVar('PACKAGES', True) 17 packages = d.getVar('PACKAGES')
18 pkgd = d.getVar('PKGD', True) 18 pkgd = d.getVar('PKGD')
19 19
20 def dump_filerdeps(varname, outfile, d): 20 def dump_filerdeps(varname, outfile, d):
21 outfile.write("#!/usr/bin/env python\n\n") 21 outfile.write("#!/usr/bin/env python\n\n")
@@ -23,10 +23,10 @@ def write_rpm_perfiledata(srcname, d):
23 outfile.write('deps = {\n') 23 outfile.write('deps = {\n')
24 for pkg in packages.split(): 24 for pkg in packages.split():
25 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg 25 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
26 dependsflist = (d.getVar(dependsflist_key, True) or "") 26 dependsflist = (d.getVar(dependsflist_key) or "")
27 for dfile in dependsflist.split(): 27 for dfile in dependsflist.split():
28 key = "FILE" + varname + "_" + dfile + "_" + pkg 28 key = "FILE" + varname + "_" + dfile + "_" + pkg
29 depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "") 29 depends_dict = bb.utils.explode_dep_versions(d.getVar(key) or "")
30 file = dfile.replace("@underscore@", "_") 30 file = dfile.replace("@underscore@", "_")
31 file = file.replace("@closebrace@", "]") 31 file = file.replace("@closebrace@", "]")
32 file = file.replace("@openbrace@", "[") 32 file = file.replace("@openbrace@", "[")
@@ -87,14 +87,14 @@ python write_specfile () {
87 # append information for logs and patches to %prep 87 # append information for logs and patches to %prep
88 def add_prep(d,spec_files_bottom): 88 def add_prep(d,spec_files_bottom):
89 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): 89 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
90 spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) ) 90 spec_files_bottom.append('%%prep -n %s' % d.getVar('PN') )
91 spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") 91 spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
92 spec_files_bottom.append('') 92 spec_files_bottom.append('')
93 93
94 # append the name of tarball to key word 'SOURCE' in xxx.spec. 94 # append the name of tarball to key word 'SOURCE' in xxx.spec.
95 def tail_source(d): 95 def tail_source(d):
96 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): 96 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
97 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 97 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
98 if not os.path.exists(ar_outdir): 98 if not os.path.exists(ar_outdir):
99 return 99 return
100 source_list = os.listdir(ar_outdir) 100 source_list = os.listdir(ar_outdir)
@@ -110,7 +110,7 @@ python write_specfile () {
110 # We need a simple way to remove the MLPREFIX from the package name, 110 # We need a simple way to remove the MLPREFIX from the package name,
111 # and dependency information... 111 # and dependency information...
112 def strip_multilib(name, d): 112 def strip_multilib(name, d):
113 multilibs = d.getVar('MULTILIBS', True) or "" 113 multilibs = d.getVar('MULTILIBS') or ""
114 for ext in multilibs.split(): 114 for ext in multilibs.split():
115 eext = ext.split(':') 115 eext = ext.split(':')
116 if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0: 116 if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0:
@@ -124,7 +124,7 @@ python write_specfile () {
124 newdeps[strip_multilib(dep, d)] = depends[dep] 124 newdeps[strip_multilib(dep, d)] = depends[dep]
125 return bb.utils.join_deps(newdeps) 125 return bb.utils.join_deps(newdeps)
126 126
127# ml = d.getVar("MLPREFIX", True) 127# ml = d.getVar("MLPREFIX")
128# if ml and name and len(ml) != 0 and name.find(ml) == 0: 128# if ml and name and len(ml) != 0 and name.find(ml) == 0:
129# return ml.join(name.split(ml, 1)[1:]) 129# return ml.join(name.split(ml, 1)[1:])
130# return name 130# return name
@@ -144,7 +144,7 @@ python write_specfile () {
144 # after renaming we cannot look up the dependencies in the packagedata 144 # after renaming we cannot look up the dependencies in the packagedata
145 # store. 145 # store.
146 def translate_vers(varname, d): 146 def translate_vers(varname, d):
147 depends = d.getVar(varname, True) 147 depends = d.getVar(varname)
148 if depends: 148 if depends:
149 depends_dict = bb.utils.explode_dep_versions2(depends) 149 depends_dict = bb.utils.explode_dep_versions2(depends)
150 newdeps_dict = {} 150 newdeps_dict = {}
@@ -248,10 +248,10 @@ python write_specfile () {
248 def get_perfile(varname, pkg, d): 248 def get_perfile(varname, pkg, d):
249 deps = [] 249 deps = []
250 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg 250 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
251 dependsflist = (d.getVar(dependsflist_key, True) or "") 251 dependsflist = (d.getVar(dependsflist_key) or "")
252 for dfile in dependsflist.split(): 252 for dfile in dependsflist.split():
253 key = "FILE" + varname + "_" + dfile + "_" + pkg 253 key = "FILE" + varname + "_" + dfile + "_" + pkg
254 depends = d.getVar(key, True) 254 depends = d.getVar(key)
255 if depends: 255 if depends:
256 deps.append(depends) 256 deps.append(depends)
257 return " ".join(deps) 257 return " ".join(deps)
@@ -269,33 +269,33 @@ python write_specfile () {
269 else: 269 else:
270 spec_preamble.append('%s' % textwrap.fill(dedent_text, width=75)) 270 spec_preamble.append('%s' % textwrap.fill(dedent_text, width=75))
271 271
272 packages = d.getVar('PACKAGES', True) 272 packages = d.getVar('PACKAGES')
273 if not packages or packages == '': 273 if not packages or packages == '':
274 bb.debug(1, "No packages; nothing to do") 274 bb.debug(1, "No packages; nothing to do")
275 return 275 return
276 276
277 pkgdest = d.getVar('PKGDEST', True) 277 pkgdest = d.getVar('PKGDEST')
278 if not pkgdest: 278 if not pkgdest:
279 bb.fatal("No PKGDEST") 279 bb.fatal("No PKGDEST")
280 280
281 outspecfile = d.getVar('OUTSPECFILE', True) 281 outspecfile = d.getVar('OUTSPECFILE')
282 if not outspecfile: 282 if not outspecfile:
283 bb.fatal("No OUTSPECFILE") 283 bb.fatal("No OUTSPECFILE")
284 284
285 # Construct the SPEC file... 285 # Construct the SPEC file...
286 srcname = strip_multilib(d.getVar('PN', True), d) 286 srcname = strip_multilib(d.getVar('PN'), d)
287 srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".") 287 srcsummary = (d.getVar('SUMMARY') or d.getVar('DESCRIPTION') or ".")
288 srcversion = d.getVar('PKGV', True).replace('-', '+') 288 srcversion = d.getVar('PKGV').replace('-', '+')
289 srcrelease = d.getVar('PKGR', True) 289 srcrelease = d.getVar('PKGR')
290 srcepoch = (d.getVar('PKGE', True) or "") 290 srcepoch = (d.getVar('PKGE') or "")
291 srclicense = d.getVar('LICENSE', True) 291 srclicense = d.getVar('LICENSE')
292 srcsection = d.getVar('SECTION', True) 292 srcsection = d.getVar('SECTION')
293 srcmaintainer = d.getVar('MAINTAINER', True) 293 srcmaintainer = d.getVar('MAINTAINER')
294 srchomepage = d.getVar('HOMEPAGE', True) 294 srchomepage = d.getVar('HOMEPAGE')
295 srcdescription = d.getVar('DESCRIPTION', True) or "." 295 srcdescription = d.getVar('DESCRIPTION') or "."
296 srccustomtagschunk = get_package_additional_metadata("rpm", d) 296 srccustomtagschunk = get_package_additional_metadata("rpm", d)
297 297
298 srcdepends = strip_multilib_deps(d.getVar('DEPENDS', True), d) 298 srcdepends = strip_multilib_deps(d.getVar('DEPENDS'), d)
299 srcrdepends = [] 299 srcrdepends = []
300 srcrrecommends = [] 300 srcrrecommends = []
301 srcrsuggests = [] 301 srcrsuggests = []
@@ -318,8 +318,8 @@ python write_specfile () {
318 spec_files_top = [] 318 spec_files_top = []
319 spec_files_bottom = [] 319 spec_files_bottom = []
320 320
321 perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" 321 perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0"
322 extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA", True) or "0") == "1" 322 extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA") or "0") == "1"
323 323
324 for pkg in packages.split(): 324 for pkg in packages.split():
325 localdata = bb.data.createCopy(d) 325 localdata = bb.data.createCopy(d)
@@ -328,7 +328,7 @@ python write_specfile () {
328 328
329 localdata.setVar('ROOT', '') 329 localdata.setVar('ROOT', '')
330 localdata.setVar('ROOT_%s' % pkg, root) 330 localdata.setVar('ROOT_%s' % pkg, root)
331 pkgname = localdata.getVar('PKG_%s' % pkg, True) 331 pkgname = localdata.getVar('PKG_%s' % pkg)
332 if not pkgname: 332 if not pkgname:
333 pkgname = pkg 333 pkgname = pkg
334 localdata.setVar('PKG', pkgname) 334 localdata.setVar('PKG', pkgname)
@@ -338,19 +338,19 @@ python write_specfile () {
338 bb.data.update_data(localdata) 338 bb.data.update_data(localdata)
339 339
340 conffiles = get_conffiles(pkg, d) 340 conffiles = get_conffiles(pkg, d)
341 dirfiles = localdata.getVar('DIRFILES', True) 341 dirfiles = localdata.getVar('DIRFILES')
342 if dirfiles is not None: 342 if dirfiles is not None:
343 dirfiles = dirfiles.split() 343 dirfiles = dirfiles.split()
344 344
345 splitname = strip_multilib(pkgname, d) 345 splitname = strip_multilib(pkgname, d)
346 346
347 splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".") 347 splitsummary = (localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or ".")
348 splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+') 348 splitversion = (localdata.getVar('PKGV') or "").replace('-', '+')
349 splitrelease = (localdata.getVar('PKGR', True) or "") 349 splitrelease = (localdata.getVar('PKGR') or "")
350 splitepoch = (localdata.getVar('PKGE', True) or "") 350 splitepoch = (localdata.getVar('PKGE') or "")
351 splitlicense = (localdata.getVar('LICENSE', True) or "") 351 splitlicense = (localdata.getVar('LICENSE') or "")
352 splitsection = (localdata.getVar('SECTION', True) or "") 352 splitsection = (localdata.getVar('SECTION') or "")
353 splitdescription = (localdata.getVar('DESCRIPTION', True) or ".") 353 splitdescription = (localdata.getVar('DESCRIPTION') or ".")
354 splitcustomtagschunk = get_package_additional_metadata("rpm", localdata) 354 splitcustomtagschunk = get_package_additional_metadata("rpm", localdata)
355 355
356 translate_vers('RDEPENDS', localdata) 356 translate_vers('RDEPENDS', localdata)
@@ -363,18 +363,18 @@ python write_specfile () {
363 # Map the dependencies into their final form 363 # Map the dependencies into their final form
364 mapping_rename_hook(localdata) 364 mapping_rename_hook(localdata)
365 365
366 splitrdepends = strip_multilib_deps(localdata.getVar('RDEPENDS', True), d) 366 splitrdepends = strip_multilib_deps(localdata.getVar('RDEPENDS'), d)
367 splitrrecommends = strip_multilib_deps(localdata.getVar('RRECOMMENDS', True), d) 367 splitrrecommends = strip_multilib_deps(localdata.getVar('RRECOMMENDS'), d)
368 splitrsuggests = strip_multilib_deps(localdata.getVar('RSUGGESTS', True), d) 368 splitrsuggests = strip_multilib_deps(localdata.getVar('RSUGGESTS'), d)
369 splitrprovides = strip_multilib_deps(localdata.getVar('RPROVIDES', True), d) 369 splitrprovides = strip_multilib_deps(localdata.getVar('RPROVIDES'), d)
370 splitrreplaces = strip_multilib_deps(localdata.getVar('RREPLACES', True), d) 370 splitrreplaces = strip_multilib_deps(localdata.getVar('RREPLACES'), d)
371 splitrconflicts = strip_multilib_deps(localdata.getVar('RCONFLICTS', True), d) 371 splitrconflicts = strip_multilib_deps(localdata.getVar('RCONFLICTS'), d)
372 splitrobsoletes = [] 372 splitrobsoletes = []
373 373
374 splitrpreinst = localdata.getVar('pkg_preinst', True) 374 splitrpreinst = localdata.getVar('pkg_preinst')
375 splitrpostinst = localdata.getVar('pkg_postinst', True) 375 splitrpostinst = localdata.getVar('pkg_postinst')
376 splitrprerm = localdata.getVar('pkg_prerm', True) 376 splitrprerm = localdata.getVar('pkg_prerm')
377 splitrpostrm = localdata.getVar('pkg_postrm', True) 377 splitrpostrm = localdata.getVar('pkg_postrm')
378 378
379 379
380 if not perfiledeps: 380 if not perfiledeps:
@@ -621,7 +621,7 @@ python write_specfile () {
621 621
622 # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top 622 # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top
623 # of the generated spec file 623 # of the generated spec file
624 external_preamble = d.getVar("RPMSPEC_PREAMBLE", True) 624 external_preamble = d.getVar("RPMSPEC_PREAMBLE")
625 if external_preamble: 625 if external_preamble:
626 specfile.write(external_preamble + "\n") 626 specfile.write(external_preamble + "\n")
627 627
@@ -652,20 +652,20 @@ python do_package_rpm () {
652 # We need a simple way to remove the MLPREFIX from the package name, 652 # We need a simple way to remove the MLPREFIX from the package name,
653 # and dependency information... 653 # and dependency information...
654 def strip_multilib(name, d): 654 def strip_multilib(name, d):
655 ml = d.getVar("MLPREFIX", True) 655 ml = d.getVar("MLPREFIX")
656 if ml and name and len(ml) != 0 and name.find(ml) >= 0: 656 if ml and name and len(ml) != 0 and name.find(ml) >= 0:
657 return "".join(name.split(ml)) 657 return "".join(name.split(ml))
658 return name 658 return name
659 659
660 workdir = d.getVar('WORKDIR', True) 660 workdir = d.getVar('WORKDIR')
661 tmpdir = d.getVar('TMPDIR', True) 661 tmpdir = d.getVar('TMPDIR')
662 pkgd = d.getVar('PKGD', True) 662 pkgd = d.getVar('PKGD')
663 pkgdest = d.getVar('PKGDEST', True) 663 pkgdest = d.getVar('PKGDEST')
664 if not workdir or not pkgd or not tmpdir: 664 if not workdir or not pkgd or not tmpdir:
665 bb.error("Variables incorrectly set, unable to package") 665 bb.error("Variables incorrectly set, unable to package")
666 return 666 return
667 667
668 packages = d.getVar('PACKAGES', True) 668 packages = d.getVar('PACKAGES')
669 if not packages or packages == '': 669 if not packages or packages == '':
670 bb.debug(1, "No packages; nothing to do") 670 bb.debug(1, "No packages; nothing to do")
671 return 671 return
@@ -674,31 +674,31 @@ python do_package_rpm () {
674 # If the spec file already exist, and has not been stored into 674 # If the spec file already exist, and has not been stored into
675 # pseudo's files.db, it maybe cause rpmbuild src.rpm fail, 675 # pseudo's files.db, it maybe cause rpmbuild src.rpm fail,
676 # so remove it before doing rpmbuild src.rpm. 676 # so remove it before doing rpmbuild src.rpm.
677 srcname = strip_multilib(d.getVar('PN', True), d) 677 srcname = strip_multilib(d.getVar('PN'), d)
678 outspecfile = workdir + "/" + srcname + ".spec" 678 outspecfile = workdir + "/" + srcname + ".spec"
679 if os.path.isfile(outspecfile): 679 if os.path.isfile(outspecfile):
680 os.remove(outspecfile) 680 os.remove(outspecfile)
681 d.setVar('OUTSPECFILE', outspecfile) 681 d.setVar('OUTSPECFILE', outspecfile)
682 bb.build.exec_func('write_specfile', d) 682 bb.build.exec_func('write_specfile', d)
683 683
684 perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" 684 perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0"
685 if perfiledeps: 685 if perfiledeps:
686 outdepends, outprovides = write_rpm_perfiledata(srcname, d) 686 outdepends, outprovides = write_rpm_perfiledata(srcname, d)
687 687
688 # Setup the rpmbuild arguments... 688 # Setup the rpmbuild arguments...
689 rpmbuild = d.getVar('RPMBUILD', True) 689 rpmbuild = d.getVar('RPMBUILD')
690 targetsys = d.getVar('TARGET_SYS', True) 690 targetsys = d.getVar('TARGET_SYS')
691 targetvendor = d.getVar('HOST_VENDOR', True) 691 targetvendor = d.getVar('HOST_VENDOR')
692 package_arch = (d.getVar('PACKAGE_ARCH', True) or "").replace("-", "_") 692 package_arch = (d.getVar('PACKAGE_ARCH') or "").replace("-", "_")
693 sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX', True) or "nativesdk").replace("-", "_") 693 sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX') or "nativesdk").replace("-", "_")
694 if package_arch not in "all any noarch".split() and not package_arch.endswith(sdkpkgsuffix): 694 if package_arch not in "all any noarch".split() and not package_arch.endswith(sdkpkgsuffix):
695 ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_") 695 ml_prefix = (d.getVar('MLPREFIX') or "").replace("-", "_")
696 d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch) 696 d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
697 else: 697 else:
698 d.setVar('PACKAGE_ARCH_EXTEND', package_arch) 698 d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
699 pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') 699 pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}')
700 d.setVar('RPM_PKGWRITEDIR', pkgwritedir) 700 d.setVar('RPM_PKGWRITEDIR', pkgwritedir)
701 bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR', True)) 701 bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR'))
702 pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-${HOST_OS}') 702 pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-${HOST_OS}')
703 magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') 703 magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc')
704 bb.utils.mkdirhier(pkgwritedir) 704 bb.utils.mkdirhier(pkgwritedir)
@@ -707,7 +707,7 @@ python do_package_rpm () {
707 cmd = rpmbuild 707 cmd = rpmbuild
708 cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd 708 cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd
709 cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'" 709 cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'"
710 cmd = cmd + " --define '_builddir " + d.getVar('S', True) + "'" 710 cmd = cmd + " --define '_builddir " + d.getVar('S') + "'"
711 cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'" 711 cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'"
712 cmd = cmd + " --define '_use_internal_dependency_generator 0'" 712 cmd = cmd + " --define '_use_internal_dependency_generator 0'"
713 if perfiledeps: 713 if perfiledeps:
@@ -721,8 +721,8 @@ python do_package_rpm () {
721 cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'" 721 cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'"
722 cmd = cmd + " --define '_tmppath " + workdir + "'" 722 cmd = cmd + " --define '_tmppath " + workdir + "'"
723 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): 723 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
724 cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" 724 cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'"
725 cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" 725 cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR') + "'"
726 cmdsrpm = cmdsrpm + " -bs " + outspecfile 726 cmdsrpm = cmdsrpm + " -bs " + outspecfile
727 # Build the .src.rpm 727 # Build the .src.rpm
728 d.setVar('SBUILDSPEC', cmdsrpm + "\n") 728 d.setVar('SBUILDSPEC', cmdsrpm + "\n")
@@ -735,12 +735,12 @@ python do_package_rpm () {
735 d.setVarFlag('BUILDSPEC', 'func', '1') 735 d.setVarFlag('BUILDSPEC', 'func', '1')
736 bb.build.exec_func('BUILDSPEC', d) 736 bb.build.exec_func('BUILDSPEC', d)
737 737
738 if d.getVar('RPM_SIGN_PACKAGES', True) == '1': 738 if d.getVar('RPM_SIGN_PACKAGES') == '1':
739 bb.build.exec_func("sign_rpm", d) 739 bb.build.exec_func("sign_rpm", d)
740} 740}
741 741
742python () { 742python () {
743 if d.getVar('PACKAGES', True) != '': 743 if d.getVar('PACKAGES') != '':
744 deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' 744 deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
745 d.appendVarFlag('do_package_write_rpm', 'depends', deps) 745 d.appendVarFlag('do_package_write_rpm', 'depends', deps)
746 d.setVarFlag('do_package_write_rpm', 'fakeroot', '1') 746 d.setVarFlag('do_package_write_rpm', 'fakeroot', '1')
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass
index e217814af4..3ff8b8f560 100644
--- a/meta/classes/package_tar.bbclass
+++ b/meta/classes/package_tar.bbclass
@@ -7,27 +7,27 @@ python do_package_tar () {
7 7
8 oldcwd = os.getcwd() 8 oldcwd = os.getcwd()
9 9
10 workdir = d.getVar('WORKDIR', True) 10 workdir = d.getVar('WORKDIR')
11 if not workdir: 11 if not workdir:
12 bb.error("WORKDIR not defined, unable to package") 12 bb.error("WORKDIR not defined, unable to package")
13 return 13 return
14 14
15 outdir = d.getVar('DEPLOY_DIR_TAR', True) 15 outdir = d.getVar('DEPLOY_DIR_TAR')
16 if not outdir: 16 if not outdir:
17 bb.error("DEPLOY_DIR_TAR not defined, unable to package") 17 bb.error("DEPLOY_DIR_TAR not defined, unable to package")
18 return 18 return
19 19
20 dvar = d.getVar('D', True) 20 dvar = d.getVar('D')
21 if not dvar: 21 if not dvar:
22 bb.error("D not defined, unable to package") 22 bb.error("D not defined, unable to package")
23 return 23 return
24 24
25 packages = d.getVar('PACKAGES', True) 25 packages = d.getVar('PACKAGES')
26 if not packages: 26 if not packages:
27 bb.debug(1, "PACKAGES not defined, nothing to package") 27 bb.debug(1, "PACKAGES not defined, nothing to package")
28 return 28 return
29 29
30 pkgdest = d.getVar('PKGDEST', True) 30 pkgdest = d.getVar('PKGDEST')
31 31
32 bb.utils.mkdirhier(outdir) 32 bb.utils.mkdirhier(outdir)
33 bb.utils.mkdirhier(dvar) 33 bb.utils.mkdirhier(dvar)
@@ -46,7 +46,7 @@ python do_package_tar () {
46 os.chdir(root) 46 os.chdir(root)
47 dlist = os.listdir(root) 47 dlist = os.listdir(root)
48 if not dlist: 48 if not dlist:
49 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) 49 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
50 continue 50 continue
51 args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split() 51 args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split()
52 ret = subprocess.call(args + [tarfn] + dlist) 52 ret = subprocess.call(args + [tarfn] + dlist)
@@ -57,7 +57,7 @@ python do_package_tar () {
57} 57}
58 58
59python () { 59python () {
60 if d.getVar('PACKAGES', True) != '': 60 if d.getVar('PACKAGES') != '':
61 deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split() 61 deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split()
62 deps.append('tar-native:do_populate_sysroot') 62 deps.append('tar-native:do_populate_sysroot')
63 deps.append('virtual/fakeroot-native:do_populate_sysroot') 63 deps.append('virtual/fakeroot-native:do_populate_sysroot')
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass
index 3397f1e36b..a903e5cfd2 100644
--- a/meta/classes/packagedata.bbclass
+++ b/meta/classes/packagedata.bbclass
@@ -2,10 +2,10 @@ python read_subpackage_metadata () {
2 import oe.packagedata 2 import oe.packagedata
3 3
4 vars = { 4 vars = {
5 "PN" : d.getVar('PN', True), 5 "PN" : d.getVar('PN'),
6 "PE" : d.getVar('PE', True), 6 "PE" : d.getVar('PE'),
7 "PV" : d.getVar('PV', True), 7 "PV" : d.getVar('PV'),
8 "PR" : d.getVar('PR', True), 8 "PR" : d.getVar('PR'),
9 } 9 }
10 10
11 data = oe.packagedata.read_pkgdata(vars["PN"], d) 11 data = oe.packagedata.read_pkgdata(vars["PN"], d)
@@ -13,7 +13,7 @@ python read_subpackage_metadata () {
13 for key in data.keys(): 13 for key in data.keys():
14 d.setVar(key, data[key]) 14 d.setVar(key, data[key])
15 15
16 for pkg in d.getVar('PACKAGES', True).split(): 16 for pkg in d.getVar('PACKAGES').split():
17 sdata = oe.packagedata.read_subpkgdata(pkg, d) 17 sdata = oe.packagedata.read_subpkgdata(pkg, d)
18 for key in sdata.keys(): 18 for key in sdata.keys():
19 if key in vars: 19 if key in vars:
diff --git a/meta/classes/packagefeed-stability.bbclass b/meta/classes/packagefeed-stability.bbclass
index aa01def74d..3a128073d0 100644
--- a/meta/classes/packagefeed-stability.bbclass
+++ b/meta/classes/packagefeed-stability.bbclass
@@ -31,7 +31,7 @@ python() {
31 # This assumes that the package_write task is called package_write_<pkgtype> 31 # This assumes that the package_write task is called package_write_<pkgtype>
32 # and that the directory in which packages should be written is 32 # and that the directory in which packages should be written is
33 # pointed to by the variable DEPLOY_DIR_<PKGTYPE> 33 # pointed to by the variable DEPLOY_DIR_<PKGTYPE>
34 for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): 34 for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split():
35 if pkgclass.startswith('package_'): 35 if pkgclass.startswith('package_'):
36 pkgtype = pkgclass.split('_', 1)[1] 36 pkgtype = pkgclass.split('_', 1)[1]
37 pkgwritefunc = 'do_package_write_%s' % pkgtype 37 pkgwritefunc = 'do_package_write_%s' % pkgtype
@@ -71,7 +71,7 @@ python() {
71# This isn't the real task function - it's a template that we use in the 71# This isn't the real task function - it's a template that we use in the
72# anonymous python code above 72# anonymous python code above
73fakeroot python do_package_compare () { 73fakeroot python do_package_compare () {
74 currenttask = d.getVar('BB_CURRENTTASK', True) 74 currenttask = d.getVar('BB_CURRENTTASK')
75 pkgtype = currenttask.rsplit('_', 1)[1] 75 pkgtype = currenttask.rsplit('_', 1)[1]
76 package_compare_impl(pkgtype, d) 76 package_compare_impl(pkgtype, d)
77} 77}
@@ -83,12 +83,12 @@ def package_compare_impl(pkgtype, d):
83 import subprocess 83 import subprocess
84 import oe.sstatesig 84 import oe.sstatesig
85 85
86 pn = d.getVar('PN', True) 86 pn = d.getVar('PN')
87 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) 87 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True)
88 prepath = deploydir + '-prediff/' 88 prepath = deploydir + '-prediff/'
89 89
90 # Find out PKGR values are 90 # Find out PKGR values are
91 pkgdatadir = d.getVar('PKGDATA_DIR', True) 91 pkgdatadir = d.getVar('PKGDATA_DIR')
92 packages = [] 92 packages = []
93 try: 93 try:
94 with open(os.path.join(pkgdatadir, pn), 'r') as f: 94 with open(os.path.join(pkgdatadir, pn), 'r') as f:
@@ -138,7 +138,7 @@ def package_compare_impl(pkgtype, d):
138 files = [] 138 files = []
139 docopy = False 139 docopy = False
140 manifest, _ = oe.sstatesig.sstate_get_manifest_filename(pkgwritetask, d) 140 manifest, _ = oe.sstatesig.sstate_get_manifest_filename(pkgwritetask, d)
141 mlprefix = d.getVar('MLPREFIX', True) 141 mlprefix = d.getVar('MLPREFIX')
142 # Copy recipe's all packages if one of the packages are different to make 142 # Copy recipe's all packages if one of the packages are different to make
143 # they have the same PR. 143 # they have the same PR.
144 with open(manifest, 'r') as f: 144 with open(manifest, 'r') as f:
@@ -215,7 +215,7 @@ def package_compare_impl(pkgtype, d):
215 # multilib), they're identical in theory, but sstate.bbclass 215 # multilib), they're identical in theory, but sstate.bbclass
216 # copies it again, so keep align with that. 216 # copies it again, so keep align with that.
217 if os.path.exists(destpath) and pkgtype == 'rpm' \ 217 if os.path.exists(destpath) and pkgtype == 'rpm' \
218 and d.getVar('PACKAGE_ARCH', True) == 'all': 218 and d.getVar('PACKAGE_ARCH') == 'all':
219 os.unlink(destpath) 219 os.unlink(destpath)
220 if (os.stat(srcpath).st_dev == os.stat(destdir).st_dev): 220 if (os.stat(srcpath).st_dev == os.stat(destdir).st_dev):
221 # Use a hard link to save space 221 # Use a hard link to save space
@@ -229,7 +229,7 @@ def package_compare_impl(pkgtype, d):
229do_cleansstate[postfuncs] += "pfs_cleanpkgs" 229do_cleansstate[postfuncs] += "pfs_cleanpkgs"
230python pfs_cleanpkgs () { 230python pfs_cleanpkgs () {
231 import errno 231 import errno
232 for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): 232 for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split():
233 if pkgclass.startswith('package_'): 233 if pkgclass.startswith('package_'):
234 pkgtype = pkgclass.split('_', 1)[1] 234 pkgtype = pkgclass.split('_', 1)[1]
235 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) 235 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True)
diff --git a/meta/classes/packagegroup.bbclass b/meta/classes/packagegroup.bbclass
index 3928c8a4ac..a60a52b6d4 100644
--- a/meta/classes/packagegroup.bbclass
+++ b/meta/classes/packagegroup.bbclass
@@ -16,15 +16,15 @@ PACKAGE_ARCH_EXPANDED := "${PACKAGE_ARCH}"
16 16
17LICENSE ?= "MIT" 17LICENSE ?= "MIT"
18 18
19inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED', True) == 'all', 'allarch', '')} 19inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED') == 'all', 'allarch', '')}
20 20
21# This automatically adds -dbg and -dev flavours of all PACKAGES 21# This automatically adds -dbg and -dev flavours of all PACKAGES
22# to the list. Their dependencies (RRECOMMENDS) are handled as usual 22# to the list. Their dependencies (RRECOMMENDS) are handled as usual
23# by package_depchains in a following step. 23# by package_depchains in a following step.
24# Also mark all packages as ALLOW_EMPTY 24# Also mark all packages as ALLOW_EMPTY
25python () { 25python () {
26 packages = d.getVar('PACKAGES', True).split() 26 packages = d.getVar('PACKAGES').split()
27 if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY', True) != '1': 27 if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY') != '1':
28 types = ['', '-dbg', '-dev'] 28 types = ['', '-dbg', '-dev']
29 if bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): 29 if bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d):
30 types.append('-ptest') 30 types.append('-ptest')
@@ -49,7 +49,7 @@ do_install[noexec] = "1"
49do_populate_sysroot[noexec] = "1" 49do_populate_sysroot[noexec] = "1"
50 50
51python () { 51python () {
52 initman = d.getVar("VIRTUAL-RUNTIME_init_manager", True) 52 initman = d.getVar("VIRTUAL-RUNTIME_init_manager")
53 if initman and initman in ['sysvinit', 'systemd'] and not bb.utils.contains('DISTRO_FEATURES', initman, True, False, d): 53 if initman and initman in ['sysvinit', 'systemd'] and not bb.utils.contains('DISTRO_FEATURES', initman, True, False, d):
54 bb.fatal("Please ensure that your setting of VIRTUAL-RUNTIME_init_manager (%s) matches the entries enabled in DISTRO_FEATURES" % initman) 54 bb.fatal("Please ensure that your setting of VIRTUAL-RUNTIME_init_manager (%s) matches the entries enabled in DISTRO_FEATURES" % initman)
55} 55}
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass
index 0e5b602462..23ba5df48b 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes/patch.bbclass
@@ -11,7 +11,7 @@ PATCH_GIT_USER_EMAIL ?= "oe.patch@oe"
11inherit terminal 11inherit terminal
12 12
13python () { 13python () {
14 if d.getVar('PATCHTOOL', True) == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS', True) == '1': 14 if d.getVar('PATCHTOOL') == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS') == '1':
15 tasks = list(filter(lambda k: d.getVarFlag(k, "task", True), d.keys())) 15 tasks = list(filter(lambda k: d.getVarFlag(k, "task", True), d.keys()))
16 extratasks = [] 16 extratasks = []
17 def follow_chain(task, endtask, chain=None): 17 def follow_chain(task, endtask, chain=None):
@@ -44,8 +44,8 @@ python () {
44 44
45python patch_task_patch_prefunc() { 45python patch_task_patch_prefunc() {
46 # Prefunc for do_patch 46 # Prefunc for do_patch
47 func = d.getVar('BB_RUNTASK', True) 47 func = d.getVar('BB_RUNTASK')
48 srcsubdir = d.getVar('S', True) 48 srcsubdir = d.getVar('S')
49 49
50 patchdir = os.path.join(srcsubdir, 'patches') 50 patchdir = os.path.join(srcsubdir, 'patches')
51 if os.path.exists(patchdir): 51 if os.path.exists(patchdir):
@@ -59,12 +59,12 @@ python patch_task_postfunc() {
59 # Prefunc for task functions between do_unpack and do_patch 59 # Prefunc for task functions between do_unpack and do_patch
60 import oe.patch 60 import oe.patch
61 import shutil 61 import shutil
62 func = d.getVar('BB_RUNTASK', True) 62 func = d.getVar('BB_RUNTASK')
63 srcsubdir = d.getVar('S', True) 63 srcsubdir = d.getVar('S')
64 64
65 if os.path.exists(srcsubdir): 65 if os.path.exists(srcsubdir):
66 if func == 'do_patch': 66 if func == 'do_patch':
67 haspatches = (d.getVar('PATCH_HAS_PATCHES_DIR', True) == '1') 67 haspatches = (d.getVar('PATCH_HAS_PATCHES_DIR') == '1')
68 patchdir = os.path.join(srcsubdir, 'patches') 68 patchdir = os.path.join(srcsubdir, 'patches')
69 if os.path.exists(patchdir): 69 if os.path.exists(patchdir):
70 shutil.rmtree(patchdir) 70 shutil.rmtree(patchdir)
@@ -99,20 +99,20 @@ python patch_do_patch() {
99 "git": oe.patch.GitApplyTree, 99 "git": oe.patch.GitApplyTree,
100 } 100 }
101 101
102 cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt'] 102 cls = patchsetmap[d.getVar('PATCHTOOL') or 'quilt']
103 103
104 resolvermap = { 104 resolvermap = {
105 "noop": oe.patch.NOOPResolver, 105 "noop": oe.patch.NOOPResolver,
106 "user": oe.patch.UserResolver, 106 "user": oe.patch.UserResolver,
107 } 107 }
108 108
109 rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user'] 109 rcls = resolvermap[d.getVar('PATCHRESOLVE') or 'user']
110 110
111 classes = {} 111 classes = {}
112 112
113 s = d.getVar('S', True) 113 s = d.getVar('S')
114 114
115 os.putenv('PATH', d.getVar('PATH', True)) 115 os.putenv('PATH', d.getVar('PATH'))
116 116
117 # We must use one TMPDIR per process so that the "patch" processes 117 # We must use one TMPDIR per process so that the "patch" processes
118 # don't generate the same temp file name. 118 # don't generate the same temp file name.
diff --git a/meta/classes/pixbufcache.bbclass b/meta/classes/pixbufcache.bbclass
index 3f48a0f344..63bf079cf1 100644
--- a/meta/classes/pixbufcache.bbclass
+++ b/meta/classes/pixbufcache.bbclass
@@ -28,20 +28,20 @@ fi
28} 28}
29 29
30python populate_packages_append() { 30python populate_packages_append() {
31 pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES', True).split() 31 pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES').split()
32 32
33 for pkg in pixbuf_pkgs: 33 for pkg in pixbuf_pkgs:
34 bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg) 34 bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg)
35 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) 35 postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
36 if not postinst: 36 if not postinst:
37 postinst = '#!/bin/sh\n' 37 postinst = '#!/bin/sh\n'
38 postinst += d.getVar('pixbufcache_common', True) 38 postinst += d.getVar('pixbufcache_common')
39 d.setVar('pkg_postinst_%s' % pkg, postinst) 39 d.setVar('pkg_postinst_%s' % pkg, postinst)
40 40
41 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) 41 postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
42 if not postrm: 42 if not postrm:
43 postrm = '#!/bin/sh\n' 43 postrm = '#!/bin/sh\n'
44 postrm += d.getVar('pixbufcache_common', True) 44 postrm += d.getVar('pixbufcache_common')
45 d.setVar('pkg_postrm_%s' % pkg, postrm) 45 d.setVar('pkg_postrm_%s' % pkg, postrm)
46} 46}
47 47
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass
index 220cde6925..48bc3959f5 100644
--- a/meta/classes/populate_sdk_base.bbclass
+++ b/meta/classes/populate_sdk_base.bbclass
@@ -11,7 +11,7 @@ COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest'
11def complementary_globs(featurevar, d): 11def complementary_globs(featurevar, d):
12 all_globs = d.getVarFlags('COMPLEMENTARY_GLOB') 12 all_globs = d.getVarFlags('COMPLEMENTARY_GLOB')
13 globs = [] 13 globs = []
14 features = set((d.getVar(featurevar, True) or '').split()) 14 features = set((d.getVar(featurevar) or '').split())
15 for name, glob in all_globs.items(): 15 for name, glob in all_globs.items():
16 if name in features: 16 if name in features:
17 globs.append(glob) 17 globs.append(glob)
@@ -57,30 +57,30 @@ SDK_PRE_INSTALL_COMMAND ?= ""
57SDK_POST_INSTALL_COMMAND ?= "" 57SDK_POST_INSTALL_COMMAND ?= ""
58SDK_RELOCATE_AFTER_INSTALL ?= "1" 58SDK_RELOCATE_AFTER_INSTALL ?= "1"
59 59
60SDKEXTPATH ?= "~/${@d.getVar('DISTRO', True)}_sdk" 60SDKEXTPATH ?= "~/${@d.getVar('DISTRO')}_sdk"
61SDK_TITLE ?= "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} SDK" 61SDK_TITLE ?= "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} SDK"
62 62
63SDK_TARGET_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.target.manifest" 63SDK_TARGET_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.target.manifest"
64SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest" 64SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest"
65python write_target_sdk_manifest () { 65python write_target_sdk_manifest () {
66 from oe.sdk import sdk_list_installed_packages 66 from oe.sdk import sdk_list_installed_packages
67 from oe.utils import format_pkg_list 67 from oe.utils import format_pkg_list
68 sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST", True)) 68 sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST"))
69 pkgs = sdk_list_installed_packages(d, True) 69 pkgs = sdk_list_installed_packages(d, True)
70 if not os.path.exists(sdkmanifestdir): 70 if not os.path.exists(sdkmanifestdir):
71 bb.utils.mkdirhier(sdkmanifestdir) 71 bb.utils.mkdirhier(sdkmanifestdir)
72 with open(d.getVar('SDK_TARGET_MANIFEST', True), 'w') as output: 72 with open(d.getVar('SDK_TARGET_MANIFEST'), 'w') as output:
73 output.write(format_pkg_list(pkgs, 'ver')) 73 output.write(format_pkg_list(pkgs, 'ver'))
74} 74}
75 75
76python write_host_sdk_manifest () { 76python write_host_sdk_manifest () {
77 from oe.sdk import sdk_list_installed_packages 77 from oe.sdk import sdk_list_installed_packages
78 from oe.utils import format_pkg_list 78 from oe.utils import format_pkg_list
79 sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST", True)) 79 sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST"))
80 pkgs = sdk_list_installed_packages(d, False) 80 pkgs = sdk_list_installed_packages(d, False)
81 if not os.path.exists(sdkmanifestdir): 81 if not os.path.exists(sdkmanifestdir):
82 bb.utils.mkdirhier(sdkmanifestdir) 82 bb.utils.mkdirhier(sdkmanifestdir)
83 with open(d.getVar('SDK_HOST_MANIFEST', True), 'w') as output: 83 with open(d.getVar('SDK_HOST_MANIFEST'), 'w') as output:
84 output.write(format_pkg_list(pkgs, 'ver')) 84 output.write(format_pkg_list(pkgs, 'ver'))
85} 85}
86 86
@@ -93,7 +93,7 @@ def populate_sdk_common(d):
93 from oe.sdk import populate_sdk 93 from oe.sdk import populate_sdk
94 from oe.manifest import create_manifest, Manifest 94 from oe.manifest import create_manifest, Manifest
95 95
96 pn = d.getVar('PN', True) 96 pn = d.getVar('PN')
97 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d) 97 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d)
98 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d) 98 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d)
99 99
@@ -101,13 +101,13 @@ def populate_sdk_common(d):
101 ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata") 101 ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata")
102 runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld) 102 runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld)
103 runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld) 103 runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld)
104 d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK", True)) 104 d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK"))
105 d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", True)) 105 d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY"))
106 106
107 # create target/host SDK manifests 107 # create target/host SDK manifests
108 create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), 108 create_manifest(d, manifest_dir=d.getVar('SDK_DIR'),
109 manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST) 109 manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST)
110 create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), 110 create_manifest(d, manifest_dir=d.getVar('SDK_DIR'),
111 manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET) 111 manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET)
112 112
113 populate_sdk(d) 113 populate_sdk(d)
@@ -134,7 +134,7 @@ fakeroot create_sdk_files() {
134python check_sdk_sysroots() { 134python check_sdk_sysroots() {
135 # Fails build if there are broken or dangling symlinks in SDK sysroots 135 # Fails build if there are broken or dangling symlinks in SDK sysroots
136 136
137 if d.getVar('CHECK_SDK_SYSROOTS', True) != '1': 137 if d.getVar('CHECK_SDK_SYSROOTS') != '1':
138 # disabled, bail out 138 # disabled, bail out
139 return 139 return
140 140
@@ -142,8 +142,8 @@ python check_sdk_sysroots() {
142 return os.path.abspath(path) 142 return os.path.abspath(path)
143 143
144 # Get scan root 144 # Get scan root
145 SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT', True), 145 SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT'),
146 d.getVar('SDKPATH', True))) 146 d.getVar('SDKPATH')))
147 147
148 bb.note('Checking SDK sysroots at ' + SCAN_ROOT) 148 bb.note('Checking SDK sysroots at ' + SCAN_ROOT)
149 149
@@ -218,7 +218,7 @@ EOF
218 -e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \ 218 -e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \
219 -e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \ 219 -e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \
220 -e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \ 220 -e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \
221 -e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE", True).replace('&', '\&')}#g' \ 221 -e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE").replace('&', '\&')}#g' \
222 -e 's#@SDK_VERSION@#${SDK_VERSION}#g' \ 222 -e 's#@SDK_VERSION@#${SDK_VERSION}#g' \
223 -e '/@SDK_PRE_INSTALL_COMMAND@/d' \ 223 -e '/@SDK_PRE_INSTALL_COMMAND@/d' \
224 -e '/@SDK_POST_INSTALL_COMMAND@/d' \ 224 -e '/@SDK_POST_INSTALL_COMMAND@/d' \
@@ -268,7 +268,7 @@ do_populate_sdk[file-checksums] += "${COREBASE}/meta/files/toolchain-shar-reloca
268 ${COREBASE}/meta/files/toolchain-shar-extract.sh:True" 268 ${COREBASE}/meta/files/toolchain-shar-extract.sh:True"
269 269
270do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}" 270do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}"
271do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS', True).split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}" 271do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS').split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}"
272do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS', True).split()])}" 272do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS').split()])}"
273do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb" 273do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb"
274addtask populate_sdk 274addtask populate_sdk
diff --git a/meta/classes/populate_sdk_ext.bbclass b/meta/classes/populate_sdk_ext.bbclass
index 1affa9dfaa..be8b6a1f6f 100644
--- a/meta/classes/populate_sdk_ext.bbclass
+++ b/meta/classes/populate_sdk_ext.bbclass
@@ -21,7 +21,7 @@ SDK_EXT_task-populate-sdk-ext = "-ext"
21# Options are full or minimal 21# Options are full or minimal
22SDK_EXT_TYPE ?= "full" 22SDK_EXT_TYPE ?= "full"
23SDK_INCLUDE_PKGDATA ?= "0" 23SDK_INCLUDE_PKGDATA ?= "0"
24SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE', True) == 'full' else '0'}" 24SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE') == 'full' else '0'}"
25 25
26SDK_RECRDEP_TASKS ?= "" 26SDK_RECRDEP_TASKS ?= ""
27 27
@@ -43,8 +43,8 @@ SDK_TARGETS ?= "${PN}"
43 43
44def get_sdk_install_targets(d, images_only=False): 44def get_sdk_install_targets(d, images_only=False):
45 sdk_install_targets = '' 45 sdk_install_targets = ''
46 if images_only or d.getVar('SDK_EXT_TYPE', True) != 'minimal': 46 if images_only or d.getVar('SDK_EXT_TYPE') != 'minimal':
47 sdk_install_targets = d.getVar('SDK_TARGETS', True) 47 sdk_install_targets = d.getVar('SDK_TARGETS')
48 48
49 depd = d.getVar('BB_TASKDEPDATA', False) 49 depd = d.getVar('BB_TASKDEPDATA', False)
50 for v in depd.values(): 50 for v in depd.values():
@@ -53,9 +53,9 @@ def get_sdk_install_targets(d, images_only=False):
53 sdk_install_targets += ' {}'.format(v[0]) 53 sdk_install_targets += ' {}'.format(v[0])
54 54
55 if not images_only: 55 if not images_only:
56 if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': 56 if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
57 sdk_install_targets += ' meta-world-pkgdata:do_allpackagedata' 57 sdk_install_targets += ' meta-world-pkgdata:do_allpackagedata'
58 if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1': 58 if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1':
59 sdk_install_targets += ' meta-extsdk-toolchain:do_populate_sysroot' 59 sdk_install_targets += ' meta-extsdk-toolchain:do_populate_sysroot'
60 60
61 return sdk_install_targets 61 return sdk_install_targets
@@ -83,7 +83,7 @@ TOOLCHAIN_OUTPUTNAME_task-populate-sdk-ext = "${TOOLCHAINEXT_OUTPUTNAME}"
83SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" 83SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest"
84SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" 84SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest"
85 85
86SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} Extensible SDK" 86SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} Extensible SDK"
87 87
88def clean_esdk_builddir(d, sdkbasepath): 88def clean_esdk_builddir(d, sdkbasepath):
89 """Clean up traces of the fake build for create_filtered_tasklist()""" 89 """Clean up traces of the fake build for create_filtered_tasklist()"""
@@ -110,7 +110,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
110 try: 110 try:
111 with open(sdkbasepath + '/conf/local.conf', 'a') as f: 111 with open(sdkbasepath + '/conf/local.conf', 'a') as f:
112 # Force the use of sstate from the build system 112 # Force the use of sstate from the build system
113 f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR', True)) 113 f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR'))
114 f.write('SSTATE_MIRRORS_forcevariable = ""\n') 114 f.write('SSTATE_MIRRORS_forcevariable = ""\n')
115 # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it 115 # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it
116 f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n') 116 f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n')
@@ -121,7 +121,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
121 121
122 # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake 122 # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake
123 # will not allow in its COREBASE path, so we need to rename the directory temporarily 123 # will not allow in its COREBASE path, so we need to rename the directory temporarily
124 temp_sdkbasepath = d.getVar('SDK_OUTPUT', True) + '/tmp-renamed-sdk' 124 temp_sdkbasepath = d.getVar('SDK_OUTPUT') + '/tmp-renamed-sdk'
125 # Delete any existing temp dir 125 # Delete any existing temp dir
126 try: 126 try:
127 shutil.rmtree(temp_sdkbasepath) 127 shutil.rmtree(temp_sdkbasepath)
@@ -130,7 +130,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
130 os.rename(sdkbasepath, temp_sdkbasepath) 130 os.rename(sdkbasepath, temp_sdkbasepath)
131 try: 131 try:
132 cmdprefix = '. %s .; ' % conf_initpath 132 cmdprefix = '. %s .; ' % conf_initpath
133 logfile = d.getVar('WORKDIR', True) + '/tasklist_bb_log.txt' 133 logfile = d.getVar('WORKDIR') + '/tasklist_bb_log.txt'
134 try: 134 try:
135 oe.copy_buildsystem.check_sstate_task_list(d, get_sdk_install_targets(d), tasklistfile, cmdprefix=cmdprefix, cwd=temp_sdkbasepath, logfile=logfile) 135 oe.copy_buildsystem.check_sstate_task_list(d, get_sdk_install_targets(d), tasklistfile, cmdprefix=cmdprefix, cwd=temp_sdkbasepath, logfile=logfile)
136 except bb.process.ExecutionError as e: 136 except bb.process.ExecutionError as e:
@@ -152,7 +152,7 @@ python copy_buildsystem () {
152 import glob 152 import glob
153 import oe.copy_buildsystem 153 import oe.copy_buildsystem
154 154
155 oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT', True) 155 oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT')
156 156
157 conf_bbpath = '' 157 conf_bbpath = ''
158 conf_initpath = '' 158 conf_initpath = ''
@@ -160,10 +160,10 @@ python copy_buildsystem () {
160 160
161 # Copy in all metadata layers + bitbake (as repositories) 161 # Copy in all metadata layers + bitbake (as repositories)
162 buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d) 162 buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d)
163 baseoutpath = d.getVar('SDK_OUTPUT', True) + '/' + d.getVar('SDKPATH', True) 163 baseoutpath = d.getVar('SDK_OUTPUT') + '/' + d.getVar('SDKPATH')
164 164
165 # Determine if we're building a derivative extensible SDK (from devtool build-sdk) 165 # Determine if we're building a derivative extensible SDK (from devtool build-sdk)
166 derivative = (d.getVar('SDK_DERIVATIVE', True) or '') == '1' 166 derivative = (d.getVar('SDK_DERIVATIVE') or '') == '1'
167 if derivative: 167 if derivative:
168 workspace_name = 'orig-workspace' 168 workspace_name = 'orig-workspace'
169 else: 169 else:
@@ -171,7 +171,7 @@ python copy_buildsystem () {
171 layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers', workspace_name) 171 layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers', workspace_name)
172 172
173 sdkbblayers = [] 173 sdkbblayers = []
174 corebase = os.path.basename(d.getVar('COREBASE', True)) 174 corebase = os.path.basename(d.getVar('COREBASE'))
175 for layer in layers_copied: 175 for layer in layers_copied:
176 if corebase == os.path.basename(layer): 176 if corebase == os.path.basename(layer):
177 conf_bbpath = os.path.join('layers', layer, 'bitbake') 177 conf_bbpath = os.path.join('layers', layer, 'bitbake')
@@ -202,8 +202,8 @@ python copy_buildsystem () {
202 config.set('General', 'init_path', conf_initpath) 202 config.set('General', 'init_path', conf_initpath)
203 config.set('General', 'core_meta_subdir', core_meta_subdir) 203 config.set('General', 'core_meta_subdir', core_meta_subdir)
204 config.add_section('SDK') 204 config.add_section('SDK')
205 config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS', True)) 205 config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS'))
206 updateurl = d.getVar('SDK_UPDATE_URL', True) 206 updateurl = d.getVar('SDK_UPDATE_URL')
207 if updateurl: 207 if updateurl:
208 config.set('SDK', 'updateserver', updateurl) 208 config.set('SDK', 'updateserver', updateurl)
209 bb.utils.mkdirhier(os.path.join(baseoutpath, 'conf')) 209 bb.utils.mkdirhier(os.path.join(baseoutpath, 'conf'))
@@ -215,7 +215,7 @@ python copy_buildsystem () {
215 pass 215 pass
216 216
217 # Create a layer for new recipes / appends 217 # Create a layer for new recipes / appends
218 bbpath = d.getVar('BBPATH', True) 218 bbpath = d.getVar('BBPATH')
219 bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')]) 219 bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')])
220 220
221 # Create bblayers.conf 221 # Create bblayers.conf
@@ -248,16 +248,16 @@ python copy_buildsystem () {
248 bb.utils.mkdirhier(uninative_outdir) 248 bb.utils.mkdirhier(uninative_outdir)
249 shutil.copy(uninative_file, uninative_outdir) 249 shutil.copy(uninative_file, uninative_outdir)
250 250
251 env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE', True) or '').split() 251 env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE') or '').split()
252 env_whitelist_values = {} 252 env_whitelist_values = {}
253 253
254 # Create local.conf 254 # Create local.conf
255 builddir = d.getVar('TOPDIR', True) 255 builddir = d.getVar('TOPDIR')
256 if derivative: 256 if derivative:
257 shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf') 257 shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf')
258 else: 258 else:
259 local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST', True) or '').split() 259 local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST') or '').split()
260 local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST', True) or '').split() 260 local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST') or '').split()
261 def handle_var(varname, origvalue, op, newlines): 261 def handle_var(varname, origvalue, op, newlines):
262 if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist): 262 if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist):
263 newlines.append('# Removed original setting of %s\n' % varname) 263 newlines.append('# Removed original setting of %s\n' % varname)
@@ -285,7 +285,7 @@ python copy_buildsystem () {
285 f.write('DL_DIR = "${TOPDIR}/downloads"\n') 285 f.write('DL_DIR = "${TOPDIR}/downloads"\n')
286 286
287 f.write('INHERIT += "%s"\n' % 'uninative') 287 f.write('INHERIT += "%s"\n' % 'uninative')
288 f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH', True), uninative_checksum)) 288 f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH'), uninative_checksum))
289 f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False)) 289 f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False))
290 290
291 # Some classes are not suitable for SDK, remove them from INHERIT 291 # Some classes are not suitable for SDK, remove them from INHERIT
@@ -319,7 +319,7 @@ python copy_buildsystem () {
319 319
320 # If you define a sdk_extraconf() function then it can contain additional config 320 # If you define a sdk_extraconf() function then it can contain additional config
321 # (Though this is awkward; sdk-extra.conf should probably be used instead) 321 # (Though this is awkward; sdk-extra.conf should probably be used instead)
322 extraconf = (d.getVar('sdk_extraconf', True) or '').strip() 322 extraconf = (d.getVar('sdk_extraconf') or '').strip()
323 if extraconf: 323 if extraconf:
324 # Strip off any leading / trailing spaces 324 # Strip off any leading / trailing spaces
325 for line in extraconf.splitlines(): 325 for line in extraconf.splitlines():
@@ -352,7 +352,7 @@ python copy_buildsystem () {
352 # BB_ENV_EXTRAWHITE) are set in the SDK's configuration 352 # BB_ENV_EXTRAWHITE) are set in the SDK's configuration
353 extralines = [] 353 extralines = []
354 for name, value in env_whitelist_values.items(): 354 for name, value in env_whitelist_values.items():
355 actualvalue = d.getVar(name, True) or '' 355 actualvalue = d.getVar(name) or ''
356 if value != actualvalue: 356 if value != actualvalue:
357 extralines.append('%s = "%s"\n' % (name, actualvalue)) 357 extralines.append('%s = "%s"\n' % (name, actualvalue))
358 if extralines: 358 if extralines:
@@ -365,7 +365,7 @@ python copy_buildsystem () {
365 365
366 # Filter the locked signatures file to just the sstate tasks we are interested in 366 # Filter the locked signatures file to just the sstate tasks we are interested in
367 excluded_targets = get_sdk_install_targets(d, images_only=True) 367 excluded_targets = get_sdk_install_targets(d, images_only=True)
368 sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' 368 sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc'
369 lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc' 369 lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc'
370 oe.copy_buildsystem.prune_lockedsigs([], 370 oe.copy_buildsystem.prune_lockedsigs([],
371 excluded_targets.split(), 371 excluded_targets.split(),
@@ -378,36 +378,36 @@ python copy_buildsystem () {
378 # uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d) 378 # uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d)
379 fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d) 379 fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d)
380 380
381 sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1') 381 sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1')
382 sdk_ext_type = d.getVar('SDK_EXT_TYPE', True) 382 sdk_ext_type = d.getVar('SDK_EXT_TYPE')
383 if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative: 383 if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative:
384 # Create the filtered task list used to generate the sstate cache shipped with the SDK 384 # Create the filtered task list used to generate the sstate cache shipped with the SDK
385 tasklistfn = d.getVar('WORKDIR', True) + '/tasklist.txt' 385 tasklistfn = d.getVar('WORKDIR') + '/tasklist.txt'
386 create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath) 386 create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath)
387 else: 387 else:
388 tasklistfn = None 388 tasklistfn = None
389 389
390 # Add packagedata if enabled 390 # Add packagedata if enabled
391 if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': 391 if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
392 lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base.inc' 392 lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc'
393 lockedsigs_copy = d.getVar('WORKDIR', True) + '/locked-sigs-copy.inc' 393 lockedsigs_copy = d.getVar('WORKDIR') + '/locked-sigs-copy.inc'
394 shutil.move(lockedsigs_pruned, lockedsigs_base) 394 shutil.move(lockedsigs_pruned, lockedsigs_base)
395 oe.copy_buildsystem.merge_lockedsigs(['do_packagedata'], 395 oe.copy_buildsystem.merge_lockedsigs(['do_packagedata'],
396 lockedsigs_base, 396 lockedsigs_base,
397 d.getVar('STAGING_DIR_HOST', True) + '/world-pkgdata/locked-sigs-pkgdata.inc', 397 d.getVar('STAGING_DIR_HOST') + '/world-pkgdata/locked-sigs-pkgdata.inc',
398 lockedsigs_pruned, 398 lockedsigs_pruned,
399 lockedsigs_copy) 399 lockedsigs_copy)
400 400
401 if sdk_include_toolchain: 401 if sdk_include_toolchain:
402 lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base2.inc' 402 lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base2.inc'
403 lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST', True) + '/locked-sigs/locked-sigs-extsdk-toolchain.inc' 403 lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST') + '/locked-sigs/locked-sigs-extsdk-toolchain.inc'
404 shutil.move(lockedsigs_pruned, lockedsigs_base) 404 shutil.move(lockedsigs_pruned, lockedsigs_base)
405 oe.copy_buildsystem.merge_lockedsigs([], 405 oe.copy_buildsystem.merge_lockedsigs([],
406 lockedsigs_base, 406 lockedsigs_base,
407 lockedsigs_toolchain, 407 lockedsigs_toolchain,
408 lockedsigs_pruned) 408 lockedsigs_pruned)
409 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_toolchain, 409 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_toolchain,
410 d.getVar('SSTATE_DIR', True), 410 d.getVar('SSTATE_DIR'),
411 sstate_out, d, 411 sstate_out, d,
412 fixedlsbstring, 412 fixedlsbstring,
413 filterfile=tasklistfn) 413 filterfile=tasklistfn)
@@ -417,22 +417,22 @@ python copy_buildsystem () {
417 # Assume the user is not going to set up an additional sstate 417 # Assume the user is not going to set up an additional sstate
418 # mirror, thus we need to copy the additional artifacts (from 418 # mirror, thus we need to copy the additional artifacts (from
419 # workspace recipes) into the derivative SDK 419 # workspace recipes) into the derivative SDK
420 lockedsigs_orig = d.getVar('TOPDIR', True) + '/conf/locked-sigs.inc' 420 lockedsigs_orig = d.getVar('TOPDIR') + '/conf/locked-sigs.inc'
421 if os.path.exists(lockedsigs_orig): 421 if os.path.exists(lockedsigs_orig):
422 lockedsigs_extra = d.getVar('WORKDIR', True) + '/locked-sigs-extra.inc' 422 lockedsigs_extra = d.getVar('WORKDIR') + '/locked-sigs-extra.inc'
423 oe.copy_buildsystem.merge_lockedsigs(None, 423 oe.copy_buildsystem.merge_lockedsigs(None,
424 lockedsigs_orig, 424 lockedsigs_orig,
425 lockedsigs_pruned, 425 lockedsigs_pruned,
426 None, 426 None,
427 lockedsigs_extra) 427 lockedsigs_extra)
428 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_extra, 428 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_extra,
429 d.getVar('SSTATE_DIR', True), 429 d.getVar('SSTATE_DIR'),
430 sstate_out, d, 430 sstate_out, d,
431 fixedlsbstring, 431 fixedlsbstring,
432 filterfile=tasklistfn) 432 filterfile=tasklistfn)
433 else: 433 else:
434 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_pruned, 434 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_pruned,
435 d.getVar('SSTATE_DIR', True), 435 d.getVar('SSTATE_DIR'),
436 sstate_out, d, 436 sstate_out, d,
437 fixedlsbstring, 437 fixedlsbstring,
438 filterfile=tasklistfn) 438 filterfile=tasklistfn)
@@ -463,24 +463,24 @@ python copy_buildsystem () {
463def get_current_buildtools(d): 463def get_current_buildtools(d):
464 """Get the file name of the current buildtools installer""" 464 """Get the file name of the current buildtools installer"""
465 import glob 465 import glob
466 btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY', True), '*-buildtools-nativesdk-standalone-*.sh')) 466 btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY'), '*-buildtools-nativesdk-standalone-*.sh'))
467 btfiles.sort(key=os.path.getctime) 467 btfiles.sort(key=os.path.getctime)
468 return os.path.basename(btfiles[-1]) 468 return os.path.basename(btfiles[-1])
469 469
470def get_sdk_required_utilities(buildtools_fn, d): 470def get_sdk_required_utilities(buildtools_fn, d):
471 """Find required utilities that aren't provided by the buildtools""" 471 """Find required utilities that aren't provided by the buildtools"""
472 sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES', True) or '').split() 472 sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES') or '').split()
473 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}gcc')) 473 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}gcc'))
474 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}g++')) 474 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}g++'))
475 buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY', True), buildtools_fn) 475 buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY'), buildtools_fn)
476 filelist, _ = bb.process.run('%s -l' % buildtools_installer) 476 filelist, _ = bb.process.run('%s -l' % buildtools_installer)
477 localdata = bb.data.createCopy(d) 477 localdata = bb.data.createCopy(d)
478 localdata.setVar('SDKPATH', '.') 478 localdata.setVar('SDKPATH', '.')
479 sdkpathnative = localdata.getVar('SDKPATHNATIVE', True) 479 sdkpathnative = localdata.getVar('SDKPATHNATIVE')
480 sdkbindirs = [localdata.getVar('bindir_nativesdk', True), 480 sdkbindirs = [localdata.getVar('bindir_nativesdk'),
481 localdata.getVar('sbindir_nativesdk', True), 481 localdata.getVar('sbindir_nativesdk'),
482 localdata.getVar('base_bindir_nativesdk', True), 482 localdata.getVar('base_bindir_nativesdk'),
483 localdata.getVar('base_sbindir_nativesdk', True)] 483 localdata.getVar('base_sbindir_nativesdk')]
484 for line in filelist.splitlines(): 484 for line in filelist.splitlines():
485 splitline = line.split() 485 splitline = line.split()
486 if len(splitline) > 5: 486 if len(splitline) > 5:
@@ -509,7 +509,7 @@ install_tools() {
509 # (they get populated from sstate on installation) 509 # (they get populated from sstate on installation)
510 unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd" 510 unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd"
511 if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then 511 if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then
512 binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR', True))} 512 binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR'))}
513 lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path 513 lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path
514 fi 514 fi
515 touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase 515 touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase
@@ -611,8 +611,8 @@ SDK_INSTALL_TARGETS = ""
611fakeroot python do_populate_sdk_ext() { 611fakeroot python do_populate_sdk_ext() {
612 # FIXME hopefully we can remove this restriction at some point, but uninative 612 # FIXME hopefully we can remove this restriction at some point, but uninative
613 # currently forces this upon us 613 # currently forces this upon us
614 if d.getVar('SDK_ARCH', True) != d.getVar('BUILD_ARCH', True): 614 if d.getVar('SDK_ARCH') != d.getVar('BUILD_ARCH'):
615 bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH', True), d.getVar('BUILD_ARCH', True))) 615 bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH'), d.getVar('BUILD_ARCH')))
616 616
617 d.setVar('SDK_INSTALL_TARGETS', get_sdk_install_targets(d)) 617 d.setVar('SDK_INSTALL_TARGETS', get_sdk_install_targets(d))
618 buildtools_fn = get_current_buildtools(d) 618 buildtools_fn = get_current_buildtools(d)
@@ -626,7 +626,7 @@ fakeroot python do_populate_sdk_ext() {
626def get_ext_sdk_depends(d): 626def get_ext_sdk_depends(d):
627 # Note: the deps varflag is a list not a string, so we need to specify expand=False 627 # Note: the deps varflag is a list not a string, so we need to specify expand=False
628 deps = d.getVarFlag('do_image_complete', 'deps', False) 628 deps = d.getVarFlag('do_image_complete', 'deps', False)
629 pn = d.getVar('PN', True) 629 pn = d.getVar('PN')
630 deplist = ['%s:%s' % (pn, dep) for dep in deps] 630 deplist = ['%s:%s' % (pn, dep) for dep in deps]
631 for task in ['do_image_complete', 'do_rootfs', 'do_build']: 631 for task in ['do_image_complete', 'do_rootfs', 'do_build']:
632 deplist.extend((d.getVarFlag(task, 'depends', True) or '').split()) 632 deplist.extend((d.getVarFlag(task, 'depends', True) or '').split())
@@ -637,7 +637,7 @@ python do_sdk_depends() {
637 # dependencies we don't need to (e.g. buildtools-tarball) and bringing those 637 # dependencies we don't need to (e.g. buildtools-tarball) and bringing those
638 # into the SDK's sstate-cache 638 # into the SDK's sstate-cache
639 import oe.copy_buildsystem 639 import oe.copy_buildsystem
640 sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' 640 sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc'
641 oe.copy_buildsystem.generate_locked_sigs(sigfile, d) 641 oe.copy_buildsystem.generate_locked_sigs(sigfile, d)
642} 642}
643addtask sdk_depends 643addtask sdk_depends
@@ -658,10 +658,10 @@ do_populate_sdk_ext[dirs] = "${@d.getVarFlag('do_populate_sdk', 'dirs', False)}"
658 658
659do_populate_sdk_ext[depends] = "${@d.getVarFlag('do_populate_sdk', 'depends', False)} \ 659do_populate_sdk_ext[depends] = "${@d.getVarFlag('do_populate_sdk', 'depends', False)} \
660 buildtools-tarball:do_populate_sdk uninative-tarball:do_populate_sdk \ 660 buildtools-tarball:do_populate_sdk uninative-tarball:do_populate_sdk \
661 ${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1' else ''} \ 661 ${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA') == '1' else ''} \
662 ${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1' else ''}" 662 ${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1' else ''}"
663 663
664do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':do_build' for x in d.getVar('SDK_TARGETS', True).split()])}" 664do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':do_build' for x in d.getVar('SDK_TARGETS').split()])}"
665 665
666# Make sure code changes can result in rebuild 666# Make sure code changes can result in rebuild
667do_populate_sdk_ext[vardeps] += "copy_buildsystem \ 667do_populate_sdk_ext[vardeps] += "copy_buildsystem \
diff --git a/meta/classes/prexport.bbclass b/meta/classes/prexport.bbclass
index 809ec1034e..6dcf99e29f 100644
--- a/meta/classes/prexport.bbclass
+++ b/meta/classes/prexport.bbclass
@@ -15,7 +15,7 @@ python prexport_handler () {
15 if isinstance(e, bb.event.RecipeParsed): 15 if isinstance(e, bb.event.RecipeParsed):
16 import oe.prservice 16 import oe.prservice
17 #get all PR values for the current PRAUTOINX 17 #get all PR values for the current PRAUTOINX
18 ver = e.data.getVar('PRSERV_DUMPOPT_VERSION', True) 18 ver = e.data.getVar('PRSERV_DUMPOPT_VERSION')
19 ver = ver.replace('%','-') 19 ver = ver.replace('%','-')
20 retval = oe.prservice.prserv_dump_db(e.data) 20 retval = oe.prservice.prserv_dump_db(e.data)
21 if not retval: 21 if not retval:
@@ -40,7 +40,7 @@ python prexport_handler () {
40 import oe.prservice 40 import oe.prservice
41 oe.prservice.prserv_check_avail(e.data) 41 oe.prservice.prserv_check_avail(e.data)
42 #remove dumpfile 42 #remove dumpfile
43 bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE', True)) 43 bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE'))
44 elif isinstance(e, bb.event.ParseCompleted): 44 elif isinstance(e, bb.event.ParseCompleted):
45 import oe.prservice 45 import oe.prservice
46 #dump meta info of tables 46 #dump meta info of tables
diff --git a/meta/classes/ptest.bbclass b/meta/classes/ptest.bbclass
index fa3561e621..798d802974 100644
--- a/meta/classes/ptest.bbclass
+++ b/meta/classes/ptest.bbclass
@@ -61,7 +61,7 @@ python () {
61 d.setVarFlag('do_install_ptest_base', 'fakeroot', '1') 61 d.setVarFlag('do_install_ptest_base', 'fakeroot', '1')
62 62
63 # Remove all '*ptest_base' tasks when ptest is not enabled 63 # Remove all '*ptest_base' tasks when ptest is not enabled
64 if not(d.getVar('PTEST_ENABLED', True) == "1"): 64 if not(d.getVar('PTEST_ENABLED') == "1"):
65 for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']: 65 for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']:
66 bb.build.deltask(i, d) 66 bb.build.deltask(i, d)
67} 67}
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass
index f2d4d1c9e1..616758260c 100644
--- a/meta/classes/qemu.bbclass
+++ b/meta/classes/qemu.bbclass
@@ -4,12 +4,12 @@
4# 4#
5 5
6def qemu_target_binary(data): 6def qemu_target_binary(data):
7 package_arch = data.getVar("PACKAGE_ARCH", True) 7 package_arch = data.getVar("PACKAGE_ARCH")
8 qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch, True) or "") 8 qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "")
9 if qemu_target_binary: 9 if qemu_target_binary:
10 return qemu_target_binary 10 return qemu_target_binary
11 11
12 target_arch = data.getVar("TARGET_ARCH", True) 12 target_arch = data.getVar("TARGET_ARCH")
13 if target_arch in ("i486", "i586", "i686"): 13 if target_arch in ("i486", "i586", "i686"):
14 target_arch = "i386" 14 target_arch = "i386"
15 elif target_arch == "powerpc": 15 elif target_arch == "powerpc":
@@ -26,7 +26,7 @@ def qemu_wrapper_cmdline(data, rootfs_path, library_paths):
26 if qemu_binary == "qemu-allarch": 26 if qemu_binary == "qemu-allarch":
27 qemu_binary = "qemuwrapper" 27 qemu_binary = "qemuwrapper"
28 28
29 qemu_options = data.getVar("QEMU_OPTIONS", True) 29 qemu_options = data.getVar("QEMU_OPTIONS")
30 30
31 return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\ 31 return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\
32 + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " " 32 + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " "
@@ -52,7 +52,7 @@ def qemu_run_binary(data, rootfs_path, binary):
52# this dance). For others (e.g. arm) a -cpu option is not necessary, since the 52# this dance). For others (e.g. arm) a -cpu option is not necessary, since the
53# qemu-arm default CPU supports all required architecture levels. 53# qemu-arm default CPU supports all required architecture levels.
54 54
55QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) or ""}" 55QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH'), True) or ""}"
56QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}" 56QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}"
57 57
58QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2" 58QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2"
diff --git a/meta/classes/qemuboot.bbclass b/meta/classes/qemuboot.bbclass
index 8b1d4d087d..28e8a737f8 100644
--- a/meta/classes/qemuboot.bbclass
+++ b/meta/classes/qemuboot.bbclass
@@ -55,17 +55,17 @@ do_write_qemuboot_conf[vardeps] += "${@' '.join(qemuboot_vars(d))}"
55python do_write_qemuboot_conf() { 55python do_write_qemuboot_conf() {
56 import configparser 56 import configparser
57 57
58 qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_NAME', True)) 58 qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_NAME'))
59 qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_LINK_NAME', True)) 59 qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_LINK_NAME'))
60 cf = configparser.ConfigParser() 60 cf = configparser.ConfigParser()
61 cf.add_section('config_bsp') 61 cf.add_section('config_bsp')
62 for k in qemuboot_vars(d): 62 for k in qemuboot_vars(d):
63 cf.set('config_bsp', k, '%s' % d.getVar(k, True)) 63 cf.set('config_bsp', k, '%s' % d.getVar(k))
64 64
65 # QB_DEFAULT_KERNEL's value of KERNEL_IMAGETYPE is the name of a symlink 65 # QB_DEFAULT_KERNEL's value of KERNEL_IMAGETYPE is the name of a symlink
66 # to the kernel file, which hinders relocatability of the qb conf. 66 # to the kernel file, which hinders relocatability of the qb conf.
67 # Read the link and replace it with the full filename of the target. 67 # Read the link and replace it with the full filename of the target.
68 kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('QB_DEFAULT_KERNEL', True)) 68 kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('QB_DEFAULT_KERNEL'))
69 kernel = os.path.realpath(kernel_link) 69 kernel = os.path.realpath(kernel_link)
70 cf.set('config_bsp', 'QB_DEFAULT_KERNEL', kernel) 70 cf.set('config_bsp', 'QB_DEFAULT_KERNEL', kernel)
71 71
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass
index add34df9d6..3b00b0e521 100644
--- a/meta/classes/recipe_sanity.bbclass
+++ b/meta/classes/recipe_sanity.bbclass
@@ -1,5 +1,5 @@
1def __note(msg, d): 1def __note(msg, d):
2 bb.note("%s: recipe_sanity: %s" % (d.getVar("P", True), msg)) 2 bb.note("%s: recipe_sanity: %s" % (d.getVar("P"), msg))
3 3
4__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" 4__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS"
5def bad_runtime_vars(cfgdata, d): 5def bad_runtime_vars(cfgdata, d):
@@ -7,7 +7,7 @@ def bad_runtime_vars(cfgdata, d):
7 bb.data.inherits_class("cross", d): 7 bb.data.inherits_class("cross", d):
8 return 8 return
9 9
10 for var in d.getVar("__recipe_sanity_badruntimevars", True).split(): 10 for var in d.getVar("__recipe_sanity_badruntimevars").split():
11 val = d.getVar(var, False) 11 val = d.getVar(var, False)
12 if val and val != cfgdata.get(var): 12 if val and val != cfgdata.get(var):
13 __note("%s should be %s_${PN}" % (var, var), d) 13 __note("%s should be %s_${PN}" % (var, var), d)
@@ -15,11 +15,11 @@ def bad_runtime_vars(cfgdata, d):
15__recipe_sanity_reqvars = "DESCRIPTION" 15__recipe_sanity_reqvars = "DESCRIPTION"
16__recipe_sanity_reqdiffvars = "" 16__recipe_sanity_reqdiffvars = ""
17def req_vars(cfgdata, d): 17def req_vars(cfgdata, d):
18 for var in d.getVar("__recipe_sanity_reqvars", True).split(): 18 for var in d.getVar("__recipe_sanity_reqvars").split():
19 if not d.getVar(var, False): 19 if not d.getVar(var, False):
20 __note("%s should be set" % var, d) 20 __note("%s should be set" % var, d)
21 21
22 for var in d.getVar("__recipe_sanity_reqdiffvars", True).split(): 22 for var in d.getVar("__recipe_sanity_reqdiffvars").split():
23 val = d.getVar(var, False) 23 val = d.getVar(var, False)
24 cfgval = cfgdata.get(var) 24 cfgval = cfgdata.get(var)
25 25
@@ -38,11 +38,11 @@ def var_renames_overwrite(cfgdata, d):
38def incorrect_nonempty_PACKAGES(cfgdata, d): 38def incorrect_nonempty_PACKAGES(cfgdata, d):
39 if bb.data.inherits_class("native", d) or \ 39 if bb.data.inherits_class("native", d) or \
40 bb.data.inherits_class("cross", d): 40 bb.data.inherits_class("cross", d):
41 if d.getVar("PACKAGES", True): 41 if d.getVar("PACKAGES"):
42 return True 42 return True
43 43
44def can_use_autotools_base(cfgdata, d): 44def can_use_autotools_base(cfgdata, d):
45 cfg = d.getVar("do_configure", True) 45 cfg = d.getVar("do_configure")
46 if not bb.data.inherits_class("autotools", d): 46 if not bb.data.inherits_class("autotools", d):
47 return False 47 return False
48 48
@@ -61,7 +61,7 @@ def can_delete_FILESPATH(cfgdata, d):
61 expected = cfgdata.get("FILESPATH") 61 expected = cfgdata.get("FILESPATH")
62 expectedpaths = d.expand(expected) 62 expectedpaths = d.expand(expected)
63 unexpanded = d.getVar("FILESPATH", False) 63 unexpanded = d.getVar("FILESPATH", False)
64 filespath = d.getVar("FILESPATH", True).split(":") 64 filespath = d.getVar("FILESPATH").split(":")
65 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] 65 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
66 for fp in filespath: 66 for fp in filespath:
67 if not fp in expectedpaths: 67 if not fp in expectedpaths:
@@ -72,13 +72,13 @@ def can_delete_FILESPATH(cfgdata, d):
72 72
73def can_delete_FILESDIR(cfgdata, d): 73def can_delete_FILESDIR(cfgdata, d):
74 expected = cfgdata.get("FILESDIR") 74 expected = cfgdata.get("FILESDIR")
75 #expected = "${@bb.utils.which(d.getVar('FILESPATH', True), '.')}" 75 #expected = "${@bb.utils.which(d.getVar('FILESPATH'), '.')}"
76 unexpanded = d.getVar("FILESDIR", False) 76 unexpanded = d.getVar("FILESDIR", False)
77 if unexpanded is None: 77 if unexpanded is None:
78 return False 78 return False
79 79
80 expanded = os.path.normpath(d.getVar("FILESDIR", True)) 80 expanded = os.path.normpath(d.getVar("FILESDIR"))
81 filespath = d.getVar("FILESPATH", True).split(":") 81 filespath = d.getVar("FILESPATH").split(":")
82 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] 82 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
83 83
84 return unexpanded != expected and \ 84 return unexpanded != expected and \
@@ -96,7 +96,7 @@ def can_delete_others(p, cfgdata, d):
96 continue 96 continue
97 97
98 try: 98 try:
99 expanded = d.getVar(k, True) 99 expanded = d.getVar(k)
100 cfgexpanded = d.expand(cfgunexpanded) 100 cfgexpanded = d.expand(cfgunexpanded)
101 except bb.fetch.ParameterError: 101 except bb.fetch.ParameterError:
102 continue 102 continue
@@ -108,8 +108,8 @@ def can_delete_others(p, cfgdata, d):
108 (p, cfgunexpanded, unexpanded, expanded)) 108 (p, cfgunexpanded, unexpanded, expanded))
109 109
110python do_recipe_sanity () { 110python do_recipe_sanity () {
111 p = d.getVar("P", True) 111 p = d.getVar("P")
112 p = "%s %s %s" % (d.getVar("PN", True), d.getVar("PV", True), d.getVar("PR", True)) 112 p = "%s %s %s" % (d.getVar("PN"), d.getVar("PV"), d.getVar("PR"))
113 113
114 sanitychecks = [ 114 sanitychecks = [
115 (can_delete_FILESDIR, "candidate for removal of FILESDIR"), 115 (can_delete_FILESDIR, "candidate for removal of FILESDIR"),
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass
index 5bb231efc1..d6fdd364ad 100644
--- a/meta/classes/report-error.bbclass
+++ b/meta/classes/report-error.bbclass
@@ -10,7 +10,7 @@ ERR_REPORT_DIR ?= "${LOG_DIR}/error-report"
10 10
11def errorreport_getdata(e): 11def errorreport_getdata(e):
12 import codecs 12 import codecs
13 logpath = e.data.getVar('ERR_REPORT_DIR', True) 13 logpath = e.data.getVar('ERR_REPORT_DIR')
14 datafile = os.path.join(logpath, "error-report.txt") 14 datafile = os.path.join(logpath, "error-report.txt")
15 with codecs.open(datafile, 'r', 'utf-8') as f: 15 with codecs.open(datafile, 'r', 'utf-8') as f:
16 data = f.read() 16 data = f.read()
@@ -19,7 +19,7 @@ def errorreport_getdata(e):
19def errorreport_savedata(e, newdata, file): 19def errorreport_savedata(e, newdata, file):
20 import json 20 import json
21 import codecs 21 import codecs
22 logpath = e.data.getVar('ERR_REPORT_DIR', True) 22 logpath = e.data.getVar('ERR_REPORT_DIR')
23 datafile = os.path.join(logpath, file) 23 datafile = os.path.join(logpath, file)
24 with codecs.open(datafile, 'w', 'utf-8') as f: 24 with codecs.open(datafile, 'w', 'utf-8') as f:
25 json.dump(newdata, f, indent=4, sort_keys=True) 25 json.dump(newdata, f, indent=4, sort_keys=True)
@@ -29,18 +29,18 @@ python errorreport_handler () {
29 import json 29 import json
30 import codecs 30 import codecs
31 31
32 logpath = e.data.getVar('ERR_REPORT_DIR', True) 32 logpath = e.data.getVar('ERR_REPORT_DIR')
33 datafile = os.path.join(logpath, "error-report.txt") 33 datafile = os.path.join(logpath, "error-report.txt")
34 34
35 if isinstance(e, bb.event.BuildStarted): 35 if isinstance(e, bb.event.BuildStarted):
36 bb.utils.mkdirhier(logpath) 36 bb.utils.mkdirhier(logpath)
37 data = {} 37 data = {}
38 machine = e.data.getVar("MACHINE", True) 38 machine = e.data.getVar("MACHINE")
39 data['machine'] = machine 39 data['machine'] = machine
40 data['build_sys'] = e.data.getVar("BUILD_SYS", True) 40 data['build_sys'] = e.data.getVar("BUILD_SYS")
41 data['nativelsb'] = e.data.getVar("NATIVELSBSTRING", True) 41 data['nativelsb'] = e.data.getVar("NATIVELSBSTRING")
42 data['distro'] = e.data.getVar("DISTRO", True) 42 data['distro'] = e.data.getVar("DISTRO")
43 data['target_sys'] = e.data.getVar("TARGET_SYS", True) 43 data['target_sys'] = e.data.getVar("TARGET_SYS")
44 data['failures'] = [] 44 data['failures'] = []
45 data['component'] = " ".join(e.getPkgs()) 45 data['component'] = " ".join(e.getPkgs())
46 data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data)) 46 data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data))
@@ -51,7 +51,7 @@ python errorreport_handler () {
51 elif isinstance(e, bb.build.TaskFailed): 51 elif isinstance(e, bb.build.TaskFailed):
52 task = e.task 52 task = e.task
53 taskdata={} 53 taskdata={}
54 log = e.data.getVar('BB_LOGFILE', True) 54 log = e.data.getVar('BB_LOGFILE')
55 taskdata['package'] = e.data.expand("${PF}") 55 taskdata['package'] = e.data.expand("${PF}")
56 taskdata['task'] = task 56 taskdata['task'] = task
57 if log: 57 if log:
@@ -61,7 +61,7 @@ python errorreport_handler () {
61 61
62 # Replace host-specific paths so the logs are cleaner 62 # Replace host-specific paths so the logs are cleaner
63 for d in ("TOPDIR", "TMPDIR"): 63 for d in ("TOPDIR", "TMPDIR"):
64 s = e.data.getVar(d, True) 64 s = e.data.getVar(d)
65 if s: 65 if s:
66 logdata = logdata.replace(s, d) 66 logdata = logdata.replace(s, d)
67 67
@@ -92,7 +92,7 @@ python errorreport_handler () {
92 bb.utils.unlockfile(lock) 92 bb.utils.unlockfile(lock)
93 failures = jsondata['failures'] 93 failures = jsondata['failures']
94 if(len(failures) > 0): 94 if(len(failures) > 0):
95 filename = "error_report_" + e.data.getVar("BUILDNAME", True)+".txt" 95 filename = "error_report_" + e.data.getVar("BUILDNAME")+".txt"
96 datafile = errorreport_savedata(e, jsondata, filename) 96 datafile = errorreport_savedata(e, jsondata, filename)
97 bb.note("The errors for this build are stored in %s\nYou can send the errors to a reports server by running:\n send-error-report %s [-s server]" % (datafile, datafile)) 97 bb.note("The errors for this build are stored in %s\nYou can send the errors to a reports server by running:\n send-error-report %s [-s server]" % (datafile, datafile))
98 bb.note("The contents of these logs will be posted in public if you use the above command with the default server. Please ensure you remove any identifying or proprietary information when prompted before sending.") 98 bb.note("The contents of these logs will be posted in public if you use the above command with the default server. Please ensure you remove any identifying or proprietary information when prompted before sending.")
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass
index 263ad8be9f..3516c7edf8 100644
--- a/meta/classes/rm_work.bbclass
+++ b/meta/classes/rm_work.bbclass
@@ -119,10 +119,10 @@ rm_work_rootfs[cleandirs] = "${WORKDIR}/rootfs"
119 119
120python () { 120python () {
121 if bb.data.inherits_class('kernel', d): 121 if bb.data.inherits_class('kernel', d):
122 d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN", True)) 122 d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN"))
123 # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe. 123 # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe.
124 excludes = (d.getVar("RM_WORK_EXCLUDE", True) or "").split() 124 excludes = (d.getVar("RM_WORK_EXCLUDE") or "").split()
125 pn = d.getVar("PN", True) 125 pn = d.getVar("PN")
126 if pn in excludes: 126 if pn in excludes:
127 d.delVarFlag('rm_work_rootfs', 'cleandirs') 127 d.delVarFlag('rm_work_rootfs', 'cleandirs')
128 d.delVarFlag('rm_work_populatesdk', 'cleandirs') 128 d.delVarFlag('rm_work_populatesdk', 'cleandirs')
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass
index 0c7ceea542..8d48a2d1d9 100644
--- a/meta/classes/rootfs-postcommands.bbclass
+++ b/meta/classes/rootfs-postcommands.bbclass
@@ -217,9 +217,9 @@ python write_image_manifest () {
217 from oe.rootfs import image_list_installed_packages 217 from oe.rootfs import image_list_installed_packages
218 from oe.utils import format_pkg_list 218 from oe.utils import format_pkg_list
219 219
220 deploy_dir = d.getVar('IMGDEPLOYDIR', True) 220 deploy_dir = d.getVar('IMGDEPLOYDIR')
221 link_name = d.getVar('IMAGE_LINK_NAME', True) 221 link_name = d.getVar('IMAGE_LINK_NAME')
222 manifest_name = d.getVar('IMAGE_MANIFEST', True) 222 manifest_name = d.getVar('IMAGE_MANIFEST')
223 223
224 if not manifest_name: 224 if not manifest_name:
225 return 225 return
diff --git a/meta/classes/rootfs_deb.bbclass b/meta/classes/rootfs_deb.bbclass
index f79fca608e..e8facd4368 100644
--- a/meta/classes/rootfs_deb.bbclass
+++ b/meta/classes/rootfs_deb.bbclass
@@ -14,7 +14,7 @@ do_rootfs[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock"
14do_populate_sdk[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock" 14do_populate_sdk[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock"
15 15
16python rootfs_deb_bad_recommendations() { 16python rootfs_deb_bad_recommendations() {
17 if d.getVar("BAD_RECOMMENDATIONS", True): 17 if d.getVar("BAD_RECOMMENDATIONS"):
18 bb.warn("Debian package install does not support BAD_RECOMMENDATIONS") 18 bb.warn("Debian package install does not support BAD_RECOMMENDATIONS")
19} 19}
20do_rootfs[prefuncs] += "rootfs_deb_bad_recommendations" 20do_rootfs[prefuncs] += "rootfs_deb_bad_recommendations"
@@ -25,7 +25,7 @@ opkglibdir = "${localstatedir}/lib/opkg"
25 25
26python () { 26python () {
27 # Map TARGET_ARCH to Debian's ideas about architectures 27 # Map TARGET_ARCH to Debian's ideas about architectures
28 darch = d.getVar('SDK_ARCH', True) 28 darch = d.getVar('SDK_ARCH')
29 if darch in ["x86", "i486", "i586", "i686", "pentium"]: 29 if darch in ["x86", "i486", "i586", "i686", "pentium"]:
30 d.setVar('DEB_SDK_ARCH', 'i386') 30 d.setVar('DEB_SDK_ARCH', 'i386')
31 elif darch == "x86_64": 31 elif darch == "x86_64":
diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes/rootfs_ipk.bbclass
index d5c38fef74..8c60398085 100644
--- a/meta/classes/rootfs_ipk.bbclass
+++ b/meta/classes/rootfs_ipk.bbclass
@@ -27,7 +27,7 @@ MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg"
27 27
28python () { 28python () {
29 29
30 if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): 30 if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
31 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) 31 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
32 flags = flags.replace("do_package_write_ipk", "") 32 flags = flags.replace("do_package_write_ipk", "")
33 flags = flags.replace("do_deploy", "") 33 flags = flags.replace("do_deploy", "")
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass
index 37730a7104..20beb7c713 100644
--- a/meta/classes/rootfs_rpm.bbclass
+++ b/meta/classes/rootfs_rpm.bbclass
@@ -25,7 +25,7 @@ do_rootfs[recrdeptask] += "do_package_write_rpm"
25do_rootfs[vardeps] += "PACKAGE_FEED_URIS" 25do_rootfs[vardeps] += "PACKAGE_FEED_URIS"
26 26
27python () { 27python () {
28 if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): 28 if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
29 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) 29 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
30 flags = flags.replace("do_package_write_rpm", "") 30 flags = flags.replace("do_package_write_rpm", "")
31 flags = flags.replace("do_deploy", "") 31 flags = flags.replace("do_deploy", "")
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index 921b248b78..9674ae9580 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -6,7 +6,7 @@ SANITY_REQUIRED_UTILITIES ?= "patch diffstat makeinfo git bzip2 tar \
6 gzip gawk chrpath wget cpio perl file" 6 gzip gawk chrpath wget cpio perl file"
7 7
8def bblayers_conf_file(d): 8def bblayers_conf_file(d):
9 return os.path.join(d.getVar('TOPDIR', True), 'conf/bblayers.conf') 9 return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf')
10 10
11def sanity_conf_read(fn): 11def sanity_conf_read(fn):
12 with open(fn, 'r') as f: 12 with open(fn, 'r') as f:
@@ -39,8 +39,8 @@ SANITY_DIFF_TOOL ?= "meld"
39SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample" 39SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample"
40python oecore_update_localconf() { 40python oecore_update_localconf() {
41 # Check we are using a valid local.conf 41 # Check we are using a valid local.conf
42 current_conf = d.getVar('CONF_VERSION', True) 42 current_conf = d.getVar('CONF_VERSION')
43 conf_version = d.getVar('LOCALCONF_VERSION', True) 43 conf_version = d.getVar('LOCALCONF_VERSION')
44 44
45 failmsg = """Your version of local.conf was generated from an older/newer version of 45 failmsg = """Your version of local.conf was generated from an older/newer version of
46local.conf.sample and there have been updates made to this file. Please compare the two 46local.conf.sample and there have been updates made to this file. Please compare the two
@@ -59,8 +59,8 @@ is a good way to visualise the changes."""
59SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample" 59SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample"
60python oecore_update_siteconf() { 60python oecore_update_siteconf() {
61 # If we have a site.conf, check it's valid 61 # If we have a site.conf, check it's valid
62 current_sconf = d.getVar('SCONF_VERSION', True) 62 current_sconf = d.getVar('SCONF_VERSION')
63 sconf_version = d.getVar('SITE_CONF_VERSION', True) 63 sconf_version = d.getVar('SITE_CONF_VERSION')
64 64
65 failmsg = """Your version of site.conf was generated from an older version of 65 failmsg = """Your version of site.conf was generated from an older version of
66site.conf.sample and there have been updates made to this file. Please compare the two 66site.conf.sample and there have been updates made to this file. Please compare the two
@@ -80,8 +80,8 @@ SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample"
80python oecore_update_bblayers() { 80python oecore_update_bblayers() {
81 # bblayers.conf is out of date, so see if we can resolve that 81 # bblayers.conf is out of date, so see if we can resolve that
82 82
83 current_lconf = int(d.getVar('LCONF_VERSION', True)) 83 current_lconf = int(d.getVar('LCONF_VERSION'))
84 lconf_version = int(d.getVar('LAYER_CONF_VERSION', True)) 84 lconf_version = int(d.getVar('LAYER_CONF_VERSION'))
85 85
86 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}). 86 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}).
87Please compare your file against bblayers.conf.sample and merge any changes before continuing. 87Please compare your file against bblayers.conf.sample and merge any changes before continuing.
@@ -141,7 +141,7 @@ is a good way to visualise the changes."""
141 # Handle rename of meta-yocto -> meta-poky 141 # Handle rename of meta-yocto -> meta-poky
142 # This marks the start of separate version numbers but code is needed in OE-Core 142 # This marks the start of separate version numbers but code is needed in OE-Core
143 # for the migration, one last time. 143 # for the migration, one last time.
144 layers = d.getVar('BBLAYERS', True).split() 144 layers = d.getVar('BBLAYERS').split()
145 layers = [ os.path.basename(path) for path in layers ] 145 layers = [ os.path.basename(path) for path in layers ]
146 if 'meta-yocto' in layers: 146 if 'meta-yocto' in layers:
147 found = False 147 found = False
@@ -172,7 +172,7 @@ is a good way to visualise the changes."""
172} 172}
173 173
174def raise_sanity_error(msg, d, network_error=False): 174def raise_sanity_error(msg, d, network_error=False):
175 if d.getVar("SANITY_USE_EVENTS", True) == "1": 175 if d.getVar("SANITY_USE_EVENTS") == "1":
176 try: 176 try:
177 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d) 177 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d)
178 except TypeError: 178 except TypeError:
@@ -198,7 +198,7 @@ def check_toolchain_tune_args(data, tune, multilib, errs):
198 return found_errors 198 return found_errors
199 199
200def check_toolchain_args_present(data, tune, multilib, tune_errors, which): 200def check_toolchain_args_present(data, tune, multilib, tune_errors, which):
201 args_set = (data.getVar("TUNE_%s" % which, True) or "").split() 201 args_set = (data.getVar("TUNE_%s" % which) or "").split()
202 args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split() 202 args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split()
203 args_missing = [] 203 args_missing = []
204 204
@@ -228,7 +228,7 @@ def check_toolchain_tune(data, tune, multilib):
228 localdata.setVar("OVERRIDES", overrides) 228 localdata.setVar("OVERRIDES", overrides)
229 bb.data.update_data(localdata) 229 bb.data.update_data(localdata)
230 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) 230 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib))
231 features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune, True) or "").split() 231 features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune) or "").split()
232 if not features: 232 if not features:
233 return "Tuning '%s' has no defined features, and cannot be used." % tune 233 return "Tuning '%s' has no defined features, and cannot be used." % tune
234 valid_tunes = localdata.getVarFlags('TUNEVALID') or {} 234 valid_tunes = localdata.getVarFlags('TUNEVALID') or {}
@@ -248,9 +248,9 @@ def check_toolchain_tune(data, tune, multilib):
248 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) 248 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
249 else: 249 else:
250 tune_errors.append("Feature '%s' is not defined." % feature) 250 tune_errors.append("Feature '%s' is not defined." % feature)
251 whitelist = localdata.getVar("TUNEABI_WHITELIST", True) 251 whitelist = localdata.getVar("TUNEABI_WHITELIST")
252 if whitelist: 252 if whitelist:
253 tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune, True) 253 tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune)
254 if not tuneabi: 254 if not tuneabi:
255 tuneabi = tune 255 tuneabi = tune
256 if True not in [x in whitelist.split() for x in tuneabi.split()]: 256 if True not in [x in whitelist.split() for x in tuneabi.split()]:
@@ -264,13 +264,13 @@ def check_toolchain_tune(data, tune, multilib):
264 264
265def check_toolchain(data): 265def check_toolchain(data):
266 tune_error_set = [] 266 tune_error_set = []
267 deftune = data.getVar("DEFAULTTUNE", True) 267 deftune = data.getVar("DEFAULTTUNE")
268 tune_errors = check_toolchain_tune(data, deftune, 'default') 268 tune_errors = check_toolchain_tune(data, deftune, 'default')
269 if tune_errors: 269 if tune_errors:
270 tune_error_set.append(tune_errors) 270 tune_error_set.append(tune_errors)
271 271
272 multilibs = (data.getVar("MULTILIB_VARIANTS", True) or "").split() 272 multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split()
273 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS", True) or "").split() 273 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split()
274 274
275 if multilibs: 275 if multilibs:
276 seen_libs = [] 276 seen_libs = []
@@ -282,7 +282,7 @@ def check_toolchain(data):
282 seen_libs.append(lib) 282 seen_libs.append(lib)
283 if not lib in global_multilibs: 283 if not lib in global_multilibs:
284 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) 284 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib)
285 tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib, True) 285 tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib)
286 if tune in seen_tunes: 286 if tune in seen_tunes:
287 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) 287 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune)
288 else: 288 else:
@@ -360,10 +360,10 @@ def check_connectivity(d):
360 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable 360 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
361 # using the same syntax as for SRC_URI. If the variable is not set 361 # using the same syntax as for SRC_URI. If the variable is not set
362 # the check is skipped 362 # the check is skipped
363 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS', True) or "").split() 363 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split()
364 retval = "" 364 retval = ""
365 365
366 bbn = d.getVar('BB_NO_NETWORK', True) 366 bbn = d.getVar('BB_NO_NETWORK')
367 if bbn not in (None, '0', '1'): 367 if bbn not in (None, '0', '1'):
368 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn 368 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn
369 369
@@ -382,7 +382,7 @@ def check_connectivity(d):
382 except Exception as err: 382 except Exception as err:
383 # Allow the message to be configured so that users can be 383 # Allow the message to be configured so that users can be
384 # pointed to a support mechanism. 384 # pointed to a support mechanism.
385 msg = data.getVar('CONNECTIVITY_CHECK_MSG', True) or "" 385 msg = data.getVar('CONNECTIVITY_CHECK_MSG') or ""
386 if len(msg) == 0: 386 if len(msg) == 0:
387 msg = "%s.\n" % err 387 msg = "%s.\n" % err
388 msg += " Please ensure your host's network is configured correctly,\n" 388 msg += " Please ensure your host's network is configured correctly,\n"
@@ -395,7 +395,7 @@ def check_connectivity(d):
395def check_supported_distro(sanity_data): 395def check_supported_distro(sanity_data):
396 from fnmatch import fnmatch 396 from fnmatch import fnmatch
397 397
398 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS', True) 398 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS')
399 if not tested_distros: 399 if not tested_distros:
400 return 400 return
401 401
@@ -418,17 +418,17 @@ def check_sanity_validmachine(sanity_data):
418 messages = "" 418 messages = ""
419 419
420 # Check TUNE_ARCH is set 420 # Check TUNE_ARCH is set
421 if sanity_data.getVar('TUNE_ARCH', True) == 'INVALID': 421 if sanity_data.getVar('TUNE_ARCH') == 'INVALID':
422 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n' 422 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n'
423 423
424 # Check TARGET_OS is set 424 # Check TARGET_OS is set
425 if sanity_data.getVar('TARGET_OS', True) == 'INVALID': 425 if sanity_data.getVar('TARGET_OS') == 'INVALID':
426 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n' 426 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n'
427 427
428 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS 428 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS
429 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS', True) 429 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS')
430 tunepkg = sanity_data.getVar('TUNE_PKGARCH', True) 430 tunepkg = sanity_data.getVar('TUNE_PKGARCH')
431 defaulttune = sanity_data.getVar('DEFAULTTUNE', True) 431 defaulttune = sanity_data.getVar('DEFAULTTUNE')
432 tunefound = False 432 tunefound = False
433 seen = {} 433 seen = {}
434 dups = [] 434 dups = []
@@ -476,7 +476,7 @@ def check_gcc_march(sanity_data):
476 result = True; 476 result = True;
477 477
478 if not result: 478 if not result:
479 build_arch = sanity_data.getVar('BUILD_ARCH', True) 479 build_arch = sanity_data.getVar('BUILD_ARCH')
480 status,res = oe.utils.getstatusoutput(sanity_data.expand("${BUILD_CC} -march=%s gcc_test.c -o gcc_test" % build_arch)) 480 status,res = oe.utils.getstatusoutput(sanity_data.expand("${BUILD_CC} -march=%s gcc_test.c -o gcc_test" % build_arch))
481 if status == 0: 481 if status == 0:
482 message = "BUILD_CFLAGS_append = \" -march=%s\"" % build_arch 482 message = "BUILD_CFLAGS_append = \" -march=%s\"" % build_arch
@@ -564,11 +564,11 @@ def check_perl_modules(sanity_data):
564 return None 564 return None
565 565
566def sanity_check_conffiles(d): 566def sanity_check_conffiles(d):
567 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS', True).split() 567 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split()
568 for func in funcs: 568 for func in funcs:
569 conffile, current_version, required_version, func = func.split(":") 569 conffile, current_version, required_version, func = func.split(":")
570 if check_conf_exists(conffile, d) and d.getVar(current_version, True) is not None and \ 570 if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \
571 d.getVar(current_version, True) != d.getVar(required_version, True): 571 d.getVar(current_version) != d.getVar(required_version):
572 try: 572 try:
573 bb.build.exec_func(func, d, pythonexception=True) 573 bb.build.exec_func(func, d, pythonexception=True)
574 except NotImplementedError as e: 574 except NotImplementedError as e:
@@ -581,8 +581,8 @@ def sanity_handle_abichanges(status, d):
581 # 581 #
582 import subprocess 582 import subprocess
583 583
584 current_abi = d.getVar('OELAYOUT_ABI', True) 584 current_abi = d.getVar('OELAYOUT_ABI')
585 abifile = d.getVar('SANITY_ABIFILE', True) 585 abifile = d.getVar('SANITY_ABIFILE')
586 if os.path.exists(abifile): 586 if os.path.exists(abifile):
587 with open(abifile, "r") as f: 587 with open(abifile, "r") as f:
588 abi = f.read().strip() 588 abi = f.read().strip()
@@ -677,12 +677,12 @@ def check_sanity_version_change(status, d):
677 missing = missing + "GNU make," 677 missing = missing + "GNU make,"
678 678
679 if not check_app_exists('${BUILD_CC}', d): 679 if not check_app_exists('${BUILD_CC}', d):
680 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC", True) 680 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC")
681 681
682 if not check_app_exists('${BUILD_CXX}', d): 682 if not check_app_exists('${BUILD_CXX}', d):
683 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX", True) 683 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX")
684 684
685 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES', True) 685 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES')
686 686
687 for util in required_utilities.split(): 687 for util in required_utilities.split():
688 if not check_app_exists(util, d): 688 if not check_app_exists(util, d):
@@ -692,7 +692,7 @@ def check_sanity_version_change(status, d):
692 missing = missing.rstrip(',') 692 missing = missing.rstrip(',')
693 status.addresult("Please install the following missing utilities: %s\n" % missing) 693 status.addresult("Please install the following missing utilities: %s\n" % missing)
694 694
695 assume_provided = d.getVar('ASSUME_PROVIDED', True).split() 695 assume_provided = d.getVar('ASSUME_PROVIDED').split()
696 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf 696 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
697 if "diffstat-native" not in assume_provided: 697 if "diffstat-native" not in assume_provided:
698 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n') 698 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n')
@@ -715,7 +715,7 @@ def check_sanity_version_change(status, d):
715 status.addresult(" __sync_bool_compare_and_swap (&atomic, 2, 3);\n") 715 status.addresult(" __sync_bool_compare_and_swap (&atomic, 2, 3);\n")
716 716
717 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) 717 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS)
718 tmpdir = d.getVar('TMPDIR', True) 718 tmpdir = d.getVar('TMPDIR')
719 status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) 719 status.addresult(check_create_long_filename(tmpdir, "TMPDIR"))
720 tmpdirmode = os.stat(tmpdir).st_mode 720 tmpdirmode = os.stat(tmpdir).st_mode
721 if (tmpdirmode & stat.S_ISGID): 721 if (tmpdirmode & stat.S_ISGID):
@@ -739,7 +739,7 @@ def check_sanity_version_change(status, d):
739 if netcheck: 739 if netcheck:
740 status.network_error = True 740 status.network_error = True
741 741
742 nolibs = d.getVar('NO32LIBS', True) 742 nolibs = d.getVar('NO32LIBS')
743 if not nolibs: 743 if not nolibs:
744 lib32path = '/lib' 744 lib32path = '/lib'
745 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ): 745 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ):
@@ -748,7 +748,7 @@ def check_sanity_version_change(status, d):
748 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'): 748 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'):
749 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n") 749 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n")
750 750
751 bbpaths = d.getVar('BBPATH', True).split(":") 751 bbpaths = d.getVar('BBPATH').split(":")
752 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths): 752 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths):
753 status.addresult("BBPATH references the current directory, either through " \ 753 status.addresult("BBPATH references the current directory, either through " \
754 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\ 754 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\
@@ -758,7 +758,7 @@ def check_sanity_version_change(status, d):
758 "references.\n" \ 758 "references.\n" \
759 "Parsed BBPATH is" + str(bbpaths)); 759 "Parsed BBPATH is" + str(bbpaths));
760 760
761 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF', True) 761 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF')
762 if not oes_bb_conf: 762 if not oes_bb_conf:
763 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') 763 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n')
764 764
@@ -793,26 +793,26 @@ def check_sanity_everybuild(status, d):
793 793
794 # Check the bitbake version meets minimum requirements 794 # Check the bitbake version meets minimum requirements
795 from distutils.version import LooseVersion 795 from distutils.version import LooseVersion
796 minversion = d.getVar('BB_MIN_VERSION', True) 796 minversion = d.getVar('BB_MIN_VERSION')
797 if (LooseVersion(bb.__version__) < LooseVersion(minversion)): 797 if (LooseVersion(bb.__version__) < LooseVersion(minversion)):
798 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) 798 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__))
799 799
800 sanity_check_locale(d) 800 sanity_check_locale(d)
801 801
802 paths = d.getVar('PATH', True).split(":") 802 paths = d.getVar('PATH').split(":")
803 if "." in paths or "./" in paths or "" in paths: 803 if "." in paths or "./" in paths or "" in paths:
804 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n") 804 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n")
805 805
806 # Check that the DISTRO is valid, if set 806 # Check that the DISTRO is valid, if set
807 # need to take into account DISTRO renaming DISTRO 807 # need to take into account DISTRO renaming DISTRO
808 distro = d.getVar('DISTRO', True) 808 distro = d.getVar('DISTRO')
809 if distro and distro != "nodistro": 809 if distro and distro != "nodistro":
810 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ): 810 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ):
811 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO", True)) 811 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO"))
812 812
813 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't 813 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't
814 # set, since so much relies on it being set. 814 # set, since so much relies on it being set.
815 dldir = d.getVar('DL_DIR', True) 815 dldir = d.getVar('DL_DIR')
816 if not dldir: 816 if not dldir:
817 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n") 817 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n")
818 if os.path.exists(dldir) and not os.access(dldir, os.W_OK): 818 if os.path.exists(dldir) and not os.access(dldir, os.W_OK):
@@ -821,7 +821,7 @@ def check_sanity_everybuild(status, d):
821 821
822 # Check that the MACHINE is valid, if it is set 822 # Check that the MACHINE is valid, if it is set
823 machinevalid = True 823 machinevalid = True
824 if d.getVar('MACHINE', True): 824 if d.getVar('MACHINE'):
825 if not check_conf_exists("conf/machine/${MACHINE}.conf", d): 825 if not check_conf_exists("conf/machine/${MACHINE}.conf", d):
826 status.addresult('Please set a valid MACHINE in your local.conf or environment\n') 826 status.addresult('Please set a valid MACHINE in your local.conf or environment\n')
827 machinevalid = False 827 machinevalid = False
@@ -834,7 +834,7 @@ def check_sanity_everybuild(status, d):
834 status.addresult(check_toolchain(d)) 834 status.addresult(check_toolchain(d))
835 835
836 # Check that the SDKMACHINE is valid, if it is set 836 # Check that the SDKMACHINE is valid, if it is set
837 if d.getVar('SDKMACHINE', True): 837 if d.getVar('SDKMACHINE'):
838 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d): 838 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d):
839 status.addresult('Specified SDKMACHINE value is not valid\n') 839 status.addresult('Specified SDKMACHINE value is not valid\n')
840 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}": 840 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}":
@@ -847,7 +847,7 @@ def check_sanity_everybuild(status, d):
847 status.addresult("Please use a umask which allows a+rx and u+rwx\n") 847 status.addresult("Please use a umask which allows a+rx and u+rwx\n")
848 os.umask(omask) 848 os.umask(omask)
849 849
850 if d.getVar('TARGET_ARCH', True) == "arm": 850 if d.getVar('TARGET_ARCH') == "arm":
851 # This path is no longer user-readable in modern (very recent) Linux 851 # This path is no longer user-readable in modern (very recent) Linux
852 try: 852 try:
853 if os.path.exists("/proc/sys/vm/mmap_min_addr"): 853 if os.path.exists("/proc/sys/vm/mmap_min_addr"):
@@ -860,7 +860,7 @@ def check_sanity_everybuild(status, d):
860 except: 860 except:
861 pass 861 pass
862 862
863 oeroot = d.getVar('COREBASE', True) 863 oeroot = d.getVar('COREBASE')
864 if oeroot.find('+') != -1: 864 if oeroot.find('+') != -1:
865 status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.") 865 status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.")
866 if oeroot.find('@') != -1: 866 if oeroot.find('@') != -1:
@@ -875,7 +875,7 @@ def check_sanity_everybuild(status, d):
875 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \ 875 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \
876 'bzr', 'cvs', 'npm', 'sftp', 'ssh'] 876 'bzr', 'cvs', 'npm', 'sftp', 'ssh']
877 for mirror_var in mirror_vars: 877 for mirror_var in mirror_vars:
878 mirrors = (d.getVar(mirror_var, True) or '').replace('\\n', '\n').split('\n') 878 mirrors = (d.getVar(mirror_var) or '').replace('\\n', '\n').split('\n')
879 for mirror_entry in mirrors: 879 for mirror_entry in mirrors:
880 mirror_entry = mirror_entry.strip() 880 mirror_entry = mirror_entry.strip()
881 if not mirror_entry: 881 if not mirror_entry:
@@ -914,7 +914,7 @@ def check_sanity_everybuild(status, d):
914 check_symlink(mirror_base, d) 914 check_symlink(mirror_base, d)
915 915
916 # Check that TMPDIR hasn't changed location since the last time we were run 916 # Check that TMPDIR hasn't changed location since the last time we were run
917 tmpdir = d.getVar('TMPDIR', True) 917 tmpdir = d.getVar('TMPDIR')
918 checkfile = os.path.join(tmpdir, "saved_tmpdir") 918 checkfile = os.path.join(tmpdir, "saved_tmpdir")
919 if os.path.exists(checkfile): 919 if os.path.exists(checkfile):
920 with open(checkfile, "r") as f: 920 with open(checkfile, "r") as f:
@@ -951,8 +951,8 @@ def check_sanity(sanity_data):
951 951
952 status = SanityStatus() 952 status = SanityStatus()
953 953
954 tmpdir = sanity_data.getVar('TMPDIR', True) 954 tmpdir = sanity_data.getVar('TMPDIR')
955 sstate_dir = sanity_data.getVar('SSTATE_DIR', True) 955 sstate_dir = sanity_data.getVar('SSTATE_DIR')
956 956
957 check_symlink(sstate_dir, sanity_data) 957 check_symlink(sstate_dir, sanity_data)
958 958
@@ -976,7 +976,7 @@ def check_sanity(sanity_data):
976 976
977 check_sanity_everybuild(status, sanity_data) 977 check_sanity_everybuild(status, sanity_data)
978 978
979 sanity_version = int(sanity_data.getVar('SANITY_VERSION', True) or 1) 979 sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1)
980 network_error = False 980 network_error = False
981 # NATIVELSBSTRING var may have been overridden with "universal", so 981 # NATIVELSBSTRING var may have been overridden with "universal", so
982 # get actual host distribution id and version 982 # get actual host distribution id and version
diff --git a/meta/classes/sign_ipk.bbclass b/meta/classes/sign_ipk.bbclass
index a481f6d9a8..e5057b7799 100644
--- a/meta/classes/sign_ipk.bbclass
+++ b/meta/classes/sign_ipk.bbclass
@@ -29,10 +29,10 @@ IPK_GPG_SIGNATURE_TYPE ?= 'ASC'
29python () { 29python () {
30 # Check configuration 30 # Check configuration
31 for var in ('IPK_GPG_NAME', 'IPK_GPG_PASSPHRASE_FILE'): 31 for var in ('IPK_GPG_NAME', 'IPK_GPG_PASSPHRASE_FILE'):
32 if not d.getVar(var, True): 32 if not d.getVar(var):
33 raise_sanity_error("You need to define %s in the config" % var, d) 33 raise_sanity_error("You need to define %s in the config" % var, d)
34 34
35 sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE", True) 35 sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE")
36 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": 36 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN":
37 raise_sanity_error("Bad value for IPK_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) 37 raise_sanity_error("Bad value for IPK_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype)
38} 38}
@@ -42,11 +42,11 @@ def sign_ipk(d, ipk_to_sign):
42 42
43 bb.debug(1, 'Signing ipk: %s' % ipk_to_sign) 43 bb.debug(1, 'Signing ipk: %s' % ipk_to_sign)
44 44
45 signer = get_signer(d, d.getVar('IPK_GPG_BACKEND', True)) 45 signer = get_signer(d, d.getVar('IPK_GPG_BACKEND'))
46 sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE', True) 46 sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE')
47 is_ascii_sig = (sig_type.upper() != "BIN") 47 is_ascii_sig = (sig_type.upper() != "BIN")
48 48
49 signer.detach_sign(ipk_to_sign, 49 signer.detach_sign(ipk_to_sign,
50 d.getVar('IPK_GPG_NAME', True), 50 d.getVar('IPK_GPG_NAME'),
51 d.getVar('IPK_GPG_PASSPHRASE_FILE', True), 51 d.getVar('IPK_GPG_PASSPHRASE_FILE'),
52 armor=is_ascii_sig) 52 armor=is_ascii_sig)
diff --git a/meta/classes/sign_package_feed.bbclass b/meta/classes/sign_package_feed.bbclass
index 31a6e9b042..71df03bab3 100644
--- a/meta/classes/sign_package_feed.bbclass
+++ b/meta/classes/sign_package_feed.bbclass
@@ -31,10 +31,10 @@ PACKAGE_FEED_GPG_SIGNATURE_TYPE ?= 'ASC'
31python () { 31python () {
32 # Check sanity of configuration 32 # Check sanity of configuration
33 for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'): 33 for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'):
34 if not d.getVar(var, True): 34 if not d.getVar(var):
35 raise_sanity_error("You need to define %s in the config" % var, d) 35 raise_sanity_error("You need to define %s in the config" % var, d)
36 36
37 sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE", True) 37 sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE")
38 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": 38 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN":
39 raise_sanity_error("Bad value for PACKAGE_FEED_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) 39 raise_sanity_error("Bad value for PACKAGE_FEED_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype)
40} 40}
diff --git a/meta/classes/sign_rpm.bbclass b/meta/classes/sign_rpm.bbclass
index a8ea75faaa..2a08020819 100644
--- a/meta/classes/sign_rpm.bbclass
+++ b/meta/classes/sign_rpm.bbclass
@@ -22,11 +22,11 @@ RPM_GPG_BACKEND ?= 'local'
22 22
23 23
24python () { 24python () {
25 if d.getVar('RPM_GPG_PASSPHRASE_FILE', True): 25 if d.getVar('RPM_GPG_PASSPHRASE_FILE'):
26 raise_sanity_error('RPM_GPG_PASSPHRASE_FILE is replaced by RPM_GPG_PASSPHRASE', d) 26 raise_sanity_error('RPM_GPG_PASSPHRASE_FILE is replaced by RPM_GPG_PASSPHRASE', d)
27 # Check configuration 27 # Check configuration
28 for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE'): 28 for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE'):
29 if not d.getVar(var, True): 29 if not d.getVar(var):
30 raise_sanity_error("You need to define %s in the config" % var, d) 30 raise_sanity_error("You need to define %s in the config" % var, d)
31 31
32 # Set the expected location of the public key 32 # Set the expected location of the public key
@@ -41,12 +41,12 @@ python sign_rpm () {
41 import glob 41 import glob
42 from oe.gpg_sign import get_signer 42 from oe.gpg_sign import get_signer
43 43
44 signer = get_signer(d, d.getVar('RPM_GPG_BACKEND', True)) 44 signer = get_signer(d, d.getVar('RPM_GPG_BACKEND'))
45 rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR', True) + '/*') 45 rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR') + '/*')
46 46
47 signer.sign_rpms(rpms, 47 signer.sign_rpms(rpms,
48 d.getVar('RPM_GPG_NAME', True), 48 d.getVar('RPM_GPG_NAME'),
49 d.getVar('RPM_GPG_PASSPHRASE', True)) 49 d.getVar('RPM_GPG_PASSPHRASE'))
50} 50}
51 51
52do_package_index[depends] += "signing-keys:do_deploy" 52do_package_index[depends] += "signing-keys:do_deploy"
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
index 45dce489de..9d51a0266a 100644
--- a/meta/classes/siteconfig.bbclass
+++ b/meta/classes/siteconfig.bbclass
@@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () {
2 shared_state = sstate_state_fromvars(d) 2 shared_state = sstate_state_fromvars(d)
3 if shared_state['task'] != 'populate_sysroot': 3 if shared_state['task'] != 'populate_sysroot':
4 return 4 return
5 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', True), 'site_config')): 5 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
6 bb.debug(1, "No site_config directory, skipping do_siteconfig") 6 bb.debug(1, "No site_config directory, skipping do_siteconfig")
7 return 7 return
8 bb.build.exec_func('do_siteconfig_gencache', d) 8 bb.build.exec_func('do_siteconfig_gencache', d)
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass
index 6eca004c5e..abb194f0d6 100644
--- a/meta/classes/siteinfo.bbclass
+++ b/meta/classes/siteinfo.bbclass
@@ -113,14 +113,14 @@ def siteinfo_data(d):
113 113
114 # Add in any extra user supplied data which may come from a BSP layer, removing the 114 # Add in any extra user supplied data which may come from a BSP layer, removing the
115 # need to always change this class directly 115 # need to always change this class directly
116 extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS", True) or "").split() 116 extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS") or "").split()
117 for m in extra_siteinfo: 117 for m in extra_siteinfo:
118 call = m + "(archinfo, osinfo, targetinfo, d)" 118 call = m + "(archinfo, osinfo, targetinfo, d)"
119 locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d} 119 locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d}
120 archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs) 120 archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs)
121 121
122 hostarch = d.getVar("HOST_ARCH", True) 122 hostarch = d.getVar("HOST_ARCH")
123 hostos = d.getVar("HOST_OS", True) 123 hostos = d.getVar("HOST_OS")
124 target = "%s-%s" % (hostarch, hostos) 124 target = "%s-%s" % (hostarch, hostos)
125 125
126 sitedata = [] 126 sitedata = []
@@ -144,7 +144,7 @@ python () {
144 d.setVar("SITEINFO_ENDIANNESS", "be") 144 d.setVar("SITEINFO_ENDIANNESS", "be")
145 else: 145 else:
146 bb.error("Unable to determine endianness for architecture '%s'" % 146 bb.error("Unable to determine endianness for architecture '%s'" %
147 d.getVar("HOST_ARCH", True)) 147 d.getVar("HOST_ARCH"))
148 bb.fatal("Please add your architecture to siteinfo.bbclass") 148 bb.fatal("Please add your architecture to siteinfo.bbclass")
149 149
150 if "bit-32" in sitedata: 150 if "bit-32" in sitedata:
@@ -153,14 +153,14 @@ python () {
153 d.setVar("SITEINFO_BITS", "64") 153 d.setVar("SITEINFO_BITS", "64")
154 else: 154 else:
155 bb.error("Unable to determine bit size for architecture '%s'" % 155 bb.error("Unable to determine bit size for architecture '%s'" %
156 d.getVar("HOST_ARCH", True)) 156 d.getVar("HOST_ARCH"))
157 bb.fatal("Please add your architecture to siteinfo.bbclass") 157 bb.fatal("Please add your architecture to siteinfo.bbclass")
158} 158}
159 159
160def siteinfo_get_files(d, aclocalcache = False): 160def siteinfo_get_files(d, aclocalcache = False):
161 sitedata = siteinfo_data(d) 161 sitedata = siteinfo_data(d)
162 sitefiles = "" 162 sitefiles = ""
163 for path in d.getVar("BBPATH", True).split(":"): 163 for path in d.getVar("BBPATH").split(":"):
164 for element in sitedata: 164 for element in sitedata:
165 filename = os.path.join(path, "site", element) 165 filename = os.path.join(path, "site", element)
166 if os.path.exists(filename): 166 if os.path.exists(filename):
@@ -177,7 +177,7 @@ def siteinfo_get_files(d, aclocalcache = False):
177 # issues and the directory being created/removed whilst this code executes. This can happen 177 # issues and the directory being created/removed whilst this code executes. This can happen
178 # when a multilib recipe is parsed along with its base variant which may be running at the time 178 # when a multilib recipe is parsed along with its base variant which may be running at the time
179 # causing rare but nasty failures 179 # causing rare but nasty failures
180 path_siteconfig = d.getVar('ACLOCALDIR', True) 180 path_siteconfig = d.getVar('ACLOCALDIR')
181 if path_siteconfig and os.path.isdir(path_siteconfig): 181 if path_siteconfig and os.path.isdir(path_siteconfig):
182 for i in os.listdir(path_siteconfig): 182 for i in os.listdir(path_siteconfig):
183 if not i.endswith("_config"): 183 if not i.endswith("_config"):
diff --git a/meta/classes/spdx.bbclass b/meta/classes/spdx.bbclass
index 89394d3a96..c5f544d2a4 100644
--- a/meta/classes/spdx.bbclass
+++ b/meta/classes/spdx.bbclass
@@ -26,20 +26,20 @@ python do_spdx () {
26 import json, shutil 26 import json, shutil
27 27
28 info = {} 28 info = {}
29 info['workdir'] = d.getVar('WORKDIR', True) 29 info['workdir'] = d.getVar('WORKDIR')
30 info['sourcedir'] = d.getVar('SPDX_S', True) 30 info['sourcedir'] = d.getVar('SPDX_S')
31 info['pn'] = d.getVar('PN', True) 31 info['pn'] = d.getVar('PN')
32 info['pv'] = d.getVar('PV', True) 32 info['pv'] = d.getVar('PV')
33 info['spdx_version'] = d.getVar('SPDX_VERSION', True) 33 info['spdx_version'] = d.getVar('SPDX_VERSION')
34 info['data_license'] = d.getVar('DATA_LICENSE', True) 34 info['data_license'] = d.getVar('DATA_LICENSE')
35 35
36 sstatedir = d.getVar('SPDXSSTATEDIR', True) 36 sstatedir = d.getVar('SPDXSSTATEDIR')
37 sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx") 37 sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx")
38 38
39 manifest_dir = d.getVar('SPDX_MANIFEST_DIR', True) 39 manifest_dir = d.getVar('SPDX_MANIFEST_DIR')
40 info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" ) 40 info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" )
41 41
42 info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR', True) 42 info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR')
43 info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" ) 43 info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" )
44 44
45 # Make sure important dirs exist 45 # Make sure important dirs exist
@@ -74,9 +74,9 @@ python do_spdx () {
74 foss_license_info = cached_spdx['Licenses'] 74 foss_license_info = cached_spdx['Licenses']
75 else: 75 else:
76 ## setup fossology command 76 ## setup fossology command
77 foss_server = d.getVar('FOSS_SERVER', True) 77 foss_server = d.getVar('FOSS_SERVER')
78 foss_flags = d.getVar('FOSS_WGET_FLAGS', True) 78 foss_flags = d.getVar('FOSS_WGET_FLAGS')
79 foss_full_spdx = d.getVar('FOSS_FULL_SPDX', True) == "true" or False 79 foss_full_spdx = d.getVar('FOSS_FULL_SPDX') == "true" or False
80 foss_command = "wget %s --post-file=%s %s"\ 80 foss_command = "wget %s --post-file=%s %s"\
81 % (foss_flags, info['tar_file'], foss_server) 81 % (foss_flags, info['tar_file'], foss_server)
82 82
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index 8643f3d247..546e276d9f 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -11,7 +11,7 @@ def generate_sstatefn(spec, hash, d):
11SSTATE_PKGARCH = "${PACKAGE_ARCH}" 11SSTATE_PKGARCH = "${PACKAGE_ARCH}"
12SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:" 12SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
13SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:" 13SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:"
14SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC', True), d.getVar('BB_TASKHASH', True), d)}" 14SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_TASKHASH'), d)}"
15SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}" 15SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}"
16SSTATE_EXTRAPATH = "" 16SSTATE_EXTRAPATH = ""
17SSTATE_EXTRAPATHWILDCARD = "" 17SSTATE_EXTRAPATHWILDCARD = ""
@@ -34,7 +34,7 @@ SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}"
34SSTATE_DUPWHITELIST += "${DEPLOY_DIR}/sdk/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt" 34SSTATE_DUPWHITELIST += "${DEPLOY_DIR}/sdk/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt"
35 35
36SSTATE_SCAN_FILES ?= "*.la *-config *_config" 36SSTATE_SCAN_FILES ?= "*.la *-config *_config"
37SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES", True).split())}" \) -type f' 37SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES").split())}" \) -type f'
38 38
39BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}" 39BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}"
40 40
@@ -84,7 +84,7 @@ python () {
84 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${SDK_OS}")) 84 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${SDK_OS}"))
85 elif bb.data.inherits_class('cross-canadian', d): 85 elif bb.data.inherits_class('cross-canadian', d):
86 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}")) 86 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}"))
87 elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH", True) == "all": 87 elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH") == "all":
88 d.setVar('SSTATE_PKGARCH', "allarch") 88 d.setVar('SSTATE_PKGARCH', "allarch")
89 else: 89 else:
90 d.setVar('SSTATE_MANMACH', d.expand("${PACKAGE_ARCH}")) 90 d.setVar('SSTATE_MANMACH', d.expand("${PACKAGE_ARCH}"))
@@ -100,7 +100,7 @@ python () {
100 scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" 100 scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}"
101 d.setVar('SSTATE_SCAN_CMD', scan_cmd) 101 d.setVar('SSTATE_SCAN_CMD', scan_cmd)
102 102
103 unique_tasks = sorted(set((d.getVar('SSTATETASKS', True) or "").split())) 103 unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split()))
104 d.setVar('SSTATETASKS', " ".join(unique_tasks)) 104 d.setVar('SSTATETASKS', " ".join(unique_tasks))
105 for task in unique_tasks: 105 for task in unique_tasks:
106 d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ") 106 d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ")
@@ -118,7 +118,7 @@ def sstate_init(task, d):
118 118
119def sstate_state_fromvars(d, task = None): 119def sstate_state_fromvars(d, task = None):
120 if task is None: 120 if task is None:
121 task = d.getVar('BB_CURRENTTASK', True) 121 task = d.getVar('BB_CURRENTTASK')
122 if not task: 122 if not task:
123 bb.fatal("sstate code running without task context?!") 123 bb.fatal("sstate code running without task context?!")
124 task = task.replace("_setscene", "") 124 task = task.replace("_setscene", "")
@@ -200,7 +200,7 @@ def sstate_install(ss, d):
200 shareddirs.append(dstdir) 200 shareddirs.append(dstdir)
201 201
202 # Check the file list for conflicts against files which already exist 202 # Check the file list for conflicts against files which already exist
203 whitelist = (d.getVar("SSTATE_DUPWHITELIST", True) or "").split() 203 whitelist = (d.getVar("SSTATE_DUPWHITELIST") or "").split()
204 match = [] 204 match = []
205 for f in sharedfiles: 205 for f in sharedfiles:
206 if os.path.exists(f): 206 if os.path.exists(f):
@@ -239,7 +239,7 @@ def sstate_install(ss, d):
239 "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \ 239 "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \
240 "be to resolve the conflict. If in doubt, please ask on the mailing list, " \ 240 "be to resolve the conflict. If in doubt, please ask on the mailing list, " \
241 "sharing the error and filelist above." % \ 241 "sharing the error and filelist above." % \
242 (d.getVar('PN', True), "\n ".join(match))) 242 (d.getVar('PN'), "\n ".join(match)))
243 bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") 243 bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.")
244 244
245 # Write out the manifest 245 # Write out the manifest
@@ -260,7 +260,7 @@ def sstate_install(ss, d):
260 260
261 i = d2.expand("${SSTATE_MANIFESTS}/index-${SSTATE_MANMACH}") 261 i = d2.expand("${SSTATE_MANIFESTS}/index-${SSTATE_MANMACH}")
262 l = bb.utils.lockfile(i + ".lock") 262 l = bb.utils.lockfile(i + ".lock")
263 filedata = d.getVar("STAMP", True) + " " + d2.getVar("SSTATE_MANFILEPREFIX", True) + " " + d.getVar("WORKDIR", True) + "\n" 263 filedata = d.getVar("STAMP") + " " + d2.getVar("SSTATE_MANFILEPREFIX") + " " + d.getVar("WORKDIR") + "\n"
264 manifests = [] 264 manifests = []
265 if os.path.exists(i): 265 if os.path.exists(i):
266 with open(i, "r") as f: 266 with open(i, "r") as f:
@@ -275,7 +275,7 @@ def sstate_install(ss, d):
275 if os.path.exists(state[1]): 275 if os.path.exists(state[1]):
276 oe.path.copyhardlinktree(state[1], state[2]) 276 oe.path.copyhardlinktree(state[1], state[2])
277 277
278 for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split(): 278 for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split():
279 # All hooks should run in the SSTATE_INSTDIR 279 # All hooks should run in the SSTATE_INSTDIR
280 bb.build.exec_func(postinst, d, (sstateinst,)) 280 bb.build.exec_func(postinst, d, (sstateinst,))
281 281
@@ -298,8 +298,8 @@ def sstate_installpkg(ss, d):
298 oe.path.remove(dir) 298 oe.path.remove(dir)
299 299
300 sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) 300 sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task'])
301 sstatefetch = d.getVar('SSTATE_PKGNAME', True) + '_' + ss['task'] + ".tgz" 301 sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz"
302 sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['task'] + ".tgz" 302 sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz"
303 303
304 if not os.path.exists(sstatepkg): 304 if not os.path.exists(sstatepkg):
305 pstaging_fetch(sstatefetch, sstatepkg, d) 305 pstaging_fetch(sstatefetch, sstatepkg, d)
@@ -313,12 +313,12 @@ def sstate_installpkg(ss, d):
313 d.setVar('SSTATE_INSTDIR', sstateinst) 313 d.setVar('SSTATE_INSTDIR', sstateinst)
314 d.setVar('SSTATE_PKG', sstatepkg) 314 d.setVar('SSTATE_PKG', sstatepkg)
315 315
316 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): 316 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
317 signer = get_signer(d, 'local') 317 signer = get_signer(d, 'local')
318 if not signer.verify(sstatepkg + '.sig'): 318 if not signer.verify(sstatepkg + '.sig'):
319 bb.warn("Cannot verify signature on sstate package %s" % sstatepkg) 319 bb.warn("Cannot verify signature on sstate package %s" % sstatepkg)
320 320
321 for f in (d.getVar('SSTATEPREINSTFUNCS', True) or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS', True) or '').split(): 321 for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split():
322 # All hooks should run in the SSTATE_INSTDIR 322 # All hooks should run in the SSTATE_INSTDIR
323 bb.build.exec_func(f, d, (sstateinst,)) 323 bb.build.exec_func(f, d, (sstateinst,))
324 324
@@ -328,7 +328,7 @@ def sstate_installpkg(ss, d):
328 sstate_install(ss, d) 328 sstate_install(ss, d)
329 329
330 for plain in ss['plaindirs']: 330 for plain in ss['plaindirs']:
331 workdir = d.getVar('WORKDIR', True) 331 workdir = d.getVar('WORKDIR')
332 src = sstateinst + "/" + plain.replace(workdir, '') 332 src = sstateinst + "/" + plain.replace(workdir, '')
333 dest = plain 333 dest = plain
334 bb.utils.mkdirhier(src) 334 bb.utils.mkdirhier(src)
@@ -344,12 +344,12 @@ python sstate_hardcode_path_unpack () {
344 # sstate_hardcode_path(d) 344 # sstate_hardcode_path(d)
345 import subprocess 345 import subprocess
346 346
347 sstateinst = d.getVar('SSTATE_INSTDIR', True) 347 sstateinst = d.getVar('SSTATE_INSTDIR')
348 fixmefn = sstateinst + "fixmepath" 348 fixmefn = sstateinst + "fixmepath"
349 if os.path.isfile(fixmefn): 349 if os.path.isfile(fixmefn):
350 staging = d.getVar('STAGING_DIR', True) 350 staging = d.getVar('STAGING_DIR')
351 staging_target = d.getVar('STAGING_DIR_TARGET', True) 351 staging_target = d.getVar('STAGING_DIR_TARGET')
352 staging_host = d.getVar('STAGING_DIR_HOST', True) 352 staging_host = d.getVar('STAGING_DIR_HOST')
353 353
354 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): 354 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
355 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) 355 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
@@ -358,9 +358,9 @@ python sstate_hardcode_path_unpack () {
358 else: 358 else:
359 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) 359 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
360 360
361 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' 361 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
362 for fixmevar in extra_staging_fixmes.split(): 362 for fixmevar in extra_staging_fixmes.split():
363 fixme_path = d.getVar(fixmevar, True) 363 fixme_path = d.getVar(fixmevar)
364 sstate_sed_cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path) 364 sstate_sed_cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path)
365 365
366 # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed 366 # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
@@ -377,12 +377,12 @@ python sstate_hardcode_path_unpack () {
377def sstate_clean_cachefile(ss, d): 377def sstate_clean_cachefile(ss, d):
378 import oe.path 378 import oe.path
379 379
380 sstatepkgfile = d.getVar('SSTATE_PATHSPEC', True) + "*_" + ss['task'] + ".tgz*" 380 sstatepkgfile = d.getVar('SSTATE_PATHSPEC') + "*_" + ss['task'] + ".tgz*"
381 bb.note("Removing %s" % sstatepkgfile) 381 bb.note("Removing %s" % sstatepkgfile)
382 oe.path.remove(sstatepkgfile) 382 oe.path.remove(sstatepkgfile)
383 383
384def sstate_clean_cachefiles(d): 384def sstate_clean_cachefiles(d):
385 for task in (d.getVar('SSTATETASKS', True) or "").split(): 385 for task in (d.getVar('SSTATETASKS') or "").split():
386 ld = d.createCopy() 386 ld = d.createCopy()
387 ss = sstate_state_fromvars(ld, task) 387 ss = sstate_state_fromvars(ld, task)
388 sstate_clean_cachefile(ss, ld) 388 sstate_clean_cachefile(ss, ld)
@@ -424,7 +424,7 @@ def sstate_clean(ss, d):
424 import glob 424 import glob
425 425
426 d2 = d.createCopy() 426 d2 = d.createCopy()
427 stamp_clean = d.getVar("STAMPCLEAN", True) 427 stamp_clean = d.getVar("STAMPCLEAN")
428 extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True) 428 extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True)
429 if extrainf: 429 if extrainf:
430 d2.setVar("SSTATE_MANMACH", extrainf) 430 d2.setVar("SSTATE_MANMACH", extrainf)
@@ -467,7 +467,7 @@ def sstate_clean(ss, d):
467 oe.path.remove(stfile) 467 oe.path.remove(stfile)
468 468
469 # Removes the users/groups created by the package 469 # Removes the users/groups created by the package
470 for cleanfunc in (d.getVar('SSTATECLEANFUNCS', True) or '').split(): 470 for cleanfunc in (d.getVar('SSTATECLEANFUNCS') or '').split():
471 bb.build.exec_func(cleanfunc, d) 471 bb.build.exec_func(cleanfunc, d)
472 472
473sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX" 473sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX"
@@ -475,13 +475,13 @@ sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX"
475CLEANFUNCS += "sstate_cleanall" 475CLEANFUNCS += "sstate_cleanall"
476 476
477python sstate_cleanall() { 477python sstate_cleanall() {
478 bb.note("Removing shared state for package %s" % d.getVar('PN', True)) 478 bb.note("Removing shared state for package %s" % d.getVar('PN'))
479 479
480 manifest_dir = d.getVar('SSTATE_MANIFESTS', True) 480 manifest_dir = d.getVar('SSTATE_MANIFESTS')
481 if not os.path.exists(manifest_dir): 481 if not os.path.exists(manifest_dir):
482 return 482 return
483 483
484 tasks = d.getVar('SSTATETASKS', True).split() 484 tasks = d.getVar('SSTATETASKS').split()
485 for name in tasks: 485 for name in tasks:
486 ld = d.createCopy() 486 ld = d.createCopy()
487 shared_state = sstate_state_fromvars(ld, name) 487 shared_state = sstate_state_fromvars(ld, name)
@@ -497,10 +497,10 @@ python sstate_hardcode_path () {
497 # Note: the logic in this function needs to match the reverse logic 497 # Note: the logic in this function needs to match the reverse logic
498 # in sstate_installpkg(ss, d) 498 # in sstate_installpkg(ss, d)
499 499
500 staging = d.getVar('STAGING_DIR', True) 500 staging = d.getVar('STAGING_DIR')
501 staging_target = d.getVar('STAGING_DIR_TARGET', True) 501 staging_target = d.getVar('STAGING_DIR_TARGET')
502 staging_host = d.getVar('STAGING_DIR_HOST', True) 502 staging_host = d.getVar('STAGING_DIR_HOST')
503 sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) 503 sstate_builddir = d.getVar('SSTATE_BUILDDIR')
504 504
505 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): 505 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
506 sstate_grep_cmd = "grep -l -e '%s'" % (staging) 506 sstate_grep_cmd = "grep -l -e '%s'" % (staging)
@@ -512,14 +512,14 @@ python sstate_hardcode_path () {
512 sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) 512 sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
513 sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) 513 sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
514 514
515 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' 515 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
516 for fixmevar in extra_staging_fixmes.split(): 516 for fixmevar in extra_staging_fixmes.split():
517 fixme_path = d.getVar(fixmevar, True) 517 fixme_path = d.getVar(fixmevar)
518 sstate_sed_cmd += " -e 's:%s:FIXME_%s:g'" % (fixme_path, fixmevar) 518 sstate_sed_cmd += " -e 's:%s:FIXME_%s:g'" % (fixme_path, fixmevar)
519 519
520 fixmefn = sstate_builddir + "fixmepath" 520 fixmefn = sstate_builddir + "fixmepath"
521 521
522 sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) 522 sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD')
523 sstate_filelist_cmd = "tee %s" % (fixmefn) 523 sstate_filelist_cmd = "tee %s" % (fixmefn)
524 524
525 # fixmepath file needs relative paths, drop sstate_builddir prefix 525 # fixmepath file needs relative paths, drop sstate_builddir prefix
@@ -568,17 +568,17 @@ def sstate_package(ss, d):
568 os.remove(path) 568 os.remove(path)
569 os.symlink(base, path) 569 os.symlink(base, path)
570 570
571 tmpdir = d.getVar('TMPDIR', True) 571 tmpdir = d.getVar('TMPDIR')
572 572
573 sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task']) 573 sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task'])
574 sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['task'] + ".tgz" 574 sstatepkg = d.getVar('SSTATE_PKG') + '_'+ ss['task'] + ".tgz"
575 bb.utils.remove(sstatebuild, recurse=True) 575 bb.utils.remove(sstatebuild, recurse=True)
576 bb.utils.mkdirhier(sstatebuild) 576 bb.utils.mkdirhier(sstatebuild)
577 bb.utils.mkdirhier(os.path.dirname(sstatepkg)) 577 bb.utils.mkdirhier(os.path.dirname(sstatepkg))
578 for state in ss['dirs']: 578 for state in ss['dirs']:
579 if not os.path.exists(state[1]): 579 if not os.path.exists(state[1]):
580 continue 580 continue
581 if d.getVar('SSTATE_SKIP_CREATION', True) == '1': 581 if d.getVar('SSTATE_SKIP_CREATION') == '1':
582 continue 582 continue
583 srcbase = state[0].rstrip("/").rsplit('/', 1)[0] 583 srcbase = state[0].rstrip("/").rsplit('/', 1)[0]
584 for walkroot, dirs, files in os.walk(state[1]): 584 for walkroot, dirs, files in os.walk(state[1]):
@@ -593,7 +593,7 @@ def sstate_package(ss, d):
593 bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) 593 bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0]))
594 oe.path.copyhardlinktree(state[1], sstatebuild + state[0]) 594 oe.path.copyhardlinktree(state[1], sstatebuild + state[0])
595 595
596 workdir = d.getVar('WORKDIR', True) 596 workdir = d.getVar('WORKDIR')
597 for plain in ss['plaindirs']: 597 for plain in ss['plaindirs']:
598 pdir = plain.replace(workdir, sstatebuild) 598 pdir = plain.replace(workdir, sstatebuild)
599 bb.utils.mkdirhier(plain) 599 bb.utils.mkdirhier(plain)
@@ -603,9 +603,9 @@ def sstate_package(ss, d):
603 d.setVar('SSTATE_BUILDDIR', sstatebuild) 603 d.setVar('SSTATE_BUILDDIR', sstatebuild)
604 d.setVar('SSTATE_PKG', sstatepkg) 604 d.setVar('SSTATE_PKG', sstatepkg)
605 605
606 for f in (d.getVar('SSTATECREATEFUNCS', True) or '').split() + \ 606 for f in (d.getVar('SSTATECREATEFUNCS') or '').split() + \
607 ['sstate_create_package', 'sstate_sign_package'] + \ 607 ['sstate_create_package', 'sstate_sign_package'] + \
608 (d.getVar('SSTATEPOSTCREATEFUNCS', True) or '').split(): 608 (d.getVar('SSTATEPOSTCREATEFUNCS') or '').split():
609 # All hooks should run in SSTATE_BUILDDIR. 609 # All hooks should run in SSTATE_BUILDDIR.
610 bb.build.exec_func(f, d, (sstatebuild,)) 610 bb.build.exec_func(f, d, (sstatebuild,))
611 611
@@ -617,7 +617,7 @@ def pstaging_fetch(sstatefetch, sstatepkg, d):
617 import bb.fetch2 617 import bb.fetch2
618 618
619 # Only try and fetch if the user has configured a mirror 619 # Only try and fetch if the user has configured a mirror
620 mirrors = d.getVar('SSTATE_MIRRORS', True) 620 mirrors = d.getVar('SSTATE_MIRRORS')
621 if not mirrors: 621 if not mirrors:
622 return 622 return
623 623
@@ -635,14 +635,14 @@ def pstaging_fetch(sstatefetch, sstatepkg, d):
635 635
636 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, 636 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
637 # we'll want to allow network access for the current set of fetches. 637 # we'll want to allow network access for the current set of fetches.
638 if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": 638 if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1":
639 localdata.delVar('BB_NO_NETWORK') 639 localdata.delVar('BB_NO_NETWORK')
640 640
641 # Try a fetch from the sstate mirror, if it fails just return and 641 # Try a fetch from the sstate mirror, if it fails just return and
642 # we will build the package 642 # we will build the package
643 uris = ['file://{0};downloadfilename={0}'.format(sstatefetch), 643 uris = ['file://{0};downloadfilename={0}'.format(sstatefetch),
644 'file://{0}.siginfo;downloadfilename={0}.siginfo'.format(sstatefetch)] 644 'file://{0}.siginfo;downloadfilename={0}.siginfo'.format(sstatefetch)]
645 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): 645 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
646 uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)] 646 uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)]
647 647
648 for srcuri in uris: 648 for srcuri in uris:
@@ -671,7 +671,7 @@ python sstate_task_postfunc () {
671 671
672 sstate_install(shared_state, d) 672 sstate_install(shared_state, d)
673 for intercept in shared_state['interceptfuncs']: 673 for intercept in shared_state['interceptfuncs']:
674 bb.build.exec_func(intercept, d, (d.getVar("WORKDIR", True),)) 674 bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),))
675 omask = os.umask(0o002) 675 omask = os.umask(0o002)
676 if omask != 0o002: 676 if omask != 0o002:
677 bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) 677 bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask)
@@ -709,13 +709,13 @@ sstate_create_package () {
709python sstate_sign_package () { 709python sstate_sign_package () {
710 from oe.gpg_sign import get_signer 710 from oe.gpg_sign import get_signer
711 711
712 if d.getVar('SSTATE_SIG_KEY', True): 712 if d.getVar('SSTATE_SIG_KEY'):
713 signer = get_signer(d, 'local') 713 signer = get_signer(d, 'local')
714 sstate_pkg = d.getVar('SSTATE_PKG', True) 714 sstate_pkg = d.getVar('SSTATE_PKG')
715 if os.path.exists(sstate_pkg + '.sig'): 715 if os.path.exists(sstate_pkg + '.sig'):
716 os.unlink(sstate_pkg + '.sig') 716 os.unlink(sstate_pkg + '.sig')
717 signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None, 717 signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None,
718 d.getVar('SSTATE_SIG_PASSPHRASE', True), armor=False) 718 d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False)
719} 719}
720 720
721# 721#
@@ -746,7 +746,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
746 splithashfn = sq_hashfn[task].split(" ") 746 splithashfn = sq_hashfn[task].split(" ")
747 spec = splithashfn[1] 747 spec = splithashfn[1]
748 if splithashfn[0] == "True": 748 if splithashfn[0] == "True":
749 extrapath = d.getVar("NATIVELSBSTRING", True) + "/" 749 extrapath = d.getVar("NATIVELSBSTRING") + "/"
750 else: 750 else:
751 extrapath = "" 751 extrapath = ""
752 752
@@ -785,7 +785,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
785 missed.append(task) 785 missed.append(task)
786 bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) 786 bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile)
787 787
788 mirrors = d.getVar("SSTATE_MIRRORS", True) 788 mirrors = d.getVar("SSTATE_MIRRORS")
789 if mirrors: 789 if mirrors:
790 # Copy the data object and override DL_DIR and SRC_URI 790 # Copy the data object and override DL_DIR and SRC_URI
791 localdata = bb.data.createCopy(d) 791 localdata = bb.data.createCopy(d)
@@ -801,7 +801,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
801 801
802 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, 802 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
803 # we'll want to allow network access for the current set of fetches. 803 # we'll want to allow network access for the current set of fetches.
804 if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": 804 if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1":
805 localdata.delVar('BB_NO_NETWORK') 805 localdata.delVar('BB_NO_NETWORK')
806 806
807 whitelist = bb.runqueue.get_setscene_enforce_whitelist(d) 807 whitelist = bb.runqueue.get_setscene_enforce_whitelist(d)
@@ -868,7 +868,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
868 if whitelist and missing: 868 if whitelist and missing:
869 bb.fatal('Required artifacts were unavailable - exiting') 869 bb.fatal('Required artifacts were unavailable - exiting')
870 870
871 inheritlist = d.getVar("INHERIT", True) 871 inheritlist = d.getVar("INHERIT")
872 if "toaster" in inheritlist: 872 if "toaster" in inheritlist:
873 evdata = {'missed': [], 'found': []}; 873 evdata = {'missed': [], 'found': []};
874 for task in missed: 874 for task in missed:
@@ -977,15 +977,15 @@ sstate_eventhandler[eventmask] = "bb.build.TaskSucceeded"
977python sstate_eventhandler() { 977python sstate_eventhandler() {
978 d = e.data 978 d = e.data
979 # When we write an sstate package we rewrite the SSTATE_PKG 979 # When we write an sstate package we rewrite the SSTATE_PKG
980 spkg = d.getVar('SSTATE_PKG', True) 980 spkg = d.getVar('SSTATE_PKG')
981 if not spkg.endswith(".tgz"): 981 if not spkg.endswith(".tgz"):
982 taskname = d.getVar("BB_RUNTASK", True)[3:] 982 taskname = d.getVar("BB_RUNTASK")[3:]
983 spec = d.getVar('SSTATE_PKGSPEC', True) 983 spec = d.getVar('SSTATE_PKGSPEC')
984 swspec = d.getVar('SSTATE_SWSPEC', True) 984 swspec = d.getVar('SSTATE_SWSPEC')
985 if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec: 985 if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec:
986 d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}") 986 d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}")
987 d.setVar("SSTATE_EXTRAPATH", "") 987 d.setVar("SSTATE_EXTRAPATH", "")
988 sstatepkg = d.getVar('SSTATE_PKG', True) 988 sstatepkg = d.getVar('SSTATE_PKG')
989 bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d) 989 bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d)
990} 990}
991 991
@@ -1004,7 +1004,7 @@ python sstate_eventhandler2() {
1004 stamps = e.stamps.values() 1004 stamps = e.stamps.values()
1005 removeworkdir = (d.getVar("SSTATE_PRUNE_OBSOLETEWORKDIR", False) == "1") 1005 removeworkdir = (d.getVar("SSTATE_PRUNE_OBSOLETEWORKDIR", False) == "1")
1006 seen = [] 1006 seen = []
1007 for a in d.getVar("SSTATE_ARCHS", True).split(): 1007 for a in d.getVar("SSTATE_ARCHS").split():
1008 toremove = [] 1008 toremove = []
1009 i = d.expand("${SSTATE_MANIFESTS}/index-" + a) 1009 i = d.expand("${SSTATE_MANIFESTS}/index-" + a)
1010 if not os.path.exists(i): 1010 if not os.path.exists(i):
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass
index bfabd06f3f..72a7421ba6 100644
--- a/meta/classes/staging.bbclass
+++ b/meta/classes/staging.bbclass
@@ -69,8 +69,8 @@ sysroot_stage_all() {
69python sysroot_strip () { 69python sysroot_strip () {
70 import stat, errno 70 import stat, errno
71 71
72 dvar = d.getVar('SYSROOT_DESTDIR', True) 72 dvar = d.getVar('SYSROOT_DESTDIR')
73 pn = d.getVar('PN', True) 73 pn = d.getVar('PN')
74 74
75 os.chdir(dvar) 75 os.chdir(dvar)
76 76
@@ -103,9 +103,9 @@ python sysroot_strip () {
103 103
104 elffiles = {} 104 elffiles = {}
105 inodes = {} 105 inodes = {}
106 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) 106 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
107 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) 107 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
108 if (d.getVar('INHIBIT_SYSROOT_STRIP', True) != '1'): 108 if (d.getVar('INHIBIT_SYSROOT_STRIP') != '1'):
109 # 109 #
110 # First lets figure out all of the files we may have to process 110 # First lets figure out all of the files we may have to process
111 # 111 #
@@ -136,7 +136,7 @@ python sysroot_strip () {
136 elf_file = isELF(file) 136 elf_file = isELF(file)
137 if elf_file & 1: 137 if elf_file & 1:
138 if elf_file & 2: 138 if elf_file & 2:
139 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): 139 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
140 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) 140 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
141 else: 141 else:
142 bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)) 142 bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn))
@@ -154,7 +154,7 @@ python sysroot_strip () {
154 # 154 #
155 # Now strip them (in parallel) 155 # Now strip them (in parallel)
156 # 156 #
157 strip = d.getVar("STRIP", True) 157 strip = d.getVar("STRIP")
158 sfiles = [] 158 sfiles = []
159 for file in elffiles: 159 for file in elffiles:
160 elf_file = int(elffiles[file]) 160 elf_file = int(elffiles[file])
@@ -211,13 +211,13 @@ def sysroot_checkhashes(covered, tasknames, fnids, fns, d, invalidtasks = None):
211python do_populate_sysroot () { 211python do_populate_sysroot () {
212 bb.build.exec_func("sysroot_stage_all", d) 212 bb.build.exec_func("sysroot_stage_all", d)
213 bb.build.exec_func("sysroot_strip", d) 213 bb.build.exec_func("sysroot_strip", d)
214 for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS', True) or '').split(): 214 for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS') or '').split():
215 bb.build.exec_func(f, d) 215 bb.build.exec_func(f, d)
216 pn = d.getVar("PN", True) 216 pn = d.getVar("PN")
217 multiprov = d.getVar("MULTI_PROVIDER_WHITELIST", True).split() 217 multiprov = d.getVar("MULTI_PROVIDER_WHITELIST").split()
218 provdir = d.expand("${SYSROOT_DESTDIR}${base_prefix}/sysroot-providers/") 218 provdir = d.expand("${SYSROOT_DESTDIR}${base_prefix}/sysroot-providers/")
219 bb.utils.mkdirhier(provdir) 219 bb.utils.mkdirhier(provdir)
220 for p in d.getVar("PROVIDES", True).split(): 220 for p in d.getVar("PROVIDES").split():
221 if p in multiprov: 221 if p in multiprov:
222 continue 222 continue
223 p = p.replace("/", "_") 223 p = p.replace("/", "_")
diff --git a/meta/classes/syslinux.bbclass b/meta/classes/syslinux.bbclass
index 7778fd708f..d8859b35bd 100644
--- a/meta/classes/syslinux.bbclass
+++ b/meta/classes/syslinux.bbclass
@@ -84,12 +84,12 @@ python build_syslinux_cfg () {
84 import copy 84 import copy
85 import sys 85 import sys
86 86
87 workdir = d.getVar('WORKDIR', True) 87 workdir = d.getVar('WORKDIR')
88 if not workdir: 88 if not workdir:
89 bb.error("WORKDIR not defined, unable to package") 89 bb.error("WORKDIR not defined, unable to package")
90 return 90 return
91 91
92 labels = d.getVar('LABELS', True) 92 labels = d.getVar('LABELS')
93 if not labels: 93 if not labels:
94 bb.debug(1, "LABELS not defined, nothing to do") 94 bb.debug(1, "LABELS not defined, nothing to do")
95 return 95 return
@@ -98,7 +98,7 @@ python build_syslinux_cfg () {
98 bb.debug(1, "No labels, nothing to do") 98 bb.debug(1, "No labels, nothing to do")
99 return 99 return
100 100
101 cfile = d.getVar('SYSLINUX_CFG', True) 101 cfile = d.getVar('SYSLINUX_CFG')
102 if not cfile: 102 if not cfile:
103 bb.fatal('Unable to read SYSLINUX_CFG') 103 bb.fatal('Unable to read SYSLINUX_CFG')
104 104
@@ -109,39 +109,39 @@ python build_syslinux_cfg () {
109 109
110 cfgfile.write('# Automatically created by OE\n') 110 cfgfile.write('# Automatically created by OE\n')
111 111
112 opts = d.getVar('SYSLINUX_OPTS', True) 112 opts = d.getVar('SYSLINUX_OPTS')
113 113
114 if opts: 114 if opts:
115 for opt in opts.split(';'): 115 for opt in opts.split(';'):
116 cfgfile.write('%s\n' % opt) 116 cfgfile.write('%s\n' % opt)
117 117
118 allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS', True) 118 allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS')
119 if allowoptions: 119 if allowoptions:
120 cfgfile.write('ALLOWOPTIONS %s\n' % allowoptions) 120 cfgfile.write('ALLOWOPTIONS %s\n' % allowoptions)
121 else: 121 else:
122 cfgfile.write('ALLOWOPTIONS 1\n') 122 cfgfile.write('ALLOWOPTIONS 1\n')
123 123
124 syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE', True) 124 syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE')
125 syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY', True) 125 syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY')
126 syslinux_serial = d.getVar('SYSLINUX_SERIAL', True) 126 syslinux_serial = d.getVar('SYSLINUX_SERIAL')
127 if syslinux_serial: 127 if syslinux_serial:
128 cfgfile.write('SERIAL %s\n' % syslinux_serial) 128 cfgfile.write('SERIAL %s\n' % syslinux_serial)
129 129
130 menu = (d.getVar('AUTO_SYSLINUXMENU', True) == "1") 130 menu = (d.getVar('AUTO_SYSLINUXMENU') == "1")
131 131
132 if menu and syslinux_serial: 132 if menu and syslinux_serial:
133 cfgfile.write('DEFAULT Graphics console %s\n' % (labels.split()[0])) 133 cfgfile.write('DEFAULT Graphics console %s\n' % (labels.split()[0]))
134 else: 134 else:
135 cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) 135 cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
136 136
137 timeout = d.getVar('SYSLINUX_TIMEOUT', True) 137 timeout = d.getVar('SYSLINUX_TIMEOUT')
138 138
139 if timeout: 139 if timeout:
140 cfgfile.write('TIMEOUT %s\n' % timeout) 140 cfgfile.write('TIMEOUT %s\n' % timeout)
141 else: 141 else:
142 cfgfile.write('TIMEOUT 50\n') 142 cfgfile.write('TIMEOUT 50\n')
143 143
144 prompt = d.getVar('SYSLINUX_PROMPT', True) 144 prompt = d.getVar('SYSLINUX_PROMPT')
145 if prompt: 145 if prompt:
146 cfgfile.write('PROMPT %s\n' % prompt) 146 cfgfile.write('PROMPT %s\n' % prompt)
147 else: 147 else:
@@ -151,14 +151,14 @@ python build_syslinux_cfg () {
151 cfgfile.write('ui vesamenu.c32\n') 151 cfgfile.write('ui vesamenu.c32\n')
152 cfgfile.write('menu title Select kernel options and boot kernel\n') 152 cfgfile.write('menu title Select kernel options and boot kernel\n')
153 cfgfile.write('menu tabmsg Press [Tab] to edit, [Return] to select\n') 153 cfgfile.write('menu tabmsg Press [Tab] to edit, [Return] to select\n')
154 splash = d.getVar('SYSLINUX_SPLASH', True) 154 splash = d.getVar('SYSLINUX_SPLASH')
155 if splash: 155 if splash:
156 cfgfile.write('menu background splash.lss\n') 156 cfgfile.write('menu background splash.lss\n')
157 157
158 for label in labels.split(): 158 for label in labels.split():
159 localdata = bb.data.createCopy(d) 159 localdata = bb.data.createCopy(d)
160 160
161 overrides = localdata.getVar('OVERRIDES', True) 161 overrides = localdata.getVar('OVERRIDES')
162 if not overrides: 162 if not overrides:
163 bb.fatal('OVERRIDES not defined') 163 bb.fatal('OVERRIDES not defined')
164 164
@@ -170,19 +170,19 @@ python build_syslinux_cfg () {
170 btypes = [ [ "Graphics console ", syslinux_default_console ], 170 btypes = [ [ "Graphics console ", syslinux_default_console ],
171 [ "Serial console ", syslinux_serial_tty ] ] 171 [ "Serial console ", syslinux_serial_tty ] ]
172 172
173 root= d.getVar('SYSLINUX_ROOT', True) 173 root= d.getVar('SYSLINUX_ROOT')
174 if not root: 174 if not root:
175 bb.fatal('SYSLINUX_ROOT not defined') 175 bb.fatal('SYSLINUX_ROOT not defined')
176 176
177 for btype in btypes: 177 for btype in btypes:
178 cfgfile.write('LABEL %s%s\nKERNEL /vmlinuz\n' % (btype[0], label)) 178 cfgfile.write('LABEL %s%s\nKERNEL /vmlinuz\n' % (btype[0], label))
179 179
180 exargs = d.getVar('SYSLINUX_KERNEL_ARGS', True) 180 exargs = d.getVar('SYSLINUX_KERNEL_ARGS')
181 if exargs: 181 if exargs:
182 btype[1] += " " + exargs 182 btype[1] += " " + exargs
183 183
184 append = localdata.getVar('APPEND', True) 184 append = localdata.getVar('APPEND')
185 initrd = localdata.getVar('INITRD', True) 185 initrd = localdata.getVar('INITRD')
186 186
187 append = root + " " + append 187 append = root + " " + append
188 cfgfile.write('APPEND ') 188 cfgfile.write('APPEND ')
diff --git a/meta/classes/systemd-boot.bbclass b/meta/classes/systemd-boot.bbclass
index 05244c7e50..60729a756c 100644
--- a/meta/classes/systemd-boot.bbclass
+++ b/meta/classes/systemd-boot.bbclass
@@ -63,8 +63,8 @@ efi_hddimg_populate() {
63} 63}
64 64
65python build_efi_cfg() { 65python build_efi_cfg() {
66 s = d.getVar("S", True) 66 s = d.getVar("S")
67 labels = d.getVar('LABELS', True) 67 labels = d.getVar('LABELS')
68 if not labels: 68 if not labels:
69 bb.debug(1, "LABELS not defined, nothing to do") 69 bb.debug(1, "LABELS not defined, nothing to do")
70 return 70 return
@@ -73,7 +73,7 @@ python build_efi_cfg() {
73 bb.debug(1, "No labels, nothing to do") 73 bb.debug(1, "No labels, nothing to do")
74 return 74 return
75 75
76 cfile = d.getVar('SYSTEMD_BOOT_CFG', True) 76 cfile = d.getVar('SYSTEMD_BOOT_CFG')
77 try: 77 try:
78 cfgfile = open(cfile, 'w') 78 cfgfile = open(cfile, 'w')
79 except OSError: 79 except OSError:
@@ -81,7 +81,7 @@ python build_efi_cfg() {
81 81
82 cfgfile.write('# Automatically created by OE\n') 82 cfgfile.write('# Automatically created by OE\n')
83 cfgfile.write('default %s\n' % (labels.split()[0])) 83 cfgfile.write('default %s\n' % (labels.split()[0]))
84 timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT', True) 84 timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT')
85 if timeout: 85 if timeout:
86 cfgfile.write('timeout %s\n' % timeout) 86 cfgfile.write('timeout %s\n' % timeout)
87 else: 87 else:
@@ -91,7 +91,7 @@ python build_efi_cfg() {
91 for label in labels.split(): 91 for label in labels.split():
92 localdata = d.createCopy() 92 localdata = d.createCopy()
93 93
94 overrides = localdata.getVar('OVERRIDES', True) 94 overrides = localdata.getVar('OVERRIDES')
95 if not overrides: 95 if not overrides:
96 bb.fatal('OVERRIDES not defined') 96 bb.fatal('OVERRIDES not defined')
97 97
@@ -107,8 +107,8 @@ python build_efi_cfg() {
107 entrycfg.write('title %s\n' % label) 107 entrycfg.write('title %s\n' % label)
108 entrycfg.write('linux /vmlinuz\n') 108 entrycfg.write('linux /vmlinuz\n')
109 109
110 append = localdata.getVar('APPEND', True) 110 append = localdata.getVar('APPEND')
111 initrd = localdata.getVar('INITRD', True) 111 initrd = localdata.getVar('INITRD')
112 112
113 if initrd: 113 if initrd:
114 entrycfg.write('initrd /initrd\n') 114 entrycfg.write('initrd /initrd\n')
diff --git a/meta/classes/systemd.bbclass b/meta/classes/systemd.bbclass
index 99a08a09a4..34bfefad40 100644
--- a/meta/classes/systemd.bbclass
+++ b/meta/classes/systemd.bbclass
@@ -71,12 +71,12 @@ python systemd_populate_packages() {
71 def get_package_var(d, var, pkg): 71 def get_package_var(d, var, pkg):
72 val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip() 72 val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip()
73 if val == "": 73 if val == "":
74 val = (d.getVar(var, True) or "").strip() 74 val = (d.getVar(var) or "").strip()
75 return val 75 return val
76 76
77 # Check if systemd-packages already included in PACKAGES 77 # Check if systemd-packages already included in PACKAGES
78 def systemd_check_package(pkg_systemd): 78 def systemd_check_package(pkg_systemd):
79 packages = d.getVar('PACKAGES', True) 79 packages = d.getVar('PACKAGES')
80 if not pkg_systemd in packages.split(): 80 if not pkg_systemd in packages.split():
81 bb.error('%s does not appear in package list, please add it' % pkg_systemd) 81 bb.error('%s does not appear in package list, please add it' % pkg_systemd)
82 82
@@ -90,23 +90,23 @@ python systemd_populate_packages() {
90 localdata.prependVar("OVERRIDES", pkg + ":") 90 localdata.prependVar("OVERRIDES", pkg + ":")
91 bb.data.update_data(localdata) 91 bb.data.update_data(localdata)
92 92
93 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 93 postinst = d.getVar('pkg_postinst_%s' % pkg)
94 if not postinst: 94 if not postinst:
95 postinst = '#!/bin/sh\n' 95 postinst = '#!/bin/sh\n'
96 postinst += localdata.getVar('systemd_postinst', True) 96 postinst += localdata.getVar('systemd_postinst')
97 d.setVar('pkg_postinst_%s' % pkg, postinst) 97 d.setVar('pkg_postinst_%s' % pkg, postinst)
98 98
99 prerm = d.getVar('pkg_prerm_%s' % pkg, True) 99 prerm = d.getVar('pkg_prerm_%s' % pkg)
100 if not prerm: 100 if not prerm:
101 prerm = '#!/bin/sh\n' 101 prerm = '#!/bin/sh\n'
102 prerm += localdata.getVar('systemd_prerm', True) 102 prerm += localdata.getVar('systemd_prerm')
103 d.setVar('pkg_prerm_%s' % pkg, prerm) 103 d.setVar('pkg_prerm_%s' % pkg, prerm)
104 104
105 105
106 # Add files to FILES_*-systemd if existent and not already done 106 # Add files to FILES_*-systemd if existent and not already done
107 def systemd_append_file(pkg_systemd, file_append): 107 def systemd_append_file(pkg_systemd, file_append):
108 appended = False 108 appended = False
109 if os.path.exists(oe.path.join(d.getVar("D", True), file_append)): 109 if os.path.exists(oe.path.join(d.getVar("D"), file_append)):
110 var_name = "FILES_" + pkg_systemd 110 var_name = "FILES_" + pkg_systemd
111 files = d.getVar(var_name, False) or "" 111 files = d.getVar(var_name, False) or ""
112 if file_append not in files.split(): 112 if file_append not in files.split():
@@ -118,7 +118,7 @@ python systemd_populate_packages() {
118 def systemd_add_files_and_parse(pkg_systemd, path, service, keys): 118 def systemd_add_files_and_parse(pkg_systemd, path, service, keys):
119 # avoid infinite recursion 119 # avoid infinite recursion
120 if systemd_append_file(pkg_systemd, oe.path.join(path, service)): 120 if systemd_append_file(pkg_systemd, oe.path.join(path, service)):
121 fullpath = oe.path.join(d.getVar("D", True), path, service) 121 fullpath = oe.path.join(d.getVar("D"), path, service)
122 if service.find('.service') != -1: 122 if service.find('.service') != -1:
123 # for *.service add *@.service 123 # for *.service add *@.service
124 service_base = service.replace('.service', '') 124 service_base = service.replace('.service', '')
@@ -141,9 +141,9 @@ python systemd_populate_packages() {
141 141
142 # Check service-files and call systemd_add_files_and_parse for each entry 142 # Check service-files and call systemd_add_files_and_parse for each entry
143 def systemd_check_services(): 143 def systemd_check_services():
144 searchpaths = [oe.path.join(d.getVar("sysconfdir", True), "systemd", "system"),] 144 searchpaths = [oe.path.join(d.getVar("sysconfdir"), "systemd", "system"),]
145 searchpaths.append(d.getVar("systemd_system_unitdir", True)) 145 searchpaths.append(d.getVar("systemd_system_unitdir"))
146 systemd_packages = d.getVar('SYSTEMD_PACKAGES', True) 146 systemd_packages = d.getVar('SYSTEMD_PACKAGES')
147 147
148 keys = 'Also' 148 keys = 'Also'
149 # scan for all in SYSTEMD_SERVICE[] 149 # scan for all in SYSTEMD_SERVICE[]
@@ -158,11 +158,11 @@ python systemd_populate_packages() {
158 base = re.sub('@[^.]+.', '@.', service) 158 base = re.sub('@[^.]+.', '@.', service)
159 159
160 for path in searchpaths: 160 for path in searchpaths:
161 if os.path.exists(oe.path.join(d.getVar("D", True), path, service)): 161 if os.path.exists(oe.path.join(d.getVar("D"), path, service)):
162 path_found = path 162 path_found = path
163 break 163 break
164 elif base is not None: 164 elif base is not None:
165 if os.path.exists(oe.path.join(d.getVar("D", True), path, base)): 165 if os.path.exists(oe.path.join(d.getVar("D"), path, base)):
166 path_found = path 166 path_found = path
167 break 167 break
168 168
@@ -172,10 +172,10 @@ python systemd_populate_packages() {
172 bb.fatal("SYSTEMD_SERVICE_%s value %s does not exist" % (pkg_systemd, service)) 172 bb.fatal("SYSTEMD_SERVICE_%s value %s does not exist" % (pkg_systemd, service))
173 173
174 # Run all modifications once when creating package 174 # Run all modifications once when creating package
175 if os.path.exists(d.getVar("D", True)): 175 if os.path.exists(d.getVar("D")):
176 for pkg in d.getVar('SYSTEMD_PACKAGES', True).split(): 176 for pkg in d.getVar('SYSTEMD_PACKAGES').split():
177 systemd_check_package(pkg) 177 systemd_check_package(pkg)
178 if d.getVar('SYSTEMD_SERVICE_' + pkg, True): 178 if d.getVar('SYSTEMD_SERVICE_' + pkg):
179 systemd_generate_package_scripts(pkg) 179 systemd_generate_package_scripts(pkg)
180 systemd_check_services() 180 systemd_check_services()
181} 181}
@@ -185,7 +185,7 @@ PACKAGESPLITFUNCS_prepend = "systemd_populate_packages "
185python rm_systemd_unitdir (){ 185python rm_systemd_unitdir (){
186 import shutil 186 import shutil
187 if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d): 187 if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d):
188 systemd_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_unitdir', True)) 188 systemd_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_unitdir'))
189 if os.path.exists(systemd_unitdir): 189 if os.path.exists(systemd_unitdir):
190 shutil.rmtree(systemd_unitdir) 190 shutil.rmtree(systemd_unitdir)
191 systemd_libdir = os.path.dirname(systemd_unitdir) 191 systemd_libdir = os.path.dirname(systemd_unitdir)
@@ -196,12 +196,12 @@ do_install[postfuncs] += "rm_systemd_unitdir "
196 196
197python rm_sysvinit_initddir (){ 197python rm_sysvinit_initddir (){
198 import shutil 198 import shutil
199 sysv_initddir = oe.path.join(d.getVar("D", True), (d.getVar('INIT_D_DIR', True) or "/etc/init.d")) 199 sysv_initddir = oe.path.join(d.getVar("D"), (d.getVar('INIT_D_DIR') or "/etc/init.d"))
200 200
201 if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and \ 201 if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and \
202 not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \ 202 not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \
203 os.path.exists(sysv_initddir): 203 os.path.exists(sysv_initddir):
204 systemd_system_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_system_unitdir', True)) 204 systemd_system_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_system_unitdir'))
205 205
206 # If systemd_system_unitdir contains anything, delete sysv_initddir 206 # If systemd_system_unitdir contains anything, delete sysv_initddir
207 if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)): 207 if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)):
diff --git a/meta/classes/terminal.bbclass b/meta/classes/terminal.bbclass
index cd8d124507..5db013f4dc 100644
--- a/meta/classes/terminal.bbclass
+++ b/meta/classes/terminal.bbclass
@@ -19,9 +19,9 @@ def emit_terminal_func(command, envdata, d):
19 envdata.setVar(cmd_func, 'exec ' + command) 19 envdata.setVar(cmd_func, 'exec ' + command)
20 envdata.setVarFlag(cmd_func, 'func', '1') 20 envdata.setVarFlag(cmd_func, 'func', '1')
21 21
22 runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" 22 runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}"
23 runfile = runfmt.format(func=cmd_func, task=cmd_func, taskfunc=cmd_func, pid=os.getpid()) 23 runfile = runfmt.format(func=cmd_func, task=cmd_func, taskfunc=cmd_func, pid=os.getpid())
24 runfile = os.path.join(d.getVar('T', True), runfile) 24 runfile = os.path.join(d.getVar('T'), runfile)
25 bb.utils.mkdirhier(os.path.dirname(runfile)) 25 bb.utils.mkdirhier(os.path.dirname(runfile))
26 26
27 with open(runfile, 'w') as script: 27 with open(runfile, 'w') as script:
@@ -44,7 +44,7 @@ def oe_terminal(command, title, d):
44 envdata.setVarFlag(v, 'export', '1') 44 envdata.setVarFlag(v, 'export', '1')
45 45
46 for export in oe.data.typed_value('OE_TERMINAL_EXPORTS', d): 46 for export in oe.data.typed_value('OE_TERMINAL_EXPORTS', d):
47 value = d.getVar(export, True) 47 value = d.getVar(export)
48 if value is not None: 48 if value is not None:
49 os.environ[export] = str(value) 49 os.environ[export] = str(value)
50 envdata.setVar(export, str(value)) 50 envdata.setVar(export, str(value))
@@ -60,7 +60,7 @@ def oe_terminal(command, title, d):
60 for key in origbbenv: 60 for key in origbbenv:
61 if key in envdata: 61 if key in envdata:
62 continue 62 continue
63 value = origbbenv.getVar(key, True) 63 value = origbbenv.getVar(key)
64 if value is not None: 64 if value is not None:
65 os.environ[key] = str(value) 65 os.environ[key] = str(value)
66 envdata.setVar(key, str(value)) 66 envdata.setVar(key, str(value))
diff --git a/meta/classes/testexport.bbclass b/meta/classes/testexport.bbclass
index 5147020820..3f7b2de71d 100644
--- a/meta/classes/testexport.bbclass
+++ b/meta/classes/testexport.bbclass
@@ -49,19 +49,19 @@ def exportTests(d,tc):
49 import re 49 import re
50 import oe.path 50 import oe.path
51 51
52 exportpath = d.getVar("TEST_EXPORT_DIR", True) 52 exportpath = d.getVar("TEST_EXPORT_DIR")
53 53
54 savedata = {} 54 savedata = {}
55 savedata["d"] = {} 55 savedata["d"] = {}
56 savedata["target"] = {} 56 savedata["target"] = {}
57 savedata["target"]["ip"] = tc.target.ip or d.getVar("TEST_TARGET_IP", True) 57 savedata["target"]["ip"] = tc.target.ip or d.getVar("TEST_TARGET_IP")
58 savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP", True) 58 savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP")
59 59
60 keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \ 60 keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \
61 and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func", True)] 61 and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func", True)]
62 for key in keys: 62 for key in keys:
63 try: 63 try:
64 savedata["d"][key] = d.getVar(key, True) 64 savedata["d"][key] = d.getVar(key)
65 except bb.data_smart.ExpansionError: 65 except bb.data_smart.ExpansionError:
66 # we don't care about those anyway 66 # we don't care about those anyway
67 pass 67 pass
@@ -71,7 +71,7 @@ def exportTests(d,tc):
71 json.dump(savedata, f, skipkeys=True, indent=4, sort_keys=True) 71 json.dump(savedata, f, skipkeys=True, indent=4, sort_keys=True)
72 72
73 # Replace absolute path with relative in the file 73 # Replace absolute path with relative in the file
74 exclude_path = os.path.join(d.getVar("COREBASE", True),'meta','lib','oeqa') 74 exclude_path = os.path.join(d.getVar("COREBASE"),'meta','lib','oeqa')
75 f1 = open(json_file,'r').read() 75 f1 = open(json_file,'r').read()
76 f2 = open(json_file,'w') 76 f2 = open(json_file,'w')
77 m = f1.replace(exclude_path,'oeqa') 77 m = f1.replace(exclude_path,'oeqa')
@@ -90,7 +90,7 @@ def exportTests(d,tc):
90 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/runtime/files")) 90 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/runtime/files"))
91 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/utils")) 91 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/utils"))
92 # copy test modules, this should cover tests in other layers too 92 # copy test modules, this should cover tests in other layers too
93 bbpath = d.getVar("BBPATH", True).split(':') 93 bbpath = d.getVar("BBPATH").split(':')
94 for t in tc.testslist: 94 for t in tc.testslist:
95 isfolder = False 95 isfolder = False
96 if re.search("\w+\.\w+\.test_\S+", t): 96 if re.search("\w+\.\w+\.test_\S+", t):
@@ -111,7 +111,7 @@ def exportTests(d,tc):
111 if os.path.isfile(json_file): 111 if os.path.isfile(json_file):
112 shutil.copy2(json_file, os.path.join(exportpath, "oeqa/runtime")) 112 shutil.copy2(json_file, os.path.join(exportpath, "oeqa/runtime"))
113 # Get meta layer 113 # Get meta layer
114 for layer in d.getVar("BBLAYERS", True).split(): 114 for layer in d.getVar("BBLAYERS").split():
115 if os.path.basename(layer) == "meta": 115 if os.path.basename(layer) == "meta":
116 meta_layer = layer 116 meta_layer = layer
117 break 117 break
@@ -130,28 +130,28 @@ def exportTests(d,tc):
130 shutil.copy2(os.path.join(root, f), os.path.join(exportpath, "oeqa/runtime/files")) 130 shutil.copy2(os.path.join(root, f), os.path.join(exportpath, "oeqa/runtime/files"))
131 131
132 # Create tar file for common parts of testexport 132 # Create tar file for common parts of testexport
133 create_tarball(d, "testexport.tar.gz", d.getVar("TEST_EXPORT_DIR", True)) 133 create_tarball(d, "testexport.tar.gz", d.getVar("TEST_EXPORT_DIR"))
134 134
135 # Copy packages needed for runtime testing 135 # Copy packages needed for runtime testing
136 test_pkg_dir = d.getVar("TEST_NEEDED_PACKAGES_DIR", True) 136 test_pkg_dir = d.getVar("TEST_NEEDED_PACKAGES_DIR")
137 if os.listdir(test_pkg_dir): 137 if os.listdir(test_pkg_dir):
138 export_pkg_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), "packages") 138 export_pkg_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"), "packages")
139 oe.path.copytree(test_pkg_dir, export_pkg_dir) 139 oe.path.copytree(test_pkg_dir, export_pkg_dir)
140 # Create tar file for packages needed by the DUT 140 # Create tar file for packages needed by the DUT
141 create_tarball(d, "testexport_packages_%s.tar.gz" % d.getVar("MACHINE", True), export_pkg_dir) 141 create_tarball(d, "testexport_packages_%s.tar.gz" % d.getVar("MACHINE"), export_pkg_dir)
142 142
143 # Copy SDK 143 # Copy SDK
144 if d.getVar("TEST_EXPORT_SDK_ENABLED", True) == "1": 144 if d.getVar("TEST_EXPORT_SDK_ENABLED") == "1":
145 sdk_deploy = d.getVar("SDK_DEPLOY", True) 145 sdk_deploy = d.getVar("SDK_DEPLOY")
146 tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME", True) 146 tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME")
147 tarball_path = os.path.join(sdk_deploy, tarball_name) 147 tarball_path = os.path.join(sdk_deploy, tarball_name)
148 export_sdk_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), 148 export_sdk_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"),
149 d.getVar("TEST_EXPORT_SDK_DIR", True)) 149 d.getVar("TEST_EXPORT_SDK_DIR"))
150 bb.utils.mkdirhier(export_sdk_dir) 150 bb.utils.mkdirhier(export_sdk_dir)
151 shutil.copy2(tarball_path, export_sdk_dir) 151 shutil.copy2(tarball_path, export_sdk_dir)
152 152
153 # Create tar file for the sdk 153 # Create tar file for the sdk
154 create_tarball(d, "testexport_sdk_%s.tar.gz" % d.getVar("SDK_ARCH", True), export_sdk_dir) 154 create_tarball(d, "testexport_sdk_%s.tar.gz" % d.getVar("SDK_ARCH"), export_sdk_dir)
155 155
156 bb.plain("Exported tests to: %s" % exportpath) 156 bb.plain("Exported tests to: %s" % exportpath)
157 157
@@ -161,8 +161,8 @@ def testexport_main(d):
161 from oeqa.utils.dump import get_host_dumper 161 from oeqa.utils.dump import get_host_dumper
162 162
163 test_create_extract_dirs(d) 163 test_create_extract_dirs(d)
164 export_dir = d.getVar("TEST_EXPORT_DIR", True) 164 export_dir = d.getVar("TEST_EXPORT_DIR")
165 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 165 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
166 bb.utils.remove(export_dir, recurse=True) 166 bb.utils.remove(export_dir, recurse=True)
167 bb.utils.mkdirhier(export_dir) 167 bb.utils.mkdirhier(export_dir)
168 168
@@ -188,7 +188,7 @@ def create_tarball(d, tar_name, src_dir):
188 188
189 import tarfile 189 import tarfile
190 190
191 tar_path = os.path.join(d.getVar("TEST_EXPORT_DIR", True), tar_name) 191 tar_path = os.path.join(d.getVar("TEST_EXPORT_DIR"), tar_name)
192 current_dir = os.getcwd() 192 current_dir = os.getcwd()
193 src_dir = src_dir.rstrip('/') 193 src_dir = src_dir.rstrip('/')
194 dir_name = os.path.dirname(src_dir) 194 dir_name = os.path.dirname(src_dir)
diff --git a/meta/classes/testimage.bbclass b/meta/classes/testimage.bbclass
index 6b6781d860..770ec801c2 100644
--- a/meta/classes/testimage.bbclass
+++ b/meta/classes/testimage.bbclass
@@ -130,8 +130,8 @@ def testimage_main(d):
130 from oeqa.targetcontrol import get_target_controller 130 from oeqa.targetcontrol import get_target_controller
131 from oeqa.utils.dump import get_host_dumper 131 from oeqa.utils.dump import get_host_dumper
132 132
133 pn = d.getVar("PN", True) 133 pn = d.getVar("PN")
134 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 134 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
135 test_create_extract_dirs(d) 135 test_create_extract_dirs(d)
136 136
137 # we need the host dumper in test context 137 # we need the host dumper in test context
@@ -176,10 +176,10 @@ def testimage_main(d):
176 target.stop() 176 target.stop()
177 177
178def test_create_extract_dirs(d): 178def test_create_extract_dirs(d):
179 install_path = d.getVar("TEST_INSTALL_TMP_DIR", True) 179 install_path = d.getVar("TEST_INSTALL_TMP_DIR")
180 package_path = d.getVar("TEST_PACKAGED_DIR", True) 180 package_path = d.getVar("TEST_PACKAGED_DIR")
181 extracted_path = d.getVar("TEST_EXTRACTED_DIR", True) 181 extracted_path = d.getVar("TEST_EXTRACTED_DIR")
182 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 182 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
183 bb.utils.remove(package_path, recurse=True) 183 bb.utils.remove(package_path, recurse=True)
184 bb.utils.mkdirhier(install_path) 184 bb.utils.mkdirhier(install_path)
185 bb.utils.mkdirhier(package_path) 185 bb.utils.mkdirhier(package_path)
diff --git a/meta/classes/testsdk.bbclass b/meta/classes/testsdk.bbclass
index 06b4c5034f..063b9080a5 100644
--- a/meta/classes/testsdk.bbclass
+++ b/meta/classes/testsdk.bbclass
@@ -54,8 +54,8 @@ def testsdk_main(d):
54 import subprocess 54 import subprocess
55 from oeqa.oetest import SDKTestContext 55 from oeqa.oetest import SDKTestContext
56 56
57 pn = d.getVar("PN", True) 57 pn = d.getVar("PN")
58 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 58 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
59 59
60 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh") 60 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh")
61 if not os.path.exists(tcname): 61 if not os.path.exists(tcname):
@@ -100,12 +100,12 @@ def testsdkext_main(d):
100 100
101 # extensible sdk can be contaminated if native programs are 101 # extensible sdk can be contaminated if native programs are
102 # in PATH, i.e. use perl-native instead of eSDK one. 102 # in PATH, i.e. use perl-native instead of eSDK one.
103 paths_to_avoid = [d.getVar('STAGING_DIR', True), 103 paths_to_avoid = [d.getVar('STAGING_DIR'),
104 d.getVar('BASE_WORKDIR', True)] 104 d.getVar('BASE_WORKDIR')]
105 os.environ['PATH'] = avoid_paths_in_environ(paths_to_avoid) 105 os.environ['PATH'] = avoid_paths_in_environ(paths_to_avoid)
106 106
107 pn = d.getVar("PN", True) 107 pn = d.getVar("PN")
108 bb.utils.mkdirhier(d.getVar("TEST_LOG_SDKEXT_DIR", True)) 108 bb.utils.mkdirhier(d.getVar("TEST_LOG_SDKEXT_DIR"))
109 109
110 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.sh") 110 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.sh")
111 if not os.path.exists(tcname): 111 if not os.path.exists(tcname):
diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass
index 917b74d887..706a392d7f 100644
--- a/meta/classes/tinderclient.bbclass
+++ b/meta/classes/tinderclient.bbclass
@@ -55,22 +55,22 @@ def tinder_format_http_post(d,status,log):
55 55
56 # the variables we will need to send on this form post 56 # the variables we will need to send on this form post
57 variables = { 57 variables = {
58 "tree" : d.getVar('TINDER_TREE', True), 58 "tree" : d.getVar('TINDER_TREE'),
59 "machine_name" : d.getVar('TINDER_MACHINE', True), 59 "machine_name" : d.getVar('TINDER_MACHINE'),
60 "os" : os.uname()[0], 60 "os" : os.uname()[0],
61 "os_version" : os.uname()[2], 61 "os_version" : os.uname()[2],
62 "compiler" : "gcc", 62 "compiler" : "gcc",
63 "clobber" : d.getVar('TINDER_CLOBBER', True) or "0", 63 "clobber" : d.getVar('TINDER_CLOBBER') or "0",
64 "srcdate" : d.getVar('SRCDATE', True), 64 "srcdate" : d.getVar('SRCDATE'),
65 "PN" : d.getVar('PN', True), 65 "PN" : d.getVar('PN'),
66 "PV" : d.getVar('PV', True), 66 "PV" : d.getVar('PV'),
67 "PR" : d.getVar('PR', True), 67 "PR" : d.getVar('PR'),
68 "FILE" : d.getVar('FILE', True) or "N/A", 68 "FILE" : d.getVar('FILE') or "N/A",
69 "TARGETARCH" : d.getVar('TARGET_ARCH', True), 69 "TARGETARCH" : d.getVar('TARGET_ARCH'),
70 "TARGETFPU" : d.getVar('TARGET_FPU', True) or "Unknown", 70 "TARGETFPU" : d.getVar('TARGET_FPU') or "Unknown",
71 "TARGETOS" : d.getVar('TARGET_OS', True) or "Unknown", 71 "TARGETOS" : d.getVar('TARGET_OS') or "Unknown",
72 "MACHINE" : d.getVar('MACHINE', True) or "Unknown", 72 "MACHINE" : d.getVar('MACHINE') or "Unknown",
73 "DISTRO" : d.getVar('DISTRO', True) or "Unknown", 73 "DISTRO" : d.getVar('DISTRO') or "Unknown",
74 "zecke-rocks" : "sure", 74 "zecke-rocks" : "sure",
75 } 75 }
76 76
@@ -127,7 +127,7 @@ def tinder_build_start(d):
127 127
128 # now we will need to save the machine number 128 # now we will need to save the machine number
129 # we will override any previous numbers 129 # we will override any previous numbers
130 f = open(d.getVar('TMPDIR', True)+"/tinder-machine.id", 'w') 130 f = open(d.getVar('TMPDIR')+"/tinder-machine.id", 'w')
131 f.write(report) 131 f.write(report)
132 132
133 133
@@ -137,8 +137,8 @@ def tinder_send_http(d, status, _log):
137 """ 137 """
138 138
139 # get the body and type 139 # get the body and type
140 server = d.getVar('TINDER_HOST', True) 140 server = d.getVar('TINDER_HOST')
141 url = d.getVar('TINDER_URL', True) 141 url = d.getVar('TINDER_URL')
142 142
143 selector = url + "/xml/build_status.pl" 143 selector = url + "/xml/build_status.pl"
144 144
@@ -278,7 +278,7 @@ def tinder_do_tinder_report(event):
278 278
279 try: 279 try:
280 # truncate the tinder log file 280 # truncate the tinder log file
281 f = open(event.data.getVar('TINDER_LOG', True), 'w') 281 f = open(event.data.getVar('TINDER_LOG'), 'w')
282 f.write("") 282 f.write("")
283 f.close() 283 f.close()
284 except: 284 except:
@@ -287,7 +287,7 @@ def tinder_do_tinder_report(event):
287 try: 287 try:
288 # write a status to the file. This is needed for the -k option 288 # write a status to the file. This is needed for the -k option
289 # of BitBake 289 # of BitBake
290 g = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') 290 g = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w')
291 g.write("") 291 g.write("")
292 g.close() 292 g.close()
293 except IOError: 293 except IOError:
@@ -296,10 +296,10 @@ def tinder_do_tinder_report(event):
296 # Append the Task-Log (compile,configure...) to the log file 296 # Append the Task-Log (compile,configure...) to the log file
297 # we will send to the server 297 # we will send to the server
298 if name == "TaskSucceeded" or name == "TaskFailed": 298 if name == "TaskSucceeded" or name == "TaskFailed":
299 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) 299 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task))
300 300
301 if len(log_file) != 0: 301 if len(log_file) != 0:
302 to_file = event.data.getVar('TINDER_LOG', True) 302 to_file = event.data.getVar('TINDER_LOG')
303 log += "".join(open(log_file[0], 'r').readlines()) 303 log += "".join(open(log_file[0], 'r').readlines())
304 304
305 # set the right 'HEADER'/Summary for the TinderBox 305 # set the right 'HEADER'/Summary for the TinderBox
@@ -310,16 +310,16 @@ def tinder_do_tinder_report(event):
310 elif name == "TaskFailed": 310 elif name == "TaskFailed":
311 log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task 311 log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task
312 elif name == "PkgStarted": 312 elif name == "PkgStarted":
313 log += "---> TINDERBOX Package %s started\n" % event.data.getVar('PF', True) 313 log += "---> TINDERBOX Package %s started\n" % event.data.getVar('PF')
314 elif name == "PkgSucceeded": 314 elif name == "PkgSucceeded":
315 log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % event.data.getVar('PF', True) 315 log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % event.data.getVar('PF')
316 elif name == "PkgFailed": 316 elif name == "PkgFailed":
317 if not event.data.getVar('TINDER_AUTOBUILD', True) == "0": 317 if not event.data.getVar('TINDER_AUTOBUILD') == "0":
318 build.exec_task('do_clean', event.data) 318 build.exec_task('do_clean', event.data)
319 log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % event.data.getVar('PF', True) 319 log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % event.data.getVar('PF')
320 status = 200 320 status = 200
321 # remember the failure for the -k case 321 # remember the failure for the -k case
322 h = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') 322 h = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w')
323 h.write("200") 323 h.write("200")
324 elif name == "BuildCompleted": 324 elif name == "BuildCompleted":
325 log += "Build Completed\n" 325 log += "Build Completed\n"
@@ -342,7 +342,7 @@ def tinder_do_tinder_report(event):
342 log += "Error:Was Runtime: %d\n" % event.isRuntime() 342 log += "Error:Was Runtime: %d\n" % event.isRuntime()
343 status = 200 343 status = 200
344 # remember the failure for the -k case 344 # remember the failure for the -k case
345 h = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') 345 h = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w')
346 h.write("200") 346 h.write("200")
347 347
348 # now post the log 348 # now post the log
@@ -360,7 +360,7 @@ python tinderclient_eventhandler() {
360 if e.data is None or bb.event.getName(e) == "MsgNote": 360 if e.data is None or bb.event.getName(e) == "MsgNote":
361 return 361 return
362 362
363 do_tinder_report = e.data.getVar('TINDER_REPORT', True) 363 do_tinder_report = e.data.getVar('TINDER_REPORT')
364 if do_tinder_report and do_tinder_report == "1": 364 if do_tinder_report and do_tinder_report == "1":
365 tinder_do_tinder_report(e) 365 tinder_do_tinder_report(e)
366 366
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass
index 4bddf34e9c..4ea20567a3 100644
--- a/meta/classes/toaster.bbclass
+++ b/meta/classes/toaster.bbclass
@@ -80,7 +80,7 @@ python toaster_layerinfo_dumpdata() {
80 return layer_info 80 return layer_info
81 81
82 82
83 bblayers = e.data.getVar("BBLAYERS", True) 83 bblayers = e.data.getVar("BBLAYERS")
84 84
85 llayerinfo = {} 85 llayerinfo = {}
86 86
@@ -119,10 +119,10 @@ python toaster_package_dumpdata() {
119 """ 119 """
120 120
121 # No need to try and dumpdata if the recipe isn't generating packages 121 # No need to try and dumpdata if the recipe isn't generating packages
122 if not d.getVar('PACKAGES', True): 122 if not d.getVar('PACKAGES'):
123 return 123 return
124 124
125 pkgdatadir = d.getVar('PKGDESTWORK', True) 125 pkgdatadir = d.getVar('PKGDESTWORK')
126 lpkgdata = {} 126 lpkgdata = {}
127 datadir = os.path.join(pkgdatadir, 'runtime') 127 datadir = os.path.join(pkgdatadir, 'runtime')
128 128
@@ -142,7 +142,7 @@ python toaster_artifact_dumpdata() {
142 """ 142 """
143 143
144 event_data = { 144 event_data = {
145 "TOOLCHAIN_OUTPUTNAME": d.getVar("TOOLCHAIN_OUTPUTNAME", True) 145 "TOOLCHAIN_OUTPUTNAME": d.getVar("TOOLCHAIN_OUTPUTNAME")
146 } 146 }
147 147
148 bb.event.fire(bb.event.MetadataEvent("SDKArtifactInfo", event_data), d) 148 bb.event.fire(bb.event.MetadataEvent("SDKArtifactInfo", event_data), d)
@@ -157,9 +157,9 @@ python toaster_collect_task_stats() {
157 import bb.utils 157 import bb.utils
158 import os 158 import os
159 159
160 toaster_statlist_file = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), "toasterstatlist") 160 toaster_statlist_file = os.path.join(e.data.getVar('BUILDSTATS_BASE'), "toasterstatlist")
161 161
162 if not e.data.getVar('BUILDSTATS_BASE', True): 162 if not e.data.getVar('BUILDSTATS_BASE'):
163 return # if we don't have buildstats, we cannot collect stats 163 return # if we don't have buildstats, we cannot collect stats
164 164
165 def stat_to_float(value): 165 def stat_to_float(value):
@@ -246,7 +246,7 @@ python toaster_buildhistory_dump() {
246 import re 246 import re
247 BUILDHISTORY_DIR = e.data.expand("${TOPDIR}/buildhistory") 247 BUILDHISTORY_DIR = e.data.expand("${TOPDIR}/buildhistory")
248 BUILDHISTORY_DIR_IMAGE_BASE = e.data.expand("%s/images/${MACHINE_ARCH}/${TCLIBC}/"% BUILDHISTORY_DIR) 248 BUILDHISTORY_DIR_IMAGE_BASE = e.data.expand("%s/images/${MACHINE_ARCH}/${TCLIBC}/"% BUILDHISTORY_DIR)
249 pkgdata_dir = e.data.getVar("PKGDATA_DIR", True) 249 pkgdata_dir = e.data.getVar("PKGDATA_DIR")
250 250
251 251
252 # scan the build targets for this build 252 # scan the build targets for this build
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass
index 0e11f2d7a0..44b4e24255 100644
--- a/meta/classes/toolchain-scripts.bbclass
+++ b/meta/classes/toolchain-scripts.bbclass
@@ -139,9 +139,9 @@ toolchain_create_sdk_siteconfig[vardepsexclude] = "TOOLCHAIN_CONFIGSITE_SYSROOTC
139python __anonymous () { 139python __anonymous () {
140 import oe.classextend 140 import oe.classextend
141 deps = "" 141 deps = ""
142 for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', True) or "").split(): 142 for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE') or "").split():
143 deps += " %s:do_populate_sysroot" % dep 143 deps += " %s:do_populate_sysroot" % dep
144 for variant in (d.getVar('MULTILIB_VARIANTS', True) or "").split(): 144 for variant in (d.getVar('MULTILIB_VARIANTS') or "").split():
145 clsextend = oe.classextend.ClassExtender(variant, d) 145 clsextend = oe.classextend.ClassExtender(variant, d)
146 newdep = clsextend.extend_name(dep) 146 newdep = clsextend.extend_name(dep)
147 deps += " %s:do_populate_sysroot" % newdep 147 deps += " %s:do_populate_sysroot" % newdep
diff --git a/meta/classes/uboot-config.bbclass b/meta/classes/uboot-config.bbclass
index 3f760f2fbe..10013b7d49 100644
--- a/meta/classes/uboot-config.bbclass
+++ b/meta/classes/uboot-config.bbclass
@@ -14,19 +14,19 @@
14UBOOT_BINARY ?= "u-boot.${UBOOT_SUFFIX}" 14UBOOT_BINARY ?= "u-boot.${UBOOT_SUFFIX}"
15 15
16python () { 16python () {
17 ubootmachine = d.getVar("UBOOT_MACHINE", True) 17 ubootmachine = d.getVar("UBOOT_MACHINE")
18 ubootconfigflags = d.getVarFlags('UBOOT_CONFIG') 18 ubootconfigflags = d.getVarFlags('UBOOT_CONFIG')
19 ubootbinary = d.getVar('UBOOT_BINARY', True) 19 ubootbinary = d.getVar('UBOOT_BINARY')
20 ubootbinaries = d.getVar('UBOOT_BINARIES', True) 20 ubootbinaries = d.getVar('UBOOT_BINARIES')
21 # The "doc" varflag is special, we don't want to see it here 21 # The "doc" varflag is special, we don't want to see it here
22 ubootconfigflags.pop('doc', None) 22 ubootconfigflags.pop('doc', None)
23 23
24 if not ubootmachine and not ubootconfigflags: 24 if not ubootmachine and not ubootconfigflags:
25 PN = d.getVar("PN", True) 25 PN = d.getVar("PN")
26 FILE = os.path.basename(d.getVar("FILE", True)) 26 FILE = os.path.basename(d.getVar("FILE"))
27 bb.debug(1, "To build %s, see %s for instructions on \ 27 bb.debug(1, "To build %s, see %s for instructions on \
28 setting up your machine config" % (PN, FILE)) 28 setting up your machine config" % (PN, FILE))
29 raise bb.parse.SkipPackage("Either UBOOT_MACHINE or UBOOT_CONFIG must be set in the %s machine configuration." % d.getVar("MACHINE", True)) 29 raise bb.parse.SkipPackage("Either UBOOT_MACHINE or UBOOT_CONFIG must be set in the %s machine configuration." % d.getVar("MACHINE"))
30 30
31 if ubootmachine and ubootconfigflags: 31 if ubootmachine and ubootconfigflags:
32 raise bb.parse.SkipPackage("You cannot use UBOOT_MACHINE and UBOOT_CONFIG at the same time.") 32 raise bb.parse.SkipPackage("You cannot use UBOOT_MACHINE and UBOOT_CONFIG at the same time.")
@@ -37,7 +37,7 @@ python () {
37 if not ubootconfigflags: 37 if not ubootconfigflags:
38 return 38 return
39 39
40 ubootconfig = (d.getVar('UBOOT_CONFIG', True) or "").split() 40 ubootconfig = (d.getVar('UBOOT_CONFIG') or "").split()
41 if len(ubootconfig) > 0: 41 if len(ubootconfig) > 0:
42 for config in ubootconfig: 42 for config in ubootconfig:
43 for f, v in ubootconfigflags.items(): 43 for f, v in ubootconfigflags.items():
diff --git a/meta/classes/uboot-extlinux-config.bbclass b/meta/classes/uboot-extlinux-config.bbclass
index df91386c00..ec5fffb7bb 100644
--- a/meta/classes/uboot-extlinux-config.bbclass
+++ b/meta/classes/uboot-extlinux-config.bbclass
@@ -58,20 +58,20 @@ UBOOT_EXTLINUX_MENU_DESCRIPTION_linux ??= "${DISTRO_NAME}"
58UBOOT_EXTLINUX_CONFIG = "${B}/extlinux.conf" 58UBOOT_EXTLINUX_CONFIG = "${B}/extlinux.conf"
59 59
60python create_extlinux_config() { 60python create_extlinux_config() {
61 if d.getVar("UBOOT_EXTLINUX", True) != "1": 61 if d.getVar("UBOOT_EXTLINUX") != "1":
62 return 62 return
63 63
64 if not d.getVar('WORKDIR', True): 64 if not d.getVar('WORKDIR'):
65 bb.error("WORKDIR not defined, unable to package") 65 bb.error("WORKDIR not defined, unable to package")
66 66
67 labels = d.getVar('UBOOT_EXTLINUX_LABELS', True) 67 labels = d.getVar('UBOOT_EXTLINUX_LABELS')
68 if not labels: 68 if not labels:
69 bb.fatal("UBOOT_EXTLINUX_LABELS not defined, nothing to do") 69 bb.fatal("UBOOT_EXTLINUX_LABELS not defined, nothing to do")
70 70
71 if not labels.strip(): 71 if not labels.strip():
72 bb.fatal("No labels, nothing to do") 72 bb.fatal("No labels, nothing to do")
73 73
74 cfile = d.getVar('UBOOT_EXTLINUX_CONFIG', True) 74 cfile = d.getVar('UBOOT_EXTLINUX_CONFIG')
75 if not cfile: 75 if not cfile:
76 bb.fatal('Unable to read UBOOT_EXTLINUX_CONFIG') 76 bb.fatal('Unable to read UBOOT_EXTLINUX_CONFIG')
77 77
@@ -85,34 +85,34 @@ python create_extlinux_config() {
85 for label in labels.split(): 85 for label in labels.split():
86 localdata = bb.data.createCopy(d) 86 localdata = bb.data.createCopy(d)
87 87
88 overrides = localdata.getVar('OVERRIDES', True) 88 overrides = localdata.getVar('OVERRIDES')
89 if not overrides: 89 if not overrides:
90 bb.fatal('OVERRIDES not defined') 90 bb.fatal('OVERRIDES not defined')
91 91
92 localdata.setVar('OVERRIDES', label + ':' + overrides) 92 localdata.setVar('OVERRIDES', label + ':' + overrides)
93 bb.data.update_data(localdata) 93 bb.data.update_data(localdata)
94 94
95 extlinux_console = localdata.getVar('UBOOT_EXTLINUX_CONSOLE', True) 95 extlinux_console = localdata.getVar('UBOOT_EXTLINUX_CONSOLE')
96 96
97 menu_description = localdata.getVar('UBOOT_EXTLINUX_MENU_DESCRIPTION', True) 97 menu_description = localdata.getVar('UBOOT_EXTLINUX_MENU_DESCRIPTION')
98 if not menu_description: 98 if not menu_description:
99 menu_description = label 99 menu_description = label
100 100
101 root = localdata.getVar('UBOOT_EXTLINUX_ROOT', True) 101 root = localdata.getVar('UBOOT_EXTLINUX_ROOT')
102 if not root: 102 if not root:
103 bb.fatal('UBOOT_EXTLINUX_ROOT not defined') 103 bb.fatal('UBOOT_EXTLINUX_ROOT not defined')
104 104
105 kernel_image = localdata.getVar('UBOOT_EXTLINUX_KERNEL_IMAGE', True) 105 kernel_image = localdata.getVar('UBOOT_EXTLINUX_KERNEL_IMAGE')
106 fdtdir = localdata.getVar('UBOOT_EXTLINUX_FDTDIR', True) 106 fdtdir = localdata.getVar('UBOOT_EXTLINUX_FDTDIR')
107 if fdtdir: 107 if fdtdir:
108 cfgfile.write('LABEL %s\n\tKERNEL %s\n\tFDTDIR %s\n' % 108 cfgfile.write('LABEL %s\n\tKERNEL %s\n\tFDTDIR %s\n' %
109 (menu_description, kernel_image, fdtdir)) 109 (menu_description, kernel_image, fdtdir))
110 else: 110 else:
111 cfgfile.write('LABEL %s\n\tKERNEL %s\n' % (menu_description, kernel_image)) 111 cfgfile.write('LABEL %s\n\tKERNEL %s\n' % (menu_description, kernel_image))
112 112
113 kernel_args = localdata.getVar('UBOOT_EXTLINUX_KERNEL_ARGS', True) 113 kernel_args = localdata.getVar('UBOOT_EXTLINUX_KERNEL_ARGS')
114 114
115 initrd = localdata.getVar('UBOOT_EXTLINUX_INITRD', True) 115 initrd = localdata.getVar('UBOOT_EXTLINUX_INITRD')
116 if initrd: 116 if initrd:
117 cfgfile.write('\tINITRD %s\n'% initrd) 117 cfgfile.write('\tINITRD %s\n'% initrd)
118 118
diff --git a/meta/classes/uboot-sign.bbclass b/meta/classes/uboot-sign.bbclass
index cef26b19be..65a8c49935 100644
--- a/meta/classes/uboot-sign.bbclass
+++ b/meta/classes/uboot-sign.bbclass
@@ -80,9 +80,9 @@ do_concat_dtb () {
80} 80}
81 81
82python () { 82python () {
83 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot', True) or 'u-boot' 83 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
84 if d.getVar('UBOOT_SIGN_ENABLE', True) == '1' and d.getVar('PN', True) == uboot_pn: 84 if d.getVar('UBOOT_SIGN_ENABLE') == '1' and d.getVar('PN') == uboot_pn:
85 kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel', True) 85 kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel')
86 86
87 # u-boot.dtb and u-boot-nodtb.bin are deployed _before_ do_deploy 87 # u-boot.dtb and u-boot-nodtb.bin are deployed _before_ do_deploy
88 # Thus, do_deploy_setscene will also populate them in DEPLOY_IMAGE_DIR 88 # Thus, do_deploy_setscene will also populate them in DEPLOY_IMAGE_DIR
diff --git a/meta/classes/uninative.bbclass b/meta/classes/uninative.bbclass
index 11cbf9be80..177af73247 100644
--- a/meta/classes/uninative.bbclass
+++ b/meta/classes/uninative.bbclass
@@ -19,11 +19,11 @@ python uninative_event_fetchloader() {
19 loader isn't already present. 19 loader isn't already present.
20 """ 20 """
21 21
22 chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH", True), True) 22 chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH"), True)
23 if not chksum: 23 if not chksum:
24 bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH", True)) 24 bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH"))
25 25
26 loader = d.getVar("UNINATIVE_LOADER", True) 26 loader = d.getVar("UNINATIVE_LOADER")
27 loaderchksum = loader + ".chksum" 27 loaderchksum = loader + ".chksum"
28 if os.path.exists(loader) and os.path.exists(loaderchksum): 28 if os.path.exists(loader) and os.path.exists(loaderchksum):
29 with open(loaderchksum, "r") as f: 29 with open(loaderchksum, "r") as f:
@@ -36,13 +36,13 @@ python uninative_event_fetchloader() {
36 # Save and restore cwd as Fetch.download() does a chdir() 36 # Save and restore cwd as Fetch.download() does a chdir()
37 olddir = os.getcwd() 37 olddir = os.getcwd()
38 38
39 tarball = d.getVar("UNINATIVE_TARBALL", True) 39 tarball = d.getVar("UNINATIVE_TARBALL")
40 tarballdir = os.path.join(d.getVar("UNINATIVE_DLDIR", True), chksum) 40 tarballdir = os.path.join(d.getVar("UNINATIVE_DLDIR"), chksum)
41 tarballpath = os.path.join(tarballdir, tarball) 41 tarballpath = os.path.join(tarballdir, tarball)
42 42
43 if not os.path.exists(tarballpath): 43 if not os.path.exists(tarballpath):
44 bb.utils.mkdirhier(tarballdir) 44 bb.utils.mkdirhier(tarballdir)
45 if d.getVar("UNINATIVE_URL", True) == "unset": 45 if d.getVar("UNINATIVE_URL") == "unset":
46 bb.fatal("Uninative selected but not configured, please set UNINATIVE_URL") 46 bb.fatal("Uninative selected but not configured, please set UNINATIVE_URL")
47 47
48 localdata = bb.data.createCopy(d) 48 localdata = bb.data.createCopy(d)
@@ -85,7 +85,7 @@ python uninative_event_enable() {
85} 85}
86 86
87def enable_uninative(d): 87def enable_uninative(d):
88 loader = d.getVar("UNINATIVE_LOADER", True) 88 loader = d.getVar("UNINATIVE_LOADER")
89 if os.path.exists(loader): 89 if os.path.exists(loader):
90 bb.debug(2, "Enabling uninative") 90 bb.debug(2, "Enabling uninative")
91 d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d)) 91 d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d))
@@ -100,7 +100,7 @@ python uninative_changeinterp () {
100 if not (bb.data.inherits_class('native', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross', d)): 100 if not (bb.data.inherits_class('native', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross', d)):
101 return 101 return
102 102
103 sstateinst = d.getVar('SSTATE_INSTDIR', True) 103 sstateinst = d.getVar('SSTATE_INSTDIR')
104 for walkroot, dirs, files in os.walk(sstateinst): 104 for walkroot, dirs, files in os.walk(sstateinst):
105 for file in files: 105 for file in files:
106 if file.endswith(".so") or ".so." in file: 106 if file.endswith(".so") or ".so." in file:
@@ -121,7 +121,7 @@ python uninative_changeinterp () {
121 121
122 try: 122 try:
123 subprocess.check_output(("patchelf-uninative", "--set-interpreter", 123 subprocess.check_output(("patchelf-uninative", "--set-interpreter",
124 d.getVar("UNINATIVE_LOADER", True), f), 124 d.getVar("UNINATIVE_LOADER"), f),
125 stderr=subprocess.STDOUT) 125 stderr=subprocess.STDOUT)
126 except subprocess.CalledProcessError as e: 126 except subprocess.CalledProcessError as e:
127 bb.fatal("'%s' failed with exit code %d and the following output:\n%s" % 127 bb.fatal("'%s' failed with exit code %d and the following output:\n%s" %
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass
index 1fdd681315..65da9dadbe 100644
--- a/meta/classes/update-alternatives.bbclass
+++ b/meta/classes/update-alternatives.bbclass
@@ -66,8 +66,8 @@ ALTERNATIVE_PRIORITY = "10"
66UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY" 66UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY"
67 67
68def gen_updatealternativesvardeps(d): 68def gen_updatealternativesvardeps(d):
69 pkgs = (d.getVar("PACKAGES", True) or "").split() 69 pkgs = (d.getVar("PACKAGES") or "").split()
70 vars = (d.getVar("UPDALTVARS", True) or "").split() 70 vars = (d.getVar("UPDALTVARS") or "").split()
71 71
72 # First compute them for non_pkg versions 72 # First compute them for non_pkg versions
73 for v in vars: 73 for v in vars:
@@ -84,7 +84,7 @@ def gen_updatealternativesvardeps(d):
84 d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False))) 84 d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False)))
85 85
86def ua_extend_depends(d): 86def ua_extend_depends(d):
87 if not 'virtual/update-alternatives' in d.getVar('PROVIDES', True): 87 if not 'virtual/update-alternatives' in d.getVar('PROVIDES'):
88 d.appendVar('DEPENDS', ' virtual/${MLPREFIX}update-alternatives') 88 d.appendVar('DEPENDS', ' virtual/${MLPREFIX}update-alternatives')
89 89
90python __anonymous() { 90python __anonymous() {
@@ -103,8 +103,8 @@ python __anonymous() {
103 103
104def gen_updatealternativesvars(d): 104def gen_updatealternativesvars(d):
105 ret = [] 105 ret = []
106 pkgs = (d.getVar("PACKAGES", True) or "").split() 106 pkgs = (d.getVar("PACKAGES") or "").split()
107 vars = (d.getVar("UPDALTVARS", True) or "").split() 107 vars = (d.getVar("UPDALTVARS") or "").split()
108 108
109 for v in vars: 109 for v in vars:
110 ret.append(v + "_VARDEPS") 110 ret.append(v + "_VARDEPS")
@@ -123,23 +123,23 @@ populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}"
123# place. 123# place.
124python perform_packagecopy_append () { 124python perform_packagecopy_append () {
125 # Check for deprecated usage... 125 # Check for deprecated usage...
126 pn = d.getVar('BPN', True) 126 pn = d.getVar('BPN')
127 if d.getVar('ALTERNATIVE_LINKS', True) != None: 127 if d.getVar('ALTERNATIVE_LINKS') != None:
128 bb.fatal('%s: Use of ALTERNATIVE_LINKS/ALTERNATIVE_PATH/ALTERNATIVE_NAME is no longer supported, please convert to the updated syntax, see update-alternatives.bbclass for more info.' % pn) 128 bb.fatal('%s: Use of ALTERNATIVE_LINKS/ALTERNATIVE_PATH/ALTERNATIVE_NAME is no longer supported, please convert to the updated syntax, see update-alternatives.bbclass for more info.' % pn)
129 129
130 # Do actual update alternatives processing 130 # Do actual update alternatives processing
131 pkgdest = d.getVar('PKGD', True) 131 pkgdest = d.getVar('PKGD')
132 for pkg in (d.getVar('PACKAGES', True) or "").split(): 132 for pkg in (d.getVar('PACKAGES') or "").split():
133 # If the src == dest, we know we need to rename the dest by appending ${BPN} 133 # If the src == dest, we know we need to rename the dest by appending ${BPN}
134 link_rename = {} 134 link_rename = {}
135 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): 135 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
136 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) 136 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
137 if not alt_link: 137 if not alt_link:
138 alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name) 138 alt_link = "%s/%s" % (d.getVar('bindir'), alt_name)
139 d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) 139 d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
140 140
141 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) 141 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
142 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link 142 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
143 # Sometimes alt_target is specified as relative to the link name. 143 # Sometimes alt_target is specified as relative to the link name.
144 alt_target = os.path.join(os.path.dirname(alt_link), alt_target) 144 alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
145 145
@@ -189,23 +189,23 @@ python perform_packagecopy_append () {
189PACKAGESPLITFUNCS_prepend = "populate_packages_updatealternatives " 189PACKAGESPLITFUNCS_prepend = "populate_packages_updatealternatives "
190 190
191python populate_packages_updatealternatives () { 191python populate_packages_updatealternatives () {
192 pn = d.getVar('BPN', True) 192 pn = d.getVar('BPN')
193 193
194 # Do actual update alternatives processing 194 # Do actual update alternatives processing
195 pkgdest = d.getVar('PKGD', True) 195 pkgdest = d.getVar('PKGD')
196 for pkg in (d.getVar('PACKAGES', True) or "").split(): 196 for pkg in (d.getVar('PACKAGES') or "").split():
197 # Create post install/removal scripts 197 # Create post install/removal scripts
198 alt_setup_links = "" 198 alt_setup_links = ""
199 alt_remove_links = "" 199 alt_remove_links = ""
200 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): 200 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
201 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) 201 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
202 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) 202 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
203 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link 203 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
204 # Sometimes alt_target is specified as relative to the link name. 204 # Sometimes alt_target is specified as relative to the link name.
205 alt_target = os.path.join(os.path.dirname(alt_link), alt_target) 205 alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
206 206
207 alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True) 207 alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True)
208 alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True) 208 alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg) or d.getVar('ALTERNATIVE_PRIORITY')
209 209
210 # This shouldn't trigger, as it should have been resolved earlier! 210 # This shouldn't trigger, as it should have been resolved earlier!
211 if alt_link == alt_target: 211 if alt_link == alt_target:
@@ -224,32 +224,32 @@ python populate_packages_updatealternatives () {
224 224
225 if alt_setup_links: 225 if alt_setup_links:
226 # RDEPENDS setup 226 # RDEPENDS setup
227 provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives', True) 227 provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives')
228 if provider: 228 if provider:
229 #bb.note('adding runtime requirement for update-alternatives for %s' % pkg) 229 #bb.note('adding runtime requirement for update-alternatives for %s' % pkg)
230 d.appendVar('RDEPENDS_%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider) 230 d.appendVar('RDEPENDS_%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider)
231 231
232 bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg) 232 bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg)
233 bb.note('%s' % alt_setup_links) 233 bb.note('%s' % alt_setup_links)
234 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or '#!/bin/sh\n' 234 postinst = d.getVar('pkg_postinst_%s' % pkg) or '#!/bin/sh\n'
235 postinst += alt_setup_links 235 postinst += alt_setup_links
236 d.setVar('pkg_postinst_%s' % pkg, postinst) 236 d.setVar('pkg_postinst_%s' % pkg, postinst)
237 237
238 bb.note('%s' % alt_remove_links) 238 bb.note('%s' % alt_remove_links)
239 prerm = d.getVar('pkg_prerm_%s' % pkg, True) or '#!/bin/sh\n' 239 prerm = d.getVar('pkg_prerm_%s' % pkg) or '#!/bin/sh\n'
240 prerm += alt_remove_links 240 prerm += alt_remove_links
241 d.setVar('pkg_prerm_%s' % pkg, prerm) 241 d.setVar('pkg_prerm_%s' % pkg, prerm)
242} 242}
243 243
244python package_do_filedeps_append () { 244python package_do_filedeps_append () {
245 pn = d.getVar('BPN', True) 245 pn = d.getVar('BPN')
246 pkgdest = d.getVar('PKGDEST', True) 246 pkgdest = d.getVar('PKGDEST')
247 247
248 for pkg in packages.split(): 248 for pkg in packages.split():
249 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): 249 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
250 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) 250 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
251 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) 251 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
252 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link 252 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
253 253
254 if alt_link == alt_target: 254 if alt_link == alt_target:
255 bb.warn('%s: alt_link == alt_target: %s == %s' % (pn, alt_link, alt_target)) 255 bb.warn('%s: alt_link == alt_target: %s == %s' % (pn, alt_link, alt_target))
@@ -261,7 +261,7 @@ python package_do_filedeps_append () {
261 # Add file provide 261 # Add file provide
262 trans_target = oe.package.file_translate(alt_target) 262 trans_target = oe.package.file_translate(alt_target)
263 d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link) 263 d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
264 if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""): 264 if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg) or ""):
265 d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target) 265 d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
266} 266}
267 267
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass
index 2c3ef9edd1..2746c360fe 100644
--- a/meta/classes/update-rc.d.bbclass
+++ b/meta/classes/update-rc.d.bbclass
@@ -89,52 +89,52 @@ python populate_packages_updatercd () {
89 return 89 return
90 statement = "grep -q -w '/etc/init.d/functions' %s" % path 90 statement = "grep -q -w '/etc/init.d/functions' %s" % path
91 if subprocess.call(statement, shell=True) == 0: 91 if subprocess.call(statement, shell=True) == 0:
92 mlprefix = d.getVar('MLPREFIX', True) or "" 92 mlprefix = d.getVar('MLPREFIX') or ""
93 d.appendVar('RDEPENDS_' + pkg, ' %sinitscripts-functions' % (mlprefix)) 93 d.appendVar('RDEPENDS_' + pkg, ' %sinitscripts-functions' % (mlprefix))
94 94
95 def update_rcd_package(pkg): 95 def update_rcd_package(pkg):
96 bb.debug(1, 'adding update-rc.d calls to preinst/postinst/prerm/postrm for %s' % pkg) 96 bb.debug(1, 'adding update-rc.d calls to preinst/postinst/prerm/postrm for %s' % pkg)
97 97
98 localdata = bb.data.createCopy(d) 98 localdata = bb.data.createCopy(d)
99 overrides = localdata.getVar("OVERRIDES", True) 99 overrides = localdata.getVar("OVERRIDES")
100 localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) 100 localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
101 bb.data.update_data(localdata) 101 bb.data.update_data(localdata)
102 102
103 update_rcd_auto_depend(pkg) 103 update_rcd_auto_depend(pkg)
104 104
105 preinst = d.getVar('pkg_preinst_%s' % pkg, True) 105 preinst = d.getVar('pkg_preinst_%s' % pkg)
106 if not preinst: 106 if not preinst:
107 preinst = '#!/bin/sh\n' 107 preinst = '#!/bin/sh\n'
108 preinst += localdata.getVar('updatercd_preinst', True) 108 preinst += localdata.getVar('updatercd_preinst')
109 d.setVar('pkg_preinst_%s' % pkg, preinst) 109 d.setVar('pkg_preinst_%s' % pkg, preinst)
110 110
111 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 111 postinst = d.getVar('pkg_postinst_%s' % pkg)
112 if not postinst: 112 if not postinst:
113 postinst = '#!/bin/sh\n' 113 postinst = '#!/bin/sh\n'
114 postinst += localdata.getVar('updatercd_postinst', True) 114 postinst += localdata.getVar('updatercd_postinst')
115 d.setVar('pkg_postinst_%s' % pkg, postinst) 115 d.setVar('pkg_postinst_%s' % pkg, postinst)
116 116
117 prerm = d.getVar('pkg_prerm_%s' % pkg, True) 117 prerm = d.getVar('pkg_prerm_%s' % pkg)
118 if not prerm: 118 if not prerm:
119 prerm = '#!/bin/sh\n' 119 prerm = '#!/bin/sh\n'
120 prerm += localdata.getVar('updatercd_prerm', True) 120 prerm += localdata.getVar('updatercd_prerm')
121 d.setVar('pkg_prerm_%s' % pkg, prerm) 121 d.setVar('pkg_prerm_%s' % pkg, prerm)
122 122
123 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 123 postrm = d.getVar('pkg_postrm_%s' % pkg)
124 if not postrm: 124 if not postrm:
125 postrm = '#!/bin/sh\n' 125 postrm = '#!/bin/sh\n'
126 postrm += localdata.getVar('updatercd_postrm', True) 126 postrm += localdata.getVar('updatercd_postrm')
127 d.setVar('pkg_postrm_%s' % pkg, postrm) 127 d.setVar('pkg_postrm_%s' % pkg, postrm)
128 128
129 d.appendVar('RRECOMMENDS_' + pkg, " ${MLPREFIX}${UPDATERCD}") 129 d.appendVar('RRECOMMENDS_' + pkg, " ${MLPREFIX}${UPDATERCD}")
130 130
131 # Check that this class isn't being inhibited (generally, by 131 # Check that this class isn't being inhibited (generally, by
132 # systemd.bbclass) before doing any work. 132 # systemd.bbclass) before doing any work.
133 if not d.getVar("INHIBIT_UPDATERCD_BBCLASS", True): 133 if not d.getVar("INHIBIT_UPDATERCD_BBCLASS"):
134 pkgs = d.getVar('INITSCRIPT_PACKAGES', True) 134 pkgs = d.getVar('INITSCRIPT_PACKAGES')
135 if pkgs == None: 135 if pkgs == None:
136 pkgs = d.getVar('UPDATERCPN', True) 136 pkgs = d.getVar('UPDATERCPN')
137 packages = (d.getVar('PACKAGES', True) or "").split() 137 packages = (d.getVar('PACKAGES') or "").split()
138 if not pkgs in packages and packages != []: 138 if not pkgs in packages and packages != []:
139 pkgs = packages[0] 139 pkgs = packages[0]
140 for pkg in pkgs.split(): 140 for pkg in pkgs.split():
diff --git a/meta/classes/useradd-staticids.bbclass b/meta/classes/useradd-staticids.bbclass
index 4162774e9c..94bcbaa3d7 100644
--- a/meta/classes/useradd-staticids.bbclass
+++ b/meta/classes/useradd-staticids.bbclass
@@ -8,11 +8,11 @@ def update_useradd_static_config(d):
8 8
9 class myArgumentParser( argparse.ArgumentParser ): 9 class myArgumentParser( argparse.ArgumentParser ):
10 def _print_message(self, message, file=None): 10 def _print_message(self, message, file=None):
11 bb.warn("%s - %s: %s" % (d.getVar('PN', True), pkg, message)) 11 bb.warn("%s - %s: %s" % (d.getVar('PN'), pkg, message))
12 12
13 # This should never be called... 13 # This should never be called...
14 def exit(self, status=0, message=None): 14 def exit(self, status=0, message=None):
15 message = message or ("%s - %s: useradd.bbclass: Argument parsing exited" % (d.getVar('PN', True), pkg)) 15 message = message or ("%s - %s: useradd.bbclass: Argument parsing exited" % (d.getVar('PN'), pkg))
16 error(message) 16 error(message)
17 17
18 def error(self, message): 18 def error(self, message):
@@ -52,10 +52,10 @@ def update_useradd_static_config(d):
52 52
53 def handle_missing_id(id, type, pkg): 53 def handle_missing_id(id, type, pkg):
54 # For backwards compatibility we accept "1" in addition to "error" 54 # For backwards compatibility we accept "1" in addition to "error"
55 if d.getVar('USERADD_ERROR_DYNAMIC', True) == 'error' or d.getVar('USERADD_ERROR_DYNAMIC', True) == '1': 55 if d.getVar('USERADD_ERROR_DYNAMIC') == 'error' or d.getVar('USERADD_ERROR_DYNAMIC') == '1':
56 raise NotImplementedError("%s - %s: %sname %s does not have a static ID defined. Skipping it." % (d.getVar('PN', True), pkg, type, id)) 56 raise NotImplementedError("%s - %s: %sname %s does not have a static ID defined. Skipping it." % (d.getVar('PN'), pkg, type, id))
57 elif d.getVar('USERADD_ERROR_DYNAMIC', True) == 'warn': 57 elif d.getVar('USERADD_ERROR_DYNAMIC') == 'warn':
58 bb.warn("%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN', True), pkg, type, id)) 58 bb.warn("%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN'), pkg, type, id))
59 59
60 # We parse and rewrite the useradd components 60 # We parse and rewrite the useradd components
61 def rewrite_useradd(params): 61 def rewrite_useradd(params):
@@ -89,8 +89,8 @@ def update_useradd_static_config(d):
89 # paths are resolved via BBPATH 89 # paths are resolved via BBPATH
90 def get_passwd_list(d): 90 def get_passwd_list(d):
91 str = "" 91 str = ""
92 bbpath = d.getVar('BBPATH', True) 92 bbpath = d.getVar('BBPATH')
93 passwd_tables = d.getVar('USERADD_UID_TABLES', True) 93 passwd_tables = d.getVar('USERADD_UID_TABLES')
94 if not passwd_tables: 94 if not passwd_tables:
95 passwd_tables = 'files/passwd' 95 passwd_tables = 'files/passwd'
96 for conf_file in passwd_tables.split(): 96 for conf_file in passwd_tables.split():
@@ -106,7 +106,7 @@ def update_useradd_static_config(d):
106 try: 106 try:
107 uaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param)) 107 uaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param))
108 except: 108 except:
109 bb.fatal("%s: Unable to parse arguments for USERADD_PARAM_%s: '%s'" % (d.getVar('PN', True), pkg, param)) 109 bb.fatal("%s: Unable to parse arguments for USERADD_PARAM_%s: '%s'" % (d.getVar('PN'), pkg, param))
110 110
111 # Read all passwd files specified in USERADD_UID_TABLES or files/passwd 111 # Read all passwd files specified in USERADD_UID_TABLES or files/passwd
112 # Use the standard passwd layout: 112 # Use the standard passwd layout:
@@ -130,7 +130,7 @@ def update_useradd_static_config(d):
130 field = users[uaargs.LOGIN] 130 field = users[uaargs.LOGIN]
131 131
132 if uaargs.uid and field[2] and (uaargs.uid != field[2]): 132 if uaargs.uid and field[2] and (uaargs.uid != field[2]):
133 bb.warn("%s: Changing username %s's uid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), uaargs.LOGIN, uaargs.uid, field[2])) 133 bb.warn("%s: Changing username %s's uid from (%s) to (%s), verify configuration files!" % (d.getVar('PN'), uaargs.LOGIN, uaargs.uid, field[2]))
134 uaargs.uid = field[2] or uaargs.uid 134 uaargs.uid = field[2] or uaargs.uid
135 135
136 # Determine the possible groupname 136 # Determine the possible groupname
@@ -158,12 +158,12 @@ def update_useradd_static_config(d):
158 # We want to add a group, but we don't know it's name... so we can't add the group... 158 # We want to add a group, but we don't know it's name... so we can't add the group...
159 # We have to assume the group has previously been added or we'll fail on the adduser... 159 # We have to assume the group has previously been added or we'll fail on the adduser...
160 # Note: specifying the actual gid is very rare in OE, usually the group name is specified. 160 # Note: specifying the actual gid is very rare in OE, usually the group name is specified.
161 bb.warn("%s: Changing gid for login %s to %s, verify configuration files!" % (d.getVar('PN', True), uaargs.LOGIN, uaargs.groupid)) 161 bb.warn("%s: Changing gid for login %s to %s, verify configuration files!" % (d.getVar('PN'), uaargs.LOGIN, uaargs.groupid))
162 162
163 uaargs.gid = uaargs.groupid 163 uaargs.gid = uaargs.groupid
164 uaargs.user_group = None 164 uaargs.user_group = None
165 if newgroup: 165 if newgroup:
166 groupadd = d.getVar("GROUPADD_PARAM_%s" % pkg, True) 166 groupadd = d.getVar("GROUPADD_PARAM_%s" % pkg)
167 if groupadd: 167 if groupadd:
168 d.setVar("GROUPADD_PARAM_%s" % pkg, "%s; %s" % (groupadd, newgroup)) 168 d.setVar("GROUPADD_PARAM_%s" % pkg, "%s; %s" % (groupadd, newgroup))
169 else: 169 else:
@@ -223,8 +223,8 @@ def update_useradd_static_config(d):
223 # paths are resolved via BBPATH 223 # paths are resolved via BBPATH
224 def get_group_list(d): 224 def get_group_list(d):
225 str = "" 225 str = ""
226 bbpath = d.getVar('BBPATH', True) 226 bbpath = d.getVar('BBPATH')
227 group_tables = d.getVar('USERADD_GID_TABLES', True) 227 group_tables = d.getVar('USERADD_GID_TABLES')
228 if not group_tables: 228 if not group_tables:
229 group_tables = 'files/group' 229 group_tables = 'files/group'
230 for conf_file in group_tables.split(): 230 for conf_file in group_tables.split():
@@ -241,7 +241,7 @@ def update_useradd_static_config(d):
241 # If we're processing multiple lines, we could have left over values here... 241 # If we're processing multiple lines, we could have left over values here...
242 gaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param)) 242 gaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param))
243 except: 243 except:
244 bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM_%s: '%s'" % (d.getVar('PN', True), pkg, param)) 244 bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM_%s: '%s'" % (d.getVar('PN'), pkg, param))
245 245
246 # Read all group files specified in USERADD_GID_TABLES or files/group 246 # Read all group files specified in USERADD_GID_TABLES or files/group
247 # Use the standard group layout: 247 # Use the standard group layout:
@@ -264,7 +264,7 @@ def update_useradd_static_config(d):
264 264
265 if field[2]: 265 if field[2]:
266 if gaargs.gid and (gaargs.gid != field[2]): 266 if gaargs.gid and (gaargs.gid != field[2]):
267 bb.warn("%s: Changing groupname %s's gid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), gaargs.GROUP, gaargs.gid, field[2])) 267 bb.warn("%s: Changing groupname %s's gid from (%s) to (%s), verify configuration files!" % (d.getVar('PN'), gaargs.GROUP, gaargs.gid, field[2]))
268 gaargs.gid = field[2] 268 gaargs.gid = field[2]
269 269
270 if not gaargs.gid or not gaargs.gid.isdigit(): 270 if not gaargs.gid or not gaargs.gid.isdigit():
@@ -288,32 +288,32 @@ def update_useradd_static_config(d):
288 # the files listed in USERADD_UID/GID_TABLES. We need to tell bitbake 288 # the files listed in USERADD_UID/GID_TABLES. We need to tell bitbake
289 # about that explicitly to trigger re-parsing and thus re-execution of 289 # about that explicitly to trigger re-parsing and thus re-execution of
290 # this code when the files change. 290 # this code when the files change.
291 bbpath = d.getVar('BBPATH', True) 291 bbpath = d.getVar('BBPATH')
292 for varname, default in (('USERADD_UID_TABLES', 'files/passwd'), 292 for varname, default in (('USERADD_UID_TABLES', 'files/passwd'),
293 ('USERADD_GID_TABLES', 'files/group')): 293 ('USERADD_GID_TABLES', 'files/group')):
294 tables = d.getVar(varname, True) 294 tables = d.getVar(varname)
295 if not tables: 295 if not tables:
296 tables = default 296 tables = default
297 for conf_file in tables.split(): 297 for conf_file in tables.split():
298 bb.parse.mark_dependency(d, bb.utils.which(bbpath, conf_file)) 298 bb.parse.mark_dependency(d, bb.utils.which(bbpath, conf_file))
299 299
300 # Load and process the users and groups, rewriting the adduser/addgroup params 300 # Load and process the users and groups, rewriting the adduser/addgroup params
301 useradd_packages = d.getVar('USERADD_PACKAGES', True) 301 useradd_packages = d.getVar('USERADD_PACKAGES')
302 302
303 for pkg in useradd_packages.split(): 303 for pkg in useradd_packages.split():
304 # Groupmems doesn't have anything we might want to change, so simply validating 304 # Groupmems doesn't have anything we might want to change, so simply validating
305 # is a bit of a waste -- only process useradd/groupadd 305 # is a bit of a waste -- only process useradd/groupadd
306 useradd_param = d.getVar('USERADD_PARAM_%s' % pkg, True) 306 useradd_param = d.getVar('USERADD_PARAM_%s' % pkg)
307 if useradd_param: 307 if useradd_param:
308 #bb.warn("Before: 'USERADD_PARAM_%s' - '%s'" % (pkg, useradd_param)) 308 #bb.warn("Before: 'USERADD_PARAM_%s' - '%s'" % (pkg, useradd_param))
309 d.setVar('USERADD_PARAM_%s' % pkg, rewrite_useradd(useradd_param)) 309 d.setVar('USERADD_PARAM_%s' % pkg, rewrite_useradd(useradd_param))
310 #bb.warn("After: 'USERADD_PARAM_%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM_%s' % pkg, True))) 310 #bb.warn("After: 'USERADD_PARAM_%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM_%s' % pkg)))
311 311
312 groupadd_param = d.getVar('GROUPADD_PARAM_%s' % pkg, True) 312 groupadd_param = d.getVar('GROUPADD_PARAM_%s' % pkg)
313 if groupadd_param: 313 if groupadd_param:
314 #bb.warn("Before: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, groupadd_param)) 314 #bb.warn("Before: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, groupadd_param))
315 d.setVar('GROUPADD_PARAM_%s' % pkg, rewrite_groupadd(groupadd_param)) 315 d.setVar('GROUPADD_PARAM_%s' % pkg, rewrite_groupadd(groupadd_param))
316 #bb.warn("After: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM_%s' % pkg, True))) 316 #bb.warn("After: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM_%s' % pkg)))
317 317
318 318
319 319
@@ -323,6 +323,6 @@ python __anonymous() {
323 try: 323 try:
324 update_useradd_static_config(d) 324 update_useradd_static_config(d)
325 except NotImplementedError as f: 325 except NotImplementedError as f:
326 bb.debug(1, "Skipping recipe %s: %s" % (d.getVar('PN', True), f)) 326 bb.debug(1, "Skipping recipe %s: %s" % (d.getVar('PN'), f))
327 raise bb.parse.SkipPackage(f) 327 raise bb.parse.SkipPackage(f)
328} 328}
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass
index 3cff08e00d..fd59969986 100644
--- a/meta/classes/useradd.bbclass
+++ b/meta/classes/useradd.bbclass
@@ -168,13 +168,13 @@ USERADDSETSCENEDEPS = ""
168 168
169# Recipe parse-time sanity checks 169# Recipe parse-time sanity checks
170def update_useradd_after_parse(d): 170def update_useradd_after_parse(d):
171 useradd_packages = d.getVar('USERADD_PACKAGES', True) 171 useradd_packages = d.getVar('USERADD_PACKAGES')
172 172
173 if not useradd_packages: 173 if not useradd_packages:
174 bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False)) 174 bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False))
175 175
176 for pkg in useradd_packages.split(): 176 for pkg in useradd_packages.split():
177 if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPMEMS_PARAM_%s' % pkg, True): 177 if not d.getVar('USERADD_PARAM_%s' % pkg) and not d.getVar('GROUPADD_PARAM_%s' % pkg) and not d.getVar('GROUPMEMS_PARAM_%s' % pkg):
178 bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg)) 178 bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg))
179 179
180python __anonymous() { 180python __anonymous() {
@@ -191,9 +191,9 @@ def get_all_cmd_params(d, cmd_type):
191 param_type = cmd_type.upper() + "_PARAM_%s" 191 param_type = cmd_type.upper() + "_PARAM_%s"
192 params = [] 192 params = []
193 193
194 useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" 194 useradd_packages = d.getVar('USERADD_PACKAGES') or ""
195 for pkg in useradd_packages.split(): 195 for pkg in useradd_packages.split():
196 param = d.getVar(param_type % pkg, True) 196 param = d.getVar(param_type % pkg)
197 if param: 197 if param:
198 params.append(param.rstrip(" ;")) 198 params.append(param.rstrip(" ;"))
199 199
@@ -209,20 +209,20 @@ fakeroot python populate_packages_prepend () {
209 required to execute on the target. Not doing so may cause 209 required to execute on the target. Not doing so may cause
210 useradd preinst to be invoked twice, causing unwanted warnings. 210 useradd preinst to be invoked twice, causing unwanted warnings.
211 """ 211 """
212 preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True) 212 preinst = d.getVar('pkg_preinst_%s' % pkg) or d.getVar('pkg_preinst')
213 if not preinst: 213 if not preinst:
214 preinst = '#!/bin/sh\n' 214 preinst = '#!/bin/sh\n'
215 preinst += 'bbnote () {\n\techo "NOTE: $*"\n}\n' 215 preinst += 'bbnote () {\n\techo "NOTE: $*"\n}\n'
216 preinst += 'bbwarn () {\n\techo "WARNING: $*"\n}\n' 216 preinst += 'bbwarn () {\n\techo "WARNING: $*"\n}\n'
217 preinst += 'bbfatal () {\n\techo "ERROR: $*"\n\texit 1\n}\n' 217 preinst += 'bbfatal () {\n\techo "ERROR: $*"\n\texit 1\n}\n'
218 preinst += 'perform_groupadd () {\n%s}\n' % d.getVar('perform_groupadd', True) 218 preinst += 'perform_groupadd () {\n%s}\n' % d.getVar('perform_groupadd')
219 preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd', True) 219 preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd')
220 preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems', True) 220 preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems')
221 preinst += d.getVar('useradd_preinst', True) 221 preinst += d.getVar('useradd_preinst')
222 d.setVar('pkg_preinst_%s' % pkg, preinst) 222 d.setVar('pkg_preinst_%s' % pkg, preinst)
223 223
224 # RDEPENDS setup 224 # RDEPENDS setup
225 rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" 225 rdepends = d.getVar("RDEPENDS_%s" % pkg) or ""
226 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-passwd' 226 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-passwd'
227 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'shadow' 227 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'shadow'
228 # base-files is where the default /etc/skel is packaged 228 # base-files is where the default /etc/skel is packaged
@@ -233,7 +233,7 @@ fakeroot python populate_packages_prepend () {
233 # to packages specified by USERADD_PACKAGES 233 # to packages specified by USERADD_PACKAGES
234 if not bb.data.inherits_class('nativesdk', d) \ 234 if not bb.data.inherits_class('nativesdk', d) \
235 and not bb.data.inherits_class('native', d): 235 and not bb.data.inherits_class('native', d):
236 useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" 236 useradd_packages = d.getVar('USERADD_PACKAGES') or ""
237 for pkg in useradd_packages.split(): 237 for pkg in useradd_packages.split():
238 update_useradd_package(pkg) 238 update_useradd_package(pkg)
239} 239}
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
index 7ba56e28ae..68e8217135 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes/utility-tasks.bbclass
@@ -32,14 +32,14 @@ python do_clean() {
32 bb.note("Removing " + dir) 32 bb.note("Removing " + dir)
33 oe.path.remove(dir) 33 oe.path.remove(dir)
34 34
35 for f in (d.getVar('CLEANFUNCS', True) or '').split(): 35 for f in (d.getVar('CLEANFUNCS') or '').split():
36 bb.build.exec_func(f, d) 36 bb.build.exec_func(f, d)
37} 37}
38 38
39addtask checkuri 39addtask checkuri
40do_checkuri[nostamp] = "1" 40do_checkuri[nostamp] = "1"
41python do_checkuri() { 41python do_checkuri() {
42 src_uri = (d.getVar('SRC_URI', True) or "").split() 42 src_uri = (d.getVar('SRC_URI') or "").split()
43 if len(src_uri) == 0: 43 if len(src_uri) == 0:
44 return 44 return
45 45
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass
index dbb5e4cbbc..640daed4a8 100644
--- a/meta/classes/utils.bbclass
+++ b/meta/classes/utils.bbclass
@@ -41,9 +41,9 @@ def oe_filter_out(f, str, d):
41 41
42def machine_paths(d): 42def machine_paths(d):
43 """List any existing machine specific filespath directories""" 43 """List any existing machine specific filespath directories"""
44 machine = d.getVar("MACHINE", True) 44 machine = d.getVar("MACHINE")
45 filespathpkg = d.getVar("FILESPATHPKG", True).split(":") 45 filespathpkg = d.getVar("FILESPATHPKG").split(":")
46 for basepath in d.getVar("FILESPATHBASE", True).split(":"): 46 for basepath in d.getVar("FILESPATHBASE").split(":"):
47 for pkgpath in filespathpkg: 47 for pkgpath in filespathpkg:
48 machinepath = os.path.join(basepath, pkgpath, machine) 48 machinepath = os.path.join(basepath, pkgpath, machine)
49 if os.path.isdir(machinepath): 49 if os.path.isdir(machinepath):
@@ -52,7 +52,7 @@ def machine_paths(d):
52def is_machine_specific(d): 52def is_machine_specific(d):
53 """Determine whether the current recipe is machine specific""" 53 """Determine whether the current recipe is machine specific"""
54 machinepaths = set(machine_paths(d)) 54 machinepaths = set(machine_paths(d))
55 srcuri = d.getVar("SRC_URI", True).split() 55 srcuri = d.getVar("SRC_URI").split()
56 for url in srcuri: 56 for url in srcuri:
57 fetcher = bb.fetch2.Fetch([srcuri], d) 57 fetcher = bb.fetch2.Fetch([srcuri], d)
58 if url.startswith("file://"): 58 if url.startswith("file://"):
@@ -315,14 +315,14 @@ def explode_deps(s):
315 315
316def base_set_filespath(path, d): 316def base_set_filespath(path, d):
317 filespath = [] 317 filespath = []
318 extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") 318 extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
319 # Remove default flag which was used for checking 319 # Remove default flag which was used for checking
320 extrapaths = extrapaths.replace("__default:", "") 320 extrapaths = extrapaths.replace("__default:", "")
321 # Don't prepend empty strings to the path list 321 # Don't prepend empty strings to the path list
322 if extrapaths != "": 322 if extrapaths != "":
323 path = extrapaths.split(":") + path 323 path = extrapaths.split(":") + path
324 # The ":" ensures we have an 'empty' override 324 # The ":" ensures we have an 'empty' override
325 overrides = (":" + (d.getVar("FILESOVERRIDES", True) or "")).split(":") 325 overrides = (":" + (d.getVar("FILESOVERRIDES") or "")).split(":")
326 overrides.reverse() 326 overrides.reverse()
327 for o in overrides: 327 for o in overrides:
328 for p in path: 328 for p in path:
@@ -333,7 +333,7 @@ def base_set_filespath(path, d):
333def extend_variants(d, var, extend, delim=':'): 333def extend_variants(d, var, extend, delim=':'):
334 """Return a string of all bb class extend variants for the given extend""" 334 """Return a string of all bb class extend variants for the given extend"""
335 variants = [] 335 variants = []
336 whole = d.getVar(var, True) or "" 336 whole = d.getVar(var) or ""
337 for ext in whole.split(): 337 for ext in whole.split():
338 eext = ext.split(delim) 338 eext = ext.split(delim)
339 if len(eext) > 1 and eext[0] == extend: 339 if len(eext) > 1 and eext[0] == extend:
@@ -341,7 +341,7 @@ def extend_variants(d, var, extend, delim=':'):
341 return " ".join(variants) 341 return " ".join(variants)
342 342
343def multilib_pkg_extend(d, pkg): 343def multilib_pkg_extend(d, pkg):
344 variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split() 344 variants = (d.getVar("MULTILIB_VARIANTS") or "").split()
345 if not variants: 345 if not variants:
346 return pkg 346 return pkg
347 pkgs = pkg 347 pkgs = pkg
@@ -352,21 +352,21 @@ def multilib_pkg_extend(d, pkg):
352def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '): 352def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '):
353 """Return a string of all ${var} in all multilib tune configuration""" 353 """Return a string of all ${var} in all multilib tune configuration"""
354 values = [] 354 values = []
355 value = d.getVar(var, True) or "" 355 value = d.getVar(var) or ""
356 if value != "": 356 if value != "":
357 if need_split: 357 if need_split:
358 for item in value.split(delim): 358 for item in value.split(delim):
359 values.append(item) 359 values.append(item)
360 else: 360 else:
361 values.append(value) 361 values.append(value)
362 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 362 variants = d.getVar("MULTILIB_VARIANTS") or ""
363 for item in variants.split(): 363 for item in variants.split():
364 localdata = bb.data.createCopy(d) 364 localdata = bb.data.createCopy(d)
365 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 365 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
366 localdata.setVar("OVERRIDES", overrides) 366 localdata.setVar("OVERRIDES", overrides)
367 localdata.setVar("MLPREFIX", item + "-") 367 localdata.setVar("MLPREFIX", item + "-")
368 bb.data.update_data(localdata) 368 bb.data.update_data(localdata)
369 value = localdata.getVar(var, True) or "" 369 value = localdata.getVar(var) or ""
370 if value != "": 370 if value != "":
371 if need_split: 371 if need_split:
372 for item in value.split(delim): 372 for item in value.split(delim):
@@ -402,21 +402,21 @@ def all_multilib_tune_list(vars, d):
402 newoverrides.append(o) 402 newoverrides.append(o)
403 localdata.setVar("OVERRIDES", ":".join(newoverrides)) 403 localdata.setVar("OVERRIDES", ":".join(newoverrides))
404 localdata.setVar("MLPREFIX", "") 404 localdata.setVar("MLPREFIX", "")
405 origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL", True) 405 origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
406 if origdefault: 406 if origdefault:
407 localdata.setVar("DEFAULTTUNE", origdefault) 407 localdata.setVar("DEFAULTTUNE", origdefault)
408 bb.data.update_data(localdata) 408 bb.data.update_data(localdata)
409 values['ml'] = [''] 409 values['ml'] = ['']
410 for v in vars: 410 for v in vars:
411 values[v].append(localdata.getVar(v, True)) 411 values[v].append(localdata.getVar(v))
412 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 412 variants = d.getVar("MULTILIB_VARIANTS") or ""
413 for item in variants.split(): 413 for item in variants.split():
414 localdata = bb.data.createCopy(d) 414 localdata = bb.data.createCopy(d)
415 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 415 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
416 localdata.setVar("OVERRIDES", overrides) 416 localdata.setVar("OVERRIDES", overrides)
417 localdata.setVar("MLPREFIX", item + "-") 417 localdata.setVar("MLPREFIX", item + "-")
418 bb.data.update_data(localdata) 418 bb.data.update_data(localdata)
419 values[v].append(localdata.getVar(v, True)) 419 values[v].append(localdata.getVar(v))
420 values['ml'].append(item) 420 values['ml'].append(item)
421 return values 421 return values
422 422
diff --git a/meta/classes/waf.bbclass b/meta/classes/waf.bbclass
index 5e55833ca4..95f524012a 100644
--- a/meta/classes/waf.bbclass
+++ b/meta/classes/waf.bbclass
@@ -2,7 +2,7 @@
2DISABLE_STATIC = "" 2DISABLE_STATIC = ""
3 3
4def get_waf_parallel_make(d): 4def get_waf_parallel_make(d):
5 pm = d.getVar('PARALLEL_MAKE', True) 5 pm = d.getVar('PARALLEL_MAKE')
6 if pm: 6 if pm:
7 # look for '-j' and throw other options (e.g. '-l') away 7 # look for '-j' and throw other options (e.g. '-l') away
8 # because they might have different meaning in bjam 8 # because they might have different meaning in bjam
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index 1472e8f847..aee9919bad 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -123,7 +123,7 @@ SDKUSE_NLS ??= "yes"
123TARGET_ARCH = "${TUNE_ARCH}" 123TARGET_ARCH = "${TUNE_ARCH}"
124TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" 124TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}"
125TARGET_VENDOR = "-oe" 125TARGET_VENDOR = "-oe"
126TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', True), ''][d.getVar('TARGET_OS', True) == ('' or 'custom')]}" 126TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS'), ''][d.getVar('TARGET_OS') == ('' or 'custom')]}"
127TARGET_PREFIX = "${TARGET_SYS}-" 127TARGET_PREFIX = "${TARGET_SYS}-"
128TARGET_CC_ARCH = "${TUNE_CCARGS}" 128TARGET_CC_ARCH = "${TUNE_CCARGS}"
129TARGET_LD_ARCH = "${TUNE_LDARGS}" 129TARGET_LD_ARCH = "${TUNE_LDARGS}"
@@ -132,7 +132,7 @@ TARGET_AS_ARCH = "${TUNE_ASARGS}"
132SDKMACHINE ??= "x86_64" 132SDKMACHINE ??= "x86_64"
133SDK_OS = "${BUILD_OS}" 133SDK_OS = "${BUILD_OS}"
134SDK_VENDOR = "-oesdk" 134SDK_VENDOR = "-oesdk"
135SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', True), ''][d.getVar('SDK_OS', True) == ('' or 'custom')]}" 135SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS'), ''][d.getVar('SDK_OS') == ('' or 'custom')]}"
136SDK_PREFIX = "${SDK_SYS}-" 136SDK_PREFIX = "${SDK_SYS}-"
137SDK_CC_ARCH = "${BUILD_CC_ARCH}" 137SDK_CC_ARCH = "${BUILD_CC_ARCH}"
138SDKPKGSUFFIX = "nativesdk" 138SDKPKGSUFFIX = "nativesdk"
@@ -142,7 +142,7 @@ SDK_AS_ARCH = "${BUILD_AS_ARCH}"
142 142
143TUNE_PKGARCH ??= "" 143TUNE_PKGARCH ??= ""
144PACKAGE_ARCH ??= "${TUNE_PKGARCH}" 144PACKAGE_ARCH ??= "${TUNE_PKGARCH}"
145MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', True), d.getVar('MACHINE', True)][bool(d.getVar('MACHINE', True))].replace('-', '_')}" 145MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH'), d.getVar('MACHINE')][bool(d.getVar('MACHINE'))].replace('-', '_')}"
146PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" 146PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}"
147PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" 147PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}"
148# MACHINE_ARCH shouldn't be included here as a variable dependency 148# MACHINE_ARCH shouldn't be included here as a variable dependency
@@ -197,24 +197,24 @@ PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0
197PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[2] or 'r0'}" 197PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[2] or 'r0'}"
198PE = "" 198PE = ""
199PF = "${PN}-${EXTENDPE}${PV}-${PR}" 199PF = "${PN}-${EXTENDPE}${PV}-${PR}"
200EXTENDPE = "${@['','${PE}_'][int(d.getVar('PE', True) or 0) > 0]}" 200EXTENDPE = "${@['','${PE}_'][int(d.getVar('PE') or 0) > 0]}"
201P = "${PN}-${PV}" 201P = "${PN}-${PV}"
202 202
203PRAUTO = "" 203PRAUTO = ""
204EXTENDPRAUTO = "${@['.${PRAUTO}', ''][not d.getVar('PRAUTO', True)]}" 204EXTENDPRAUTO = "${@['.${PRAUTO}', ''][not d.getVar('PRAUTO')]}"
205PRAUTOINX = "${PF}" 205PRAUTOINX = "${PF}"
206 206
207PKGV ?= "${PV}" 207PKGV ?= "${PV}"
208PKGR ?= "${PR}${EXTENDPRAUTO}" 208PKGR ?= "${PR}${EXTENDPRAUTO}"
209PKGE ?= "${@['','${PE}'][int(d.getVar('PE', True) or 0) > 0]}" 209PKGE ?= "${@['','${PE}'][int(d.getVar('PE') or 0) > 0]}"
210EXTENDPKGEVER = "${@['','${PKGE}:'][d.getVar('PKGE', True).strip() != '']}" 210EXTENDPKGEVER = "${@['','${PKGE}:'][d.getVar('PKGE').strip() != '']}"
211EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}" 211EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}"
212 212
213# Base package name 213# Base package name
214# Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial" 214# Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial"
215# otherwise it is the same as PN and P 215# otherwise it is the same as PN and P
216SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -crosssdk -cross-canadian" 216SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -crosssdk -cross-canadian"
217BPN = "${@base_prune_suffix(d.getVar('PN', True), d.getVar('SPECIAL_PKGSUFFIX', True).split(), d)}" 217BPN = "${@base_prune_suffix(d.getVar('PN'), d.getVar('SPECIAL_PKGSUFFIX').split(), d)}"
218BP = "${BPN}-${PV}" 218BP = "${BPN}-${PV}"
219 219
220# Package info. 220# Package info.
@@ -330,7 +330,7 @@ FILESEXTRAPATHS ?= "__default:"
330################################################################## 330##################################################################
331 331
332TMPDIR ?= "${TOPDIR}/tmp" 332TMPDIR ?= "${TOPDIR}/tmp"
333CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}" 333CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE'))][bool(d.getVar('MACHINE'))]}${@['', '/' + str(d.getVar('SDKMACHINE'))][bool(d.getVar('SDKMACHINE'))]}"
334# The persistent cache should be shared by all builds 334# The persistent cache should be shared by all builds
335PERSISTENT_DIR = "${TOPDIR}/cache" 335PERSISTENT_DIR = "${TOPDIR}/cache"
336LOG_DIR = "${TMPDIR}/log" 336LOG_DIR = "${TMPDIR}/log"
@@ -455,7 +455,7 @@ export PATH
455CCACHE ??= "" 455CCACHE ??= ""
456# Disable ccache explicitly if CCACHE is null since gcc may be a symlink 456# Disable ccache explicitly if CCACHE is null since gcc may be a symlink
457# of ccache some distributions (e.g., Fedora 17). 457# of ccache some distributions (e.g., Fedora 17).
458export CCACHE_DISABLE ??= "${@[0,1][d.getVar('CCACHE', True) == '']}" 458export CCACHE_DISABLE ??= "${@[0,1][d.getVar('CCACHE') == '']}"
459# ccache < 3.1.10 will create CCACHE_DIR on startup even if disabled, and 459# ccache < 3.1.10 will create CCACHE_DIR on startup even if disabled, and
460# autogen sets HOME=/dev/null so in certain situations builds can fail. 460# autogen sets HOME=/dev/null so in certain situations builds can fail.
461# Explicitly export CCACHE_DIR until we can assume ccache >3.1.10 on the host. 461# Explicitly export CCACHE_DIR until we can assume ccache >3.1.10 on the host.
@@ -535,7 +535,7 @@ LINKER_HASH_STYLE ??= "gnu"
535# mips does not support GNU hash style therefore we override 535# mips does not support GNU hash style therefore we override
536LINKER_HASH_STYLE_mipsarch = "sysv" 536LINKER_HASH_STYLE_mipsarch = "sysv"
537 537
538TARGET_LINK_HASH_STYLE ?= "${@['-Wl,--hash-style=gnu',''][d.getVar('LINKER_HASH_STYLE', True) != 'gnu']}" 538TARGET_LINK_HASH_STYLE ?= "${@['-Wl,--hash-style=gnu',''][d.getVar('LINKER_HASH_STYLE') != 'gnu']}"
539 539
540export LDFLAGS = "${TARGET_LDFLAGS}" 540export LDFLAGS = "${TARGET_LDFLAGS}"
541export TARGET_LDFLAGS = "-Wl,-O1 ${TARGET_LINK_HASH_STYLE}" 541export TARGET_LDFLAGS = "-Wl,-O1 ${TARGET_LINK_HASH_STYLE}"
@@ -561,7 +561,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types ${DEBUG_PREFIX_MAP}"
561# Disabled until the option works properly -feliminate-dwarf2-dups 561# Disabled until the option works properly -feliminate-dwarf2-dups
562FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" 562FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}"
563DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" 563DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe"
564SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', True) == '1'], True)}" 564SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD') == '1'], True)}"
565SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION" 565SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION"
566BUILD_OPTIMIZATION = "-O2 -pipe" 566BUILD_OPTIMIZATION = "-O2 -pipe"
567 567
@@ -687,7 +687,7 @@ DISTRO_NAME ??= "OpenEmbedded"
687OVERRIDES = "${TARGET_OS}:${TRANSLATED_TARGET_ARCH}:build-${BUILD_OS}:pn-${PN}:${MACHINEOVERRIDES}:${DISTROOVERRIDES}:${CLASSOVERRIDE}:forcevariable" 687OVERRIDES = "${TARGET_OS}:${TRANSLATED_TARGET_ARCH}:build-${BUILD_OS}:pn-${PN}:${MACHINEOVERRIDES}:${DISTROOVERRIDES}:${CLASSOVERRIDE}:forcevariable"
688OVERRIDES[vardepsexclude] = "MACHINEOVERRIDES" 688OVERRIDES[vardepsexclude] = "MACHINEOVERRIDES"
689CLASSOVERRIDE ?= "class-target" 689CLASSOVERRIDE ?= "class-target"
690DISTROOVERRIDES ?= "${@d.getVar('DISTRO', True) or ''}" 690DISTROOVERRIDES ?= "${@d.getVar('DISTRO') or ''}"
691MACHINEOVERRIDES ?= "${MACHINE}" 691MACHINEOVERRIDES ?= "${MACHINE}"
692MACHINEOVERRIDES[vardepsexclude] = "MACHINE" 692MACHINEOVERRIDES[vardepsexclude] = "MACHINE"
693 693
@@ -773,7 +773,7 @@ COMBINED_FEATURES = "${@oe.utils.set_intersect('DISTRO_FEATURES', 'MACHINE_FEATU
773COMBINED_FEATURES[vardeps] += "DISTRO_FEATURES MACHINE_FEATURES" 773COMBINED_FEATURES[vardeps] += "DISTRO_FEATURES MACHINE_FEATURES"
774 774
775SERIAL_CONSOLE ??= "" 775SERIAL_CONSOLE ??= ""
776SERIAL_CONSOLES ??= "${@d.getVar('SERIAL_CONSOLE', True).replace(' ', ';')}" 776SERIAL_CONSOLES ??= "${@d.getVar('SERIAL_CONSOLE').replace(' ', ';')}"
777 777
778NO_RECOMMENDATIONS ?= "" 778NO_RECOMMENDATIONS ?= ""
779BAD_RECOMMENDATIONS ?= "" 779BAD_RECOMMENDATIONS ?= ""
@@ -797,7 +797,7 @@ DISTRO[unexport] = "1"
797SHELL[unexport] = "1" 797SHELL[unexport] = "1"
798 798
799# Used by canadian-cross to handle string conversions on TARGET_ARCH where needed 799# Used by canadian-cross to handle string conversions on TARGET_ARCH where needed
800TRANSLATED_TARGET_ARCH ??= "${@d.getVar('TARGET_ARCH', True).replace("_", "-")}" 800TRANSLATED_TARGET_ARCH ??= "${@d.getVar('TARGET_ARCH').replace("_", "-")}"
801 801
802# Complete output from bitbake 802# Complete output from bitbake
803BB_CONSOLELOG ?= "${LOG_DIR}/cooker/${MACHINE}/${DATETIME}.log" 803BB_CONSOLELOG ?= "${LOG_DIR}/cooker/${MACHINE}/${DATETIME}.log"
diff --git a/meta/conf/distro/defaultsetup.conf b/meta/conf/distro/defaultsetup.conf
index aa21345a1c..1055b9bb93 100644
--- a/meta/conf/distro/defaultsetup.conf
+++ b/meta/conf/distro/defaultsetup.conf
@@ -15,7 +15,7 @@ require conf/distro/include/uninative-flags.inc
15TCLIBCAPPEND ?= "-${TCLIBC}" 15TCLIBCAPPEND ?= "-${TCLIBC}"
16TMPDIR .= "${TCLIBCAPPEND}" 16TMPDIR .= "${TCLIBCAPPEND}"
17 17
18CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}" 18CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE'))][bool(d.getVar('MACHINE'))]}${@['', '/' + str(d.getVar('SDKMACHINE'))][bool(d.getVar('SDKMACHINE'))]}"
19 19
20USER_CLASSES ?= "" 20USER_CLASSES ?= ""
21PACKAGE_CLASSES ?= "package_ipk" 21PACKAGE_CLASSES ?= "package_ipk"
diff --git a/meta/conf/distro/include/tclibc-glibc.inc b/meta/conf/distro/include/tclibc-glibc.inc
index 649918fd2b..ad8000f5de 100644
--- a/meta/conf/distro/include/tclibc-glibc.inc
+++ b/meta/conf/distro/include/tclibc-glibc.inc
@@ -2,7 +2,7 @@
2# glibc specific configuration 2# glibc specific configuration
3# 3#
4 4
5LIBCEXTENSION = "${@['', '-gnu'][(d.getVar('ABIEXTENSION', True) or '') != '']}" 5LIBCEXTENSION = "${@['', '-gnu'][(d.getVar('ABIEXTENSION') or '') != '']}"
6 6
7# Add glibc overrides to the overrides for glibc. 7# Add glibc overrides to the overrides for glibc.
8LIBCOVERRIDE = ":libc-glibc" 8LIBCOVERRIDE = ":libc-glibc"
@@ -34,7 +34,7 @@ LIBC_LOCALE_DEPENDENCIES = "\
34 glibc-gconv-iso8859-15" 34 glibc-gconv-iso8859-15"
35 35
36def get_libc_locales_dependencies(d): 36def get_libc_locales_dependencies(d):
37 if 'libc-locales' in (d.getVar('DISTRO_FEATURES', True) or '').split() : 37 if 'libc-locales' in (d.getVar('DISTRO_FEATURES') or '').split() :
38 return d.getVar('LIBC_LOCALE_DEPENDENCIES', True) or '' 38 return d.getVar('LIBC_LOCALE_DEPENDENCIES') or ''
39 else: 39 else:
40 return '' 40 return ''
diff --git a/meta/conf/machine/include/arm/arch-arm.inc b/meta/conf/machine/include/arm/arch-arm.inc
index 2e3127c799..99625d8417 100644
--- a/meta/conf/machine/include/arm/arch-arm.inc
+++ b/meta/conf/machine/include/arm/arch-arm.inc
@@ -13,4 +13,4 @@ TUNE_PKGARCH = "${ARMPKGARCH}${ARMPKGSFX_THUMB}${ARMPKGSFX_DSP}${ARMPKGSFX_EABI}
13 13
14ABIEXTENSION = "eabi" 14ABIEXTENSION = "eabi"
15 15
16TARGET_FPU = "${@d.getVar('TUNE_CCARGS_MFLOAT', True) or 'soft'}" 16TARGET_FPU = "${@d.getVar('TUNE_CCARGS_MFLOAT') or 'soft'}"
diff --git a/meta/conf/machine/include/arm/arch-arm64.inc b/meta/conf/machine/include/arm/arch-arm64.inc
index 9eeffac812..5f90763f7f 100644
--- a/meta/conf/machine/include/arm/arch-arm64.inc
+++ b/meta/conf/machine/include/arm/arch-arm64.inc
@@ -28,7 +28,7 @@ TARGET_FPU_64 = ""
28TUNE_ARCH_32 = "${@bb.utils.contains('TUNE_FEATURES', 'bigendian', 'armeb', 'arm', d)}" 28TUNE_ARCH_32 = "${@bb.utils.contains('TUNE_FEATURES', 'bigendian', 'armeb', 'arm', d)}"
29TUNE_PKGARCH_32 = "${ARMPKGARCH}${ARMPKGSFX_THUMB}${ARMPKGSFX_DSP}${ARMPKGSFX_EABI}${ARMPKGSFX_ENDIAN}${ARMPKGSFX_FPU}" 29TUNE_PKGARCH_32 = "${ARMPKGARCH}${ARMPKGSFX_THUMB}${ARMPKGSFX_DSP}${ARMPKGSFX_EABI}${ARMPKGSFX_ENDIAN}${ARMPKGSFX_FPU}"
30ABIEXTENSION_32 = "eabi" 30ABIEXTENSION_32 = "eabi"
31TARGET_FPU_32 = "${@d.getVar('TUNE_CCARGS_MFLOAT', True) or 'soft'}" 31TARGET_FPU_32 = "${@d.getVar('TUNE_CCARGS_MFLOAT') or 'soft'}"
32 32
33TUNE_ARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_ARCH_64}', '${TUNE_ARCH_32}' ,d)}" 33TUNE_ARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_ARCH_64}', '${TUNE_ARCH_32}' ,d)}"
34TUNE_PKGARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_PKGARCH_64}', '${TUNE_PKGARCH_32}' ,d)}" 34TUNE_PKGARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_PKGARCH_64}', '${TUNE_PKGARCH_32}' ,d)}"
diff --git a/meta/conf/machine/include/arm/feature-arm-thumb.inc b/meta/conf/machine/include/arm/feature-arm-thumb.inc
index 1faebf7c26..6d4747b21a 100644
--- a/meta/conf/machine/include/arm/feature-arm-thumb.inc
+++ b/meta/conf/machine/include/arm/feature-arm-thumb.inc
@@ -1,5 +1,5 @@
1TUNEVALID[thumb] = "Use thumb instructions instead of ARM" 1TUNEVALID[thumb] = "Use thumb instructions instead of ARM"
2ARM_THUMB_OPT = "${@['arm', 'thumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}" 2ARM_THUMB_OPT = "${@['arm', 'thumb'][d.getVar('ARM_INSTRUCTION_SET') == 'thumb']}"
3ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv4', 't', '', d)}" 3ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv4', 't', '', d)}"
4ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv5', 't', '', d)}" 4ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv5', 't', '', d)}"
5ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv6', 't', '', d)}" 5ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv6', 't', '', d)}"
@@ -15,19 +15,19 @@ ARM_M_OPT = "${@bb.utils.contains('TUNE_FEATURES', 'arm', '${ARM_THUMB_OPT}', 't
15python () { 15python () {
16 if bb.utils.contains('TUNE_FEATURES', 'thumb', False, True, d): 16 if bb.utils.contains('TUNE_FEATURES', 'thumb', False, True, d):
17 return 17 return
18 selected = d.getVar('ARM_INSTRUCTION_SET', True) 18 selected = d.getVar('ARM_INSTRUCTION_SET')
19 if selected == None: 19 if selected == None:
20 return 20 return
21 used = d.getVar('ARM_M_OPT', True) 21 used = d.getVar('ARM_M_OPT')
22 if selected != used: 22 if selected != used:
23 pn = d.getVar('PN', True) 23 pn = d.getVar('PN')
24 bb.warn("Recipe '%s' selects ARM_INSTRUCTION_SET to be '%s', but tune configuration overrides it to '%s'" % (pn, selected, used)) 24 bb.warn("Recipe '%s' selects ARM_INSTRUCTION_SET to be '%s', but tune configuration overrides it to '%s'" % (pn, selected, used))
25} 25}
26 26
27TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', ' -m${ARM_M_OPT}', '', d)}" 27TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', ' -m${ARM_M_OPT}', '', d)}"
28 28
29# Add suffix from ARM_THUMB_SUFFIX only if after all this we still set ARM_M_OPT to thumb 29# Add suffix from ARM_THUMB_SUFFIX only if after all this we still set ARM_M_OPT to thumb
30ARMPKGSFX_THUMB .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '${ARM_THUMB_SUFFIX}', '', d) if d.getVar('ARM_M_OPT', True) == 'thumb' else ''}" 30ARMPKGSFX_THUMB .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '${ARM_THUMB_SUFFIX}', '', d) if d.getVar('ARM_M_OPT') == 'thumb' else ''}"
31 31
32# what about armv7m devices which don't support -marm (e.g. Cortex-M3)? 32# what about armv7m devices which don't support -marm (e.g. Cortex-M3)?
33TARGET_CC_KERNEL_ARCH += "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '-mno-thumb-interwork -marm', '', d)}" 33TARGET_CC_KERNEL_ARCH += "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '-mno-thumb-interwork -marm', '', d)}"
diff --git a/meta/conf/machine/include/arm/feature-arm-vfp.inc b/meta/conf/machine/include/arm/feature-arm-vfp.inc
index 9ef31e70e2..667b60910a 100644
--- a/meta/conf/machine/include/arm/feature-arm-vfp.inc
+++ b/meta/conf/machine/include/arm/feature-arm-vfp.inc
@@ -5,10 +5,10 @@
5TUNEVALID[vfp] = "Enable Vector Floating Point (vfp) unit." 5TUNEVALID[vfp] = "Enable Vector Floating Point (vfp) unit."
6TUNE_CCARGS_MFPU .= "${@bb.utils.contains('TUNE_FEATURES', 'vfp', ' vfp', '', d)}" 6TUNE_CCARGS_MFPU .= "${@bb.utils.contains('TUNE_FEATURES', 'vfp', ' vfp', '', d)}"
7 7
8TUNE_CCARGS .= "${@ (' -mfpu=%s ' % d.getVar('TUNE_CCARGS_MFPU', True).split()[-1]) if (d.getVar('TUNE_CCARGS_MFPU', True) != '') else ''}" 8TUNE_CCARGS .= "${@ (' -mfpu=%s ' % d.getVar('TUNE_CCARGS_MFPU').split()[-1]) if (d.getVar('TUNE_CCARGS_MFPU') != '') else ''}"
9ARMPKGSFX_FPU = "${@ ('-%s' % d.getVar('TUNE_CCARGS_MFPU', True).split()[-1].replace('vfpv3-d16', 'vfpv3d16')) if (d.getVar('TUNE_CCARGS_MFPU', True) != '') else ''}" 9ARMPKGSFX_FPU = "${@ ('-%s' % d.getVar('TUNE_CCARGS_MFPU').split()[-1].replace('vfpv3-d16', 'vfpv3d16')) if (d.getVar('TUNE_CCARGS_MFPU') != '') else ''}"
10 10
11TUNEVALID[callconvention-hard] = "Enable EABI hard float call convention, requires VFP." 11TUNEVALID[callconvention-hard] = "Enable EABI hard float call convention, requires VFP."
12TUNE_CCARGS_MFLOAT = "${@ bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hard', 'softfp', d) if (d.getVar('TUNE_CCARGS_MFPU', True) != '') else '' }" 12TUNE_CCARGS_MFLOAT = "${@ bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hard', 'softfp', d) if (d.getVar('TUNE_CCARGS_MFPU') != '') else '' }"
13TUNE_CCARGS .= "${@ ' -mfloat-abi=${TUNE_CCARGS_MFLOAT}' if (d.getVar('TUNE_CCARGS_MFLOAT', True) != '') else ''}" 13TUNE_CCARGS .= "${@ ' -mfloat-abi=${TUNE_CCARGS_MFLOAT}' if (d.getVar('TUNE_CCARGS_MFLOAT') != '') else ''}"
14ARMPKGSFX_EABI = "${@ 'hf' if (d.getVar('TUNE_CCARGS_MFLOAT', True) == 'hard') else ''}" 14ARMPKGSFX_EABI = "${@ 'hf' if (d.getVar('TUNE_CCARGS_MFLOAT') == 'hard') else ''}"
diff --git a/meta/conf/machine/include/mips/feature-mips-mips16e.inc b/meta/conf/machine/include/mips/feature-mips-mips16e.inc
index 05011dec41..101d5331bc 100644
--- a/meta/conf/machine/include/mips/feature-mips-mips16e.inc
+++ b/meta/conf/machine/include/mips/feature-mips-mips16e.inc
@@ -1,8 +1,8 @@
1TUNEVALID[mips16e] = "Build target packages with MIPS16e ASE instructions" 1TUNEVALID[mips16e] = "Build target packages with MIPS16e ASE instructions"
2MIPS_MIPS16E_OPT = "${@['mno-mips16', 'mips16'][d.getVar('MIPS_INSTRUCTION_SET', True) == 'mips16e']}" 2MIPS_MIPS16E_OPT = "${@['mno-mips16', 'mips16'][d.getVar('MIPS_INSTRUCTION_SET') == 'mips16e']}"
3TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ' -${MIPS_MIPS16E_OPT}', '', d)}" 3TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ' -${MIPS_MIPS16E_OPT}', '', d)}"
4 4
5MIPSPKGSFX_MIPS16E .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', '-m16', '', d) if d.getVar('MIPS_MIPS16E_OPT', True) == 'mips16' else ''}" 5MIPSPKGSFX_MIPS16E .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', '-m16', '', d) if d.getVar('MIPS_MIPS16E_OPT') == 'mips16' else ''}"
6 6
7# Whether to compile with code to allow interworking between the two 7# Whether to compile with code to allow interworking between the two
8# instruction sets. This allows mips16e code to be executed on a primarily 8# instruction sets. This allows mips16e code to be executed on a primarily
@@ -14,4 +14,4 @@ TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ' ${MIPS16_TUNE
14OVERRIDES .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ':mips16e', '', d)}" 14OVERRIDES .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ':mips16e', '', d)}"
15 15
16# show status (if compiling in MIPS16e mode) 16# show status (if compiling in MIPS16e mode)
17BUILDCFG_VARS += "${@['', 'MIPS_INSTRUCTION_SET'][d.getVar('MIPS_INSTRUCTION_SET', True) == 'mips16e']}" 17BUILDCFG_VARS += "${@['', 'MIPS_INSTRUCTION_SET'][d.getVar('MIPS_INSTRUCTION_SET') == 'mips16e']}"
diff --git a/meta/conf/multilib.conf b/meta/conf/multilib.conf
index 1403a034a6..ce97d5d014 100644
--- a/meta/conf/multilib.conf
+++ b/meta/conf/multilib.conf
@@ -1,5 +1,5 @@
1 1
2baselib = "${@d.getVar('BASE_LIB_tune-' + (d.getVar('DEFAULTTUNE', True) or 'INVALID'), True) or d.getVar('BASELIB', True)}" 2baselib = "${@d.getVar('BASE_LIB_tune-' + (d.getVar('DEFAULTTUNE') or 'INVALID'), True) or d.getVar('BASELIB')}"
3 3
4MULTILIB_VARIANTS = "${@extend_variants(d,'MULTILIBS','multilib')}" 4MULTILIB_VARIANTS = "${@extend_variants(d,'MULTILIBS','multilib')}"
5MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS TARGET_VENDOR" 5MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS TARGET_VENDOR"
diff --git a/meta/lib/buildstats.py b/meta/lib/buildstats.py
index 854c38721f..c5d4c73cf5 100644
--- a/meta/lib/buildstats.py
+++ b/meta/lib/buildstats.py
@@ -8,8 +8,8 @@ import bb.event
8 8
9class SystemStats: 9class SystemStats:
10 def __init__(self, d): 10 def __init__(self, d):
11 bn = d.getVar('BUILDNAME', True) 11 bn = d.getVar('BUILDNAME')
12 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) 12 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
13 bb.utils.mkdirhier(bsdir) 13 bb.utils.mkdirhier(bsdir)
14 14
15 self.proc_files = [] 15 self.proc_files = []
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py
index 4c8a00070c..d2eeaf0e5c 100644
--- a/meta/lib/oe/classextend.py
+++ b/meta/lib/oe/classextend.py
@@ -25,7 +25,7 @@ class ClassExtender(object):
25 return name 25 return name
26 26
27 def map_variable(self, varname, setvar = True): 27 def map_variable(self, varname, setvar = True):
28 var = self.d.getVar(varname, True) 28 var = self.d.getVar(varname)
29 if not var: 29 if not var:
30 return "" 30 return ""
31 var = var.split() 31 var = var.split()
@@ -38,7 +38,7 @@ class ClassExtender(object):
38 return newdata 38 return newdata
39 39
40 def map_regexp_variable(self, varname, setvar = True): 40 def map_regexp_variable(self, varname, setvar = True):
41 var = self.d.getVar(varname, True) 41 var = self.d.getVar(varname)
42 if not var: 42 if not var:
43 return "" 43 return ""
44 var = var.split() 44 var = var.split()
@@ -60,7 +60,7 @@ class ClassExtender(object):
60 return dep 60 return dep
61 else: 61 else:
62 # Do not extend for that already have multilib prefix 62 # Do not extend for that already have multilib prefix
63 var = self.d.getVar("MULTILIB_VARIANTS", True) 63 var = self.d.getVar("MULTILIB_VARIANTS")
64 if var: 64 if var:
65 var = var.split() 65 var = var.split()
66 for v in var: 66 for v in var:
@@ -74,7 +74,7 @@ class ClassExtender(object):
74 varname = varname + "_" + suffix 74 varname = varname + "_" + suffix
75 orig = self.d.getVar("EXTENDPKGV", False) 75 orig = self.d.getVar("EXTENDPKGV", False)
76 self.d.setVar("EXTENDPKGV", "EXTENDPKGV") 76 self.d.setVar("EXTENDPKGV", "EXTENDPKGV")
77 deps = self.d.getVar(varname, True) 77 deps = self.d.getVar(varname)
78 if not deps: 78 if not deps:
79 self.d.setVar("EXTENDPKGV", orig) 79 self.d.setVar("EXTENDPKGV", orig)
80 return 80 return
@@ -87,7 +87,7 @@ class ClassExtender(object):
87 self.d.setVar("EXTENDPKGV", orig) 87 self.d.setVar("EXTENDPKGV", orig)
88 88
89 def map_packagevars(self): 89 def map_packagevars(self):
90 for pkg in (self.d.getVar("PACKAGES", True).split() + [""]): 90 for pkg in (self.d.getVar("PACKAGES").split() + [""]):
91 self.map_depends_variable("RDEPENDS", pkg) 91 self.map_depends_variable("RDEPENDS", pkg)
92 self.map_depends_variable("RRECOMMENDS", pkg) 92 self.map_depends_variable("RRECOMMENDS", pkg)
93 self.map_depends_variable("RSUGGESTS", pkg) 93 self.map_depends_variable("RSUGGESTS", pkg)
@@ -97,7 +97,7 @@ class ClassExtender(object):
97 self.map_depends_variable("PKG", pkg) 97 self.map_depends_variable("PKG", pkg)
98 98
99 def rename_packages(self): 99 def rename_packages(self):
100 for pkg in (self.d.getVar("PACKAGES", True) or "").split(): 100 for pkg in (self.d.getVar("PACKAGES") or "").split():
101 if pkg.startswith(self.extname): 101 if pkg.startswith(self.extname):
102 self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg]) 102 self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg])
103 continue 103 continue
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py
index 29ac6d418f..a372904183 100644
--- a/meta/lib/oe/copy_buildsystem.py
+++ b/meta/lib/oe/copy_buildsystem.py
@@ -21,8 +21,8 @@ class BuildSystem(object):
21 def __init__(self, context, d): 21 def __init__(self, context, d):
22 self.d = d 22 self.d = d
23 self.context = context 23 self.context = context
24 self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS', True).split()] 24 self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS').split()]
25 self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE', True) or "").split() 25 self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE') or "").split()
26 26
27 def copy_bitbake_and_layers(self, destdir, workspace_name=None): 27 def copy_bitbake_and_layers(self, destdir, workspace_name=None):
28 # Copy in all metadata layers + bitbake (as repositories) 28 # Copy in all metadata layers + bitbake (as repositories)
@@ -30,7 +30,7 @@ class BuildSystem(object):
30 bb.utils.mkdirhier(destdir) 30 bb.utils.mkdirhier(destdir)
31 layers = list(self.layerdirs) 31 layers = list(self.layerdirs)
32 32
33 corebase = os.path.abspath(self.d.getVar('COREBASE', True)) 33 corebase = os.path.abspath(self.d.getVar('COREBASE'))
34 layers.append(corebase) 34 layers.append(corebase)
35 35
36 # Exclude layers 36 # Exclude layers
@@ -46,7 +46,7 @@ class BuildSystem(object):
46 extranum += 1 46 extranum += 1
47 workspace_newname = '%s-%d' % (workspace_name, extranum) 47 workspace_newname = '%s-%d' % (workspace_name, extranum)
48 48
49 corebase_files = self.d.getVar('COREBASE_FILES', True).split() 49 corebase_files = self.d.getVar('COREBASE_FILES').split()
50 corebase_files = [corebase + '/' +x for x in corebase_files] 50 corebase_files = [corebase + '/' +x for x in corebase_files]
51 # Make sure bitbake goes in 51 # Make sure bitbake goes in
52 bitbake_dir = bb.__file__.rsplit('/', 3)[0] 52 bitbake_dir = bb.__file__.rsplit('/', 3)[0]
@@ -100,7 +100,7 @@ class BuildSystem(object):
100 # Drop all bbappends except the one for the image the SDK is being built for 100 # Drop all bbappends except the one for the image the SDK is being built for
101 # (because of externalsrc, the workspace bbappends will interfere with the 101 # (because of externalsrc, the workspace bbappends will interfere with the
102 # locked signatures if present, and we don't need them anyway) 102 # locked signatures if present, and we don't need them anyway)
103 image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE', True)))[0] + '.bbappend' 103 image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE')))[0] + '.bbappend'
104 appenddir = os.path.join(layerdestpath, 'appends') 104 appenddir = os.path.join(layerdestpath, 'appends')
105 if os.path.isdir(appenddir): 105 if os.path.isdir(appenddir):
106 for fn in os.listdir(appenddir): 106 for fn in os.listdir(appenddir):
@@ -208,7 +208,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac
208 import shutil 208 import shutil
209 bb.note('Generating sstate-cache...') 209 bb.note('Generating sstate-cache...')
210 210
211 nativelsbstring = d.getVar('NATIVELSBSTRING', True) 211 nativelsbstring = d.getVar('NATIVELSBSTRING')
212 bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) 212 bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or ''))
213 if fixedlsbstring and nativelsbstring != fixedlsbstring: 213 if fixedlsbstring and nativelsbstring != fixedlsbstring:
214 nativedir = output_sstate_cache + '/' + nativelsbstring 214 nativedir = output_sstate_cache + '/' + nativelsbstring
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py
index ee48950a82..032f68a847 100644
--- a/meta/lib/oe/data.py
+++ b/meta/lib/oe/data.py
@@ -12,6 +12,6 @@ def typed_value(key, d):
12 flags = {} 12 flags = {}
13 13
14 try: 14 try:
15 return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags) 15 return oe.maketype.create(d.getVar(key) or '', var_type, **flags)
16 except (TypeError, ValueError) as exc: 16 except (TypeError, ValueError) as exc:
17 bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) 17 bb.msg.fatal("Data", "%s: %s" % (key, str(exc)))
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index c666ddc257..f54f4bb67d 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -224,37 +224,37 @@ def compare_in_distro_packages_list(distro_check_dir, d):
224 localdata = bb.data.createCopy(d) 224 localdata = bb.data.createCopy(d)
225 pkglst_dir = os.path.join(distro_check_dir, "package_lists") 225 pkglst_dir = os.path.join(distro_check_dir, "package_lists")
226 matching_distros = [] 226 matching_distros = []
227 pn = recipe_name = d.getVar('PN', True) 227 pn = recipe_name = d.getVar('PN')
228 bb.note("Checking: %s" % pn) 228 bb.note("Checking: %s" % pn)
229 229
230 if pn.find("-native") != -1: 230 if pn.find("-native") != -1:
231 pnstripped = pn.split("-native") 231 pnstripped = pn.split("-native")
232 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 232 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
233 bb.data.update_data(localdata) 233 bb.data.update_data(localdata)
234 recipe_name = pnstripped[0] 234 recipe_name = pnstripped[0]
235 235
236 if pn.startswith("nativesdk-"): 236 if pn.startswith("nativesdk-"):
237 pnstripped = pn.split("nativesdk-") 237 pnstripped = pn.split("nativesdk-")
238 localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES', True)) 238 localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES'))
239 bb.data.update_data(localdata) 239 bb.data.update_data(localdata)
240 recipe_name = pnstripped[1] 240 recipe_name = pnstripped[1]
241 241
242 if pn.find("-cross") != -1: 242 if pn.find("-cross") != -1:
243 pnstripped = pn.split("-cross") 243 pnstripped = pn.split("-cross")
244 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 244 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
245 bb.data.update_data(localdata) 245 bb.data.update_data(localdata)
246 recipe_name = pnstripped[0] 246 recipe_name = pnstripped[0]
247 247
248 if pn.find("-initial") != -1: 248 if pn.find("-initial") != -1:
249 pnstripped = pn.split("-initial") 249 pnstripped = pn.split("-initial")
250 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 250 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
251 bb.data.update_data(localdata) 251 bb.data.update_data(localdata)
252 recipe_name = pnstripped[0] 252 recipe_name = pnstripped[0]
253 253
254 bb.note("Recipe: %s" % recipe_name) 254 bb.note("Recipe: %s" % recipe_name)
255 255
256 distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) 256 distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'})
257 tmp = localdata.getVar('DISTRO_PN_ALIAS', True) or "" 257 tmp = localdata.getVar('DISTRO_PN_ALIAS') or ""
258 for str in tmp.split(): 258 for str in tmp.split():
259 if str and str.find("=") == -1 and distro_exceptions[str]: 259 if str and str.find("=") == -1 and distro_exceptions[str]:
260 matching_distros.append(str) 260 matching_distros.append(str)
@@ -286,10 +286,10 @@ def compare_in_distro_packages_list(distro_check_dir, d):
286 return matching_distros 286 return matching_distros
287 287
288def create_log_file(d, logname): 288def create_log_file(d, logname):
289 logpath = d.getVar('LOG_DIR', True) 289 logpath = d.getVar('LOG_DIR')
290 bb.utils.mkdirhier(logpath) 290 bb.utils.mkdirhier(logpath)
291 logfn, logsuffix = os.path.splitext(logname) 291 logfn, logsuffix = os.path.splitext(logname)
292 logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix)) 292 logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME'), logsuffix))
293 if not os.path.exists(logfile): 293 if not os.path.exists(logfile):
294 slogfile = os.path.join(logpath, logname) 294 slogfile = os.path.join(logpath, logname)
295 if os.path.exists(slogfile): 295 if os.path.exists(slogfile):
@@ -301,8 +301,8 @@ def create_log_file(d, logname):
301 301
302 302
303def save_distro_check_result(result, datetime, result_file, d): 303def save_distro_check_result(result, datetime, result_file, d):
304 pn = d.getVar('PN', True) 304 pn = d.getVar('PN')
305 logdir = d.getVar('LOG_DIR', True) 305 logdir = d.getVar('LOG_DIR')
306 if not logdir: 306 if not logdir:
307 bb.error("LOG_DIR variable is not defined, can't write the distro_check results") 307 bb.error("LOG_DIR variable is not defined, can't write the distro_check results")
308 return 308 return
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py
index 38eb0cb137..dcd1990930 100644
--- a/meta/lib/oe/gpg_sign.py
+++ b/meta/lib/oe/gpg_sign.py
@@ -7,9 +7,9 @@ import oe.utils
7class LocalSigner(object): 7class LocalSigner(object):
8 """Class for handling local (on the build host) signing""" 8 """Class for handling local (on the build host) signing"""
9 def __init__(self, d): 9 def __init__(self, d):
10 self.gpg_bin = d.getVar('GPG_BIN', True) or \ 10 self.gpg_bin = d.getVar('GPG_BIN') or \
11 bb.utils.which(os.getenv('PATH'), 'gpg') 11 bb.utils.which(os.getenv('PATH'), 'gpg')
12 self.gpg_path = d.getVar('GPG_PATH', True) 12 self.gpg_path = d.getVar('GPG_PATH')
13 self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpm") 13 self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpm")
14 14
15 def export_pubkey(self, output_file, keyid, armor=True): 15 def export_pubkey(self, output_file, keyid, armor=True):
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py
index 95f8eb2df3..6ec9b1af8b 100644
--- a/meta/lib/oe/manifest.py
+++ b/meta/lib/oe/manifest.py
@@ -59,9 +59,9 @@ class Manifest(object, metaclass=ABCMeta):
59 59
60 if manifest_dir is None: 60 if manifest_dir is None:
61 if manifest_type != self.MANIFEST_TYPE_IMAGE: 61 if manifest_type != self.MANIFEST_TYPE_IMAGE:
62 self.manifest_dir = self.d.getVar('SDK_DIR', True) 62 self.manifest_dir = self.d.getVar('SDK_DIR')
63 else: 63 else:
64 self.manifest_dir = self.d.getVar('WORKDIR', True) 64 self.manifest_dir = self.d.getVar('WORKDIR')
65 else: 65 else:
66 self.manifest_dir = manifest_dir 66 self.manifest_dir = manifest_dir
67 67
@@ -82,7 +82,7 @@ class Manifest(object, metaclass=ABCMeta):
82 This will be used for testing until the class is implemented properly! 82 This will be used for testing until the class is implemented properly!
83 """ 83 """
84 def _create_dummy_initial(self): 84 def _create_dummy_initial(self):
85 image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) 85 image_rootfs = self.d.getVar('IMAGE_ROOTFS')
86 pkg_list = dict() 86 pkg_list = dict()
87 if image_rootfs.find("core-image-sato-sdk") > 0: 87 if image_rootfs.find("core-image-sato-sdk") > 0:
88 pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ 88 pkg_list[self.PKG_TYPE_MUST_INSTALL] = \
@@ -195,7 +195,7 @@ class RpmManifest(Manifest):
195 for pkg in pkg_list.split(): 195 for pkg in pkg_list.split():
196 pkg_type = self.PKG_TYPE_MUST_INSTALL 196 pkg_type = self.PKG_TYPE_MUST_INSTALL
197 197
198 ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() 198 ml_variants = self.d.getVar('MULTILIB_VARIANTS').split()
199 199
200 for ml_variant in ml_variants: 200 for ml_variant in ml_variants:
201 if pkg.startswith(ml_variant + '-'): 201 if pkg.startswith(ml_variant + '-'):
@@ -216,13 +216,13 @@ class RpmManifest(Manifest):
216 216
217 for var in self.var_maps[self.manifest_type]: 217 for var in self.var_maps[self.manifest_type]:
218 if var in self.vars_to_split: 218 if var in self.vars_to_split:
219 split_pkgs = self._split_multilib(self.d.getVar(var, True)) 219 split_pkgs = self._split_multilib(self.d.getVar(var))
220 if split_pkgs is not None: 220 if split_pkgs is not None:
221 pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) 221 pkgs = dict(list(pkgs.items()) + list(split_pkgs.items()))
222 else: 222 else:
223 pkg_list = self.d.getVar(var, True) 223 pkg_list = self.d.getVar(var)
224 if pkg_list is not None: 224 if pkg_list is not None:
225 pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) 225 pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var)
226 226
227 for pkg_type in pkgs: 227 for pkg_type in pkgs:
228 for pkg in pkgs[pkg_type].split(): 228 for pkg in pkgs[pkg_type].split():
@@ -245,7 +245,7 @@ class OpkgManifest(Manifest):
245 for pkg in pkg_list.split(): 245 for pkg in pkg_list.split():
246 pkg_type = self.PKG_TYPE_MUST_INSTALL 246 pkg_type = self.PKG_TYPE_MUST_INSTALL
247 247
248 ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() 248 ml_variants = self.d.getVar('MULTILIB_VARIANTS').split()
249 249
250 for ml_variant in ml_variants: 250 for ml_variant in ml_variants:
251 if pkg.startswith(ml_variant + '-'): 251 if pkg.startswith(ml_variant + '-'):
@@ -266,13 +266,13 @@ class OpkgManifest(Manifest):
266 266
267 for var in self.var_maps[self.manifest_type]: 267 for var in self.var_maps[self.manifest_type]:
268 if var in self.vars_to_split: 268 if var in self.vars_to_split:
269 split_pkgs = self._split_multilib(self.d.getVar(var, True)) 269 split_pkgs = self._split_multilib(self.d.getVar(var))
270 if split_pkgs is not None: 270 if split_pkgs is not None:
271 pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) 271 pkgs = dict(list(pkgs.items()) + list(split_pkgs.items()))
272 else: 272 else:
273 pkg_list = self.d.getVar(var, True) 273 pkg_list = self.d.getVar(var)
274 if pkg_list is not None: 274 if pkg_list is not None:
275 pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) 275 pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var)
276 276
277 for pkg_type in pkgs: 277 for pkg_type in pkgs:
278 for pkg in pkgs[pkg_type].split(): 278 for pkg in pkgs[pkg_type].split():
@@ -310,7 +310,7 @@ class DpkgManifest(Manifest):
310 manifest.write(self.initial_manifest_file_header) 310 manifest.write(self.initial_manifest_file_header)
311 311
312 for var in self.var_maps[self.manifest_type]: 312 for var in self.var_maps[self.manifest_type]:
313 pkg_list = self.d.getVar(var, True) 313 pkg_list = self.d.getVar(var)
314 314
315 if pkg_list is None: 315 if pkg_list is None:
316 continue 316 continue
@@ -332,7 +332,7 @@ def create_manifest(d, final_manifest=False, manifest_dir=None,
332 'ipk': OpkgManifest, 332 'ipk': OpkgManifest,
333 'deb': DpkgManifest} 333 'deb': DpkgManifest}
334 334
335 manifest = manifest_map[d.getVar('IMAGE_PKGTYPE', True)](d, manifest_dir, manifest_type) 335 manifest = manifest_map[d.getVar('IMAGE_PKGTYPE')](d, manifest_dir, manifest_type)
336 336
337 if final_manifest: 337 if final_manifest:
338 manifest.create_final() 338 manifest.create_final()
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index ae60a5843e..795389517f 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -104,7 +104,7 @@ def read_shlib_providers(d):
104 import re 104 import re
105 105
106 shlib_provider = {} 106 shlib_provider = {}
107 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() 107 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
108 list_re = re.compile('^(.*)\.list$') 108 list_re = re.compile('^(.*)\.list$')
109 # Go from least to most specific since the last one found wins 109 # Go from least to most specific since the last one found wins
110 for dir in reversed(shlibs_dirs): 110 for dir in reversed(shlibs_dirs):
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py
index e5e3c3b679..bb458691e3 100644
--- a/meta/lib/oe/package_manager.py
+++ b/meta/lib/oe/package_manager.py
@@ -107,16 +107,16 @@ class RpmIndexer(Indexer):
107 target_os = collections.OrderedDict() 107 target_os = collections.OrderedDict()
108 108
109 if arch_var is not None and os_var is not None: 109 if arch_var is not None and os_var is not None:
110 package_archs['default'] = self.d.getVar(arch_var, True).split() 110 package_archs['default'] = self.d.getVar(arch_var).split()
111 package_archs['default'].reverse() 111 package_archs['default'].reverse()
112 target_os['default'] = self.d.getVar(os_var, True).strip() 112 target_os['default'] = self.d.getVar(os_var).strip()
113 else: 113 else:
114 package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split() 114 package_archs['default'] = self.d.getVar("PACKAGE_ARCHS").split()
115 # arch order is reversed. This ensures the -best- match is 115 # arch order is reversed. This ensures the -best- match is
116 # listed first! 116 # listed first!
117 package_archs['default'].reverse() 117 package_archs['default'].reverse()
118 target_os['default'] = self.d.getVar("TARGET_OS", True).strip() 118 target_os['default'] = self.d.getVar("TARGET_OS").strip()
119 multilibs = self.d.getVar('MULTILIBS', True) or "" 119 multilibs = self.d.getVar('MULTILIBS') or ""
120 for ext in multilibs.split(): 120 for ext in multilibs.split():
121 eext = ext.split(':') 121 eext = ext.split(':')
122 if len(eext) > 1 and eext[0] == 'multilib': 122 if len(eext) > 1 and eext[0] == 'multilib':
@@ -150,8 +150,8 @@ class RpmIndexer(Indexer):
150 return (ml_prefix_list, target_os) 150 return (ml_prefix_list, target_os)
151 151
152 def write_index(self): 152 def write_index(self):
153 sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split() 153 sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS') or "").replace('-', '_').split()
154 all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() 154 all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").replace('-', '_').split()
155 155
156 mlb_prefix_list = self.get_ml_prefix_and_os_list()[0] 156 mlb_prefix_list = self.get_ml_prefix_and_os_list()[0]
157 157
@@ -165,15 +165,15 @@ class RpmIndexer(Indexer):
165 archs = archs.union(set(sdk_pkg_archs)) 165 archs = archs.union(set(sdk_pkg_archs))
166 166
167 rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") 167 rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo")
168 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': 168 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
169 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) 169 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
170 else: 170 else:
171 signer = None 171 signer = None
172 index_cmds = [] 172 index_cmds = []
173 repomd_files = [] 173 repomd_files = []
174 rpm_dirs_found = False 174 rpm_dirs_found = False
175 for arch in archs: 175 for arch in archs:
176 dbpath = os.path.join(self.d.getVar('WORKDIR', True), 'rpmdb', arch) 176 dbpath = os.path.join(self.d.getVar('WORKDIR'), 'rpmdb', arch)
177 if os.path.exists(dbpath): 177 if os.path.exists(dbpath):
178 bb.utils.remove(dbpath, True) 178 bb.utils.remove(dbpath, True)
179 arch_dir = os.path.join(self.deploy_dir, arch) 179 arch_dir = os.path.join(self.deploy_dir, arch)
@@ -197,11 +197,11 @@ class RpmIndexer(Indexer):
197 # Sign repomd 197 # Sign repomd
198 if signer: 198 if signer:
199 for repomd in repomd_files: 199 for repomd in repomd_files:
200 feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) 200 feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE')
201 is_ascii_sig = (feed_sig_type.upper() != "BIN") 201 is_ascii_sig = (feed_sig_type.upper() != "BIN")
202 signer.detach_sign(repomd, 202 signer.detach_sign(repomd,
203 self.d.getVar('PACKAGE_FEED_GPG_NAME', True), 203 self.d.getVar('PACKAGE_FEED_GPG_NAME'),
204 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), 204 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
205 armor=is_ascii_sig) 205 armor=is_ascii_sig)
206 206
207 207
@@ -212,8 +212,8 @@ class OpkgIndexer(Indexer):
212 "MULTILIB_ARCHS"] 212 "MULTILIB_ARCHS"]
213 213
214 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") 214 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
215 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': 215 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
216 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) 216 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
217 else: 217 else:
218 signer = None 218 signer = None
219 219
@@ -223,7 +223,7 @@ class OpkgIndexer(Indexer):
223 index_cmds = set() 223 index_cmds = set()
224 index_sign_files = set() 224 index_sign_files = set()
225 for arch_var in arch_vars: 225 for arch_var in arch_vars:
226 archs = self.d.getVar(arch_var, True) 226 archs = self.d.getVar(arch_var)
227 if archs is None: 227 if archs is None:
228 continue 228 continue
229 229
@@ -251,12 +251,12 @@ class OpkgIndexer(Indexer):
251 bb.fatal('%s' % ('\n'.join(result))) 251 bb.fatal('%s' % ('\n'.join(result)))
252 252
253 if signer: 253 if signer:
254 feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) 254 feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE')
255 is_ascii_sig = (feed_sig_type.upper() != "BIN") 255 is_ascii_sig = (feed_sig_type.upper() != "BIN")
256 for f in index_sign_files: 256 for f in index_sign_files:
257 signer.detach_sign(f, 257 signer.detach_sign(f,
258 self.d.getVar('PACKAGE_FEED_GPG_NAME', True), 258 self.d.getVar('PACKAGE_FEED_GPG_NAME'),
259 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), 259 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
260 armor=is_ascii_sig) 260 armor=is_ascii_sig)
261 261
262 262
@@ -290,16 +290,16 @@ class DpkgIndexer(Indexer):
290 290
291 os.environ['APT_CONFIG'] = self.apt_conf_file 291 os.environ['APT_CONFIG'] = self.apt_conf_file
292 292
293 pkg_archs = self.d.getVar('PACKAGE_ARCHS', True) 293 pkg_archs = self.d.getVar('PACKAGE_ARCHS')
294 if pkg_archs is not None: 294 if pkg_archs is not None:
295 arch_list = pkg_archs.split() 295 arch_list = pkg_archs.split()
296 sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True) 296 sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS')
297 if sdk_pkg_archs is not None: 297 if sdk_pkg_archs is not None:
298 for a in sdk_pkg_archs.split(): 298 for a in sdk_pkg_archs.split():
299 if a not in pkg_archs: 299 if a not in pkg_archs:
300 arch_list.append(a) 300 arch_list.append(a)
301 301
302 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() 302 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
303 arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) 303 arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list)
304 304
305 apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") 305 apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
@@ -332,7 +332,7 @@ class DpkgIndexer(Indexer):
332 result = oe.utils.multiprocess_exec(index_cmds, create_index) 332 result = oe.utils.multiprocess_exec(index_cmds, create_index)
333 if result: 333 if result:
334 bb.fatal('%s' % ('\n'.join(result))) 334 bb.fatal('%s' % ('\n'.join(result)))
335 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': 335 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
336 raise NotImplementedError('Package feed signing not implementd for dpkg') 336 raise NotImplementedError('Package feed signing not implementd for dpkg')
337 337
338 338
@@ -386,7 +386,7 @@ class RpmPkgsList(PkgsList):
386 386
387 # Workaround for bug 3565. Simply look to see if we 387 # Workaround for bug 3565. Simply look to see if we
388 # know of a package with that name, if not try again! 388 # know of a package with that name, if not try again!
389 filename = os.path.join(self.d.getVar('PKGDATA_DIR', True), 389 filename = os.path.join(self.d.getVar('PKGDATA_DIR'),
390 'runtime-reverse', 390 'runtime-reverse',
391 new_pkg) 391 new_pkg)
392 if os.path.exists(filename): 392 if os.path.exists(filename):
@@ -464,7 +464,7 @@ class OpkgPkgsList(PkgsList):
464 464
465 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") 465 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
466 self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir) 466 self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir)
467 self.opkg_args += self.d.getVar("OPKG_ARGS", True) 467 self.opkg_args += self.d.getVar("OPKG_ARGS")
468 468
469 def list_pkgs(self, format=None): 469 def list_pkgs(self, format=None):
470 cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args) 470 cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args)
@@ -512,9 +512,9 @@ class PackageManager(object, metaclass=ABCMeta):
512 self.d = d 512 self.d = d
513 self.deploy_dir = None 513 self.deploy_dir = None
514 self.deploy_lock = None 514 self.deploy_lock = None
515 self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or "" 515 self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS') or ""
516 self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS', True) or "" 516 self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS') or ""
517 self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS', True) 517 self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS')
518 518
519 """ 519 """
520 Update the package manager package database. 520 Update the package manager package database.
@@ -568,7 +568,7 @@ class PackageManager(object, metaclass=ABCMeta):
568 def install_complementary(self, globs=None): 568 def install_complementary(self, globs=None):
569 # we need to write the list of installed packages to a file because the 569 # we need to write the list of installed packages to a file because the
570 # oe-pkgdata-util reads it from a file 570 # oe-pkgdata-util reads it from a file
571 installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True), 571 installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR'),
572 "installed_pkgs.txt") 572 "installed_pkgs.txt")
573 with open(installed_pkgs_file, "w+") as installed_pkgs: 573 with open(installed_pkgs_file, "w+") as installed_pkgs:
574 pkgs = self.list_installed() 574 pkgs = self.list_installed()
@@ -576,10 +576,10 @@ class PackageManager(object, metaclass=ABCMeta):
576 installed_pkgs.write(output) 576 installed_pkgs.write(output)
577 577
578 if globs is None: 578 if globs is None:
579 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True) 579 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
580 split_linguas = set() 580 split_linguas = set()
581 581
582 for translation in self.d.getVar('IMAGE_LINGUAS', True).split(): 582 for translation in self.d.getVar('IMAGE_LINGUAS').split():
583 split_linguas.add(translation) 583 split_linguas.add(translation)
584 split_linguas.add(translation.split('-')[0]) 584 split_linguas.add(translation.split('-')[0])
585 585
@@ -592,9 +592,9 @@ class PackageManager(object, metaclass=ABCMeta):
592 return 592 return
593 593
594 cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), 594 cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"),
595 "-p", self.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, 595 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs_file,
596 globs] 596 globs]
597 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY', True) 597 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
598 if exclude: 598 if exclude:
599 cmd.extend(['--exclude=' + '|'.join(exclude.split())]) 599 cmd.extend(['--exclude=' + '|'.join(exclude.split())])
600 try: 600 try:
@@ -659,7 +659,7 @@ class RpmPM(PackageManager):
659 self.task_name = task_name 659 self.task_name = task_name
660 self.providename = providename 660 self.providename = providename
661 self.fullpkglist = list() 661 self.fullpkglist = list()
662 self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True) 662 self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM')
663 self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm") 663 self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm")
664 self.install_dir_name = "oe_install" 664 self.install_dir_name = "oe_install"
665 self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name) 665 self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name)
@@ -669,7 +669,7 @@ class RpmPM(PackageManager):
669 # 1 = --log-level=info (includes information about executing scriptlets and their output) 669 # 1 = --log-level=info (includes information about executing scriptlets and their output)
670 # 2 = --log-level=debug 670 # 2 = --log-level=debug
671 # 3 = --log-level=debug plus dumps of scriplet content and command invocation 671 # 3 = --log-level=debug plus dumps of scriplet content and command invocation
672 self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG', True) or "0") 672 self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG') or "0")
673 self.smart_opt = ["--log-level=%s" % 673 self.smart_opt = ["--log-level=%s" %
674 ("warning" if self.debug_level == 0 else 674 ("warning" if self.debug_level == 0 else
675 "info" if self.debug_level == 1 else 675 "info" if self.debug_level == 1 else
@@ -684,7 +684,7 @@ class RpmPM(PackageManager):
684 if not os.path.exists(self.d.expand('${T}/saved')): 684 if not os.path.exists(self.d.expand('${T}/saved')):
685 bb.utils.mkdirhier(self.d.expand('${T}/saved')) 685 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
686 686
687 packageindex_dir = os.path.join(self.d.getVar('WORKDIR', True), 'rpms') 687 packageindex_dir = os.path.join(self.d.getVar('WORKDIR'), 'rpms')
688 self.indexer = RpmIndexer(self.d, packageindex_dir) 688 self.indexer = RpmIndexer(self.d, packageindex_dir)
689 self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var) 689 self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var)
690 690
@@ -702,7 +702,7 @@ class RpmPM(PackageManager):
702 # List must be prefered to least preferred order 702 # List must be prefered to least preferred order
703 default_platform_extra = list() 703 default_platform_extra = list()
704 platform_extra = list() 704 platform_extra = list()
705 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" 705 bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or ""
706 for mlib in self.ml_os_list: 706 for mlib in self.ml_os_list:
707 for arch in self.ml_prefix_list[mlib]: 707 for arch in self.ml_prefix_list[mlib]:
708 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] 708 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
@@ -750,7 +750,7 @@ class RpmPM(PackageManager):
750 Create configs for rpm and smart, and multilib is supported 750 Create configs for rpm and smart, and multilib is supported
751 ''' 751 '''
752 def create_configs(self): 752 def create_configs(self):
753 target_arch = self.d.getVar('TARGET_ARCH', True) 753 target_arch = self.d.getVar('TARGET_ARCH')
754 platform = '%s%s-%s' % (target_arch.replace('-', '_'), 754 platform = '%s%s-%s' % (target_arch.replace('-', '_'),
755 self.target_vendor, 755 self.target_vendor,
756 self.ml_os_list['default']) 756 self.ml_os_list['default'])
@@ -758,7 +758,7 @@ class RpmPM(PackageManager):
758 # List must be prefered to least preferred order 758 # List must be prefered to least preferred order
759 default_platform_extra = list() 759 default_platform_extra = list()
760 platform_extra = list() 760 platform_extra = list()
761 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" 761 bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or ""
762 for mlib in self.ml_os_list: 762 for mlib in self.ml_os_list:
763 for arch in self.ml_prefix_list[mlib]: 763 for arch in self.ml_prefix_list[mlib]:
764 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] 764 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
@@ -841,7 +841,7 @@ class RpmPM(PackageManager):
841 if not new_pkg: 841 if not new_pkg:
842 # Failed to translate, package not found! 842 # Failed to translate, package not found!
843 err_msg = '%s not found in the %s feeds (%s) in %s.' % \ 843 err_msg = '%s not found in the %s feeds (%s) in %s.' % \
844 (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) 844 (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM'))
845 if not attempt_only: 845 if not attempt_only:
846 bb.error(err_msg) 846 bb.error(err_msg)
847 bb.fatal("This is often caused by an empty package declared " \ 847 bb.fatal("This is often caused by an empty package declared " \
@@ -860,7 +860,7 @@ class RpmPM(PackageManager):
860 new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs) 860 new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs)
861 if not new_pkg: 861 if not new_pkg:
862 err_msg = '%s not found in the feeds (%s) in %s.' % \ 862 err_msg = '%s not found in the feeds (%s) in %s.' % \
863 (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) 863 (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM'))
864 if not attempt_only: 864 if not attempt_only:
865 bb.error(err_msg) 865 bb.error(err_msg)
866 bb.fatal("This is often caused by an empty package declared " \ 866 bb.fatal("This is often caused by an empty package declared " \
@@ -887,7 +887,7 @@ class RpmPM(PackageManager):
887 887
888 channel_priority = 5 888 channel_priority = 5
889 platform_dir = os.path.join(self.etcrpm_dir, "platform") 889 platform_dir = os.path.join(self.etcrpm_dir, "platform")
890 sdkos = self.d.getVar("SDK_OS", True) 890 sdkos = self.d.getVar("SDK_OS")
891 with open(platform_dir, "w+") as platform_fd: 891 with open(platform_dir, "w+") as platform_fd:
892 platform_fd.write(platform + '\n') 892 platform_fd.write(platform + '\n')
893 for pt in platform_extra: 893 for pt in platform_extra:
@@ -957,8 +957,8 @@ class RpmPM(PackageManager):
957 bb.fatal("Create rpm database failed. Command '%s' " 957 bb.fatal("Create rpm database failed. Command '%s' "
958 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) 958 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
959 # Import GPG key to RPM database of the target system 959 # Import GPG key to RPM database of the target system
960 if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1': 960 if self.d.getVar('RPM_SIGN_PACKAGES') == '1':
961 pubkey_path = self.d.getVar('RPM_GPG_PUBKEY', True) 961 pubkey_path = self.d.getVar('RPM_GPG_PUBKEY')
962 cmd = [self.rpm_cmd, '--root', self.target_rootfs, '--dbpath', '/var/lib/rpm', '--import', pubkey_path] 962 cmd = [self.rpm_cmd, '--root', self.target_rootfs, '--dbpath', '/var/lib/rpm', '--import', pubkey_path]
963 try: 963 try:
964 subprocess.check_output(cmd, stderr=subprocess.STDOUT) 964 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
@@ -974,10 +974,10 @@ class RpmPM(PackageManager):
974 self._invoke_smart(['config', '--set', 'rpm-root=%s' % self.target_rootfs]) 974 self._invoke_smart(['config', '--set', 'rpm-root=%s' % self.target_rootfs])
975 self._invoke_smart(['config', '--set', 'rpm-dbpath=/var/lib/rpm']) 975 self._invoke_smart(['config', '--set', 'rpm-dbpath=/var/lib/rpm'])
976 self._invoke_smart(['config', '--set', 'rpm-extra-macros._var=%s' % 976 self._invoke_smart(['config', '--set', 'rpm-extra-macros._var=%s' %
977 self.d.getVar('localstatedir', True)]) 977 self.d.getVar('localstatedir')])
978 cmd = ["config", "--set", "rpm-extra-macros._tmppath=/%s/tmp" % self.install_dir_name] 978 cmd = ["config", "--set", "rpm-extra-macros._tmppath=/%s/tmp" % self.install_dir_name]
979 979
980 prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True) 980 prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH')
981 if prefer_color: 981 if prefer_color:
982 if prefer_color not in ['0', '1', '2', '4']: 982 if prefer_color not in ['0', '1', '2', '4']:
983 bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n" 983 bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n"
@@ -985,7 +985,7 @@ class RpmPM(PackageManager):
985 "\t2: ELF64 wins\n" 985 "\t2: ELF64 wins\n"
986 "\t4: ELF64 N32 wins (mips64 or mips64el only)" % 986 "\t4: ELF64 N32 wins (mips64 or mips64el only)" %
987 prefer_color) 987 prefer_color)
988 if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \ 988 if prefer_color == "4" and self.d.getVar("TUNE_ARCH") not in \
989 ['mips64', 'mips64el']: 989 ['mips64', 'mips64el']:
990 bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el " 990 bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el "
991 "only.") 991 "only.")
@@ -998,17 +998,17 @@ class RpmPM(PackageManager):
998 # Write common configuration for host and target usage 998 # Write common configuration for host and target usage
999 self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) 999 self._invoke_smart(['config', '--set', 'rpm-nolinktos=1'])
1000 self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) 1000 self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1'])
1001 check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True) 1001 check_signature = self.d.getVar('RPM_CHECK_SIGNATURES')
1002 if check_signature and check_signature.strip() == "0": 1002 if check_signature and check_signature.strip() == "0":
1003 self._invoke_smart(['config', '--set rpm-check-signatures=false']) 1003 self._invoke_smart(['config', '--set rpm-check-signatures=false'])
1004 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): 1004 for i in self.d.getVar('BAD_RECOMMENDATIONS').split():
1005 self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) 1005 self._invoke_smart(['flag', '--set', 'ignore-recommends', i])
1006 1006
1007 # Do the following configurations here, to avoid them being 1007 # Do the following configurations here, to avoid them being
1008 # saved for field upgrade 1008 # saved for field upgrade
1009 if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1": 1009 if self.d.getVar('NO_RECOMMENDATIONS').strip() == "1":
1010 self._invoke_smart(['config', '--set', 'ignore-all-recommends=1']) 1010 self._invoke_smart(['config', '--set', 'ignore-all-recommends=1'])
1011 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" 1011 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or ""
1012 for i in pkg_exclude.split(): 1012 for i in pkg_exclude.split():
1013 self._invoke_smart(['flag', '--set', 'exclude-packages', i]) 1013 self._invoke_smart(['flag', '--set', 'exclude-packages', i])
1014 1014
@@ -1019,13 +1019,13 @@ class RpmPM(PackageManager):
1019 ch_already_added = [] 1019 ch_already_added = []
1020 for canonical_arch in platform_extra: 1020 for canonical_arch in platform_extra:
1021 arch = canonical_arch.split('-')[0] 1021 arch = canonical_arch.split('-')[0]
1022 arch_channel = os.path.join(self.d.getVar('WORKDIR', True), 'rpms', arch) 1022 arch_channel = os.path.join(self.d.getVar('WORKDIR'), 'rpms', arch)
1023 oe.path.remove(arch_channel) 1023 oe.path.remove(arch_channel)
1024 deploy_arch_dir = os.path.join(self.deploy_dir, arch) 1024 deploy_arch_dir = os.path.join(self.deploy_dir, arch)
1025 if not os.path.exists(deploy_arch_dir): 1025 if not os.path.exists(deploy_arch_dir):
1026 continue 1026 continue
1027 1027
1028 lockfilename = self.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" 1028 lockfilename = self.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock"
1029 lf = bb.utils.lockfile(lockfilename, False) 1029 lf = bb.utils.lockfile(lockfilename, False)
1030 oe.path.copyhardlinktree(deploy_arch_dir, arch_channel) 1030 oe.path.copyhardlinktree(deploy_arch_dir, arch_channel)
1031 bb.utils.unlockfile(lf) 1031 bb.utils.unlockfile(lf)
@@ -1096,7 +1096,7 @@ class RpmPM(PackageManager):
1096 "fi\n" 1096 "fi\n"
1097 1097
1098 intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts') 1098 intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts')
1099 native_root = self.d.getVar('STAGING_DIR_NATIVE', True) 1099 native_root = self.d.getVar('STAGING_DIR_NATIVE')
1100 scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'], 1100 scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'],
1101 self.target_rootfs, 1101 self.target_rootfs,
1102 intercept_dir, 1102 intercept_dir,
@@ -1170,7 +1170,7 @@ class RpmPM(PackageManager):
1170 ml_pkgs = [] 1170 ml_pkgs = []
1171 non_ml_pkgs = pkgs[:] 1171 non_ml_pkgs = pkgs[:]
1172 for pkg in pkgs: 1172 for pkg in pkgs:
1173 for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): 1173 for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split():
1174 if pkg.startswith(mlib + '-'): 1174 if pkg.startswith(mlib + '-'):
1175 ml_pkgs.append(pkg) 1175 ml_pkgs.append(pkg)
1176 non_ml_pkgs.remove(pkg) 1176 non_ml_pkgs.remove(pkg)
@@ -1184,7 +1184,7 @@ class RpmPM(PackageManager):
1184 # correctly. 1184 # correctly.
1185 pkgs_new = [] 1185 pkgs_new = []
1186 for pkg in non_ml_pkgs: 1186 for pkg in non_ml_pkgs:
1187 for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): 1187 for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split():
1188 mlib_pkg = mlib + "-" + pkg 1188 mlib_pkg = mlib + "-" + pkg
1189 if mlib_pkg in ml_pkgs: 1189 if mlib_pkg in ml_pkgs:
1190 pkgs_new.append(pkg) 1190 pkgs_new.append(pkg)
@@ -1401,7 +1401,7 @@ class RpmPM(PackageManager):
1401 1401
1402 self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) 1402 self._invoke_smart(['config', '--set', 'rpm-nolinktos=1'])
1403 self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) 1403 self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1'])
1404 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): 1404 for i in self.d.getVar('BAD_RECOMMENDATIONS').split():
1405 self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) 1405 self._invoke_smart(['flag', '--set', 'ignore-recommends', i])
1406 self._invoke_smart(['channel', '--add', 'rpmsys', 'type=rpm-sys', '-y']) 1406 self._invoke_smart(['channel', '--add', 'rpmsys', 'type=rpm-sys', '-y'])
1407 1407
@@ -1575,13 +1575,13 @@ class OpkgPM(OpkgDpkgPM):
1575 self.pkg_archs = archs 1575 self.pkg_archs = archs
1576 self.task_name = task_name 1576 self.task_name = task_name
1577 1577
1578 self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True) 1578 self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK")
1579 self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock") 1579 self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock")
1580 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") 1580 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
1581 self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs) 1581 self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs)
1582 self.opkg_args += self.d.getVar("OPKG_ARGS", True) 1582 self.opkg_args += self.d.getVar("OPKG_ARGS")
1583 1583
1584 opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True) 1584 opkg_lib_dir = self.d.getVar('OPKGLIBDIR')
1585 if opkg_lib_dir[0] == "/": 1585 if opkg_lib_dir[0] == "/":
1586 opkg_lib_dir = opkg_lib_dir[1:] 1586 opkg_lib_dir = opkg_lib_dir[1:]
1587 1587
@@ -1593,7 +1593,7 @@ class OpkgPM(OpkgDpkgPM):
1593 if not os.path.exists(self.d.expand('${T}/saved')): 1593 if not os.path.exists(self.d.expand('${T}/saved')):
1594 bb.utils.mkdirhier(self.d.expand('${T}/saved')) 1594 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
1595 1595
1596 self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") == "1" 1596 self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") == "1"
1597 if self.from_feeds: 1597 if self.from_feeds:
1598 self._create_custom_config() 1598 self._create_custom_config()
1599 else: 1599 else:
@@ -1638,7 +1638,7 @@ class OpkgPM(OpkgDpkgPM):
1638 config_file.write("arch %s %d\n" % (arch, priority)) 1638 config_file.write("arch %s %d\n" % (arch, priority))
1639 priority += 5 1639 priority += 5
1640 1640
1641 for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split(): 1641 for line in (self.d.getVar('IPK_FEED_URIS') or "").split():
1642 feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line) 1642 feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line)
1643 1643
1644 if feed_match is not None: 1644 if feed_match is not None:
@@ -1655,17 +1655,17 @@ class OpkgPM(OpkgDpkgPM):
1655 specified as compatible for the current machine. 1655 specified as compatible for the current machine.
1656 NOTE: Development-helper feature, NOT a full-fledged feed. 1656 NOTE: Development-helper feature, NOT a full-fledged feed.
1657 """ 1657 """
1658 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "": 1658 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "":
1659 for arch in self.pkg_archs.split(): 1659 for arch in self.pkg_archs.split():
1660 cfg_file_name = os.path.join(self.target_rootfs, 1660 cfg_file_name = os.path.join(self.target_rootfs,
1661 self.d.getVar("sysconfdir", True), 1661 self.d.getVar("sysconfdir"),
1662 "opkg", 1662 "opkg",
1663 "local-%s-feed.conf" % arch) 1663 "local-%s-feed.conf" % arch)
1664 1664
1665 with open(cfg_file_name, "w+") as cfg_file: 1665 with open(cfg_file_name, "w+") as cfg_file:
1666 cfg_file.write("src/gz local-%s %s/%s" % 1666 cfg_file.write("src/gz local-%s %s/%s" %
1667 (arch, 1667 (arch,
1668 self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True), 1668 self.d.getVar('FEED_DEPLOYDIR_BASE_URI'),
1669 arch)) 1669 arch))
1670 1670
1671 if self.opkg_dir != '/var/lib/opkg': 1671 if self.opkg_dir != '/var/lib/opkg':
@@ -1674,8 +1674,8 @@ class OpkgPM(OpkgDpkgPM):
1674 # the default value of "/var/lib" as defined in opkg: 1674 # the default value of "/var/lib" as defined in opkg:
1675 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" 1675 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
1676 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" 1676 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
1677 cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) 1677 cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info'))
1678 cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) 1678 cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status'))
1679 1679
1680 1680
1681 def _create_config(self): 1681 def _create_config(self):
@@ -1699,8 +1699,8 @@ class OpkgPM(OpkgDpkgPM):
1699 # the default value of "/var/lib" as defined in opkg: 1699 # the default value of "/var/lib" as defined in opkg:
1700 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" 1700 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
1701 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" 1701 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
1702 config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) 1702 config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info'))
1703 config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) 1703 config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status'))
1704 1704
1705 def insert_feeds_uris(self): 1705 def insert_feeds_uris(self):
1706 if self.feed_uris == "": 1706 if self.feed_uris == "":
@@ -1755,9 +1755,9 @@ class OpkgPM(OpkgDpkgPM):
1755 os.environ['OFFLINE_ROOT'] = self.target_rootfs 1755 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1756 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs 1756 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1757 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs 1757 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1758 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), 1758 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'),
1759 "intercept_scripts") 1759 "intercept_scripts")
1760 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) 1760 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
1761 1761
1762 try: 1762 try:
1763 bb.note("Installing the following packages: %s" % ' '.join(pkgs)) 1763 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
@@ -1808,7 +1808,7 @@ class OpkgPM(OpkgDpkgPM):
1808 return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs() 1808 return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs()
1809 1809
1810 def handle_bad_recommendations(self): 1810 def handle_bad_recommendations(self):
1811 bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or "" 1811 bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS") or ""
1812 if bad_recommendations.strip() == "": 1812 if bad_recommendations.strip() == "":
1813 return 1813 return
1814 1814
@@ -1859,7 +1859,7 @@ class OpkgPM(OpkgDpkgPM):
1859 bb.utils.mkdirhier(temp_opkg_dir) 1859 bb.utils.mkdirhier(temp_opkg_dir)
1860 1860
1861 opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs) 1861 opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs)
1862 opkg_args += self.d.getVar("OPKG_ARGS", True) 1862 opkg_args += self.d.getVar("OPKG_ARGS")
1863 1863
1864 cmd = "%s %s update" % (self.opkg_cmd, opkg_args) 1864 cmd = "%s %s update" % (self.opkg_cmd, opkg_args)
1865 try: 1865 try:
@@ -1935,7 +1935,7 @@ class DpkgPM(OpkgDpkgPM):
1935 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None): 1935 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None):
1936 super(DpkgPM, self).__init__(d) 1936 super(DpkgPM, self).__init__(d)
1937 self.target_rootfs = target_rootfs 1937 self.target_rootfs = target_rootfs
1938 self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True) 1938 self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB')
1939 if apt_conf_dir is None: 1939 if apt_conf_dir is None:
1940 self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") 1940 self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt")
1941 else: 1941 else:
@@ -1944,10 +1944,10 @@ class DpkgPM(OpkgDpkgPM):
1944 self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") 1944 self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get")
1945 self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") 1945 self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache")
1946 1946
1947 self.apt_args = d.getVar("APT_ARGS", True) 1947 self.apt_args = d.getVar("APT_ARGS")
1948 1948
1949 self.all_arch_list = archs.split() 1949 self.all_arch_list = archs.split()
1950 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() 1950 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
1951 self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) 1951 self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list)
1952 1952
1953 self._create_configs(archs, base_archs) 1953 self._create_configs(archs, base_archs)
@@ -2008,9 +2008,9 @@ class DpkgPM(OpkgDpkgPM):
2008 os.environ['OFFLINE_ROOT'] = self.target_rootfs 2008 os.environ['OFFLINE_ROOT'] = self.target_rootfs
2009 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs 2009 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
2010 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs 2010 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
2011 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), 2011 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'),
2012 "intercept_scripts") 2012 "intercept_scripts")
2013 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) 2013 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
2014 2014
2015 failed_pkgs = [] 2015 failed_pkgs = []
2016 for pkg_name in installed_pkgs: 2016 for pkg_name in installed_pkgs:
@@ -2161,7 +2161,7 @@ class DpkgPM(OpkgDpkgPM):
2161 2161
2162 priority += 5 2162 priority += 5
2163 2163
2164 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" 2164 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or ""
2165 for pkg in pkg_exclude.split(): 2165 for pkg in pkg_exclude.split():
2166 prefs_file.write( 2166 prefs_file.write(
2167 "Package: %s\n" 2167 "Package: %s\n"
@@ -2176,14 +2176,14 @@ class DpkgPM(OpkgDpkgPM):
2176 os.path.join(self.deploy_dir, arch)) 2176 os.path.join(self.deploy_dir, arch))
2177 2177
2178 base_arch_list = base_archs.split() 2178 base_arch_list = base_archs.split()
2179 multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True); 2179 multilib_variants = self.d.getVar("MULTILIB_VARIANTS");
2180 for variant in multilib_variants.split(): 2180 for variant in multilib_variants.split():
2181 localdata = bb.data.createCopy(self.d) 2181 localdata = bb.data.createCopy(self.d)
2182 variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) 2182 variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False)
2183 orig_arch = localdata.getVar("DPKG_ARCH", True) 2183 orig_arch = localdata.getVar("DPKG_ARCH")
2184 localdata.setVar("DEFAULTTUNE", variant_tune) 2184 localdata.setVar("DEFAULTTUNE", variant_tune)
2185 bb.data.update_data(localdata) 2185 bb.data.update_data(localdata)
2186 variant_arch = localdata.getVar("DPKG_ARCH", True) 2186 variant_arch = localdata.getVar("DPKG_ARCH")
2187 if variant_arch not in base_arch_list: 2187 if variant_arch not in base_arch_list:
2188 base_arch_list.append(variant_arch) 2188 base_arch_list.append(variant_arch)
2189 2189
@@ -2214,7 +2214,7 @@ class DpkgPM(OpkgDpkgPM):
2214 2214
2215 def remove_packaging_data(self): 2215 def remove_packaging_data(self):
2216 bb.utils.remove(os.path.join(self.target_rootfs, 2216 bb.utils.remove(os.path.join(self.target_rootfs,
2217 self.d.getVar('opkglibdir', True)), True) 2217 self.d.getVar('opkglibdir')), True)
2218 bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) 2218 bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
2219 2219
2220 def fix_broken_dependencies(self): 2220 def fix_broken_dependencies(self):
@@ -2262,12 +2262,12 @@ class DpkgPM(OpkgDpkgPM):
2262 return tmp_dir 2262 return tmp_dir
2263 2263
2264def generate_index_files(d): 2264def generate_index_files(d):
2265 classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split() 2265 classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split()
2266 2266
2267 indexer_map = { 2267 indexer_map = {
2268 "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)), 2268 "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM')),
2269 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)), 2269 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')),
2270 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True)) 2270 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB'))
2271 } 2271 }
2272 2272
2273 result = None 2273 result = None
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py
index 21d4de914f..32e5c82a94 100644
--- a/meta/lib/oe/packagedata.py
+++ b/meta/lib/oe/packagedata.py
@@ -57,7 +57,7 @@ def read_subpkgdata_dict(pkg, d):
57def _pkgmap(d): 57def _pkgmap(d):
58 """Return a dictionary mapping package to recipe name.""" 58 """Return a dictionary mapping package to recipe name."""
59 59
60 pkgdatadir = d.getVar("PKGDATA_DIR", True) 60 pkgdatadir = d.getVar("PKGDATA_DIR")
61 61
62 pkgmap = {} 62 pkgmap = {}
63 try: 63 try:
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py
index 97819279b7..d68e5d322b 100644
--- a/meta/lib/oe/packagegroup.py
+++ b/meta/lib/oe/packagegroup.py
@@ -1,7 +1,7 @@
1import itertools 1import itertools
2 2
3def is_optional(feature, d): 3def is_optional(feature, d):
4 packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) 4 packages = d.getVar("FEATURE_PACKAGES_%s" % feature)
5 if packages: 5 if packages:
6 return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True)) 6 return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True))
7 else: 7 else:
@@ -9,9 +9,9 @@ def is_optional(feature, d):
9 9
10def packages(features, d): 10def packages(features, d):
11 for feature in features: 11 for feature in features:
12 packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) 12 packages = d.getVar("FEATURE_PACKAGES_%s" % feature)
13 if not packages: 13 if not packages:
14 packages = d.getVar("PACKAGE_GROUP_%s" % feature, True) 14 packages = d.getVar("PACKAGE_GROUP_%s" % feature)
15 for pkg in (packages or "").split(): 15 for pkg in (packages or "").split():
16 yield pkg 16 yield pkg
17 17
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index 456ee70f7d..95674b3706 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -281,8 +281,8 @@ class GitApplyTree(PatchTree):
281 281
282 def __init__(self, dir, d): 282 def __init__(self, dir, d):
283 PatchTree.__init__(self, dir, d) 283 PatchTree.__init__(self, dir, d)
284 self.commituser = d.getVar('PATCH_GIT_USER_NAME', True) 284 self.commituser = d.getVar('PATCH_GIT_USER_NAME')
285 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) 285 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
286 286
287 @staticmethod 287 @staticmethod
288 def extractPatchHeader(patchfile): 288 def extractPatchHeader(patchfile):
@@ -371,8 +371,8 @@ class GitApplyTree(PatchTree):
371 @staticmethod 371 @staticmethod
372 def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None): 372 def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None):
373 if d: 373 if d:
374 commituser = d.getVar('PATCH_GIT_USER_NAME', True) 374 commituser = d.getVar('PATCH_GIT_USER_NAME')
375 commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) 375 commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
376 if commituser: 376 if commituser:
377 cmd += ['-c', 'user.name="%s"' % commituser] 377 cmd += ['-c', 'user.name="%s"' % commituser]
378 if commitemail: 378 if commitemail:
@@ -551,7 +551,7 @@ class GitApplyTree(PatchTree):
551 551
552class QuiltTree(PatchSet): 552class QuiltTree(PatchSet):
553 def _runcmd(self, args, run = True): 553 def _runcmd(self, args, run = True):
554 quiltrc = self.d.getVar('QUILTRCFILE', True) 554 quiltrc = self.d.getVar('QUILTRCFILE')
555 if not run: 555 if not run:
556 return ["quilt"] + ["--quiltrc"] + [quiltrc] + args 556 return ["quilt"] + ["--quiltrc"] + [quiltrc] + args
557 runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) 557 runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir)
@@ -727,7 +727,7 @@ class UserResolver(Resolver):
727 # Patch application failed 727 # Patch application failed
728 patchcmd = self.patchset.Push(True, False, False) 728 patchcmd = self.patchset.Push(True, False, False)
729 729
730 t = self.patchset.d.getVar('T', True) 730 t = self.patchset.d.getVar('T')
731 if not t: 731 if not t:
732 bb.msg.fatal("Build", "T not set") 732 bb.msg.fatal("Build", "T not set")
733 bb.utils.mkdirhier(t) 733 bb.utils.mkdirhier(t)
@@ -792,7 +792,7 @@ def patch_path(url, fetch, workdir, expand=True):
792 return local 792 return local
793 793
794def src_patches(d, all=False, expand=True): 794def src_patches(d, all=False, expand=True):
795 workdir = d.getVar('WORKDIR', True) 795 workdir = d.getVar('WORKDIR')
796 fetch = bb.fetch2.Fetch([], d) 796 fetch = bb.fetch2.Fetch([], d)
797 patches = [] 797 patches = []
798 sources = [] 798 sources = []
@@ -839,13 +839,13 @@ def src_patches(d, all=False, expand=True):
839 839
840def should_apply(parm, d): 840def should_apply(parm, d):
841 if "mindate" in parm or "maxdate" in parm: 841 if "mindate" in parm or "maxdate" in parm:
842 pn = d.getVar('PN', True) 842 pn = d.getVar('PN')
843 srcdate = d.getVar('SRCDATE_%s' % pn, True) 843 srcdate = d.getVar('SRCDATE_%s' % pn)
844 if not srcdate: 844 if not srcdate:
845 srcdate = d.getVar('SRCDATE', True) 845 srcdate = d.getVar('SRCDATE')
846 846
847 if srcdate == "now": 847 if srcdate == "now":
848 srcdate = d.getVar('DATE', True) 848 srcdate = d.getVar('DATE')
849 849
850 if "maxdate" in parm and parm["maxdate"] < srcdate: 850 if "maxdate" in parm and parm["maxdate"] < srcdate:
851 return False, 'is outdated' 851 return False, 'is outdated'
@@ -855,22 +855,22 @@ def should_apply(parm, d):
855 855
856 856
857 if "minrev" in parm: 857 if "minrev" in parm:
858 srcrev = d.getVar('SRCREV', True) 858 srcrev = d.getVar('SRCREV')
859 if srcrev and srcrev < parm["minrev"]: 859 if srcrev and srcrev < parm["minrev"]:
860 return False, 'applies to later revisions' 860 return False, 'applies to later revisions'
861 861
862 if "maxrev" in parm: 862 if "maxrev" in parm:
863 srcrev = d.getVar('SRCREV', True) 863 srcrev = d.getVar('SRCREV')
864 if srcrev and srcrev > parm["maxrev"]: 864 if srcrev and srcrev > parm["maxrev"]:
865 return False, 'applies to earlier revisions' 865 return False, 'applies to earlier revisions'
866 866
867 if "rev" in parm: 867 if "rev" in parm:
868 srcrev = d.getVar('SRCREV', True) 868 srcrev = d.getVar('SRCREV')
869 if srcrev and parm["rev"] not in srcrev: 869 if srcrev and parm["rev"] not in srcrev:
870 return False, "doesn't apply to revision" 870 return False, "doesn't apply to revision"
871 871
872 if "notrev" in parm: 872 if "notrev" in parm:
873 srcrev = d.getVar('SRCREV', True) 873 srcrev = d.getVar('SRCREV')
874 if srcrev and parm["notrev"] in srcrev: 874 if srcrev and parm["notrev"] in srcrev:
875 return False, "doesn't apply to revision" 875 return False, "doesn't apply to revision"
876 876
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py
index f73fd4ac07..804ecd5fea 100644
--- a/meta/lib/oe/path.py
+++ b/meta/lib/oe/path.py
@@ -52,7 +52,7 @@ def make_relative_symlink(path):
52 52
53def format_display(path, metadata): 53def format_display(path, metadata):
54 """ Prepare a path for display to the user. """ 54 """ Prepare a path for display to the user. """
55 rel = relative(metadata.getVar("TOPDIR", True), path) 55 rel = relative(metadata.getVar("TOPDIR"), path)
56 if len(rel) > len(path): 56 if len(rel) > len(path):
57 return path 57 return path
58 else: 58 else:
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py
index 0054f954cc..32dfc15e88 100644
--- a/meta/lib/oe/prservice.py
+++ b/meta/lib/oe/prservice.py
@@ -1,7 +1,7 @@
1 1
2def prserv_make_conn(d, check = False): 2def prserv_make_conn(d, check = False):
3 import prserv.serv 3 import prserv.serv
4 host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) 4 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
5 try: 5 try:
6 conn = None 6 conn = None
7 conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) 7 conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1]))
@@ -15,11 +15,11 @@ def prserv_make_conn(d, check = False):
15 return conn 15 return conn
16 16
17def prserv_dump_db(d): 17def prserv_dump_db(d):
18 if not d.getVar('PRSERV_HOST', True): 18 if not d.getVar('PRSERV_HOST'):
19 bb.error("Not using network based PR service") 19 bb.error("Not using network based PR service")
20 return None 20 return None
21 21
22 conn = d.getVar("__PRSERV_CONN", True) 22 conn = d.getVar("__PRSERV_CONN")
23 if conn is None: 23 if conn is None:
24 conn = prserv_make_conn(d) 24 conn = prserv_make_conn(d)
25 if conn is None: 25 if conn is None:
@@ -27,18 +27,18 @@ def prserv_dump_db(d):
27 return None 27 return None
28 28
29 #dump db 29 #dump db
30 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True) 30 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION')
31 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True) 31 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH')
32 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True) 32 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM')
33 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True)) 33 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL'))
34 return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) 34 return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col)
35 35
36def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): 36def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None):
37 if not d.getVar('PRSERV_HOST', True): 37 if not d.getVar('PRSERV_HOST'):
38 bb.error("Not using network based PR service") 38 bb.error("Not using network based PR service")
39 return None 39 return None
40 40
41 conn = d.getVar("__PRSERV_CONN", True) 41 conn = d.getVar("__PRSERV_CONN")
42 if conn is None: 42 if conn is None:
43 conn = prserv_make_conn(d) 43 conn = prserv_make_conn(d)
44 if conn is None: 44 if conn is None:
@@ -58,7 +58,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu
58 (filter_checksum and filter_checksum != checksum): 58 (filter_checksum and filter_checksum != checksum):
59 continue 59 continue
60 try: 60 try:
61 value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True)) 61 value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum))
62 except BaseException as exc: 62 except BaseException as exc:
63 bb.debug("Not valid value of %s:%s" % (v,str(exc))) 63 bb.debug("Not valid value of %s:%s" % (v,str(exc)))
64 continue 64 continue
@@ -72,8 +72,8 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu
72def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): 72def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
73 import bb.utils 73 import bb.utils
74 #initilize the output file 74 #initilize the output file
75 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR', True)) 75 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR'))
76 df = d.getVar('PRSERV_DUMPFILE', True) 76 df = d.getVar('PRSERV_DUMPFILE')
77 #write data 77 #write data
78 lf = bb.utils.lockfile("%s.lock" % df) 78 lf = bb.utils.lockfile("%s.lock" % df)
79 f = open(df, "a") 79 f = open(df, "a")
@@ -114,7 +114,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
114 bb.utils.unlockfile(lf) 114 bb.utils.unlockfile(lf)
115 115
116def prserv_check_avail(d): 116def prserv_check_avail(d):
117 host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) 117 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
118 try: 118 try:
119 if len(host_params) != 2: 119 if len(host_params) != 2:
120 raise TypeError 120 raise TypeError
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py
index 22d76dcbcd..3231e60cea 100644
--- a/meta/lib/oe/qa.py
+++ b/meta/lib/oe/qa.py
@@ -129,11 +129,11 @@ class ELFFile:
129 if cmd in self.objdump_output: 129 if cmd in self.objdump_output:
130 return self.objdump_output[cmd] 130 return self.objdump_output[cmd]
131 131
132 objdump = d.getVar('OBJDUMP', True) 132 objdump = d.getVar('OBJDUMP')
133 133
134 env = os.environ.copy() 134 env = os.environ.copy()
135 env["LC_ALL"] = "C" 135 env["LC_ALL"] = "C"
136 env["PATH"] = d.getVar('PATH', True) 136 env["PATH"] = d.getVar('PATH')
137 137
138 try: 138 try:
139 bb.note("%s %s %s" % (objdump, cmd, self.name)) 139 bb.note("%s %s %s" % (objdump, cmd, self.name))
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index 26c926f214..a7fdd36e40 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -328,16 +328,16 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True):
328 328
329 # FIXME need a warning if the unexpanded SRC_URI value contains variable references 329 # FIXME need a warning if the unexpanded SRC_URI value contains variable references
330 330
331 uris = (d.getVar('SRC_URI', True) or "").split() 331 uris = (d.getVar('SRC_URI') or "").split()
332 fetch = bb.fetch2.Fetch(uris, d) 332 fetch = bb.fetch2.Fetch(uris, d)
333 if download: 333 if download:
334 fetch.download() 334 fetch.download()
335 335
336 # Copy local files to target directory and gather any remote files 336 # Copy local files to target directory and gather any remote files
337 bb_dir = os.path.dirname(d.getVar('FILE', True)) + os.sep 337 bb_dir = os.path.dirname(d.getVar('FILE')) + os.sep
338 remotes = [] 338 remotes = []
339 copied = [] 339 copied = []
340 includes = [path for path in d.getVar('BBINCLUDED', True).split() if 340 includes = [path for path in d.getVar('BBINCLUDED').split() if
341 path.startswith(bb_dir) and os.path.exists(path)] 341 path.startswith(bb_dir) and os.path.exists(path)]
342 for path in fetch.localpaths() + includes: 342 for path in fetch.localpaths() + includes:
343 # Only import files that are under the meta directory 343 # Only import files that are under the meta directory
@@ -361,7 +361,7 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True):
361def get_recipe_local_files(d, patches=False, archives=False): 361def get_recipe_local_files(d, patches=False, archives=False):
362 """Get a list of local files in SRC_URI within a recipe.""" 362 """Get a list of local files in SRC_URI within a recipe."""
363 import oe.patch 363 import oe.patch
364 uris = (d.getVar('SRC_URI', True) or "").split() 364 uris = (d.getVar('SRC_URI') or "").split()
365 fetch = bb.fetch2.Fetch(uris, d) 365 fetch = bb.fetch2.Fetch(uris, d)
366 # FIXME this list should be factored out somewhere else (such as the 366 # FIXME this list should be factored out somewhere else (such as the
367 # fetcher) though note that this only encompasses actual container formats 367 # fetcher) though note that this only encompasses actual container formats
@@ -421,7 +421,7 @@ def get_recipe_patched_files(d):
421 for patch in patches: 421 for patch in patches:
422 _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch) 422 _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch)
423 striplevel = int(parm['striplevel']) 423 striplevel = int(parm['striplevel'])
424 patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S', True), parm.get('patchdir', ''))) 424 patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S'), parm.get('patchdir', '')))
425 return patchedfiles 425 return patchedfiles
426 426
427 427
@@ -459,9 +459,9 @@ def get_bbfile_path(d, destdir, extrapathhint=None):
459 confdata.setVar('LAYERDIR', destlayerdir) 459 confdata.setVar('LAYERDIR', destlayerdir)
460 destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") 460 destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf")
461 confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) 461 confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata)
462 pn = d.getVar('PN', True) 462 pn = d.getVar('PN')
463 463
464 bbfilespecs = (confdata.getVar('BBFILES', True) or '').split() 464 bbfilespecs = (confdata.getVar('BBFILES') or '').split()
465 if destdir == destlayerdir: 465 if destdir == destlayerdir:
466 for bbfilespec in bbfilespecs: 466 for bbfilespec in bbfilespecs:
467 if not bbfilespec.endswith('.bbappend'): 467 if not bbfilespec.endswith('.bbappend'):
@@ -474,8 +474,8 @@ def get_bbfile_path(d, destdir, extrapathhint=None):
474 474
475 # Try to make up a path that matches BBFILES 475 # Try to make up a path that matches BBFILES
476 # this is a little crude, but better than nothing 476 # this is a little crude, but better than nothing
477 bpn = d.getVar('BPN', True) 477 bpn = d.getVar('BPN')
478 recipefn = os.path.basename(d.getVar('FILE', True)) 478 recipefn = os.path.basename(d.getVar('FILE'))
479 pathoptions = [destdir] 479 pathoptions = [destdir]
480 if extrapathhint: 480 if extrapathhint:
481 pathoptions.append(os.path.join(destdir, extrapathhint)) 481 pathoptions.append(os.path.join(destdir, extrapathhint))
@@ -499,7 +499,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
499 import bb.cookerdata 499 import bb.cookerdata
500 500
501 destlayerdir = os.path.abspath(destlayerdir) 501 destlayerdir = os.path.abspath(destlayerdir)
502 recipefile = d.getVar('FILE', True) 502 recipefile = d.getVar('FILE')
503 recipefn = os.path.splitext(os.path.basename(recipefile))[0] 503 recipefn = os.path.splitext(os.path.basename(recipefile))[0]
504 if wildcardver and '_' in recipefn: 504 if wildcardver and '_' in recipefn:
505 recipefn = recipefn.split('_', 1)[0] + '_%' 505 recipefn = recipefn.split('_', 1)[0] + '_%'
@@ -519,7 +519,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
519 appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn) 519 appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn)
520 closepath = '' 520 closepath = ''
521 pathok = True 521 pathok = True
522 for bbfilespec in confdata.getVar('BBFILES', True).split(): 522 for bbfilespec in confdata.getVar('BBFILES').split():
523 if fnmatch.fnmatchcase(appendpath, bbfilespec): 523 if fnmatch.fnmatchcase(appendpath, bbfilespec):
524 # Our append path works, we're done 524 # Our append path works, we're done
525 break 525 break
@@ -592,7 +592,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
592 592
593 # FIXME check if the bbappend doesn't get overridden by a higher priority layer? 593 # FIXME check if the bbappend doesn't get overridden by a higher priority layer?
594 594
595 layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] 595 layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
596 if not os.path.abspath(destlayerdir) in layerdirs: 596 if not os.path.abspath(destlayerdir) in layerdirs:
597 bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') 597 bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active')
598 598
@@ -628,7 +628,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
628 else: 628 else:
629 bbappendlines.append((varname, op, value)) 629 bbappendlines.append((varname, op, value))
630 630
631 destsubdir = rd.getVar('PN', True) 631 destsubdir = rd.getVar('PN')
632 if srcfiles: 632 if srcfiles:
633 bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) 633 bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:'))
634 634
@@ -647,7 +647,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
647 srcurientry = 'file://%s' % srcfile 647 srcurientry = 'file://%s' % srcfile
648 # Double-check it's not there already 648 # Double-check it's not there already
649 # FIXME do we care if the entry is added by another bbappend that might go away? 649 # FIXME do we care if the entry is added by another bbappend that might go away?
650 if not srcurientry in rd.getVar('SRC_URI', True).split(): 650 if not srcurientry in rd.getVar('SRC_URI').split():
651 if machine: 651 if machine:
652 appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) 652 appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry)
653 else: 653 else:
@@ -796,7 +796,7 @@ def replace_dir_vars(path, d):
796 # Sort by length so we get the variables we're interested in first 796 # Sort by length so we get the variables we're interested in first
797 for var in sorted(list(d.keys()), key=len): 797 for var in sorted(list(d.keys()), key=len):
798 if var.endswith('dir') and var.lower() == var: 798 if var.endswith('dir') and var.lower() == var:
799 value = d.getVar(var, True) 799 value = d.getVar(var)
800 if value.startswith('/') and not '\n' in value and value not in dirvars: 800 if value.startswith('/') and not '\n' in value and value not in dirvars:
801 dirvars[value] = var 801 dirvars[value] = var
802 for dirpath in sorted(list(dirvars.keys()), reverse=True): 802 for dirpath in sorted(list(dirvars.keys()), reverse=True):
@@ -850,12 +850,12 @@ def get_recipe_upstream_version(rd):
850 ru['type'] = 'U' 850 ru['type'] = 'U'
851 ru['datetime'] = '' 851 ru['datetime'] = ''
852 852
853 pv = rd.getVar('PV', True) 853 pv = rd.getVar('PV')
854 854
855 # XXX: If don't have SRC_URI means that don't have upstream sources so 855 # XXX: If don't have SRC_URI means that don't have upstream sources so
856 # returns the current recipe version, so that upstream version check 856 # returns the current recipe version, so that upstream version check
857 # declares a match. 857 # declares a match.
858 src_uris = rd.getVar('SRC_URI', True) 858 src_uris = rd.getVar('SRC_URI')
859 if not src_uris: 859 if not src_uris:
860 ru['version'] = pv 860 ru['version'] = pv
861 ru['type'] = 'M' 861 ru['type'] = 'M'
@@ -866,13 +866,13 @@ def get_recipe_upstream_version(rd):
866 src_uri = src_uris.split()[0] 866 src_uri = src_uris.split()[0]
867 uri_type, _, _, _, _, _ = decodeurl(src_uri) 867 uri_type, _, _, _, _, _ = decodeurl(src_uri)
868 868
869 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True) 869 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION")
870 if manual_upstream_version: 870 if manual_upstream_version:
871 # manual tracking of upstream version. 871 # manual tracking of upstream version.
872 ru['version'] = manual_upstream_version 872 ru['version'] = manual_upstream_version
873 ru['type'] = 'M' 873 ru['type'] = 'M'
874 874
875 manual_upstream_date = rd.getVar("CHECK_DATE", True) 875 manual_upstream_date = rd.getVar("CHECK_DATE")
876 if manual_upstream_date: 876 if manual_upstream_date:
877 date = datetime.strptime(manual_upstream_date, "%b %d, %Y") 877 date = datetime.strptime(manual_upstream_date, "%b %d, %Y")
878 else: 878 else:
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 74fc3bd256..d9a473006a 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -18,8 +18,8 @@ class Rootfs(object, metaclass=ABCMeta):
18 def __init__(self, d, progress_reporter=None, logcatcher=None): 18 def __init__(self, d, progress_reporter=None, logcatcher=None):
19 self.d = d 19 self.d = d
20 self.pm = None 20 self.pm = None
21 self.image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) 21 self.image_rootfs = self.d.getVar('IMAGE_ROOTFS')
22 self.deploydir = self.d.getVar('IMGDEPLOYDIR', True) 22 self.deploydir = self.d.getVar('IMGDEPLOYDIR')
23 self.progress_reporter = progress_reporter 23 self.progress_reporter = progress_reporter
24 self.logcatcher = logcatcher 24 self.logcatcher = logcatcher
25 25
@@ -72,7 +72,7 @@ class Rootfs(object, metaclass=ABCMeta):
72 else: 72 else:
73 msg = '%d %s messages' % (len(messages), type) 73 msg = '%d %s messages' % (len(messages), type)
74 msg = '[log_check] %s: found %s in the logfile:\n%s' % \ 74 msg = '[log_check] %s: found %s in the logfile:\n%s' % \
75 (self.d.getVar('PN', True), msg, ''.join(messages)) 75 (self.d.getVar('PN'), msg, ''.join(messages))
76 if type == 'error': 76 if type == 'error':
77 bb.fatal(msg) 77 bb.fatal(msg)
78 else: 78 else:
@@ -103,7 +103,7 @@ class Rootfs(object, metaclass=ABCMeta):
103 pass 103 pass
104 104
105 def _setup_dbg_rootfs(self, dirs): 105 def _setup_dbg_rootfs(self, dirs):
106 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS', True) or '0' 106 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0'
107 if gen_debugfs != '1': 107 if gen_debugfs != '1':
108 return 108 return
109 109
@@ -156,7 +156,7 @@ class Rootfs(object, metaclass=ABCMeta):
156 os.rename(self.image_rootfs + '-orig', self.image_rootfs) 156 os.rename(self.image_rootfs + '-orig', self.image_rootfs)
157 157
158 def _exec_shell_cmd(self, cmd): 158 def _exec_shell_cmd(self, cmd):
159 fakerootcmd = self.d.getVar('FAKEROOT', True) 159 fakerootcmd = self.d.getVar('FAKEROOT')
160 if fakerootcmd is not None: 160 if fakerootcmd is not None:
161 exec_cmd = [fakerootcmd, cmd] 161 exec_cmd = [fakerootcmd, cmd]
162 else: 162 else:
@@ -171,14 +171,14 @@ class Rootfs(object, metaclass=ABCMeta):
171 171
172 def create(self): 172 def create(self):
173 bb.note("###### Generate rootfs #######") 173 bb.note("###### Generate rootfs #######")
174 pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND", True) 174 pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND")
175 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND", True) 175 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND")
176 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND', True) 176 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND')
177 177
178 postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR", True) 178 postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR")
179 if not postinst_intercepts_dir: 179 if not postinst_intercepts_dir:
180 postinst_intercepts_dir = self.d.expand("${COREBASE}/scripts/postinst-intercepts") 180 postinst_intercepts_dir = self.d.expand("${COREBASE}/scripts/postinst-intercepts")
181 intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), 181 intercepts_dir = os.path.join(self.d.getVar('WORKDIR'),
182 "intercept_scripts") 182 "intercept_scripts")
183 183
184 bb.utils.remove(intercepts_dir, True) 184 bb.utils.remove(intercepts_dir, True)
@@ -201,10 +201,10 @@ class Rootfs(object, metaclass=ABCMeta):
201 # call the package manager dependent create method 201 # call the package manager dependent create method
202 self._create() 202 self._create()
203 203
204 sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir', True) 204 sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir')
205 bb.utils.mkdirhier(sysconfdir) 205 bb.utils.mkdirhier(sysconfdir)
206 with open(sysconfdir + "/version", "w+") as ver: 206 with open(sysconfdir + "/version", "w+") as ver:
207 ver.write(self.d.getVar('BUILDNAME', True) + "\n") 207 ver.write(self.d.getVar('BUILDNAME') + "\n")
208 208
209 execute_pre_post_process(self.d, rootfs_post_install_cmds) 209 execute_pre_post_process(self.d, rootfs_post_install_cmds)
210 210
@@ -223,7 +223,7 @@ class Rootfs(object, metaclass=ABCMeta):
223 "offline and rootfs is read-only: %s" % 223 "offline and rootfs is read-only: %s" %
224 delayed_postinsts) 224 delayed_postinsts)
225 225
226 if self.d.getVar('USE_DEVFS', True) != "1": 226 if self.d.getVar('USE_DEVFS') != "1":
227 self._create_devfs() 227 self._create_devfs()
228 228
229 self._uninstall_unneeded() 229 self._uninstall_unneeded()
@@ -235,7 +235,7 @@ class Rootfs(object, metaclass=ABCMeta):
235 235
236 self._run_ldconfig() 236 self._run_ldconfig()
237 237
238 if self.d.getVar('USE_DEPMOD', True) != "0": 238 if self.d.getVar('USE_DEPMOD') != "0":
239 self._generate_kernel_module_deps() 239 self._generate_kernel_module_deps()
240 240
241 self._cleanup() 241 self._cleanup()
@@ -251,16 +251,16 @@ class Rootfs(object, metaclass=ABCMeta):
251 if delayed_postinsts is None: 251 if delayed_postinsts is None:
252 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): 252 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")):
253 self._exec_shell_cmd(["update-rc.d", "-f", "-r", 253 self._exec_shell_cmd(["update-rc.d", "-f", "-r",
254 self.d.getVar('IMAGE_ROOTFS', True), 254 self.d.getVar('IMAGE_ROOTFS'),
255 "run-postinsts", "remove"]) 255 "run-postinsts", "remove"])
256 256
257 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", 257 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
258 True, False, self.d) 258 True, False, self.d)
259 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE', True) 259 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE')
260 260
261 if image_rorfs or image_rorfs_force == "1": 261 if image_rorfs or image_rorfs_force == "1":
262 # Remove components that we don't need if it's a read-only rootfs 262 # Remove components that we don't need if it's a read-only rootfs
263 unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED", True).split() 263 unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED").split()
264 pkgs_installed = image_list_installed_packages(self.d) 264 pkgs_installed = image_list_installed_packages(self.d)
265 pkgs_to_remove = [pkg for pkg in pkgs_installed if pkg in unneeded_pkgs] 265 pkgs_to_remove = [pkg for pkg in pkgs_installed if pkg in unneeded_pkgs]
266 266
@@ -273,7 +273,7 @@ class Rootfs(object, metaclass=ABCMeta):
273 bb.warn("There are post install scripts " 273 bb.warn("There are post install scripts "
274 "in a read-only rootfs") 274 "in a read-only rootfs")
275 275
276 post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND", True) 276 post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND")
277 execute_pre_post_process(self.d, post_uninstall_cmds) 277 execute_pre_post_process(self.d, post_uninstall_cmds)
278 278
279 runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management", 279 runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management",
@@ -283,12 +283,12 @@ class Rootfs(object, metaclass=ABCMeta):
283 self.pm.remove_packaging_data() 283 self.pm.remove_packaging_data()
284 284
285 def _run_intercepts(self): 285 def _run_intercepts(self):
286 intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), 286 intercepts_dir = os.path.join(self.d.getVar('WORKDIR'),
287 "intercept_scripts") 287 "intercept_scripts")
288 288
289 bb.note("Running intercept scripts:") 289 bb.note("Running intercept scripts:")
290 os.environ['D'] = self.image_rootfs 290 os.environ['D'] = self.image_rootfs
291 os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE', True) 291 os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE')
292 for script in os.listdir(intercepts_dir): 292 for script in os.listdir(intercepts_dir):
293 script_full = os.path.join(intercepts_dir, script) 293 script_full = os.path.join(intercepts_dir, script)
294 294
@@ -320,7 +320,7 @@ class Rootfs(object, metaclass=ABCMeta):
320 self._handle_intercept_failure(registered_pkgs) 320 self._handle_intercept_failure(registered_pkgs)
321 321
322 def _run_ldconfig(self): 322 def _run_ldconfig(self):
323 if self.d.getVar('LDCONFIGDEPEND', True): 323 if self.d.getVar('LDCONFIGDEPEND'):
324 bb.note("Executing: ldconfig -r" + self.image_rootfs + "-c new -v") 324 bb.note("Executing: ldconfig -r" + self.image_rootfs + "-c new -v")
325 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', 325 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c',
326 'new', '-v']) 326 'new', '-v'])
@@ -340,7 +340,7 @@ class Rootfs(object, metaclass=ABCMeta):
340 bb.note("No Kernel Modules found, not running depmod") 340 bb.note("No Kernel Modules found, not running depmod")
341 return 341 return
342 342
343 kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR', True), "kernel-depmod", 343 kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod",
344 'kernel-abiversion') 344 'kernel-abiversion')
345 if not os.path.exists(kernel_abi_ver_file): 345 if not os.path.exists(kernel_abi_ver_file):
346 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) 346 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
@@ -362,15 +362,15 @@ class Rootfs(object, metaclass=ABCMeta):
362 """ 362 """
363 def _create_devfs(self): 363 def _create_devfs(self):
364 devtable_list = [] 364 devtable_list = []
365 devtable = self.d.getVar('IMAGE_DEVICE_TABLE', True) 365 devtable = self.d.getVar('IMAGE_DEVICE_TABLE')
366 if devtable is not None: 366 if devtable is not None:
367 devtable_list.append(devtable) 367 devtable_list.append(devtable)
368 else: 368 else:
369 devtables = self.d.getVar('IMAGE_DEVICE_TABLES', True) 369 devtables = self.d.getVar('IMAGE_DEVICE_TABLES')
370 if devtables is None: 370 if devtables is None:
371 devtables = 'files/device_table-minimal.txt' 371 devtables = 'files/device_table-minimal.txt'
372 for devtable in devtables.split(): 372 for devtable in devtables.split():
373 devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH', True), devtable)) 373 devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH'), devtable))
374 374
375 for devtable in devtable_list: 375 for devtable in devtable_list:
376 self._exec_shell_cmd(["makedevs", "-r", 376 self._exec_shell_cmd(["makedevs", "-r",
@@ -386,16 +386,16 @@ class RpmRootfs(Rootfs):
386 self.manifest = RpmManifest(d, manifest_dir) 386 self.manifest = RpmManifest(d, manifest_dir)
387 387
388 self.pm = RpmPM(d, 388 self.pm = RpmPM(d,
389 d.getVar('IMAGE_ROOTFS', True), 389 d.getVar('IMAGE_ROOTFS'),
390 self.d.getVar('TARGET_VENDOR', True) 390 self.d.getVar('TARGET_VENDOR')
391 ) 391 )
392 392
393 self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN', True) 393 self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN')
394 if self.inc_rpm_image_gen != "1": 394 if self.inc_rpm_image_gen != "1":
395 bb.utils.remove(self.image_rootfs, True) 395 bb.utils.remove(self.image_rootfs, True)
396 else: 396 else:
397 self.pm.recovery_packaging_data() 397 self.pm.recovery_packaging_data()
398 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) 398 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True)
399 399
400 self.pm.create_configs() 400 self.pm.create_configs()
401 401
@@ -429,8 +429,8 @@ class RpmRootfs(Rootfs):
429 429
430 def _create(self): 430 def _create(self):
431 pkgs_to_install = self.manifest.parse_initial_manifest() 431 pkgs_to_install = self.manifest.parse_initial_manifest()
432 rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS', True) 432 rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS')
433 rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS', True) 433 rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS')
434 434
435 # update PM index files 435 # update PM index files
436 self.pm.write_index() 436 self.pm.write_index()
@@ -601,7 +601,7 @@ class DpkgOpkgRootfs(Rootfs):
601 pkg_list = [] 601 pkg_list = []
602 602
603 pkgs = None 603 pkgs = None
604 if not self.d.getVar('PACKAGE_INSTALL', True).strip(): 604 if not self.d.getVar('PACKAGE_INSTALL').strip():
605 bb.note("Building empty image") 605 bb.note("Building empty image")
606 else: 606 else:
607 pkgs = self._get_pkgs_postinsts(status_file) 607 pkgs = self._get_pkgs_postinsts(status_file)
@@ -637,17 +637,17 @@ class DpkgRootfs(DpkgOpkgRootfs):
637 ] 637 ]
638 638
639 bb.utils.remove(self.image_rootfs, True) 639 bb.utils.remove(self.image_rootfs, True)
640 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) 640 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True)
641 self.manifest = DpkgManifest(d, manifest_dir) 641 self.manifest = DpkgManifest(d, manifest_dir)
642 self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS', True), 642 self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS'),
643 d.getVar('PACKAGE_ARCHS', True), 643 d.getVar('PACKAGE_ARCHS'),
644 d.getVar('DPKG_ARCH', True)) 644 d.getVar('DPKG_ARCH'))
645 645
646 646
647 def _create(self): 647 def _create(self):
648 pkgs_to_install = self.manifest.parse_initial_manifest() 648 pkgs_to_install = self.manifest.parse_initial_manifest()
649 deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS', True) 649 deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS')
650 deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS', True) 650 deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS')
651 651
652 alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives") 652 alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives")
653 bb.utils.mkdirhier(alt_dir) 653 bb.utils.mkdirhier(alt_dir)
@@ -725,10 +725,10 @@ class OpkgRootfs(DpkgOpkgRootfs):
725 self.log_check_regex = '(exit 1|Collected errors)' 725 self.log_check_regex = '(exit 1|Collected errors)'
726 726
727 self.manifest = OpkgManifest(d, manifest_dir) 727 self.manifest = OpkgManifest(d, manifest_dir)
728 self.opkg_conf = self.d.getVar("IPKGCONF_TARGET", True) 728 self.opkg_conf = self.d.getVar("IPKGCONF_TARGET")
729 self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True) 729 self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS")
730 730
731 self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN', True) or "" 731 self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN') or ""
732 if self._remove_old_rootfs(): 732 if self._remove_old_rootfs():
733 bb.utils.remove(self.image_rootfs, True) 733 bb.utils.remove(self.image_rootfs, True)
734 self.pm = OpkgPM(d, 734 self.pm = OpkgPM(d,
@@ -742,7 +742,7 @@ class OpkgRootfs(DpkgOpkgRootfs):
742 self.pkg_archs) 742 self.pkg_archs)
743 self.pm.recover_packaging_data() 743 self.pm.recover_packaging_data()
744 744
745 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) 745 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True)
746 746
747 def _prelink_file(self, root_dir, filename): 747 def _prelink_file(self, root_dir, filename):
748 bb.note('prelink %s in %s' % (filename, root_dir)) 748 bb.note('prelink %s in %s' % (filename, root_dir))
@@ -797,7 +797,7 @@ class OpkgRootfs(DpkgOpkgRootfs):
797 """ 797 """
798 def _multilib_sanity_test(self, dirs): 798 def _multilib_sanity_test(self, dirs):
799 799
800 allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP", True) 800 allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP")
801 if allow_replace is None: 801 if allow_replace is None:
802 allow_replace = "" 802 allow_replace = ""
803 803
@@ -829,12 +829,12 @@ class OpkgRootfs(DpkgOpkgRootfs):
829 files[key] = item 829 files[key] = item
830 830
831 def _multilib_test_install(self, pkgs): 831 def _multilib_test_install(self, pkgs):
832 ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS", True) 832 ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS")
833 bb.utils.mkdirhier(ml_temp) 833 bb.utils.mkdirhier(ml_temp)
834 834
835 dirs = [self.image_rootfs] 835 dirs = [self.image_rootfs]
836 836
837 for variant in self.d.getVar("MULTILIB_VARIANTS", True).split(): 837 for variant in self.d.getVar("MULTILIB_VARIANTS").split():
838 ml_target_rootfs = os.path.join(ml_temp, variant) 838 ml_target_rootfs = os.path.join(ml_temp, variant)
839 839
840 bb.utils.remove(ml_target_rootfs, True) 840 bb.utils.remove(ml_target_rootfs, True)
@@ -894,9 +894,9 @@ class OpkgRootfs(DpkgOpkgRootfs):
894 old_vars_list = open(vars_list_file, 'r+').read() 894 old_vars_list = open(vars_list_file, 'r+').read()
895 895
896 new_vars_list = '%s:%s:%s\n' % \ 896 new_vars_list = '%s:%s:%s\n' % \
897 ((self.d.getVar('BAD_RECOMMENDATIONS', True) or '').strip(), 897 ((self.d.getVar('BAD_RECOMMENDATIONS') or '').strip(),
898 (self.d.getVar('NO_RECOMMENDATIONS', True) or '').strip(), 898 (self.d.getVar('NO_RECOMMENDATIONS') or '').strip(),
899 (self.d.getVar('PACKAGE_EXCLUDE', True) or '').strip()) 899 (self.d.getVar('PACKAGE_EXCLUDE') or '').strip())
900 open(vars_list_file, 'w+').write(new_vars_list) 900 open(vars_list_file, 'w+').write(new_vars_list)
901 901
902 if old_vars_list != new_vars_list: 902 if old_vars_list != new_vars_list:
@@ -906,11 +906,11 @@ class OpkgRootfs(DpkgOpkgRootfs):
906 906
907 def _create(self): 907 def _create(self):
908 pkgs_to_install = self.manifest.parse_initial_manifest() 908 pkgs_to_install = self.manifest.parse_initial_manifest()
909 opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS', True) 909 opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS')
910 opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS', True) 910 opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS')
911 911
912 # update PM index files, unless users provide their own feeds 912 # update PM index files, unless users provide their own feeds
913 if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": 913 if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1":
914 self.pm.write_index() 914 self.pm.write_index()
915 915
916 execute_pre_post_process(self.d, opkg_pre_process_cmds) 916 execute_pre_post_process(self.d, opkg_pre_process_cmds)
@@ -968,7 +968,7 @@ class OpkgRootfs(DpkgOpkgRootfs):
968 968
969 def _get_delayed_postinsts(self): 969 def _get_delayed_postinsts(self):
970 status_file = os.path.join(self.image_rootfs, 970 status_file = os.path.join(self.image_rootfs,
971 self.d.getVar('OPKGLIBDIR', True).strip('/'), 971 self.d.getVar('OPKGLIBDIR').strip('/'),
972 "opkg", "status") 972 "opkg", "status")
973 return self._get_delayed_postinsts_common(status_file) 973 return self._get_delayed_postinsts_common(status_file)
974 974
@@ -993,14 +993,14 @@ def get_class_for_type(imgtype):
993 "deb": DpkgRootfs}[imgtype] 993 "deb": DpkgRootfs}[imgtype]
994 994
995def variable_depends(d, manifest_dir=None): 995def variable_depends(d, manifest_dir=None):
996 img_type = d.getVar('IMAGE_PKGTYPE', True) 996 img_type = d.getVar('IMAGE_PKGTYPE')
997 cls = get_class_for_type(img_type) 997 cls = get_class_for_type(img_type)
998 return cls._depends_list() 998 return cls._depends_list()
999 999
1000def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None): 1000def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None):
1001 env_bkp = os.environ.copy() 1001 env_bkp = os.environ.copy()
1002 1002
1003 img_type = d.getVar('IMAGE_PKGTYPE', True) 1003 img_type = d.getVar('IMAGE_PKGTYPE')
1004 if img_type == "rpm": 1004 if img_type == "rpm":
1005 RpmRootfs(d, manifest_dir, progress_reporter, logcatcher).create() 1005 RpmRootfs(d, manifest_dir, progress_reporter, logcatcher).create()
1006 elif img_type == "ipk": 1006 elif img_type == "ipk":
@@ -1014,13 +1014,13 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None)
1014 1014
1015def image_list_installed_packages(d, rootfs_dir=None): 1015def image_list_installed_packages(d, rootfs_dir=None):
1016 if not rootfs_dir: 1016 if not rootfs_dir:
1017 rootfs_dir = d.getVar('IMAGE_ROOTFS', True) 1017 rootfs_dir = d.getVar('IMAGE_ROOTFS')
1018 1018
1019 img_type = d.getVar('IMAGE_PKGTYPE', True) 1019 img_type = d.getVar('IMAGE_PKGTYPE')
1020 if img_type == "rpm": 1020 if img_type == "rpm":
1021 return RpmPkgsList(d, rootfs_dir).list_pkgs() 1021 return RpmPkgsList(d, rootfs_dir).list_pkgs()
1022 elif img_type == "ipk": 1022 elif img_type == "ipk":
1023 return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET", True)).list_pkgs() 1023 return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET")).list_pkgs()
1024 elif img_type == "deb": 1024 elif img_type == "deb":
1025 return DpkgPkgsList(d, rootfs_dir).list_pkgs() 1025 return DpkgPkgsList(d, rootfs_dir).list_pkgs()
1026 1026
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py
index c74525f929..fef02d0777 100644
--- a/meta/lib/oe/sdk.py
+++ b/meta/lib/oe/sdk.py
@@ -11,16 +11,16 @@ import traceback
11class Sdk(object, metaclass=ABCMeta): 11class Sdk(object, metaclass=ABCMeta):
12 def __init__(self, d, manifest_dir): 12 def __init__(self, d, manifest_dir):
13 self.d = d 13 self.d = d
14 self.sdk_output = self.d.getVar('SDK_OUTPUT', True) 14 self.sdk_output = self.d.getVar('SDK_OUTPUT')
15 self.sdk_native_path = self.d.getVar('SDKPATHNATIVE', True).strip('/') 15 self.sdk_native_path = self.d.getVar('SDKPATHNATIVE').strip('/')
16 self.target_path = self.d.getVar('SDKTARGETSYSROOT', True).strip('/') 16 self.target_path = self.d.getVar('SDKTARGETSYSROOT').strip('/')
17 self.sysconfdir = self.d.getVar('sysconfdir', True).strip('/') 17 self.sysconfdir = self.d.getVar('sysconfdir').strip('/')
18 18
19 self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path) 19 self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path)
20 self.sdk_host_sysroot = self.sdk_output 20 self.sdk_host_sysroot = self.sdk_output
21 21
22 if manifest_dir is None: 22 if manifest_dir is None:
23 self.manifest_dir = self.d.getVar("SDK_DIR", True) 23 self.manifest_dir = self.d.getVar("SDK_DIR")
24 else: 24 else:
25 self.manifest_dir = manifest_dir 25 self.manifest_dir = manifest_dir
26 26
@@ -40,12 +40,12 @@ class Sdk(object, metaclass=ABCMeta):
40 40
41 # Don't ship any libGL in the SDK 41 # Don't ship any libGL in the SDK
42 self.remove(os.path.join(self.sdk_output, self.sdk_native_path, 42 self.remove(os.path.join(self.sdk_output, self.sdk_native_path,
43 self.d.getVar('libdir_nativesdk', True).strip('/'), 43 self.d.getVar('libdir_nativesdk').strip('/'),
44 "libGL*")) 44 "libGL*"))
45 45
46 # Fix or remove broken .la files 46 # Fix or remove broken .la files
47 self.remove(os.path.join(self.sdk_output, self.sdk_native_path, 47 self.remove(os.path.join(self.sdk_output, self.sdk_native_path,
48 self.d.getVar('libdir_nativesdk', True).strip('/'), 48 self.d.getVar('libdir_nativesdk').strip('/'),
49 "*.la")) 49 "*.la"))
50 50
51 # Link the ld.so.cache file into the hosts filesystem 51 # Link the ld.so.cache file into the hosts filesystem
@@ -54,7 +54,7 @@ class Sdk(object, metaclass=ABCMeta):
54 self.mkdirhier(os.path.dirname(link_name)) 54 self.mkdirhier(os.path.dirname(link_name))
55 os.symlink("/etc/ld.so.cache", link_name) 55 os.symlink("/etc/ld.so.cache", link_name)
56 56
57 execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND', True)) 57 execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND'))
58 58
59 def movefile(self, sourcefile, destdir): 59 def movefile(self, sourcefile, destdir):
60 try: 60 try:
@@ -102,7 +102,7 @@ class RpmSdk(Sdk):
102 102
103 self.target_pm = RpmPM(d, 103 self.target_pm = RpmPM(d,
104 self.sdk_target_sysroot, 104 self.sdk_target_sysroot,
105 self.d.getVar('TARGET_VENDOR', True), 105 self.d.getVar('TARGET_VENDOR'),
106 'target', 106 'target',
107 target_providename 107 target_providename
108 ) 108 )
@@ -118,7 +118,7 @@ class RpmSdk(Sdk):
118 118
119 self.host_pm = RpmPM(d, 119 self.host_pm = RpmPM(d,
120 self.sdk_host_sysroot, 120 self.sdk_host_sysroot,
121 self.d.getVar('SDK_VENDOR', True), 121 self.d.getVar('SDK_VENDOR'),
122 'host', 122 'host',
123 sdk_providename, 123 sdk_providename,
124 "SDK_PACKAGE_ARCHS", 124 "SDK_PACKAGE_ARCHS",
@@ -149,9 +149,9 @@ class RpmSdk(Sdk):
149 bb.note("Installing TARGET packages") 149 bb.note("Installing TARGET packages")
150 self._populate_sysroot(self.target_pm, self.target_manifest) 150 self._populate_sysroot(self.target_pm, self.target_manifest)
151 151
152 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) 152 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
153 153
154 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) 154 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
155 155
156 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 156 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
157 self.target_pm.remove_packaging_data() 157 self.target_pm.remove_packaging_data()
@@ -159,7 +159,7 @@ class RpmSdk(Sdk):
159 bb.note("Installing NATIVESDK packages") 159 bb.note("Installing NATIVESDK packages")
160 self._populate_sysroot(self.host_pm, self.host_manifest) 160 self._populate_sysroot(self.host_pm, self.host_manifest)
161 161
162 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) 162 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND"))
163 163
164 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 164 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
165 self.host_pm.remove_packaging_data() 165 self.host_pm.remove_packaging_data()
@@ -167,7 +167,7 @@ class RpmSdk(Sdk):
167 # Move host RPM library data 167 # Move host RPM library data
168 native_rpm_state_dir = os.path.join(self.sdk_output, 168 native_rpm_state_dir = os.path.join(self.sdk_output,
169 self.sdk_native_path, 169 self.sdk_native_path,
170 self.d.getVar('localstatedir_nativesdk', True).strip('/'), 170 self.d.getVar('localstatedir_nativesdk').strip('/'),
171 "lib", 171 "lib",
172 "rpm" 172 "rpm"
173 ) 173 )
@@ -197,8 +197,8 @@ class OpkgSdk(Sdk):
197 def __init__(self, d, manifest_dir=None): 197 def __init__(self, d, manifest_dir=None):
198 super(OpkgSdk, self).__init__(d, manifest_dir) 198 super(OpkgSdk, self).__init__(d, manifest_dir)
199 199
200 self.target_conf = self.d.getVar("IPKGCONF_TARGET", True) 200 self.target_conf = self.d.getVar("IPKGCONF_TARGET")
201 self.host_conf = self.d.getVar("IPKGCONF_SDK", True) 201 self.host_conf = self.d.getVar("IPKGCONF_SDK")
202 202
203 self.target_manifest = OpkgManifest(d, self.manifest_dir, 203 self.target_manifest = OpkgManifest(d, self.manifest_dir,
204 Manifest.MANIFEST_TYPE_SDK_TARGET) 204 Manifest.MANIFEST_TYPE_SDK_TARGET)
@@ -206,15 +206,15 @@ class OpkgSdk(Sdk):
206 Manifest.MANIFEST_TYPE_SDK_HOST) 206 Manifest.MANIFEST_TYPE_SDK_HOST)
207 207
208 self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf, 208 self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf,
209 self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) 209 self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
210 210
211 self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf, 211 self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf,
212 self.d.getVar("SDK_PACKAGE_ARCHS", True)) 212 self.d.getVar("SDK_PACKAGE_ARCHS"))
213 213
214 def _populate_sysroot(self, pm, manifest): 214 def _populate_sysroot(self, pm, manifest):
215 pkgs_to_install = manifest.parse_initial_manifest() 215 pkgs_to_install = manifest.parse_initial_manifest()
216 216
217 if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": 217 if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1":
218 pm.write_index() 218 pm.write_index()
219 219
220 pm.update() 220 pm.update()
@@ -228,9 +228,9 @@ class OpkgSdk(Sdk):
228 bb.note("Installing TARGET packages") 228 bb.note("Installing TARGET packages")
229 self._populate_sysroot(self.target_pm, self.target_manifest) 229 self._populate_sysroot(self.target_pm, self.target_manifest)
230 230
231 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) 231 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
232 232
233 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) 233 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
234 234
235 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 235 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
236 self.target_pm.remove_packaging_data() 236 self.target_pm.remove_packaging_data()
@@ -238,7 +238,7 @@ class OpkgSdk(Sdk):
238 bb.note("Installing NATIVESDK packages") 238 bb.note("Installing NATIVESDK packages")
239 self._populate_sysroot(self.host_pm, self.host_manifest) 239 self._populate_sysroot(self.host_pm, self.host_manifest)
240 240
241 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) 241 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND"))
242 242
243 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 243 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
244 self.host_pm.remove_packaging_data() 244 self.host_pm.remove_packaging_data()
@@ -257,7 +257,7 @@ class OpkgSdk(Sdk):
257 os.path.basename(self.host_conf)), 0o644) 257 os.path.basename(self.host_conf)), 0o644)
258 258
259 native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, 259 native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path,
260 self.d.getVar('localstatedir_nativesdk', True).strip('/'), 260 self.d.getVar('localstatedir_nativesdk').strip('/'),
261 "lib", "opkg") 261 "lib", "opkg")
262 self.mkdirhier(native_opkg_state_dir) 262 self.mkdirhier(native_opkg_state_dir)
263 for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")): 263 for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")):
@@ -270,8 +270,8 @@ class DpkgSdk(Sdk):
270 def __init__(self, d, manifest_dir=None): 270 def __init__(self, d, manifest_dir=None):
271 super(DpkgSdk, self).__init__(d, manifest_dir) 271 super(DpkgSdk, self).__init__(d, manifest_dir)
272 272
273 self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt") 273 self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt")
274 self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt-sdk") 274 self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt-sdk")
275 275
276 self.target_manifest = DpkgManifest(d, self.manifest_dir, 276 self.target_manifest = DpkgManifest(d, self.manifest_dir,
277 Manifest.MANIFEST_TYPE_SDK_TARGET) 277 Manifest.MANIFEST_TYPE_SDK_TARGET)
@@ -279,17 +279,17 @@ class DpkgSdk(Sdk):
279 Manifest.MANIFEST_TYPE_SDK_HOST) 279 Manifest.MANIFEST_TYPE_SDK_HOST)
280 280
281 self.target_pm = DpkgPM(d, self.sdk_target_sysroot, 281 self.target_pm = DpkgPM(d, self.sdk_target_sysroot,
282 self.d.getVar("PACKAGE_ARCHS", True), 282 self.d.getVar("PACKAGE_ARCHS"),
283 self.d.getVar("DPKG_ARCH", True), 283 self.d.getVar("DPKG_ARCH"),
284 self.target_conf_dir) 284 self.target_conf_dir)
285 285
286 self.host_pm = DpkgPM(d, self.sdk_host_sysroot, 286 self.host_pm = DpkgPM(d, self.sdk_host_sysroot,
287 self.d.getVar("SDK_PACKAGE_ARCHS", True), 287 self.d.getVar("SDK_PACKAGE_ARCHS"),
288 self.d.getVar("DEB_SDK_ARCH", True), 288 self.d.getVar("DEB_SDK_ARCH"),
289 self.host_conf_dir) 289 self.host_conf_dir)
290 290
291 def _copy_apt_dir_to(self, dst_dir): 291 def _copy_apt_dir_to(self, dst_dir):
292 staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE", True) 292 staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE")
293 293
294 self.remove(dst_dir, True) 294 self.remove(dst_dir, True)
295 295
@@ -310,9 +310,9 @@ class DpkgSdk(Sdk):
310 bb.note("Installing TARGET packages") 310 bb.note("Installing TARGET packages")
311 self._populate_sysroot(self.target_pm, self.target_manifest) 311 self._populate_sysroot(self.target_pm, self.target_manifest)
312 312
313 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) 313 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
314 314
315 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) 315 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
316 316
317 self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt")) 317 self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt"))
318 318
@@ -322,7 +322,7 @@ class DpkgSdk(Sdk):
322 bb.note("Installing NATIVESDK packages") 322 bb.note("Installing NATIVESDK packages")
323 self._populate_sysroot(self.host_pm, self.host_manifest) 323 self._populate_sysroot(self.host_pm, self.host_manifest)
324 324
325 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) 325 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND"))
326 326
327 self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path, 327 self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path,
328 "etc", "apt")) 328 "etc", "apt"))
@@ -341,26 +341,26 @@ class DpkgSdk(Sdk):
341 341
342def sdk_list_installed_packages(d, target, rootfs_dir=None): 342def sdk_list_installed_packages(d, target, rootfs_dir=None):
343 if rootfs_dir is None: 343 if rootfs_dir is None:
344 sdk_output = d.getVar('SDK_OUTPUT', True) 344 sdk_output = d.getVar('SDK_OUTPUT')
345 target_path = d.getVar('SDKTARGETSYSROOT', True).strip('/') 345 target_path = d.getVar('SDKTARGETSYSROOT').strip('/')
346 346
347 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] 347 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True]
348 348
349 img_type = d.getVar('IMAGE_PKGTYPE', True) 349 img_type = d.getVar('IMAGE_PKGTYPE')
350 if img_type == "rpm": 350 if img_type == "rpm":
351 arch_var = ["SDK_PACKAGE_ARCHS", None][target is True] 351 arch_var = ["SDK_PACKAGE_ARCHS", None][target is True]
352 os_var = ["SDK_OS", None][target is True] 352 os_var = ["SDK_OS", None][target is True]
353 return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list_pkgs() 353 return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list_pkgs()
354 elif img_type == "ipk": 354 elif img_type == "ipk":
355 conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True] 355 conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True]
356 return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var, True)).list_pkgs() 356 return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var)).list_pkgs()
357 elif img_type == "deb": 357 elif img_type == "deb":
358 return DpkgPkgsList(d, rootfs_dir).list_pkgs() 358 return DpkgPkgsList(d, rootfs_dir).list_pkgs()
359 359
360def populate_sdk(d, manifest_dir=None): 360def populate_sdk(d, manifest_dir=None):
361 env_bkp = os.environ.copy() 361 env_bkp = os.environ.copy()
362 362
363 img_type = d.getVar('IMAGE_PKGTYPE', True) 363 img_type = d.getVar('IMAGE_PKGTYPE')
364 if img_type == "rpm": 364 if img_type == "rpm":
365 RpmSdk(d, manifest_dir).populate() 365 RpmSdk(d, manifest_dir).populate()
366 elif img_type == "ipk": 366 elif img_type == "ipk":
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index 8224e3a12e..e053c37e96 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -63,10 +63,10 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
63 63
64def sstate_lockedsigs(d): 64def sstate_lockedsigs(d):
65 sigs = {} 65 sigs = {}
66 types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES", True) or "").split() 66 types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
67 for t in types: 67 for t in types:
68 siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t 68 siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
69 lockedsigs = (d.getVar(siggen_lockedsigs_var, True) or "").split() 69 lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
70 for ls in lockedsigs: 70 for ls in lockedsigs:
71 pn, task, h = ls.split(":", 2) 71 pn, task, h = ls.split(":", 2)
72 if pn not in sigs: 72 if pn not in sigs:
@@ -77,8 +77,8 @@ def sstate_lockedsigs(d):
77class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): 77class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
78 name = "OEBasic" 78 name = "OEBasic"
79 def init_rundepcheck(self, data): 79 def init_rundepcheck(self, data):
80 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() 80 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
81 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() 81 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
82 pass 82 pass
83 def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): 83 def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
84 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) 84 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
@@ -86,15 +86,15 @@ class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
86class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): 86class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
87 name = "OEBasicHash" 87 name = "OEBasicHash"
88 def init_rundepcheck(self, data): 88 def init_rundepcheck(self, data):
89 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() 89 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
90 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() 90 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
91 self.lockedsigs = sstate_lockedsigs(data) 91 self.lockedsigs = sstate_lockedsigs(data)
92 self.lockedhashes = {} 92 self.lockedhashes = {}
93 self.lockedpnmap = {} 93 self.lockedpnmap = {}
94 self.lockedhashfn = {} 94 self.lockedhashfn = {}
95 self.machine = data.getVar("MACHINE", True) 95 self.machine = data.getVar("MACHINE")
96 self.mismatch_msgs = [] 96 self.mismatch_msgs = []
97 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES", True) or 97 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
98 "").split() 98 "").split()
99 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } 99 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
100 pass 100 pass
@@ -224,13 +224,13 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
224 sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" 224 sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
225 % (pn, sq_task[task], sq_hash[task])) 225 % (pn, sq_task[task], sq_hash[task]))
226 226
227 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK", True) 227 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
228 if checklevel == 'warn': 228 if checklevel == 'warn':
229 warn_msgs += self.mismatch_msgs 229 warn_msgs += self.mismatch_msgs
230 elif checklevel == 'error': 230 elif checklevel == 'error':
231 error_msgs += self.mismatch_msgs 231 error_msgs += self.mismatch_msgs
232 232
233 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK", True) 233 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
234 if checklevel == 'warn': 234 if checklevel == 'warn':
235 warn_msgs += sstate_missing_msgs 235 warn_msgs += sstate_missing_msgs
236 elif checklevel == 'error': 236 elif checklevel == 'error':
@@ -274,7 +274,7 @@ def find_siginfo(pn, taskname, taskhashlist, d):
274 localdata.setVar('PV', '*') 274 localdata.setVar('PV', '*')
275 localdata.setVar('PR', '*') 275 localdata.setVar('PR', '*')
276 localdata.setVar('EXTENDPE', '') 276 localdata.setVar('EXTENDPE', '')
277 stamp = localdata.getVar('STAMP', True) 277 stamp = localdata.getVar('STAMP')
278 if pn.startswith("gcc-source"): 278 if pn.startswith("gcc-source"):
279 # gcc-source shared workdir is a special case :( 279 # gcc-source shared workdir is a special case :(
280 stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") 280 stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
@@ -309,18 +309,18 @@ def find_siginfo(pn, taskname, taskhashlist, d):
309 localdata.setVar('PV', '*') 309 localdata.setVar('PV', '*')
310 localdata.setVar('PR', '*') 310 localdata.setVar('PR', '*')
311 localdata.setVar('BB_TASKHASH', hashval) 311 localdata.setVar('BB_TASKHASH', hashval)
312 swspec = localdata.getVar('SSTATE_SWSPEC', True) 312 swspec = localdata.getVar('SSTATE_SWSPEC')
313 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: 313 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
314 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') 314 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
315 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: 315 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
316 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") 316 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
317 sstatename = taskname[3:] 317 sstatename = taskname[3:]
318 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG', True), sstatename) 318 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
319 319
320 if hashval != '*': 320 if hashval != '*':
321 sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR', True), hashval[:2]) 321 sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR'), hashval[:2])
322 else: 322 else:
323 sstatedir = d.getVar('SSTATE_DIR', True) 323 sstatedir = d.getVar('SSTATE_DIR')
324 324
325 for root, dirs, files in os.walk(sstatedir): 325 for root, dirs, files in os.walk(sstatedir):
326 for fn in files: 326 for fn in files:
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
index a89fa45691..0426e15834 100644
--- a/meta/lib/oe/terminal.py
+++ b/meta/lib/oe/terminal.py
@@ -196,7 +196,7 @@ class Custom(Terminal):
196 priority = 3 196 priority = 3
197 197
198 def __init__(self, sh_cmd, title=None, env=None, d=None): 198 def __init__(self, sh_cmd, title=None, env=None, d=None):
199 self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD', True) 199 self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD')
200 if self.command: 200 if self.command:
201 if not '{command}' in self.command: 201 if not '{command}' in self.command:
202 self.command += ' {command}' 202 self.command += ' {command}'
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 2b095f1f0a..bb3f0e5d75 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -23,13 +23,13 @@ def ifelse(condition, iftrue = True, iffalse = False):
23 return iffalse 23 return iffalse
24 24
25def conditional(variable, checkvalue, truevalue, falsevalue, d): 25def conditional(variable, checkvalue, truevalue, falsevalue, d):
26 if d.getVar(variable, True) == checkvalue: 26 if d.getVar(variable) == checkvalue:
27 return truevalue 27 return truevalue
28 else: 28 else:
29 return falsevalue 29 return falsevalue
30 30
31def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): 31def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
32 if float(d.getVar(variable, True)) <= float(checkvalue): 32 if float(d.getVar(variable)) <= float(checkvalue):
33 return truevalue 33 return truevalue
34 else: 34 else:
35 return falsevalue 35 return falsevalue
@@ -42,8 +42,8 @@ def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
42 return falsevalue 42 return falsevalue
43 43
44def both_contain(variable1, variable2, checkvalue, d): 44def both_contain(variable1, variable2, checkvalue, d):
45 val1 = d.getVar(variable1, True) 45 val1 = d.getVar(variable1)
46 val2 = d.getVar(variable2, True) 46 val2 = d.getVar(variable2)
47 val1 = set(val1.split()) 47 val1 = set(val1.split())
48 val2 = set(val2.split()) 48 val2 = set(val2.split())
49 if isinstance(checkvalue, str): 49 if isinstance(checkvalue, str):
@@ -66,8 +66,8 @@ def set_intersect(variable1, variable2, d):
66 s3 = set_intersect(s1, s2) 66 s3 = set_intersect(s1, s2)
67 => s3 = "b c" 67 => s3 = "b c"
68 """ 68 """
69 val1 = set(d.getVar(variable1, True).split()) 69 val1 = set(d.getVar(variable1).split())
70 val2 = set(d.getVar(variable2, True).split()) 70 val2 = set(d.getVar(variable2).split())
71 return " ".join(val1 & val2) 71 return " ".join(val1 & val2)
72 72
73def prune_suffix(var, suffixes, d): 73def prune_suffix(var, suffixes, d):
@@ -77,7 +77,7 @@ def prune_suffix(var, suffixes, d):
77 if var.endswith(suffix): 77 if var.endswith(suffix):
78 var = var.replace(suffix, "") 78 var = var.replace(suffix, "")
79 79
80 prefix = d.getVar("MLPREFIX", True) 80 prefix = d.getVar("MLPREFIX")
81 if prefix and var.startswith(prefix): 81 if prefix and var.startswith(prefix):
82 var = var.replace(prefix, "") 82 var = var.replace(prefix, "")
83 83
@@ -115,9 +115,9 @@ def features_backfill(var,d):
115 # disturbing distributions that have already set DISTRO_FEATURES. 115 # disturbing distributions that have already set DISTRO_FEATURES.
116 # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should 116 # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should
117 # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED 117 # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED
118 features = (d.getVar(var, True) or "").split() 118 features = (d.getVar(var) or "").split()
119 backfill = (d.getVar(var+"_BACKFILL", True) or "").split() 119 backfill = (d.getVar(var+"_BACKFILL") or "").split()
120 considered = (d.getVar(var+"_BACKFILL_CONSIDERED", True) or "").split() 120 considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split()
121 121
122 addfeatures = [] 122 addfeatures = []
123 for feature in backfill: 123 for feature in backfill:
@@ -133,12 +133,12 @@ def packages_filter_out_system(d):
133 Return a list of packages from PACKAGES with the "system" packages such as 133 Return a list of packages from PACKAGES with the "system" packages such as
134 PN-dbg PN-doc PN-locale-eb-gb removed. 134 PN-dbg PN-doc PN-locale-eb-gb removed.
135 """ 135 """
136 pn = d.getVar('PN', True) 136 pn = d.getVar('PN')
137 blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')] 137 blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')]
138 localepkg = pn + "-locale-" 138 localepkg = pn + "-locale-"
139 pkgs = [] 139 pkgs = []
140 140
141 for pkg in d.getVar('PACKAGES', True).split(): 141 for pkg in d.getVar('PACKAGES').split():
142 if pkg not in blacklist and localepkg not in pkg: 142 if pkg not in blacklist and localepkg not in pkg:
143 pkgs.append(pkg) 143 pkgs.append(pkg)
144 return pkgs 144 return pkgs
@@ -231,7 +231,7 @@ def format_pkg_list(pkg_dict, ret_format=None):
231 return '\n'.join(output) 231 return '\n'.join(output)
232 232
233def host_gcc_version(d): 233def host_gcc_version(d):
234 compiler = d.getVar("BUILD_CC", True) 234 compiler = d.getVar("BUILD_CC")
235 retval, output = getstatusoutput("%s --version" % compiler) 235 retval, output = getstatusoutput("%s --version" % compiler)
236 if retval: 236 if retval:
237 bb.fatal("Error running %s --version: %s" % (compiler, output)) 237 bb.fatal("Error running %s --version: %s" % (compiler, output))
@@ -316,8 +316,8 @@ def write_ld_so_conf(d):
316 bb.utils.remove(ldsoconf) 316 bb.utils.remove(ldsoconf)
317 bb.utils.mkdirhier(os.path.dirname(ldsoconf)) 317 bb.utils.mkdirhier(os.path.dirname(ldsoconf))
318 with open(ldsoconf, "w") as f: 318 with open(ldsoconf, "w") as f:
319 f.write(d.getVar("base_libdir", True) + '\n') 319 f.write(d.getVar("base_libdir") + '\n')
320 f.write(d.getVar("libdir", True) + '\n') 320 f.write(d.getVar("libdir") + '\n')
321 321
322class ImageQAFailed(bb.build.FuncFailed): 322class ImageQAFailed(bb.build.FuncFailed):
323 def __init__(self, description, name=None, logfile=None): 323 def __init__(self, description, name=None, logfile=None):
diff --git a/meta/lib/oeqa/controllers/masterimage.py b/meta/lib/oeqa/controllers/masterimage.py
index 9ce3bf803d..d796fc3c30 100644
--- a/meta/lib/oeqa/controllers/masterimage.py
+++ b/meta/lib/oeqa/controllers/masterimage.py
@@ -32,14 +32,14 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
32 super(MasterImageHardwareTarget, self).__init__(d) 32 super(MasterImageHardwareTarget, self).__init__(d)
33 33
34 # target ip 34 # target ip
35 addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') 35 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.')
36 self.ip = addr.split(":")[0] 36 self.ip = addr.split(":")[0]
37 try: 37 try:
38 self.port = addr.split(":")[1] 38 self.port = addr.split(":")[1]
39 except IndexError: 39 except IndexError:
40 self.port = None 40 self.port = None
41 bb.note("Target IP: %s" % self.ip) 41 bb.note("Target IP: %s" % self.ip)
42 self.server_ip = d.getVar("TEST_SERVER_IP", True) 42 self.server_ip = d.getVar("TEST_SERVER_IP")
43 if not self.server_ip: 43 if not self.server_ip:
44 try: 44 try:
45 self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1] 45 self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1]
@@ -49,8 +49,8 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
49 49
50 # test rootfs + kernel 50 # test rootfs + kernel
51 self.image_fstype = self.get_image_fstype(d) 51 self.image_fstype = self.get_image_fstype(d)
52 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype) 52 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype)
53 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') 53 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
54 if not os.path.isfile(self.rootfs): 54 if not os.path.isfile(self.rootfs):
55 # we could've checked that IMAGE_FSTYPES contains tar.gz but the config for running testimage might not be 55 # we could've checked that IMAGE_FSTYPES contains tar.gz but the config for running testimage might not be
56 # the same as the config with which the image was build, ie 56 # the same as the config with which the image was build, ie
@@ -64,16 +64,16 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
64 # master ssh connection 64 # master ssh connection
65 self.master = None 65 self.master = None
66 # if the user knows what they are doing, then by all means... 66 # if the user knows what they are doing, then by all means...
67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS", True) 67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS")
68 self.deploy_cmds = None 68 self.deploy_cmds = None
69 69
70 # this is the name of the command that controls the power for a board 70 # this is the name of the command that controls the power for a board
71 # e.g: TEST_POWERCONTROL_CMD = "/home/user/myscripts/powercontrol.py ${MACHINE} what-ever-other-args-the-script-wants" 71 # e.g: TEST_POWERCONTROL_CMD = "/home/user/myscripts/powercontrol.py ${MACHINE} what-ever-other-args-the-script-wants"
72 # the command should take as the last argument "off" and "on" and "cycle" (off, on) 72 # the command should take as the last argument "off" and "on" and "cycle" (off, on)
73 self.powercontrol_cmd = d.getVar("TEST_POWERCONTROL_CMD", True) or None 73 self.powercontrol_cmd = d.getVar("TEST_POWERCONTROL_CMD") or None
74 self.powercontrol_args = d.getVar("TEST_POWERCONTROL_EXTRA_ARGS", False) or "" 74 self.powercontrol_args = d.getVar("TEST_POWERCONTROL_EXTRA_ARGS", False) or ""
75 75
76 self.serialcontrol_cmd = d.getVar("TEST_SERIALCONTROL_CMD", True) or None 76 self.serialcontrol_cmd = d.getVar("TEST_SERIALCONTROL_CMD") or None
77 self.serialcontrol_args = d.getVar("TEST_SERIALCONTROL_EXTRA_ARGS", False) or "" 77 self.serialcontrol_args = d.getVar("TEST_SERIALCONTROL_EXTRA_ARGS", False) or ""
78 78
79 self.origenv = os.environ 79 self.origenv = os.environ
@@ -82,7 +82,7 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
82 # ssh + keys means we need the original user env 82 # ssh + keys means we need the original user env
83 bborigenv = d.getVar("BB_ORIGENV", False) or {} 83 bborigenv = d.getVar("BB_ORIGENV", False) or {}
84 for key in bborigenv: 84 for key in bborigenv:
85 val = bborigenv.getVar(key, True) 85 val = bborigenv.getVar(key)
86 if val is not None: 86 if val is not None:
87 self.origenv[key] = str(val) 87 self.origenv[key] = str(val)
88 88
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py
index 95d3bf72fc..d1aef967e4 100644
--- a/meta/lib/oeqa/oetest.py
+++ b/meta/lib/oeqa/oetest.py
@@ -221,15 +221,15 @@ class TestContext(object):
221 path = [os.path.dirname(os.path.abspath(__file__))] 221 path = [os.path.dirname(os.path.abspath(__file__))]
222 extrapath = "" 222 extrapath = ""
223 else: 223 else:
224 path = d.getVar("BBPATH", True).split(':') 224 path = d.getVar("BBPATH").split(':')
225 extrapath = "lib/oeqa" 225 extrapath = "lib/oeqa"
226 226
227 self.testslist = self._get_tests_list(path, extrapath) 227 self.testslist = self._get_tests_list(path, extrapath)
228 self.testsrequired = self._get_test_suites_required() 228 self.testsrequired = self._get_test_suites_required()
229 229
230 self.filesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "runtime/files") 230 self.filesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "runtime/files")
231 self.imagefeatures = d.getVar("IMAGE_FEATURES", True).split() 231 self.imagefeatures = d.getVar("IMAGE_FEATURES").split()
232 self.distrofeatures = d.getVar("DISTRO_FEATURES", True).split() 232 self.distrofeatures = d.getVar("DISTRO_FEATURES").split()
233 233
234 # get testcase list from specified file 234 # get testcase list from specified file
235 # if path is a relative path, then relative to build/conf/ 235 # if path is a relative path, then relative to build/conf/
@@ -406,9 +406,9 @@ class RuntimeTestContext(TestContext):
406 self.target = target 406 self.target = target
407 407
408 self.pkgmanifest = {} 408 self.pkgmanifest = {}
409 manifest = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), 409 manifest = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"),
410 d.getVar("IMAGE_LINK_NAME", True) + ".manifest") 410 d.getVar("IMAGE_LINK_NAME") + ".manifest")
411 nomanifest = d.getVar("IMAGE_NO_MANIFEST", True) 411 nomanifest = d.getVar("IMAGE_NO_MANIFEST")
412 if nomanifest is None or nomanifest != "1": 412 if nomanifest is None or nomanifest != "1":
413 try: 413 try:
414 with open(manifest) as f: 414 with open(manifest) as f:
@@ -424,19 +424,19 @@ class RuntimeTestContext(TestContext):
424 def _get_test_suites(self): 424 def _get_test_suites(self):
425 testsuites = [] 425 testsuites = []
426 426
427 manifests = (self.d.getVar("TEST_SUITES_MANIFEST", True) or '').split() 427 manifests = (self.d.getVar("TEST_SUITES_MANIFEST") or '').split()
428 if manifests: 428 if manifests:
429 for manifest in manifests: 429 for manifest in manifests:
430 testsuites.extend(self._read_testlist(manifest, 430 testsuites.extend(self._read_testlist(manifest,
431 self.d.getVar("TOPDIR", True)).split()) 431 self.d.getVar("TOPDIR")).split())
432 432
433 else: 433 else:
434 testsuites = self.d.getVar("TEST_SUITES", True).split() 434 testsuites = self.d.getVar("TEST_SUITES").split()
435 435
436 return testsuites 436 return testsuites
437 437
438 def _get_test_suites_required(self): 438 def _get_test_suites_required(self):
439 return [t for t in self.d.getVar("TEST_SUITES", True).split() if t != "auto"] 439 return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"]
440 440
441 def loadTests(self): 441 def loadTests(self):
442 super(RuntimeTestContext, self).loadTests() 442 super(RuntimeTestContext, self).loadTests()
@@ -449,10 +449,10 @@ class RuntimeTestContext(TestContext):
449 """ 449 """
450 450
451 modules = self.getTestModules() 451 modules = self.getTestModules()
452 bbpaths = self.d.getVar("BBPATH", True).split(":") 452 bbpaths = self.d.getVar("BBPATH").split(":")
453 453
454 shutil.rmtree(self.d.getVar("TEST_EXTRACTED_DIR", True)) 454 shutil.rmtree(self.d.getVar("TEST_EXTRACTED_DIR"))
455 shutil.rmtree(self.d.getVar("TEST_PACKAGED_DIR", True)) 455 shutil.rmtree(self.d.getVar("TEST_PACKAGED_DIR"))
456 for module in modules: 456 for module in modules:
457 json_file = self._getJsonFile(module) 457 json_file = self._getJsonFile(module)
458 if json_file: 458 if json_file:
@@ -466,8 +466,8 @@ class RuntimeTestContext(TestContext):
466 466
467 import oe.path 467 import oe.path
468 468
469 extracted_path = self.d.getVar("TEST_EXTRACTED_DIR", True) 469 extracted_path = self.d.getVar("TEST_EXTRACTED_DIR")
470 packaged_path = self.d.getVar("TEST_PACKAGED_DIR", True) 470 packaged_path = self.d.getVar("TEST_PACKAGED_DIR")
471 471
472 for key,value in needed_packages.items(): 472 for key,value in needed_packages.items():
473 packages = () 473 packages = ()
@@ -548,7 +548,7 @@ class RuntimeTestContext(TestContext):
548 548
549 from oeqa.utils.package_manager import get_package_manager 549 from oeqa.utils.package_manager import get_package_manager
550 550
551 pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR", True), pkg) 551 pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR"), pkg)
552 pm = get_package_manager(self.d, pkg_path) 552 pm = get_package_manager(self.d, pkg_path)
553 extract_dir = pm.extract(pkg) 553 extract_dir = pm.extract(pkg)
554 shutil.rmtree(pkg_path) 554 shutil.rmtree(pkg_path)
@@ -562,8 +562,8 @@ class RuntimeTestContext(TestContext):
562 562
563 from oeqa.utils.package_manager import get_package_manager 563 from oeqa.utils.package_manager import get_package_manager
564 564
565 pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR", True), pkg) 565 pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR"), pkg)
566 dst_dir = self.d.getVar("TEST_PACKAGED_DIR", True) 566 dst_dir = self.d.getVar("TEST_PACKAGED_DIR")
567 pm = get_package_manager(self.d, pkg_path) 567 pm = get_package_manager(self.d, pkg_path)
568 pkg_info = pm.package_info(pkg) 568 pkg_info = pm.package_info(pkg)
569 file_path = pkg_info[pkg]["filepath"] 569 file_path = pkg_info[pkg]["filepath"]
@@ -611,7 +611,7 @@ class ImageTestContext(RuntimeTestContext):
611 def __init__(self, d, target, host_dumper): 611 def __init__(self, d, target, host_dumper):
612 super(ImageTestContext, self).__init__(d, target) 612 super(ImageTestContext, self).__init__(d, target)
613 613
614 self.tagexp = d.getVar("TEST_SUITES_TAGS", True) 614 self.tagexp = d.getVar("TEST_SUITES_TAGS")
615 615
616 self.host_dumper = host_dumper 616 self.host_dumper = host_dumper
617 617
@@ -629,7 +629,7 @@ class ImageTestContext(RuntimeTestContext):
629 Check if the test requires a package and Install/Unistall it in the DUT 629 Check if the test requires a package and Install/Unistall it in the DUT
630 """ 630 """
631 631
632 pkg_dir = self.d.getVar("TEST_EXTRACTED_DIR", True) 632 pkg_dir = self.d.getVar("TEST_EXTRACTED_DIR")
633 super(ImageTestContext, self).install_uninstall_packages(test_id, pkg_dir, install) 633 super(ImageTestContext, self).install_uninstall_packages(test_id, pkg_dir, install)
634 634
635class ExportTestContext(RuntimeTestContext): 635class ExportTestContext(RuntimeTestContext):
@@ -643,7 +643,7 @@ class ExportTestContext(RuntimeTestContext):
643 super(ExportTestContext, self).__init__(d, target, exported) 643 super(ExportTestContext, self).__init__(d, target, exported)
644 644
645 tag = parsedArgs.get("tag", None) 645 tag = parsedArgs.get("tag", None)
646 self.tagexp = tag if tag != None else d.getVar("TEST_SUITES_TAGS", True) 646 self.tagexp = tag if tag != None else d.getVar("TEST_SUITES_TAGS")
647 647
648 self.sigterm = None 648 self.sigterm = None
649 649
@@ -653,7 +653,7 @@ class ExportTestContext(RuntimeTestContext):
653 """ 653 """
654 654
655 export_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) 655 export_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
656 extracted_dir = self.d.getVar("TEST_EXPORT_EXTRACTED_DIR", True) 656 extracted_dir = self.d.getVar("TEST_EXPORT_EXTRACTED_DIR")
657 pkg_dir = os.path.join(export_dir, extracted_dir) 657 pkg_dir = os.path.join(export_dir, extracted_dir)
658 super(ExportTestContext, self).install_uninstall_packages(test_id, pkg_dir, install) 658 super(ExportTestContext, self).install_uninstall_packages(test_id, pkg_dir, install)
659 659
@@ -666,7 +666,7 @@ class SDKTestContext(TestContext):
666 self.tcname = tcname 666 self.tcname = tcname
667 667
668 if not hasattr(self, 'target_manifest'): 668 if not hasattr(self, 'target_manifest'):
669 self.target_manifest = d.getVar("SDK_TARGET_MANIFEST", True) 669 self.target_manifest = d.getVar("SDK_TARGET_MANIFEST")
670 try: 670 try:
671 self.pkgmanifest = {} 671 self.pkgmanifest = {}
672 with open(self.target_manifest) as f: 672 with open(self.target_manifest) as f:
@@ -677,7 +677,7 @@ class SDKTestContext(TestContext):
677 bb.fatal("No package manifest file found. Did you build the sdk image?\n%s" % e) 677 bb.fatal("No package manifest file found. Did you build the sdk image?\n%s" % e)
678 678
679 if not hasattr(self, 'host_manifest'): 679 if not hasattr(self, 'host_manifest'):
680 self.host_manifest = d.getVar("SDK_HOST_MANIFEST", True) 680 self.host_manifest = d.getVar("SDK_HOST_MANIFEST")
681 try: 681 try:
682 with open(self.host_manifest) as f: 682 with open(self.host_manifest) as f:
683 self.hostpkgmanifest = f.read() 683 self.hostpkgmanifest = f.read()
@@ -688,16 +688,16 @@ class SDKTestContext(TestContext):
688 return "sdk" 688 return "sdk"
689 689
690 def _get_test_suites(self): 690 def _get_test_suites(self):
691 return (self.d.getVar("TEST_SUITES_SDK", True) or "auto").split() 691 return (self.d.getVar("TEST_SUITES_SDK") or "auto").split()
692 692
693 def _get_test_suites_required(self): 693 def _get_test_suites_required(self):
694 return [t for t in (self.d.getVar("TEST_SUITES_SDK", True) or \ 694 return [t for t in (self.d.getVar("TEST_SUITES_SDK") or \
695 "auto").split() if t != "auto"] 695 "auto").split() if t != "auto"]
696 696
697class SDKExtTestContext(SDKTestContext): 697class SDKExtTestContext(SDKTestContext):
698 def __init__(self, d, sdktestdir, sdkenv, tcname, *args): 698 def __init__(self, d, sdktestdir, sdkenv, tcname, *args):
699 self.target_manifest = d.getVar("SDK_EXT_TARGET_MANIFEST", True) 699 self.target_manifest = d.getVar("SDK_EXT_TARGET_MANIFEST")
700 self.host_manifest = d.getVar("SDK_EXT_HOST_MANIFEST", True) 700 self.host_manifest = d.getVar("SDK_EXT_HOST_MANIFEST")
701 if args: 701 if args:
702 self.cm = args[0] # Compatibility mode for run SDK tests 702 self.cm = args[0] # Compatibility mode for run SDK tests
703 else: 703 else:
@@ -715,8 +715,8 @@ class SDKExtTestContext(SDKTestContext):
715 return "sdkext" 715 return "sdkext"
716 716
717 def _get_test_suites(self): 717 def _get_test_suites(self):
718 return (self.d.getVar("TEST_SUITES_SDK_EXT", True) or "auto").split() 718 return (self.d.getVar("TEST_SUITES_SDK_EXT") or "auto").split()
719 719
720 def _get_test_suites_required(self): 720 def _get_test_suites_required(self):
721 return [t for t in (self.d.getVar("TEST_SUITES_SDK_EXT", True) or \ 721 return [t for t in (self.d.getVar("TEST_SUITES_SDK_EXT") or \
722 "auto").split() if t != "auto"] 722 "auto").split() if t != "auto"]
diff --git a/meta/lib/oeqa/runexported.py b/meta/lib/oeqa/runexported.py
index 7e245c4120..9cfea0f7ab 100755
--- a/meta/lib/oeqa/runexported.py
+++ b/meta/lib/oeqa/runexported.py
@@ -43,8 +43,8 @@ class FakeTarget(object):
43 self.ip = None 43 self.ip = None
44 self.server_ip = None 44 self.server_ip = None
45 self.datetime = time.strftime('%Y%m%d%H%M%S',time.gmtime()) 45 self.datetime = time.strftime('%Y%m%d%H%M%S',time.gmtime())
46 self.testdir = d.getVar("TEST_LOG_DIR", True) 46 self.testdir = d.getVar("TEST_LOG_DIR")
47 self.pn = d.getVar("PN", True) 47 self.pn = d.getVar("PN")
48 48
49 def exportStart(self): 49 def exportStart(self):
50 self.sshlog = os.path.join(self.testdir, "ssh_target_log.%s" % self.datetime) 50 self.sshlog = os.path.join(self.testdir, "ssh_target_log.%s" % self.datetime)
@@ -130,8 +130,8 @@ def extract_sdk(d):
130 """ 130 """
131 131
132 export_dir = os.path.dirname(os.path.realpath(__file__)) 132 export_dir = os.path.dirname(os.path.realpath(__file__))
133 tools_dir = d.getVar("TEST_EXPORT_SDK_DIR", True) 133 tools_dir = d.getVar("TEST_EXPORT_SDK_DIR")
134 tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME", True) 134 tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME")
135 tarball_path = os.path.join(export_dir, tools_dir, tarball_name) 135 tarball_path = os.path.join(export_dir, tools_dir, tarball_name)
136 extract_path = os.path.join(export_dir, "sysroot") 136 extract_path = os.path.join(export_dir, "sysroot")
137 if os.path.isfile(tarball_path): 137 if os.path.isfile(tarball_path):
diff --git a/meta/lib/oeqa/runtime/_ptest.py b/meta/lib/oeqa/runtime/_ptest.py
index 71324d3da2..cfb4041f18 100644
--- a/meta/lib/oeqa/runtime/_ptest.py
+++ b/meta/lib/oeqa/runtime/_ptest.py
@@ -13,7 +13,7 @@ def setUpModule():
13 skipModule("Image doesn't have package management feature") 13 skipModule("Image doesn't have package management feature")
14 if not oeRuntimeTest.hasPackage("smartpm"): 14 if not oeRuntimeTest.hasPackage("smartpm"):
15 skipModule("Image doesn't have smart installed") 15 skipModule("Image doesn't have smart installed")
16 if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: 16 if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES").split()[0]:
17 skipModule("Rpm is not the primary package manager") 17 skipModule("Rpm is not the primary package manager")
18 18
19class PtestRunnerTest(oeRuntimeTest): 19class PtestRunnerTest(oeRuntimeTest):
@@ -57,7 +57,7 @@ class PtestRunnerTest(oeRuntimeTest):
57# (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0) 57# (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0)
58# for x in result.split("\n"): 58# for x in result.split("\n"):
59# self.existingchannels.add(x) 59# self.existingchannels.add(x)
60 self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), oeRuntimeTest.tc.target.server_ip) 60 self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR'), oeRuntimeTest.tc.target.server_ip)
61 self.repo_server.start() 61 self.repo_server.start()
62 62
63 @classmethod 63 @classmethod
@@ -70,23 +70,23 @@ class PtestRunnerTest(oeRuntimeTest):
70# oeRuntimeTest.tc.target.run('smart channel --remove '+x[1:-1]+' -y', 0) 70# oeRuntimeTest.tc.target.run('smart channel --remove '+x[1:-1]+' -y', 0)
71 71
72 def add_smart_channel(self): 72 def add_smart_channel(self):
73 image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True) 73 image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE')
74 deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype) 74 deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype)
75 pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split() 75 pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS').replace("-","_").split()
76 for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)): 76 for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)):
77 if arch in pkgarchs: 77 if arch in pkgarchs:
78 self.target.run('smart channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url), 0) 78 self.target.run('smart channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url), 0)
79 self.target.run('smart update', 0) 79 self.target.run('smart update', 0)
80 80
81 def install_complementary(self, globs=None): 81 def install_complementary(self, globs=None):
82 installed_pkgs_file = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), 82 installed_pkgs_file = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR'),
83 "installed_pkgs.txt") 83 "installed_pkgs.txt")
84 self.pkgs_list = RpmPkgsList(oeRuntimeTest.tc.d, oeRuntimeTest.tc.d.getVar('IMAGE_ROOTFS', True), oeRuntimeTest.tc.d.getVar('arch_var', True), oeRuntimeTest.tc.d.getVar('os_var', True)) 84 self.pkgs_list = RpmPkgsList(oeRuntimeTest.tc.d, oeRuntimeTest.tc.d.getVar('IMAGE_ROOTFS'), oeRuntimeTest.tc.d.getVar('arch_var'), oeRuntimeTest.tc.d.getVar('os_var'))
85 with open(installed_pkgs_file, "w+") as installed_pkgs: 85 with open(installed_pkgs_file, "w+") as installed_pkgs:
86 installed_pkgs.write(self.pkgs_list.list("arch")) 86 installed_pkgs.write(self.pkgs_list.list("arch"))
87 87
88 cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), 88 cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"),
89 "-p", oeRuntimeTest.tc.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, 89 "-p", oeRuntimeTest.tc.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs_file,
90 globs] 90 globs]
91 try: 91 try:
92 bb.note("Installing complementary packages ...") 92 bb.note("Installing complementary packages ...")
@@ -99,7 +99,7 @@ class PtestRunnerTest(oeRuntimeTest):
99 return complementary_pkgs.split() 99 return complementary_pkgs.split()
100 100
101 def setUpLocal(self): 101 def setUpLocal(self):
102 self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME', True)) 102 self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME'))
103 103
104 @skipUnlessPassed('test_ssh') 104 @skipUnlessPassed('test_ssh')
105 def test_ptestrunner(self): 105 def test_ptestrunner(self):
diff --git a/meta/lib/oeqa/runtime/date.py b/meta/lib/oeqa/runtime/date.py
index 447987e075..6f3516a92f 100644
--- a/meta/lib/oeqa/runtime/date.py
+++ b/meta/lib/oeqa/runtime/date.py
@@ -5,11 +5,11 @@ import re
5class DateTest(oeRuntimeTest): 5class DateTest(oeRuntimeTest):
6 6
7 def setUpLocal(self): 7 def setUpLocal(self):
8 if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd": 8 if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager") == "systemd":
9 self.target.run('systemctl stop systemd-timesyncd') 9 self.target.run('systemctl stop systemd-timesyncd')
10 10
11 def tearDownLocal(self): 11 def tearDownLocal(self):
12 if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd": 12 if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager") == "systemd":
13 self.target.run('systemctl start systemd-timesyncd') 13 self.target.run('systemctl start systemd-timesyncd')
14 14
15 @testcase(211) 15 @testcase(211)
diff --git a/meta/lib/oeqa/runtime/multilib.py b/meta/lib/oeqa/runtime/multilib.py
index 593d385021..5cce24f5f4 100644
--- a/meta/lib/oeqa/runtime/multilib.py
+++ b/meta/lib/oeqa/runtime/multilib.py
@@ -3,7 +3,7 @@ from oeqa.oetest import oeRuntimeTest, skipModule
3from oeqa.utils.decorators import * 3from oeqa.utils.decorators import *
4 4
5def setUpModule(): 5def setUpModule():
6 multilibs = oeRuntimeTest.tc.d.getVar("MULTILIBS", True) or "" 6 multilibs = oeRuntimeTest.tc.d.getVar("MULTILIBS") or ""
7 if "multilib:lib32" not in multilibs: 7 if "multilib:lib32" not in multilibs:
8 skipModule("this isn't a multilib:lib32 image") 8 skipModule("this isn't a multilib:lib32 image")
9 9
diff --git a/meta/lib/oeqa/runtime/parselogs.py b/meta/lib/oeqa/runtime/parselogs.py
index 3e1c7d0c30..cc2d0617f5 100644
--- a/meta/lib/oeqa/runtime/parselogs.py
+++ b/meta/lib/oeqa/runtime/parselogs.py
@@ -193,10 +193,10 @@ class ParseLogsTest(oeRuntimeTest):
193 self.ignore_errors[machine] = self.ignore_errors[machine] + video_related 193 self.ignore_errors[machine] = self.ignore_errors[machine] + video_related
194 194
195 def getMachine(self): 195 def getMachine(self):
196 return oeRuntimeTest.tc.d.getVar("MACHINE", True) 196 return oeRuntimeTest.tc.d.getVar("MACHINE")
197 197
198 def getWorkdir(self): 198 def getWorkdir(self):
199 return oeRuntimeTest.tc.d.getVar("WORKDIR", True) 199 return oeRuntimeTest.tc.d.getVar("WORKDIR")
200 200
201 #get some information on the CPU of the machine to display at the beginning of the output. This info might be useful in some cases. 201 #get some information on the CPU of the machine to display at the beginning of the output. This info might be useful in some cases.
202 def getHardwareInfo(self): 202 def getHardwareInfo(self):
diff --git a/meta/lib/oeqa/runtime/rpm.py b/meta/lib/oeqa/runtime/rpm.py
index 7f514ca00c..f1c4763fc0 100644
--- a/meta/lib/oeqa/runtime/rpm.py
+++ b/meta/lib/oeqa/runtime/rpm.py
@@ -7,7 +7,7 @@ from oeqa.utils.decorators import *
7def setUpModule(): 7def setUpModule():
8 if not oeRuntimeTest.hasFeature("package-management"): 8 if not oeRuntimeTest.hasFeature("package-management"):
9 skipModule("rpm module skipped: target doesn't have package-management in IMAGE_FEATURES") 9 skipModule("rpm module skipped: target doesn't have package-management in IMAGE_FEATURES")
10 if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: 10 if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES").split()[0]:
11 skipModule("rpm module skipped: target doesn't have rpm as primary package manager") 11 skipModule("rpm module skipped: target doesn't have rpm as primary package manager")
12 12
13 13
@@ -29,8 +29,8 @@ class RpmInstallRemoveTest(oeRuntimeTest):
29 29
30 @classmethod 30 @classmethod
31 def setUpClass(self): 31 def setUpClass(self):
32 pkgarch = oeRuntimeTest.tc.d.getVar('TUNE_PKGARCH', True).replace("-", "_") 32 pkgarch = oeRuntimeTest.tc.d.getVar('TUNE_PKGARCH').replace("-", "_")
33 rpmdir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), "rpm", pkgarch) 33 rpmdir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR'), "rpm", pkgarch)
34 # pick rpm-doc as a test file to get installed, because it's small and it will always be built for standard targets 34 # pick rpm-doc as a test file to get installed, because it's small and it will always be built for standard targets
35 for f in fnmatch.filter(os.listdir(rpmdir), "rpm-doc-*.%s.rpm" % pkgarch): 35 for f in fnmatch.filter(os.listdir(rpmdir), "rpm-doc-*.%s.rpm" % pkgarch):
36 testrpmfile = f 36 testrpmfile = f
diff --git a/meta/lib/oeqa/runtime/scp.py b/meta/lib/oeqa/runtime/scp.py
index 48e87d2d0b..cf36cfa5d5 100644
--- a/meta/lib/oeqa/runtime/scp.py
+++ b/meta/lib/oeqa/runtime/scp.py
@@ -11,7 +11,7 @@ class ScpTest(oeRuntimeTest):
11 @testcase(220) 11 @testcase(220)
12 @skipUnlessPassed('test_ssh') 12 @skipUnlessPassed('test_ssh')
13 def test_scp_file(self): 13 def test_scp_file(self):
14 test_log_dir = oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR", True) 14 test_log_dir = oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR")
15 test_file_path = os.path.join(test_log_dir, 'test_scp_file') 15 test_file_path = os.path.join(test_log_dir, 'test_scp_file')
16 with open(test_file_path, 'w') as test_scp_file: 16 with open(test_file_path, 'w') as test_scp_file:
17 test_scp_file.seek(2 ** 22 - 1) 17 test_scp_file.seek(2 ** 22 - 1)
diff --git a/meta/lib/oeqa/runtime/smart.py b/meta/lib/oeqa/runtime/smart.py
index 6cdb10d631..dde1c4d792 100644
--- a/meta/lib/oeqa/runtime/smart.py
+++ b/meta/lib/oeqa/runtime/smart.py
@@ -11,7 +11,7 @@ def setUpModule():
11 skipModule("Image doesn't have package management feature") 11 skipModule("Image doesn't have package management feature")
12 if not oeRuntimeTest.hasPackage("smartpm"): 12 if not oeRuntimeTest.hasPackage("smartpm"):
13 skipModule("Image doesn't have smart installed") 13 skipModule("Image doesn't have smart installed")
14 if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: 14 if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES").split()[0]:
15 skipModule("Rpm is not the primary package manager") 15 skipModule("Rpm is not the primary package manager")
16 16
17class SmartTest(oeRuntimeTest): 17class SmartTest(oeRuntimeTest):
@@ -75,16 +75,16 @@ class SmartRepoTest(SmartTest):
75 rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") 75 rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo")
76 index_cmds = [] 76 index_cmds = []
77 rpm_dirs_found = False 77 rpm_dirs_found = False
78 archs = (oeRuntimeTest.tc.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() 78 archs = (oeRuntimeTest.tc.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").replace('-', '_').split()
79 for arch in archs: 79 for arch in archs:
80 rpm_dir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM', True), arch) 80 rpm_dir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM'), arch)
81 idx_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), 'rpm', arch) 81 idx_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR'), 'rpm', arch)
82 db_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), 'rpmdb', arch) 82 db_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR'), 'rpmdb', arch)
83 if not os.path.isdir(rpm_dir): 83 if not os.path.isdir(rpm_dir):
84 continue 84 continue
85 if os.path.exists(db_path): 85 if os.path.exists(db_path):
86 bb.utils.remove(dbpath, True) 86 bb.utils.remove(dbpath, True)
87 lockfilename = oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" 87 lockfilename = oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock"
88 lf = bb.utils.lockfile(lockfilename, False) 88 lf = bb.utils.lockfile(lockfilename, False)
89 oe.path.copyhardlinktree(rpm_dir, idx_path) 89 oe.path.copyhardlinktree(rpm_dir, idx_path)
90 # Full indexes overload a 256MB image so reduce the number of rpms 90 # Full indexes overload a 256MB image so reduce the number of rpms
@@ -98,7 +98,7 @@ class SmartRepoTest(SmartTest):
98 result = oe.utils.multiprocess_exec(index_cmds, self.create_index) 98 result = oe.utils.multiprocess_exec(index_cmds, self.create_index)
99 if result: 99 if result:
100 bb.fatal('%s' % ('\n'.join(result))) 100 bb.fatal('%s' % ('\n'.join(result)))
101 self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('WORKDIR', True), oeRuntimeTest.tc.target.server_ip) 101 self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('WORKDIR'), oeRuntimeTest.tc.target.server_ip)
102 self.repo_server.start() 102 self.repo_server.start()
103 103
104 @classmethod 104 @classmethod
@@ -113,9 +113,9 @@ class SmartRepoTest(SmartTest):
113 113
114 @testcase(719) 114 @testcase(719)
115 def test_smart_channel_add(self): 115 def test_smart_channel_add(self):
116 image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True) 116 image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE')
117 deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype) 117 deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype)
118 pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split() 118 pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS').replace("-","_").split()
119 for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)): 119 for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)):
120 if arch in pkgarchs: 120 if arch in pkgarchs:
121 self.smart('channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url)) 121 self.smart('channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url))
diff --git a/meta/lib/oeqa/runtime/systemd.py b/meta/lib/oeqa/runtime/systemd.py
index 8de799cd63..52feb1b31e 100644
--- a/meta/lib/oeqa/runtime/systemd.py
+++ b/meta/lib/oeqa/runtime/systemd.py
@@ -6,7 +6,7 @@ from oeqa.utils.decorators import *
6def setUpModule(): 6def setUpModule():
7 if not oeRuntimeTest.hasFeature("systemd"): 7 if not oeRuntimeTest.hasFeature("systemd"):
8 skipModule("target doesn't have systemd in DISTRO_FEATURES") 8 skipModule("target doesn't have systemd in DISTRO_FEATURES")
9 if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True): 9 if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager"):
10 skipModule("systemd is not the init manager for this image") 10 skipModule("systemd is not the init manager for this image")
11 11
12 12
diff --git a/meta/lib/oeqa/runtime/x32lib.py b/meta/lib/oeqa/runtime/x32lib.py
index ce5e214035..2f98dbf71e 100644
--- a/meta/lib/oeqa/runtime/x32lib.py
+++ b/meta/lib/oeqa/runtime/x32lib.py
@@ -4,7 +4,7 @@ from oeqa.utils.decorators import *
4 4
5def setUpModule(): 5def setUpModule():
6 #check if DEFAULTTUNE is set and it's value is: x86-64-x32 6 #check if DEFAULTTUNE is set and it's value is: x86-64-x32
7 defaulttune = oeRuntimeTest.tc.d.getVar("DEFAULTTUNE", True) 7 defaulttune = oeRuntimeTest.tc.d.getVar("DEFAULTTUNE")
8 if "x86-64-x32" not in defaulttune: 8 if "x86-64-x32" not in defaulttune:
9 skipModule("DEFAULTTUNE is not set to x86-64-x32") 9 skipModule("DEFAULTTUNE is not set to x86-64-x32")
10 10
diff --git a/meta/lib/oeqa/sdk/gcc.py b/meta/lib/oeqa/sdk/gcc.py
index 8395b9b908..f3f4341a20 100644
--- a/meta/lib/oeqa/sdk/gcc.py
+++ b/meta/lib/oeqa/sdk/gcc.py
@@ -5,7 +5,7 @@ from oeqa.oetest import oeSDKTest, skipModule
5from oeqa.utils.decorators import * 5from oeqa.utils.decorators import *
6 6
7def setUpModule(): 7def setUpModule():
8 machine = oeSDKTest.tc.d.getVar("MACHINE", True) 8 machine = oeSDKTest.tc.d.getVar("MACHINE")
9 if not oeSDKTest.hasHostPackage("packagegroup-cross-canadian-" + machine): 9 if not oeSDKTest.hasHostPackage("packagegroup-cross-canadian-" + machine):
10 skipModule("SDK doesn't contain a cross-canadian toolchain") 10 skipModule("SDK doesn't contain a cross-canadian toolchain")
11 11
diff --git a/meta/lib/oeqa/selftest/tinfoil.py b/meta/lib/oeqa/selftest/tinfoil.py
index 4f70e0d2f7..c8d635cd05 100644
--- a/meta/lib/oeqa/selftest/tinfoil.py
+++ b/meta/lib/oeqa/selftest/tinfoil.py
@@ -13,7 +13,7 @@ class TinfoilTests(oeSelfTest):
13 def test_getvar(self): 13 def test_getvar(self):
14 with bb.tinfoil.Tinfoil() as tinfoil: 14 with bb.tinfoil.Tinfoil() as tinfoil:
15 tinfoil.prepare(True) 15 tinfoil.prepare(True)
16 machine = tinfoil.config_data.getVar('MACHINE', True) 16 machine = tinfoil.config_data.getVar('MACHINE')
17 if not machine: 17 if not machine:
18 self.fail('Unable to get MACHINE value - returned %s' % machine) 18 self.fail('Unable to get MACHINE value - returned %s' % machine)
19 19
@@ -41,7 +41,7 @@ class TinfoilTests(oeSelfTest):
41 if not best: 41 if not best:
42 self.fail('Unable to find recipe providing %s' % testrecipe) 42 self.fail('Unable to find recipe providing %s' % testrecipe)
43 rd = tinfoil.parse_recipe_file(best[3]) 43 rd = tinfoil.parse_recipe_file(best[3])
44 self.assertEqual(testrecipe, rd.getVar('PN', True)) 44 self.assertEqual(testrecipe, rd.getVar('PN'))
45 45
46 def test_parse_recipe_copy_expand(self): 46 def test_parse_recipe_copy_expand(self):
47 with bb.tinfoil.Tinfoil() as tinfoil: 47 with bb.tinfoil.Tinfoil() as tinfoil:
@@ -52,14 +52,14 @@ class TinfoilTests(oeSelfTest):
52 self.fail('Unable to find recipe providing %s' % testrecipe) 52 self.fail('Unable to find recipe providing %s' % testrecipe)
53 rd = tinfoil.parse_recipe_file(best[3]) 53 rd = tinfoil.parse_recipe_file(best[3])
54 # Check we can get variable values 54 # Check we can get variable values
55 self.assertEqual(testrecipe, rd.getVar('PN', True)) 55 self.assertEqual(testrecipe, rd.getVar('PN'))
56 # Check that expanding a value that includes a variable reference works 56 # Check that expanding a value that includes a variable reference works
57 self.assertEqual(testrecipe, rd.getVar('BPN', True)) 57 self.assertEqual(testrecipe, rd.getVar('BPN'))
58 # Now check that changing the referenced variable's value in a copy gives that 58 # Now check that changing the referenced variable's value in a copy gives that
59 # value when expanding 59 # value when expanding
60 localdata = bb.data.createCopy(rd) 60 localdata = bb.data.createCopy(rd)
61 localdata.setVar('PN', 'hello') 61 localdata.setVar('PN', 'hello')
62 self.assertEqual('hello', localdata.getVar('BPN', True)) 62 self.assertEqual('hello', localdata.getVar('BPN'))
63 63
64 def test_parse_recipe_initial_datastore(self): 64 def test_parse_recipe_initial_datastore(self):
65 with bb.tinfoil.Tinfoil() as tinfoil: 65 with bb.tinfoil.Tinfoil() as tinfoil:
@@ -72,7 +72,7 @@ class TinfoilTests(oeSelfTest):
72 dcopy.setVar('MYVARIABLE', 'somevalue') 72 dcopy.setVar('MYVARIABLE', 'somevalue')
73 rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy) 73 rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy)
74 # Check we can get variable values 74 # Check we can get variable values
75 self.assertEqual('somevalue', rd.getVar('MYVARIABLE', True)) 75 self.assertEqual('somevalue', rd.getVar('MYVARIABLE'))
76 76
77 def test_list_recipes(self): 77 def test_list_recipes(self):
78 with bb.tinfoil.Tinfoil() as tinfoil: 78 with bb.tinfoil.Tinfoil() as tinfoil:
@@ -127,7 +127,7 @@ class TinfoilTests(oeSelfTest):
127 with bb.tinfoil.Tinfoil() as tinfoil: 127 with bb.tinfoil.Tinfoil() as tinfoil:
128 tinfoil.prepare(config_only=True) 128 tinfoil.prepare(config_only=True)
129 tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue') 129 tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue')
130 self.assertEqual(tinfoil.config_data.getVar('TESTVAR', True), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()') 130 self.assertEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()')
131 131
132 # Now check that the setVariable's effects are no longer present 132 # Now check that the setVariable's effects are no longer present
133 # (this may legitimately break in future if we stop reinitialising 133 # (this may legitimately break in future if we stop reinitialising
@@ -135,7 +135,7 @@ class TinfoilTests(oeSelfTest):
135 # setVariable entirely) 135 # setVariable entirely)
136 with bb.tinfoil.Tinfoil() as tinfoil: 136 with bb.tinfoil.Tinfoil() as tinfoil:
137 tinfoil.prepare(config_only=True) 137 tinfoil.prepare(config_only=True)
138 self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR', True), 'specialvalue', 'Value set using setVariable is still present!') 138 self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is still present!')
139 139
140 # Now check that setVar on the main datastore works (uses setVariable internally) 140 # Now check that setVar on the main datastore works (uses setVariable internally)
141 with bb.tinfoil.Tinfoil() as tinfoil: 141 with bb.tinfoil.Tinfoil() as tinfoil:
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py
index 24669f461d..d1f441f841 100644
--- a/meta/lib/oeqa/targetcontrol.py
+++ b/meta/lib/oeqa/targetcontrol.py
@@ -19,7 +19,7 @@ from oeqa.controllers.testtargetloader import TestTargetLoader
19from abc import ABCMeta, abstractmethod 19from abc import ABCMeta, abstractmethod
20 20
21def get_target_controller(d): 21def get_target_controller(d):
22 testtarget = d.getVar("TEST_TARGET", True) 22 testtarget = d.getVar("TEST_TARGET")
23 # old, simple names 23 # old, simple names
24 if testtarget == "qemu": 24 if testtarget == "qemu":
25 return QemuTarget(d) 25 return QemuTarget(d)
@@ -33,7 +33,7 @@ def get_target_controller(d):
33 except AttributeError: 33 except AttributeError:
34 # nope, perhaps a layer defined one 34 # nope, perhaps a layer defined one
35 try: 35 try:
36 bbpath = d.getVar("BBPATH", True).split(':') 36 bbpath = d.getVar("BBPATH").split(':')
37 testtargetloader = TestTargetLoader() 37 testtargetloader = TestTargetLoader()
38 controller = testtargetloader.get_controller_module(testtarget, bbpath) 38 controller = testtargetloader.get_controller_module(testtarget, bbpath)
39 except ImportError as e: 39 except ImportError as e:
@@ -51,9 +51,9 @@ class BaseTarget(object, metaclass=ABCMeta):
51 self.connection = None 51 self.connection = None
52 self.ip = None 52 self.ip = None
53 self.server_ip = None 53 self.server_ip = None
54 self.datetime = d.getVar('DATETIME', True) 54 self.datetime = d.getVar('DATETIME')
55 self.testdir = d.getVar("TEST_LOG_DIR", True) 55 self.testdir = d.getVar("TEST_LOG_DIR")
56 self.pn = d.getVar("PN", True) 56 self.pn = d.getVar("PN")
57 57
58 @abstractmethod 58 @abstractmethod
59 def deploy(self): 59 def deploy(self):
@@ -80,7 +80,7 @@ class BaseTarget(object, metaclass=ABCMeta):
80 @classmethod 80 @classmethod
81 def match_image_fstype(self, d, image_fstypes=None): 81 def match_image_fstype(self, d, image_fstypes=None):
82 if not image_fstypes: 82 if not image_fstypes:
83 image_fstypes = d.getVar('IMAGE_FSTYPES', True).split(' ') 83 image_fstypes = d.getVar('IMAGE_FSTYPES').split(' ')
84 possible_image_fstypes = [fstype for fstype in self.supported_image_fstypes if fstype in image_fstypes] 84 possible_image_fstypes = [fstype for fstype in self.supported_image_fstypes if fstype in image_fstypes]
85 if possible_image_fstypes: 85 if possible_image_fstypes:
86 return possible_image_fstypes[0] 86 return possible_image_fstypes[0]
@@ -119,14 +119,14 @@ class QemuTarget(BaseTarget):
119 119
120 self.image_fstype = self.get_image_fstype(d) 120 self.image_fstype = self.get_image_fstype(d)
121 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime) 121 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime)
122 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype) 122 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype)
123 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') 123 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
124 dump_target_cmds = d.getVar("testimage_dump_target", True) 124 dump_target_cmds = d.getVar("testimage_dump_target")
125 dump_host_cmds = d.getVar("testimage_dump_host", True) 125 dump_host_cmds = d.getVar("testimage_dump_host")
126 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR", True) 126 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR")
127 if d.getVar("QEMU_USE_KVM", False) is not None \ 127 if d.getVar("QEMU_USE_KVM", False) is not None \
128 and d.getVar("QEMU_USE_KVM", False) == "True" \ 128 and d.getVar("QEMU_USE_KVM", False) == "True" \
129 and "x86" in d.getVar("MACHINE", True): 129 and "x86" in d.getVar("MACHINE"):
130 use_kvm = True 130 use_kvm = True
131 else: 131 else:
132 use_kvm = False 132 use_kvm = False
@@ -141,26 +141,26 @@ class QemuTarget(BaseTarget):
141 logger.addHandler(loggerhandler) 141 logger.addHandler(loggerhandler)
142 oe.path.symlink(os.path.basename(self.qemurunnerlog), os.path.join(self.testdir, 'qemurunner_log'), force=True) 142 oe.path.symlink(os.path.basename(self.qemurunnerlog), os.path.join(self.testdir, 'qemurunner_log'), force=True)
143 143
144 if d.getVar("DISTRO", True) == "poky-tiny": 144 if d.getVar("DISTRO") == "poky-tiny":
145 self.runner = QemuTinyRunner(machine=d.getVar("MACHINE", True), 145 self.runner = QemuTinyRunner(machine=d.getVar("MACHINE"),
146 rootfs=self.rootfs, 146 rootfs=self.rootfs,
147 tmpdir = d.getVar("TMPDIR", True), 147 tmpdir = d.getVar("TMPDIR"),
148 deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True), 148 deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE"),
149 display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True), 149 display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY"),
150 logfile = self.qemulog, 150 logfile = self.qemulog,
151 kernel = self.kernel, 151 kernel = self.kernel,
152 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True))) 152 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")))
153 else: 153 else:
154 self.runner = QemuRunner(machine=d.getVar("MACHINE", True), 154 self.runner = QemuRunner(machine=d.getVar("MACHINE"),
155 rootfs=self.rootfs, 155 rootfs=self.rootfs,
156 tmpdir = d.getVar("TMPDIR", True), 156 tmpdir = d.getVar("TMPDIR"),
157 deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True), 157 deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE"),
158 display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True), 158 display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY"),
159 logfile = self.qemulog, 159 logfile = self.qemulog,
160 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True)), 160 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
161 use_kvm = use_kvm, 161 use_kvm = use_kvm,
162 dump_dir = dump_dir, 162 dump_dir = dump_dir,
163 dump_host_cmds = d.getVar("testimage_dump_host", True)) 163 dump_host_cmds = d.getVar("testimage_dump_host"))
164 164
165 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner) 165 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner)
166 166
@@ -214,14 +214,14 @@ class SimpleRemoteTarget(BaseTarget):
214 214
215 def __init__(self, d): 215 def __init__(self, d):
216 super(SimpleRemoteTarget, self).__init__(d) 216 super(SimpleRemoteTarget, self).__init__(d)
217 addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') 217 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.')
218 self.ip = addr.split(":")[0] 218 self.ip = addr.split(":")[0]
219 try: 219 try:
220 self.port = addr.split(":")[1] 220 self.port = addr.split(":")[1]
221 except IndexError: 221 except IndexError:
222 self.port = None 222 self.port = None
223 bb.note("Target IP: %s" % self.ip) 223 bb.note("Target IP: %s" % self.ip)
224 self.server_ip = d.getVar("TEST_SERVER_IP", True) 224 self.server_ip = d.getVar("TEST_SERVER_IP")
225 if not self.server_ip: 225 if not self.server_ip:
226 try: 226 try:
227 self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1] 227 self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1]
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index aecf8cf5a8..3a68b001b7 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -231,7 +231,7 @@ def runqemu(pn, ssh=True):
231 logger = logging.getLogger('BitBake.QemuRunner') 231 logger = logging.getLogger('BitBake.QemuRunner')
232 logger.setLevel(logging.DEBUG) 232 logger.setLevel(logging.DEBUG)
233 logger.propagate = False 233 logger.propagate = False
234 logdir = recipedata.getVar("TEST_LOG_DIR", True) 234 logdir = recipedata.getVar("TEST_LOG_DIR")
235 235
236 qemu = oeqa.targetcontrol.QemuTarget(recipedata) 236 qemu = oeqa.targetcontrol.QemuTarget(recipedata)
237 finally: 237 finally:
diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py
index 71422a9aea..44037a989d 100644
--- a/meta/lib/oeqa/utils/dump.py
+++ b/meta/lib/oeqa/utils/dump.py
@@ -6,8 +6,8 @@ import itertools
6from .commands import runCmd 6from .commands import runCmd
7 7
8def get_host_dumper(d): 8def get_host_dumper(d):
9 cmds = d.getVar("testimage_dump_host", True) 9 cmds = d.getVar("testimage_dump_host")
10 parent_dir = d.getVar("TESTIMAGE_DUMP_DIR", True) 10 parent_dir = d.getVar("TESTIMAGE_DUMP_DIR")
11 return HostDumper(cmds, parent_dir) 11 return HostDumper(cmds, parent_dir)
12 12
13 13
diff --git a/meta/lib/oeqa/utils/package_manager.py b/meta/lib/oeqa/utils/package_manager.py
index 099ecc9728..0f6bdbc542 100644
--- a/meta/lib/oeqa/utils/package_manager.py
+++ b/meta/lib/oeqa/utils/package_manager.py
@@ -4,24 +4,24 @@ def get_package_manager(d, root_path):
4 """ 4 """
5 from oe.package_manager import RpmPM, OpkgPM, DpkgPM 5 from oe.package_manager import RpmPM, OpkgPM, DpkgPM
6 6
7 pkg_class = d.getVar("IMAGE_PKGTYPE", True) 7 pkg_class = d.getVar("IMAGE_PKGTYPE")
8 if pkg_class == "rpm": 8 if pkg_class == "rpm":
9 pm = RpmPM(d, 9 pm = RpmPM(d,
10 root_path, 10 root_path,
11 d.getVar('TARGET_VENDOR', True)) 11 d.getVar('TARGET_VENDOR'))
12 pm.create_configs() 12 pm.create_configs()
13 13
14 elif pkg_class == "ipk": 14 elif pkg_class == "ipk":
15 pm = OpkgPM(d, 15 pm = OpkgPM(d,
16 root_path, 16 root_path,
17 d.getVar("IPKGCONF_TARGET", True), 17 d.getVar("IPKGCONF_TARGET"),
18 d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) 18 d.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
19 19
20 elif pkg_class == "deb": 20 elif pkg_class == "deb":
21 pm = DpkgPM(d, 21 pm = DpkgPM(d,
22 root_path, 22 root_path,
23 d.getVar('PACKAGE_ARCHS', True), 23 d.getVar('PACKAGE_ARCHS'),
24 d.getVar('DPKG_ARCH', True)) 24 d.getVar('DPKG_ARCH'))
25 25
26 pm.write_index() 26 pm.write_index()
27 pm.update() 27 pm.update()
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py
index 59593f5ef3..c001602b54 100644
--- a/meta/lib/oeqa/utils/targetbuild.py
+++ b/meta/lib/oeqa/utils/targetbuild.py
@@ -25,7 +25,7 @@ class BuildProject(metaclass=ABCMeta):
25 # Download self.archive to self.localarchive 25 # Download self.archive to self.localarchive
26 def _download_archive(self): 26 def _download_archive(self):
27 27
28 dl_dir = self.d.getVar("DL_DIR", True) 28 dl_dir = self.d.getVar("DL_DIR")
29 if dl_dir and os.path.exists(os.path.join(dl_dir, self.archive)): 29 if dl_dir and os.path.exists(os.path.join(dl_dir, self.archive)):
30 bb.utils.copyfile(os.path.join(dl_dir, self.archive), self.localarchive) 30 bb.utils.copyfile(os.path.join(dl_dir, self.archive), self.localarchive)
31 return 31 return
@@ -40,7 +40,7 @@ class BuildProject(metaclass=ABCMeta):
40 40
41 cmd = '' 41 cmd = ''
42 for var in exportvars: 42 for var in exportvars:
43 val = self.d.getVar(var, True) 43 val = self.d.getVar(var)
44 if val: 44 if val:
45 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) 45 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
46 46
@@ -103,8 +103,8 @@ class SDKBuildProject(BuildProject):
103 self.testdir = testpath 103 self.testdir = testpath
104 self.targetdir = testpath 104 self.targetdir = testpath
105 bb.utils.mkdirhier(testpath) 105 bb.utils.mkdirhier(testpath)
106 self.datetime = d.getVar('DATETIME', True) 106 self.datetime = d.getVar('DATETIME')
107 self.testlogdir = d.getVar("TEST_LOG_DIR", True) 107 self.testlogdir = d.getVar("TEST_LOG_DIR")
108 bb.utils.mkdirhier(self.testlogdir) 108 bb.utils.mkdirhier(self.testlogdir)
109 self.logfile = os.path.join(self.testlogdir, "sdk_target_log.%s" % self.datetime) 109 self.logfile = os.path.join(self.testlogdir, "sdk_target_log.%s" % self.datetime)
110 BuildProject.__init__(self, d, uri, foldername, tmpdir=testpath) 110 BuildProject.__init__(self, d, uri, foldername, tmpdir=testpath)
diff --git a/meta/lib/oeqa/utils/testexport.py b/meta/lib/oeqa/utils/testexport.py
index 57be2ca449..be2a2110fc 100644
--- a/meta/lib/oeqa/utils/testexport.py
+++ b/meta/lib/oeqa/utils/testexport.py
@@ -72,9 +72,9 @@ def process_binaries(d, params):
72 return extract_bin_command 72 return extract_bin_command
73 73
74 if determine_if_poky_env(): # machine with poky environment 74 if determine_if_poky_env(): # machine with poky environment
75 exportpath = d.getVar("TEST_EXPORT_DIR", True) if export_env else d.getVar("DEPLOY_DIR", True) 75 exportpath = d.getVar("TEST_EXPORT_DIR") if export_env else d.getVar("DEPLOY_DIR")
76 rpm_deploy_dir = d.getVar("DEPLOY_DIR_RPM", True) 76 rpm_deploy_dir = d.getVar("DEPLOY_DIR_RPM")
77 arch = get_dest_folder(d.getVar("TUNE_FEATURES", True), os.listdir(rpm_deploy_dir)) 77 arch = get_dest_folder(d.getVar("TUNE_FEATURES"), os.listdir(rpm_deploy_dir))
78 arch_rpm_dir = os.path.join(rpm_deploy_dir, arch) 78 arch_rpm_dir = os.path.join(rpm_deploy_dir, arch)
79 extracted_bin_dir = os.path.join(exportpath,"binaries", arch, "extracted_binaries") 79 extracted_bin_dir = os.path.join(exportpath,"binaries", arch, "extracted_binaries")
80 packaged_bin_dir = os.path.join(exportpath,"binaries", arch, "packaged_binaries") 80 packaged_bin_dir = os.path.join(exportpath,"binaries", arch, "packaged_binaries")
@@ -92,7 +92,7 @@ def process_binaries(d, params):
92 return "" 92 return ""
93 for item in native_rpm_file_list:# will copy all versions of package. Used version will be selected on remote machine 93 for item in native_rpm_file_list:# will copy all versions of package. Used version will be selected on remote machine
94 bb.plain("Copying native package file: %s" % item) 94 bb.plain("Copying native package file: %s" % item)
95 sh.copy(os.path.join(rpm_deploy_dir, native_rpm_dir, item), os.path.join(d.getVar("TEST_EXPORT_DIR", True), "binaries", "native")) 95 sh.copy(os.path.join(rpm_deploy_dir, native_rpm_dir, item), os.path.join(d.getVar("TEST_EXPORT_DIR"), "binaries", "native"))
96 else: # nothing to do here; running tests under bitbake, so we asume native binaries are in sysroots dir. 96 else: # nothing to do here; running tests under bitbake, so we asume native binaries are in sysroots dir.
97 if param_list[1] or param_list[4]: 97 if param_list[1] or param_list[4]:
98 bb.warn("Native binary %s %s%s. Running tests under bitbake environment. Version can't be checked except when the test itself does it" 98 bb.warn("Native binary %s %s%s. Running tests under bitbake environment. Version can't be checked except when the test itself does it"
@@ -148,7 +148,7 @@ def process_binaries(d, params):
148 else: # this is for target device 148 else: # this is for target device
149 if param_list[2] == "rpm": 149 if param_list[2] == "rpm":
150 return "No need to extract, this is an .rpm file" 150 return "No need to extract, this is an .rpm file"
151 arch = get_dest_folder(d.getVar("TUNE_FEATURES", True), os.listdir(binaries_path)) 151 arch = get_dest_folder(d.getVar("TUNE_FEATURES"), os.listdir(binaries_path))
152 extracted_bin_path = os.path.join(binaries_path, arch, "extracted_binaries") 152 extracted_bin_path = os.path.join(binaries_path, arch, "extracted_binaries")
153 extracted_bin_list = [item for item in os.listdir(extracted_bin_path)] 153 extracted_bin_list = [item for item in os.listdir(extracted_bin_path)]
154 packaged_bin_path = os.path.join(binaries_path, arch, "packaged_binaries") 154 packaged_bin_path = os.path.join(binaries_path, arch, "packaged_binaries")
@@ -206,9 +206,9 @@ def send_bin_to_DUT(d,params):
206 from oeqa.oetest import oeRuntimeTest 206 from oeqa.oetest import oeRuntimeTest
207 param_list = params 207 param_list = params
208 cleanup_list = list() 208 cleanup_list = list()
209 bins_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), "binaries") if determine_if_poky_env() \ 209 bins_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"), "binaries") if determine_if_poky_env() \
210 else os.getenv("bin_dir") 210 else os.getenv("bin_dir")
211 arch = get_dest_folder(d.getVar("TUNE_FEATURES", True), os.listdir(bins_dir)) 211 arch = get_dest_folder(d.getVar("TUNE_FEATURES"), os.listdir(bins_dir))
212 arch_rpms_dir = os.path.join(bins_dir, arch, "packaged_binaries") 212 arch_rpms_dir = os.path.join(bins_dir, arch, "packaged_binaries")
213 extracted_bin_dir = os.path.join(bins_dir, arch, "extracted_binaries", param_list[0]) 213 extracted_bin_dir = os.path.join(bins_dir, arch, "extracted_binaries", param_list[0])
214 214
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb b/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb
index e0d8ee76dd..9ddc4ba716 100644
--- a/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb
+++ b/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb
@@ -27,7 +27,7 @@ COMPATIBLE_HOST_armv4 = 'null'
27 27
28def gnu_efi_arch(d): 28def gnu_efi_arch(d):
29 import re 29 import re
30 tarch = d.getVar("TARGET_ARCH", True) 30 tarch = d.getVar("TARGET_ARCH")
31 if re.match("i[3456789]86", tarch): 31 if re.match("i[3456789]86", tarch):
32 return "ia32" 32 return "ia32"
33 return tarch 33 return tarch
diff --git a/meta/recipes-bsp/grub/grub-efi_2.00.bb b/meta/recipes-bsp/grub/grub-efi_2.00.bb
index 5a0dc954a3..531482b915 100644
--- a/meta/recipes-bsp/grub/grub-efi_2.00.bb
+++ b/meta/recipes-bsp/grub/grub-efi_2.00.bb
@@ -13,7 +13,7 @@ S = "${WORKDIR}/grub-${PV}"
13# Determine the target arch for the grub modules 13# Determine the target arch for the grub modules
14python __anonymous () { 14python __anonymous () {
15 import re 15 import re
16 target = d.getVar('TARGET_ARCH', True) 16 target = d.getVar('TARGET_ARCH')
17 if target == "x86_64": 17 if target == "x86_64":
18 grubtarget = 'x86_64' 18 grubtarget = 'x86_64'
19 grubimage = "bootx64.efi" 19 grubimage = "bootx64.efi"
diff --git a/meta/recipes-bsp/u-boot/u-boot.inc b/meta/recipes-bsp/u-boot/u-boot.inc
index afe3fe91f2..4a55fd6b4a 100644
--- a/meta/recipes-bsp/u-boot/u-boot.inc
+++ b/meta/recipes-bsp/u-boot/u-boot.inc
@@ -42,7 +42,7 @@ UBOOT_ELF_SYMLINK ?= "u-boot-${MACHINE}.${UBOOT_ELF_SUFFIX}"
42# deploy directory. For those versions they can set the following variables 42# deploy directory. For those versions they can set the following variables
43# to allow packaging the SPL. 43# to allow packaging the SPL.
44SPL_BINARY ?= "" 44SPL_BINARY ?= ""
45SPL_BINARYNAME ?= "${@os.path.basename(d.getVar("SPL_BINARY", True))}" 45SPL_BINARYNAME ?= "${@os.path.basename(d.getVar("SPL_BINARY"))}"
46SPL_IMAGE ?= "${SPL_BINARYNAME}-${MACHINE}-${PV}-${PR}" 46SPL_IMAGE ?= "${SPL_BINARYNAME}-${MACHINE}-${PV}-${PR}"
47SPL_SYMLINK ?= "${SPL_BINARYNAME}-${MACHINE}" 47SPL_SYMLINK ?= "${SPL_BINARYNAME}-${MACHINE}"
48 48
diff --git a/meta/recipes-connectivity/bluez5/bluez5.inc b/meta/recipes-connectivity/bluez5/bluez5.inc
index ecefb7b593..55a5055ff3 100644
--- a/meta/recipes-connectivity/bluez5/bluez5.inc
+++ b/meta/recipes-connectivity/bluez5/bluez5.inc
@@ -95,13 +95,13 @@ FILES_${PN}-testtools = "${libdir}/bluez/test/*"
95 95
96def get_noinst_tools_paths (d, bb, tools): 96def get_noinst_tools_paths (d, bb, tools):
97 s = list() 97 s = list()
98 bindir = d.getVar("bindir", True) 98 bindir = d.getVar("bindir")
99 for bdp in tools.split(): 99 for bdp in tools.split():
100 f = os.path.basename(bdp) 100 f = os.path.basename(bdp)
101 s.append("%s/%s" % (bindir, f)) 101 s.append("%s/%s" % (bindir, f))
102 return "\n".join(s) 102 return "\n".join(s)
103 103
104FILES_${PN}-noinst-tools = "${@get_noinst_tools_paths(d, bb, d.getVar('NOINST_TOOLS', True))}" 104FILES_${PN}-noinst-tools = "${@get_noinst_tools_paths(d, bb, d.getVar('NOINST_TOOLS'))}"
105 105
106RDEPENDS_${PN}-testtools += "python3 python3-dbus python3-pygobject" 106RDEPENDS_${PN}-testtools += "python3 python3-dbus python3-pygobject"
107 107
diff --git a/meta/recipes-connectivity/connman/connman.inc b/meta/recipes-connectivity/connman/connman.inc
index 35a7eed0a9..7fa9a7b93c 100644
--- a/meta/recipes-connectivity/connman/connman.inc
+++ b/meta/recipes-connectivity/connman/connman.inc
@@ -58,7 +58,7 @@ INITSCRIPT_PARAMS = "start 05 5 2 3 . stop 22 0 1 6 ."
58 58
59python __anonymous () { 59python __anonymous () {
60 systemd_packages = "${PN}" 60 systemd_packages = "${PN}"
61 pkgconfig = d.getVar('PACKAGECONFIG', True) 61 pkgconfig = d.getVar('PACKAGECONFIG')
62 if ('openvpn' or 'vpnc' or 'l2tp' or 'pptp') in pkgconfig.split(): 62 if ('openvpn' or 'vpnc' or 'l2tp' or 'pptp') in pkgconfig.split():
63 systemd_packages += " ${PN}-vpn" 63 systemd_packages += " ${PN}-vpn"
64 d.setVar('SYSTEMD_PACKAGES', systemd_packages) 64 d.setVar('SYSTEMD_PACKAGES', systemd_packages)
@@ -116,7 +116,7 @@ def add_rdepends(bb, d, file, pkg, depmap, multilib_prefix, add_insane_skip):
116 116
117python populate_packages_prepend() { 117python populate_packages_prepend() {
118 depmap = dict(pppd="ppp") 118 depmap = dict(pppd="ppp")
119 multilib_prefix = (d.getVar("MLPREFIX", True) or "") 119 multilib_prefix = (d.getVar("MLPREFIX") or "")
120 120
121 hook = lambda file,pkg,x,y,z: \ 121 hook = lambda file,pkg,x,y,z: \
122 add_rdepends(bb, d, file, pkg, depmap, multilib_prefix, False) 122 add_rdepends(bb, d, file, pkg, depmap, multilib_prefix, False)
diff --git a/meta/recipes-core/base-files/base-files_3.0.14.bb b/meta/recipes-core/base-files/base-files_3.0.14.bb
index 533311061c..d138005311 100644
--- a/meta/recipes-core/base-files/base-files_3.0.14.bb
+++ b/meta/recipes-core/base-files/base-files_3.0.14.bb
@@ -173,5 +173,5 @@ FILES_${PN}-doc = "${docdir} ${datadir}/common-licenses"
173 173
174PACKAGE_ARCH = "${MACHINE_ARCH}" 174PACKAGE_ARCH = "${MACHINE_ARCH}"
175 175
176CONFFILES_${PN} = "${sysconfdir}/fstab ${@['', '${sysconfdir}/hostname'][(d.getVar('hostname', True) != '')]} ${sysconfdir}/shells" 176CONFFILES_${PN} = "${sysconfdir}/fstab ${@['', '${sysconfdir}/hostname'][(d.getVar('hostname') != '')]} ${sysconfdir}/shells"
177CONFFILES_${PN} += "${sysconfdir}/motd ${sysconfdir}/nsswitch.conf ${sysconfdir}/profile" 177CONFFILES_${PN} += "${sysconfdir}/motd ${sysconfdir}/nsswitch.conf ${sysconfdir}/profile"
diff --git a/meta/recipes-core/busybox/busybox.inc b/meta/recipes-core/busybox/busybox.inc
index b2f1960226..1f4a48c8cf 100644
--- a/meta/recipes-core/busybox/busybox.inc
+++ b/meta/recipes-core/busybox/busybox.inc
@@ -331,21 +331,21 @@ ALTERNATIVE_LINK_NAME[syslog-conf] = "${sysconfdir}/syslog.conf"
331 331
332python () { 332python () {
333 if bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): 333 if bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
334 pn = d.getVar('PN', True) 334 pn = d.getVar('PN')
335 d.appendVar('ALTERNATIVE_%s-syslog' % (pn), ' syslog-init') 335 d.appendVar('ALTERNATIVE_%s-syslog' % (pn), ' syslog-init')
336 d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-init', '%s/init.d/syslog' % (d.getVar('sysconfdir', True))) 336 d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-init', '%s/init.d/syslog' % (d.getVar('sysconfdir')))
337 d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-init', '%s/init.d/syslog.%s' % (d.getVar('sysconfdir', True), d.getVar('BPN', True))) 337 d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-init', '%s/init.d/syslog.%s' % (d.getVar('sysconfdir'), d.getVar('BPN')))
338 d.appendVar('ALTERNATIVE_%s-syslog' % (pn), ' syslog-startup-conf') 338 d.appendVar('ALTERNATIVE_%s-syslog' % (pn), ' syslog-startup-conf')
339 d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-startup-conf', '%s/syslog-startup.conf' % (d.getVar('sysconfdir', True))) 339 d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-startup-conf', '%s/syslog-startup.conf' % (d.getVar('sysconfdir')))
340 d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-startup-conf', '%s/syslog-startup.conf.%s' % (d.getVar('sysconfdir', True), d.getVar('BPN', True))) 340 d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-startup-conf', '%s/syslog-startup.conf.%s' % (d.getVar('sysconfdir'), d.getVar('BPN')))
341} 341}
342 342
343python do_package_prepend () { 343python do_package_prepend () {
344 # We need to load the full set of busybox provides from the /etc/busybox.links 344 # We need to load the full set of busybox provides from the /etc/busybox.links
345 # Use this to see the update-alternatives with the right information 345 # Use this to see the update-alternatives with the right information
346 346
347 dvar = d.getVar('D', True) 347 dvar = d.getVar('D')
348 pn = d.getVar('PN', True) 348 pn = d.getVar('PN')
349 def set_alternative_vars(links, target): 349 def set_alternative_vars(links, target):
350 links = d.expand(links) 350 links = d.expand(links)
351 target = d.expand(target) 351 target = d.expand(target)
diff --git a/meta/recipes-core/busybox/busybox_1.24.1.bb b/meta/recipes-core/busybox/busybox_1.24.1.bb
index c35cba3222..afb69d13e6 100644
--- a/meta/recipes-core/busybox/busybox_1.24.1.bb
+++ b/meta/recipes-core/busybox/busybox_1.24.1.bb
@@ -38,8 +38,8 @@ SRC_URI = "http://www.busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \
38 file://sha256sum.cfg \ 38 file://sha256sum.cfg \
39 file://getopts.cfg \ 39 file://getopts.cfg \
40 file://resize.cfg \ 40 file://resize.cfg \
41 ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager', True) == 'busybox')]} \ 41 ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager') == 'busybox')]} \
42 ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager', True) == 'busybox-mdev')]} \ 42 ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager') == 'busybox-mdev')]} \
43 file://inittab \ 43 file://inittab \
44 file://rcS \ 44 file://rcS \
45 file://rcK \ 45 file://rcK \
diff --git a/meta/recipes-core/busybox/busybox_git.bb b/meta/recipes-core/busybox/busybox_git.bb
index c29b894349..c2ee3e6c4b 100644
--- a/meta/recipes-core/busybox/busybox_git.bb
+++ b/meta/recipes-core/busybox/busybox_git.bb
@@ -40,8 +40,8 @@ SRC_URI = "git://busybox.net/busybox.git \
40 file://sha256sum.cfg \ 40 file://sha256sum.cfg \
41 file://getopts.cfg \ 41 file://getopts.cfg \
42 file://resize.cfg \ 42 file://resize.cfg \
43 ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager', True) == 'busybox')]} \ 43 ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager') == 'busybox')]} \
44 ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager', True) == 'busybox-mdev')]} \ 44 ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager') == 'busybox-mdev')]} \
45 file://inittab \ 45 file://inittab \
46 file://rcS \ 46 file://rcS \
47 file://rcK \ 47 file://rcK \
diff --git a/meta/recipes-core/coreutils/coreutils_6.9.bb b/meta/recipes-core/coreutils/coreutils_6.9.bb
index 35700a32fd..9bfbeabc04 100644
--- a/meta/recipes-core/coreutils/coreutils_6.9.bb
+++ b/meta/recipes-core/coreutils/coreutils_6.9.bb
@@ -99,9 +99,9 @@ ALTERNATIVE_LINK_NAME[lbracket] = "${bindir}/["
99ALTERNATIVE_TARGET[lbracket] = "${bindir}/lbracket.${BPN}" 99ALTERNATIVE_TARGET[lbracket] = "${bindir}/lbracket.${BPN}"
100 100
101python __anonymous() { 101python __anonymous() {
102 for prog in d.getVar('base_bindir_progs', True).split(): 102 for prog in d.getVar('base_bindir_progs').split():
103 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir', True), prog)) 103 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
104 104
105 for prog in d.getVar('sbindir_progs', True).split(): 105 for prog in d.getVar('sbindir_progs').split():
106 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir', True), prog)) 106 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir'), prog))
107} 107}
diff --git a/meta/recipes-core/coreutils/coreutils_8.25.bb b/meta/recipes-core/coreutils/coreutils_8.25.bb
index df6ad380f3..6d6f7bc84e 100644
--- a/meta/recipes-core/coreutils/coreutils_8.25.bb
+++ b/meta/recipes-core/coreutils/coreutils_8.25.bb
@@ -132,11 +132,11 @@ ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
132ALTERNATIVE_LINK_NAME[stat.1] = "${mandir}/man1/stat.1" 132ALTERNATIVE_LINK_NAME[stat.1] = "${mandir}/man1/stat.1"
133 133
134python __anonymous() { 134python __anonymous() {
135 for prog in d.getVar('base_bindir_progs', True).split(): 135 for prog in d.getVar('base_bindir_progs').split():
136 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir', True), prog)) 136 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
137 137
138 for prog in d.getVar('sbindir_progs', True).split(): 138 for prog in d.getVar('sbindir_progs').split():
139 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir', True), prog)) 139 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir'), prog))
140} 140}
141 141
142BBCLASSEXTEND = "native nativesdk" 142BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/glibc/glibc-ld.inc b/meta/recipes-core/glibc/glibc-ld.inc
index e2e24741f3..547c235eee 100644
--- a/meta/recipes-core/glibc/glibc-ld.inc
+++ b/meta/recipes-core/glibc/glibc-ld.inc
@@ -1,6 +1,6 @@
1def ld_append_if_tune_exists(d, infos, dict): 1def ld_append_if_tune_exists(d, infos, dict):
2 tune = d.getVar("DEFAULTTUNE", True) or "" 2 tune = d.getVar("DEFAULTTUNE") or ""
3 libdir = d.getVar("base_libdir", True) or "" 3 libdir = d.getVar("base_libdir") or ""
4 if tune in dict: 4 if tune in dict:
5 infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }') 5 infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }')
6 infos['lddrewrite'].add(libdir+'/'+dict[tune][0]) 6 infos['lddrewrite'].add(libdir+'/'+dict[tune][0])
@@ -32,7 +32,7 @@ def glibc_dl_info(d):
32 localdata.setVar("DEFAULTTUNE", original_tune) 32 localdata.setVar("DEFAULTTUNE", original_tune)
33 ld_append_if_tune_exists(localdata, infos, ld_info_all) 33 ld_append_if_tune_exists(localdata, infos, ld_info_all)
34 34
35 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 35 variants = d.getVar("MULTILIB_VARIANTS") or ""
36 for item in variants.split(): 36 for item in variants.split():
37 localdata = bb.data.createCopy(d) 37 localdata = bb.data.createCopy(d)
38 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 38 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
diff --git a/meta/recipes-core/glibc/glibc-locale.inc b/meta/recipes-core/glibc/glibc-locale.inc
index 0a7adfcc83..11bd612fff 100644
--- a/meta/recipes-core/glibc/glibc-locale.inc
+++ b/meta/recipes-core/glibc/glibc-locale.inc
@@ -41,22 +41,22 @@ PACKAGES_DYNAMIC = "^locale-base-.* \
41# Create a glibc-binaries package 41# Create a glibc-binaries package
42ALLOW_EMPTY_${BPN}-binaries = "1" 42ALLOW_EMPTY_${BPN}-binaries = "1"
43PACKAGES += "${BPN}-binaries" 43PACKAGES += "${BPN}-binaries"
44RRECOMMENDS_${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-binary") != -1])}" 44RRECOMMENDS_${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-binary") != -1])}"
45 45
46# Create a glibc-charmaps package 46# Create a glibc-charmaps package
47ALLOW_EMPTY_${BPN}-charmaps = "1" 47ALLOW_EMPTY_${BPN}-charmaps = "1"
48PACKAGES += "${BPN}-charmaps" 48PACKAGES += "${BPN}-charmaps"
49RRECOMMENDS_${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-charmap") != -1])}" 49RRECOMMENDS_${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-charmap") != -1])}"
50 50
51# Create a glibc-gconvs package 51# Create a glibc-gconvs package
52ALLOW_EMPTY_${BPN}-gconvs = "1" 52ALLOW_EMPTY_${BPN}-gconvs = "1"
53PACKAGES += "${BPN}-gconvs" 53PACKAGES += "${BPN}-gconvs"
54RRECOMMENDS_${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-gconv") != -1])}" 54RRECOMMENDS_${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-gconv") != -1])}"
55 55
56# Create a glibc-localedatas package 56# Create a glibc-localedatas package
57ALLOW_EMPTY_${BPN}-localedatas = "1" 57ALLOW_EMPTY_${BPN}-localedatas = "1"
58PACKAGES += "${BPN}-localedatas" 58PACKAGES += "${BPN}-localedatas"
59RRECOMMENDS_${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-localedata") != -1])}" 59RRECOMMENDS_${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-localedata") != -1])}"
60 60
61DESCRIPTION_localedef = "glibc: compile locale definition files" 61DESCRIPTION_localedef = "glibc: compile locale definition files"
62 62
diff --git a/meta/recipes-core/glibc/glibc-package.inc b/meta/recipes-core/glibc/glibc-package.inc
index bad642449a..481a00e125 100644
--- a/meta/recipes-core/glibc/glibc-package.inc
+++ b/meta/recipes-core/glibc/glibc-package.inc
@@ -8,10 +8,10 @@
8 8
9python __anonymous () { 9python __anonymous () {
10 import bb, re 10 import bb, re
11 uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', True)) != None) 11 uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS')) != None)
12 if uc_os: 12 if uc_os:
13 raise bb.parse.SkipPackage("incompatible with target %s" % 13 raise bb.parse.SkipPackage("incompatible with target %s" %
14 d.getVar('TARGET_OS', True)) 14 d.getVar('TARGET_OS'))
15} 15}
16 16
17# Set this to zero if you don't want ldconfig in the output package 17# Set this to zero if you don't want ldconfig in the output package
diff --git a/meta/recipes-core/glibc/glibc.inc b/meta/recipes-core/glibc/glibc.inc
index 7bae0e9554..823e60e771 100644
--- a/meta/recipes-core/glibc/glibc.inc
+++ b/meta/recipes-core/glibc/glibc.inc
@@ -10,13 +10,13 @@ TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TCBOOTSTRAP}"
10 10
11python () { 11python () {
12 opt_effective = "-O" 12 opt_effective = "-O"
13 for opt in d.getVar('SELECTED_OPTIMIZATION', True).split(): 13 for opt in d.getVar('SELECTED_OPTIMIZATION').split():
14 if opt in ("-O0", "-O", "-O1", "-O2", "-O3", "-Os"): 14 if opt in ("-O0", "-O", "-O1", "-O2", "-O3", "-Os"):
15 opt_effective = opt 15 opt_effective = opt
16 if opt_effective == "-O0": 16 if opt_effective == "-O0":
17 bb.fatal("%s can't be built with %s, try -O1 instead" % (d.getVar('PN', True), opt_effective)) 17 bb.fatal("%s can't be built with %s, try -O1 instead" % (d.getVar('PN'), opt_effective))
18 if opt_effective in ("-O", "-O1", "-Os"): 18 if opt_effective in ("-O", "-O1", "-Os"):
19 bb.note("%s doesn't build cleanly with %s, adding -Wno-error to SELECTED_OPTIMIZATION" % (d.getVar('PN', True), opt_effective)) 19 bb.note("%s doesn't build cleanly with %s, adding -Wno-error to SELECTED_OPTIMIZATION" % (d.getVar('PN'), opt_effective))
20 d.appendVar("SELECTED_OPTIMIZATION", " -Wno-error") 20 d.appendVar("SELECTED_OPTIMIZATION", " -Wno-error")
21} 21}
22 22
diff --git a/meta/recipes-core/libxml/libxml2_2.9.4.bb b/meta/recipes-core/libxml/libxml2_2.9.4.bb
index ba08c9c994..0287a0a7c4 100644
--- a/meta/recipes-core/libxml/libxml2_2.9.4.bb
+++ b/meta/recipes-core/libxml/libxml2_2.9.4.bb
@@ -56,7 +56,7 @@ EXTRA_OECONF_linuxstdbase = "--with-debug --with-legacy --with-docbook --with-c1
56 56
57python populate_packages_prepend () { 57python populate_packages_prepend () {
58 # autonamer would call this libxml2-2, but we don't want that 58 # autonamer would call this libxml2-2, but we don't want that
59 if d.getVar('DEBIAN_NAMES', True): 59 if d.getVar('DEBIAN_NAMES'):
60 d.setVar('PKG_libxml2', '${MLPREFIX}libxml2') 60 d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
61} 61}
62 62
diff --git a/meta/recipes-core/meta/meta-environment-extsdk.bb b/meta/recipes-core/meta/meta-environment-extsdk.bb
index d9e596143f..52012cc4fb 100644
--- a/meta/recipes-core/meta/meta-environment-extsdk.bb
+++ b/meta/recipes-core/meta/meta-environment-extsdk.bb
@@ -5,8 +5,8 @@ require meta-environment.bb
5PN = "meta-environment-extsdk-${MACHINE}" 5PN = "meta-environment-extsdk-${MACHINE}"
6 6
7create_sdk_files_append() { 7create_sdk_files_append() {
8 local sysroot=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_TARGET', True), d.getVar('TOPDIR', True))} 8 local sysroot=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_TARGET'), d.getVar('TOPDIR'))}
9 local sdkpathnative=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_NATIVE',True), d.getVar('TOPDIR', True))} 9 local sdkpathnative=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_NATIVE',True), d.getVar('TOPDIR'))}
10 10
11 toolchain_create_sdk_env_script '' '' $sysroot '' ${bindir_native} ${prefix_native} $sdkpathnative 11 toolchain_create_sdk_env_script '' '' $sysroot '' ${bindir_native} ${prefix_native} $sdkpathnative
12} 12}
diff --git a/meta/recipes-core/meta/meta-environment.bb b/meta/recipes-core/meta/meta-environment.bb
index 71e0c23186..a250cc89fe 100644
--- a/meta/recipes-core/meta/meta-environment.bb
+++ b/meta/recipes-core/meta/meta-environment.bb
@@ -26,16 +26,16 @@ python do_generate_content() {
26 localdata = bb.data.createCopy(d) 26 localdata = bb.data.createCopy(d)
27 27
28 # make sure we only use the WORKDIR value from 'd', or it can change 28 # make sure we only use the WORKDIR value from 'd', or it can change
29 localdata.setVar('WORKDIR', d.getVar('WORKDIR', True)) 29 localdata.setVar('WORKDIR', d.getVar('WORKDIR'))
30 30
31 # make sure we only use the SDKTARGETSYSROOT value from 'd' 31 # make sure we only use the SDKTARGETSYSROOT value from 'd'
32 localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True)) 32 localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT'))
33 localdata.setVar('libdir', d.getVar('target_libdir', False)) 33 localdata.setVar('libdir', d.getVar('target_libdir', False))
34 34
35 # Process DEFAULTTUNE 35 # Process DEFAULTTUNE
36 bb.build.exec_func("create_sdk_files", localdata) 36 bb.build.exec_func("create_sdk_files", localdata)
37 37
38 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 38 variants = d.getVar("MULTILIB_VARIANTS") or ""
39 for item in variants.split(): 39 for item in variants.split():
40 # Load overrides from 'd' to avoid having to reset the value... 40 # Load overrides from 'd' to avoid having to reset the value...
41 overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 41 overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
diff --git a/meta/recipes-core/meta/meta-extsdk-toolchain.bb b/meta/recipes-core/meta/meta-extsdk-toolchain.bb
index 22de0f357b..fc6d6de99a 100644
--- a/meta/recipes-core/meta/meta-extsdk-toolchain.bb
+++ b/meta/recipes-core/meta/meta-extsdk-toolchain.bb
@@ -19,7 +19,7 @@ do_locked_sigs[sstate-outputdirs] = "${STAGING_DIR_HOST}/locked-sigs"
19 19
20python do_locked_sigs() { 20python do_locked_sigs() {
21 import oe.copy_buildsystem 21 import oe.copy_buildsystem
22 outdir = os.path.join(d.getVar('LOCKED_SIGS_INDIR', True)) 22 outdir = os.path.join(d.getVar('LOCKED_SIGS_INDIR'))
23 bb.utils.mkdirhier(outdir) 23 bb.utils.mkdirhier(outdir)
24 sigfile = os.path.join(outdir, 'locked-sigs-extsdk-toolchain.inc') 24 sigfile = os.path.join(outdir, 'locked-sigs-extsdk-toolchain.inc')
25 oe.copy_buildsystem.generate_locked_sigs(sigfile, d) 25 oe.copy_buildsystem.generate_locked_sigs(sigfile, d)
diff --git a/meta/recipes-core/meta/meta-world-pkgdata.bb b/meta/recipes-core/meta/meta-world-pkgdata.bb
index 81c8647fa6..0ce378ea2b 100644
--- a/meta/recipes-core/meta/meta-world-pkgdata.bb
+++ b/meta/recipes-core/meta/meta-world-pkgdata.bb
@@ -19,7 +19,7 @@ do_collect_packagedata[sstate-outputdirs] = "${STAGING_DIR_HOST}/world-pkgdata"
19 19
20python do_collect_packagedata() { 20python do_collect_packagedata() {
21 import oe.copy_buildsystem 21 import oe.copy_buildsystem
22 outdir = os.path.join(d.getVar('WORLD_PKGDATADIR', True)) 22 outdir = os.path.join(d.getVar('WORLD_PKGDATADIR'))
23 bb.utils.mkdirhier(outdir) 23 bb.utils.mkdirhier(outdir)
24 sigfile = os.path.join(outdir, 'locked-sigs-pkgdata.inc') 24 sigfile = os.path.join(outdir, 'locked-sigs-pkgdata.inc')
25 oe.copy_buildsystem.generate_locked_sigs(sigfile, d) 25 oe.copy_buildsystem.generate_locked_sigs(sigfile, d)
diff --git a/meta/recipes-core/meta/signing-keys.bb b/meta/recipes-core/meta/signing-keys.bb
index d088c0c88c..aaa01d0c34 100644
--- a/meta/recipes-core/meta/signing-keys.bb
+++ b/meta/recipes-core/meta/signing-keys.bb
@@ -21,23 +21,23 @@ FILES_${PN}-packagefeed = "${sysconfdir}/pki/packagefeed-gpg"
21python do_get_public_keys () { 21python do_get_public_keys () {
22 from oe.gpg_sign import get_signer 22 from oe.gpg_sign import get_signer
23 23
24 if d.getVar("RPM_SIGN_PACKAGES", True): 24 if d.getVar("RPM_SIGN_PACKAGES"):
25 # Export public key of the rpm signing key 25 # Export public key of the rpm signing key
26 signer = get_signer(d, d.getVar('RPM_GPG_BACKEND', True)) 26 signer = get_signer(d, d.getVar('RPM_GPG_BACKEND'))
27 signer.export_pubkey(os.path.join(d.expand('${B}'), 'rpm-key'), 27 signer.export_pubkey(os.path.join(d.expand('${B}'), 'rpm-key'),
28 d.getVar('RPM_GPG_NAME', True)) 28 d.getVar('RPM_GPG_NAME'))
29 29
30 if d.getVar("IPK_SIGN_PACKAGES", True): 30 if d.getVar("IPK_SIGN_PACKAGES"):
31 # Export public key of the ipk signing key 31 # Export public key of the ipk signing key
32 signer = get_signer(d, d.getVar('IPK_GPG_BACKEND', True)) 32 signer = get_signer(d, d.getVar('IPK_GPG_BACKEND'))
33 signer.export_pubkey(os.path.join(d.expand('${B}'), 'ipk-key'), 33 signer.export_pubkey(os.path.join(d.expand('${B}'), 'ipk-key'),
34 d.getVar('IPK_GPG_NAME', True)) 34 d.getVar('IPK_GPG_NAME'))
35 35
36 if d.getVar('PACKAGE_FEED_SIGN', True) == '1': 36 if d.getVar('PACKAGE_FEED_SIGN') == '1':
37 # Export public key of the feed signing key 37 # Export public key of the feed signing key
38 signer = get_signer(d, d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) 38 signer = get_signer(d, d.getVar('PACKAGE_FEED_GPG_BACKEND'))
39 signer.export_pubkey(os.path.join(d.expand('${B}'), 'pf-key'), 39 signer.export_pubkey(os.path.join(d.expand('${B}'), 'pf-key'),
40 d.getVar('PACKAGE_FEED_GPG_NAME', True)) 40 d.getVar('PACKAGE_FEED_GPG_NAME'))
41} 41}
42do_get_public_keys[cleandirs] = "${B}" 42do_get_public_keys[cleandirs] = "${B}"
43addtask get_public_keys before do_install 43addtask get_public_keys before do_install
diff --git a/meta/recipes-core/os-release/os-release.bb b/meta/recipes-core/os-release/os-release.bb
index fac0bacf6a..f988704756 100644
--- a/meta/recipes-core/os-release/os-release.bb
+++ b/meta/recipes-core/os-release/os-release.bb
@@ -32,8 +32,8 @@ def sanitise_version(ver):
32python do_compile () { 32python do_compile () {
33 import shutil 33 import shutil
34 with open(d.expand('${B}/os-release'), 'w') as f: 34 with open(d.expand('${B}/os-release'), 'w') as f:
35 for field in d.getVar('OS_RELEASE_FIELDS', True).split(): 35 for field in d.getVar('OS_RELEASE_FIELDS').split():
36 value = d.getVar(field, True) 36 value = d.getVar(field)
37 if value and field == 'VERSION_ID': 37 if value and field == 'VERSION_ID':
38 value = sanitise_version(value) 38 value = sanitise_version(value)
39 if value: 39 if value:
diff --git a/meta/recipes-core/packagegroups/packagegroup-base.bb b/meta/recipes-core/packagegroups/packagegroup-base.bb
index 2e94fdefb5..0069e3e0f7 100644
--- a/meta/recipes-core/packagegroups/packagegroup-base.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-base.bb
@@ -110,8 +110,8 @@ python __anonymous () {
110 # If Distro want wifi and machine feature wifi/pci/pcmcia/usbhost (one of them) 110 # If Distro want wifi and machine feature wifi/pci/pcmcia/usbhost (one of them)
111 # then include packagegroup-base-wifi in packagegroup-base 111 # then include packagegroup-base-wifi in packagegroup-base
112 112
113 distro_features = set(d.getVar("DISTRO_FEATURES", True).split()) 113 distro_features = set(d.getVar("DISTRO_FEATURES").split())
114 machine_features= set(d.getVar("MACHINE_FEATURES", True).split()) 114 machine_features= set(d.getVar("MACHINE_FEATURES").split())
115 115
116 if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): 116 if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
117 d.setVar("ADD_BT", "packagegroup-base-bluetooth") 117 d.setVar("ADD_BT", "packagegroup-base-bluetooth")
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb b/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb
index 327cd8e865..7d6d41473a 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb
@@ -42,7 +42,7 @@ RRECOMMENDS_packagegroup-core-sdk = "\
42 42
43#python generate_sdk_pkgs () { 43#python generate_sdk_pkgs () {
44# poky_pkgs = read_pkgdata('packagegroup-core', d)['PACKAGES'] 44# poky_pkgs = read_pkgdata('packagegroup-core', d)['PACKAGES']
45# pkgs = d.getVar('PACKAGES', True).split() 45# pkgs = d.getVar('PACKAGES').split()
46# for pkg in poky_pkgs.split(): 46# for pkg in poky_pkgs.split():
47# newpkg = pkg.replace('packagegroup-core', 'packagegroup-core-sdk') 47# newpkg = pkg.replace('packagegroup-core', 'packagegroup-core-sdk')
48# 48#
diff --git a/meta/recipes-core/psplash/psplash_git.bb b/meta/recipes-core/psplash/psplash_git.bb
index 0537426513..b0d6bb4070 100644
--- a/meta/recipes-core/psplash/psplash_git.bb
+++ b/meta/recipes-core/psplash/psplash_git.bb
@@ -16,8 +16,8 @@ SRC_URI = "git://git.yoctoproject.org/${BPN} \
16SPLASH_IMAGES = "file://psplash-poky-img.h;outsuffix=default" 16SPLASH_IMAGES = "file://psplash-poky-img.h;outsuffix=default"
17 17
18python __anonymous() { 18python __anonymous() {
19 oldpkgs = d.getVar("PACKAGES", True).split() 19 oldpkgs = d.getVar("PACKAGES").split()
20 splashfiles = d.getVar('SPLASH_IMAGES', True).split() 20 splashfiles = d.getVar('SPLASH_IMAGES').split()
21 pkgs = [] 21 pkgs = []
22 localpaths = [] 22 localpaths = []
23 haspng = False 23 haspng = False
@@ -50,8 +50,8 @@ python __anonymous() {
50 d.appendVar("DEPENDS", " gdk-pixbuf-native") 50 d.appendVar("DEPENDS", " gdk-pixbuf-native")
51 51
52 d.prependVar("PACKAGES", "%s " % (" ".join(pkgs))) 52 d.prependVar("PACKAGES", "%s " % (" ".join(pkgs)))
53 mlprefix = d.getVar('MLPREFIX', True) or '' 53 mlprefix = d.getVar('MLPREFIX') or ''
54 pn = d.getVar('PN', True) or '' 54 pn = d.getVar('PN') or ''
55 for p in pkgs: 55 for p in pkgs:
56 ep = '%s%s' % (mlprefix, p) 56 ep = '%s%s' % (mlprefix, p)
57 epsplash = '%s%s' % (mlprefix, 'psplash') 57 epsplash = '%s%s' % (mlprefix, 'psplash')
@@ -74,11 +74,11 @@ python do_compile () {
74 import shutil 74 import shutil
75 75
76 # Build a separate executable for each splash image 76 # Build a separate executable for each splash image
77 workdir = d.getVar('WORKDIR', True) 77 workdir = d.getVar('WORKDIR')
78 convertscript = "%s/make-image-header.sh" % d.getVar('S', True) 78 convertscript = "%s/make-image-header.sh" % d.getVar('S')
79 destfile = "%s/psplash-poky-img.h" % d.getVar('S', True) 79 destfile = "%s/psplash-poky-img.h" % d.getVar('S')
80 localfiles = d.getVar('SPLASH_LOCALPATHS', True).split() 80 localfiles = d.getVar('SPLASH_LOCALPATHS').split()
81 outputfiles = d.getVar('SPLASH_INSTALL', True).split() 81 outputfiles = d.getVar('SPLASH_INSTALL').split()
82 for localfile, outputfile in zip(localfiles, outputfiles): 82 for localfile, outputfile in zip(localfiles, outputfiles):
83 if localfile.endswith(".png"): 83 if localfile.endswith(".png"):
84 outp = oe.utils.getstatusoutput('%s %s POKY' % (convertscript, os.path.join(workdir, localfile))) 84 outp = oe.utils.getstatusoutput('%s %s POKY' % (convertscript, os.path.join(workdir, localfile)))
@@ -89,7 +89,7 @@ python do_compile () {
89 shutil.copyfile(os.path.join(workdir, localfile), destfile) 89 shutil.copyfile(os.path.join(workdir, localfile), destfile)
90 # For some reason just updating the header is not enough, we have to touch the .c 90 # For some reason just updating the header is not enough, we have to touch the .c
91 # file in order to get it to rebuild 91 # file in order to get it to rebuild
92 os.utime("%s/psplash.c" % d.getVar('S', True), None) 92 os.utime("%s/psplash.c" % d.getVar('S'), None)
93 bb.build.exec_func("oe_runmake", d) 93 bb.build.exec_func("oe_runmake", d)
94 shutil.copyfile("psplash", outputfile) 94 shutil.copyfile("psplash", outputfile)
95} 95}
diff --git a/meta/recipes-core/systemd/systemd_232.bb b/meta/recipes-core/systemd/systemd_232.bb
index 3391c0118b..5b45eb221a 100644
--- a/meta/recipes-core/systemd/systemd_232.bb
+++ b/meta/recipes-core/systemd/systemd_232.bb
@@ -273,7 +273,7 @@ do_install_ptest () {
273} 273}
274 274
275python populate_packages_prepend (){ 275python populate_packages_prepend (){
276 systemdlibdir = d.getVar("rootlibdir", True) 276 systemdlibdir = d.getVar("rootlibdir")
277 do_split_packages(d, systemdlibdir, '^lib(.*)\.so\.*', 'lib%s', 'Systemd %s library', extra_depends='', allow_links=True) 277 do_split_packages(d, systemdlibdir, '^lib(.*)\.so\.*', 'lib%s', 'Systemd %s library', extra_depends='', allow_links=True)
278} 278}
279PACKAGES_DYNAMIC += "^lib(udev|systemd|nss).*" 279PACKAGES_DYNAMIC += "^lib(udev|systemd|nss).*"
@@ -591,6 +591,6 @@ python () {
591 raise bb.parse.SkipPackage("'systemd' not in DISTRO_FEATURES") 591 raise bb.parse.SkipPackage("'systemd' not in DISTRO_FEATURES")
592 592
593 import re 593 import re
594 if re.match('.*musl*', d.getVar('TARGET_OS', True)) != None: 594 if re.match('.*musl*', d.getVar('TARGET_OS')) != None:
595 raise bb.parse.SkipPackage("Not _yet_ supported on musl based targets") 595 raise bb.parse.SkipPackage("Not _yet_ supported on musl based targets")
596} 596}
diff --git a/meta/recipes-core/util-linux/util-linux.inc b/meta/recipes-core/util-linux/util-linux.inc
index 70cba6b592..21815fb1ec 100644
--- a/meta/recipes-core/util-linux/util-linux.inc
+++ b/meta/recipes-core/util-linux/util-linux.inc
@@ -270,7 +270,7 @@ ALTERNATIVE_LINK_NAME[mountpoint] = "${base_bindir}/mountpoint"
270BBCLASSEXTEND = "native nativesdk" 270BBCLASSEXTEND = "native nativesdk"
271 271
272python do_package_prepend () { 272python do_package_prepend () {
273 if '--enable-su' in d.getVar('EXTRA_OECONF', True).split(): 273 if '--enable-su' in d.getVar('EXTRA_OECONF').split():
274 d.appendVar(d.expand('ALTERNATIVE_${PN}'), ' su') 274 d.appendVar(d.expand('ALTERNATIVE_${PN}'), ' su')
275 d.appendVar(d.expand('ALTERNATIVE_${PN}-doc'), ' su.1') 275 d.appendVar(d.expand('ALTERNATIVE_${PN}-doc'), ' su.1')
276 276
diff --git a/meta/recipes-core/volatile-binds/volatile-binds.bb b/meta/recipes-core/volatile-binds/volatile-binds.bb
index fee7275e32..f07458acc0 100644
--- a/meta/recipes-core/volatile-binds/volatile-binds.bb
+++ b/meta/recipes-core/volatile-binds/volatile-binds.bb
@@ -46,7 +46,7 @@ do_compile () {
46 -e "s#@whatparent@#${spec%/*}#g; s#@whereparent@#${mountpoint%/*}#g" \ 46 -e "s#@whatparent@#${spec%/*}#g; s#@whereparent@#${mountpoint%/*}#g" \
47 volatile-binds.service.in >$servicefile 47 volatile-binds.service.in >$servicefile
48 done <<END 48 done <<END
49${@d.getVar('VOLATILE_BINDS', True).replace("\\n", "\n")} 49${@d.getVar('VOLATILE_BINDS').replace("\\n", "\n")}
50END 50END
51 51
52 if [ -e var-volatile-lib.service ]; then 52 if [ -e var-volatile-lib.service ]; then
diff --git a/meta/recipes-devtools/apt/apt-native.inc b/meta/recipes-devtools/apt/apt-native.inc
index 9fa206efe1..68f1b3ce2c 100644
--- a/meta/recipes-devtools/apt/apt-native.inc
+++ b/meta/recipes-devtools/apt/apt-native.inc
@@ -17,14 +17,14 @@ python do_install () {
17} 17}
18 18
19python do_install_config () { 19python do_install_config () {
20 indir = os.path.dirname(d.getVar('FILE', True)) 20 indir = os.path.dirname(d.getVar('FILE'))
21 infile = open(oe.path.join(indir, 'files', 'apt.conf'), 'r') 21 infile = open(oe.path.join(indir, 'files', 'apt.conf'), 'r')
22 data = infile.read() 22 data = infile.read()
23 infile.close() 23 infile.close()
24 24
25 data = d.expand(data) 25 data = d.expand(data)
26 26
27 outdir = oe.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt') 27 outdir = oe.path.join(d.getVar('D'), d.getVar('sysconfdir'), 'apt')
28 if not os.path.exists(outdir): 28 if not os.path.exists(outdir):
29 os.makedirs(outdir) 29 os.makedirs(outdir)
30 30
diff --git a/meta/recipes-devtools/apt/apt-package.inc b/meta/recipes-devtools/apt/apt-package.inc
index a553aa21fe..6e8cb84a5b 100644
--- a/meta/recipes-devtools/apt/apt-package.inc
+++ b/meta/recipes-devtools/apt/apt-package.inc
@@ -34,9 +34,9 @@ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \
34 ${localstatedir} ${sysconfdir} \ 34 ${localstatedir} ${sysconfdir} \
35 ${libdir}/dpkg" 35 ${libdir}/dpkg"
36FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" 36FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates"
37FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', True))} \ 37FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages'))} \
38 ${docdir}/apt" 38 ${docdir}/apt"
39FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', True))}" 39FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages'))}"
40FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" 40FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}"
41 41
42do_install () { 42do_install () {
diff --git a/meta/recipes-devtools/automake/automake.inc b/meta/recipes-devtools/automake/automake.inc
index 4f9b5f7740..87cedc9838 100644
--- a/meta/recipes-devtools/automake/automake.inc
+++ b/meta/recipes-devtools/automake/automake.inc
@@ -16,6 +16,6 @@ do_configure() {
16 oe_runconf 16 oe_runconf
17} 17}
18 18
19export AUTOMAKE = "${@bb.utils.which('automake', d.getVar('PATH', True))}" 19export AUTOMAKE = "${@bb.utils.which('automake', d.getVar('PATH'))}"
20 20
21FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" 21FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*"
diff --git a/meta/recipes-devtools/binutils/binutils-2.27.inc b/meta/recipes-devtools/binutils/binutils-2.27.inc
index 75180eaf50..df8864cbfa 100644
--- a/meta/recipes-devtools/binutils/binutils-2.27.inc
+++ b/meta/recipes-devtools/binutils/binutils-2.27.inc
@@ -11,7 +11,7 @@ LIC_FILES_CHKSUM="\
11 " 11 "
12 12
13def binutils_branch_version(d): 13def binutils_branch_version(d):
14 pvsplit = d.getVar('PV', True).split('.') 14 pvsplit = d.getVar('PV').split('.')
15 return pvsplit[0] + "_" + pvsplit[1] 15 return pvsplit[0] + "_" + pvsplit[1]
16 16
17BINUPV = "${@binutils_branch_version(d)}" 17BINUPV = "${@binutils_branch_version(d)}"
diff --git a/meta/recipes-devtools/binutils/binutils.inc b/meta/recipes-devtools/binutils/binutils.inc
index 75d190f802..9b1192b6e7 100644
--- a/meta/recipes-devtools/binutils/binutils.inc
+++ b/meta/recipes-devtools/binutils/binutils.inc
@@ -59,9 +59,9 @@ USE_ALTERNATIVES_FOR = " \
59" 59"
60 60
61python do_package_prepend() { 61python do_package_prepend() {
62 make_alts = d.getVar("USE_ALTERNATIVES_FOR", True) or "" 62 make_alts = d.getVar("USE_ALTERNATIVES_FOR") or ""
63 prefix = d.getVar("TARGET_PREFIX", True) 63 prefix = d.getVar("TARGET_PREFIX")
64 bindir = d.getVar("bindir", True) 64 bindir = d.getVar("bindir")
65 for alt in make_alts.split(): 65 for alt in make_alts.split():
66 d.setVarFlag('ALTERNATIVE_TARGET', alt, bindir + "/" + prefix + alt) 66 d.setVarFlag('ALTERNATIVE_TARGET', alt, bindir + "/" + prefix + alt)
67 d.setVarFlag('ALTERNATIVE_LINK_NAME', alt, bindir + "/" + alt) 67 d.setVarFlag('ALTERNATIVE_LINK_NAME', alt, bindir + "/" + alt)
diff --git a/meta/recipes-devtools/cmake/cmake.inc b/meta/recipes-devtools/cmake/cmake.inc
index 9ae28900a6..40255d9da3 100644
--- a/meta/recipes-devtools/cmake/cmake.inc
+++ b/meta/recipes-devtools/cmake/cmake.inc
@@ -9,7 +9,7 @@ LICENSE = "BSD"
9LIC_FILES_CHKSUM = "file://Copyright.txt;md5=7a64bc564202bf7401d9a8ef33c9564d \ 9LIC_FILES_CHKSUM = "file://Copyright.txt;md5=7a64bc564202bf7401d9a8ef33c9564d \
10 file://Source/cmake.h;beginline=1;endline=3;md5=4494dee184212fc89c469c3acd555a14" 10 file://Source/cmake.h;beginline=1;endline=3;md5=4494dee184212fc89c469c3acd555a14"
11 11
12CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV', True).split('.')[0:2])}" 12CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV').split('.')[0:2])}"
13 13
14SRC_URI = "https://cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \ 14SRC_URI = "https://cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \
15 file://support-oe-qt4-tools-names.patch \ 15 file://support-oe-qt4-tools-names.patch \
diff --git a/meta/recipes-devtools/cmake/cmake_3.7.0.bb b/meta/recipes-devtools/cmake/cmake_3.7.0.bb
index 58c6615680..787f33c02b 100644
--- a/meta/recipes-devtools/cmake/cmake_3.7.0.bb
+++ b/meta/recipes-devtools/cmake/cmake_3.7.0.bb
@@ -10,8 +10,8 @@ SRC_URI_append_class-nativesdk = " \
10 10
11# Strip ${prefix} from ${docdir}, set result into docdir_stripped 11# Strip ${prefix} from ${docdir}, set result into docdir_stripped
12python () { 12python () {
13 prefix=d.getVar("prefix", True) 13 prefix=d.getVar("prefix")
14 docdir=d.getVar("docdir", True) 14 docdir=d.getVar("docdir")
15 15
16 if not docdir.startswith(prefix): 16 if not docdir.startswith(prefix):
17 bb.fatal('docdir must contain prefix as its prefix') 17 bb.fatal('docdir must contain prefix as its prefix')
diff --git a/meta/recipes-devtools/dpkg/dpkg.inc b/meta/recipes-devtools/dpkg/dpkg.inc
index e8d8a9b4f7..ff883183dc 100644
--- a/meta/recipes-devtools/dpkg/dpkg.inc
+++ b/meta/recipes-devtools/dpkg/dpkg.inc
@@ -13,7 +13,7 @@ inherit autotools gettext perlnative pkgconfig systemd
13 13
14python () { 14python () {
15 if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): 15 if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
16 pn = d.getVar('PN', True) 16 pn = d.getVar('PN')
17 d.setVar('SYSTEMD_SERVICE_%s' % (pn), 'dpkg-configure.service') 17 d.setVar('SYSTEMD_SERVICE_%s' % (pn), 'dpkg-configure.service')
18} 18}
19 19
diff --git a/meta/recipes-devtools/gcc/gcc-common.inc b/meta/recipes-devtools/gcc/gcc-common.inc
index f540b4d965..d17ba29d1d 100644
--- a/meta/recipes-devtools/gcc/gcc-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-common.inc
@@ -10,35 +10,35 @@ inherit autotools gettext texinfo
10BPN = "gcc" 10BPN = "gcc"
11 11
12def get_gcc_float_setting(bb, d): 12def get_gcc_float_setting(bb, d):
13 if d.getVar('ARMPKGSFX_EABI', True) == "hf" and d.getVar('TRANSLATED_TARGET_ARCH', True) == "arm": 13 if d.getVar('ARMPKGSFX_EABI') == "hf" and d.getVar('TRANSLATED_TARGET_ARCH') == "arm":
14 return "--with-float=hard" 14 return "--with-float=hard"
15 if d.getVar('TARGET_FPU', True) in [ 'soft' ]: 15 if d.getVar('TARGET_FPU') in [ 'soft' ]:
16 return "--with-float=soft" 16 return "--with-float=soft"
17 if d.getVar('TARGET_FPU', True) in [ 'ppc-efd' ]: 17 if d.getVar('TARGET_FPU') in [ 'ppc-efd' ]:
18 return "--enable-e500_double" 18 return "--enable-e500_double"
19 return "" 19 return ""
20 20
21get_gcc_float_setting[vardepvalue] = "${@get_gcc_float_setting(bb, d)}" 21get_gcc_float_setting[vardepvalue] = "${@get_gcc_float_setting(bb, d)}"
22 22
23def get_gcc_mips_plt_setting(bb, d): 23def get_gcc_mips_plt_setting(bb, d):
24 if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'mips', 'mipsel' ] and bb.utils.contains('DISTRO_FEATURES', 'mplt', True, False, d): 24 if d.getVar('TRANSLATED_TARGET_ARCH') in [ 'mips', 'mipsel' ] and bb.utils.contains('DISTRO_FEATURES', 'mplt', True, False, d):
25 return "--with-mips-plt" 25 return "--with-mips-plt"
26 return "" 26 return ""
27 27
28def get_gcc_ppc_plt_settings(bb, d): 28def get_gcc_ppc_plt_settings(bb, d):
29 if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc' ] and not bb.utils.contains('DISTRO_FEATURES', 'bssplt', True, False, d): 29 if d.getVar('TRANSLATED_TARGET_ARCH') in [ 'powerpc' ] and not bb.utils.contains('DISTRO_FEATURES', 'bssplt', True, False, d):
30 return "--enable-secureplt" 30 return "--enable-secureplt"
31 return "" 31 return ""
32 32
33def get_long_double_setting(bb, d): 33def get_long_double_setting(bb, d):
34 if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc', 'powerpc64' ] and d.getVar('TCLIBC', True) in [ 'uclibc', 'glibc' ]: 34 if d.getVar('TRANSLATED_TARGET_ARCH') in [ 'powerpc', 'powerpc64' ] and d.getVar('TCLIBC') in [ 'uclibc', 'glibc' ]:
35 return "--with-long-double-128" 35 return "--with-long-double-128"
36 else: 36 else:
37 return "--without-long-double-128" 37 return "--without-long-double-128"
38 return "" 38 return ""
39 39
40def get_gcc_multiarch_setting(bb, d): 40def get_gcc_multiarch_setting(bb, d):
41 target_arch = d.getVar('TRANSLATED_TARGET_ARCH', True) 41 target_arch = d.getVar('TRANSLATED_TARGET_ARCH')
42 multiarch_options = { 42 multiarch_options = {
43 "i586": "--enable-targets=all", 43 "i586": "--enable-targets=all",
44 "i686": "--enable-targets=all", 44 "i686": "--enable-targets=all",
@@ -54,7 +54,7 @@ def get_gcc_multiarch_setting(bb, d):
54 54
55# this is used by the multilib setup of gcc 55# this is used by the multilib setup of gcc
56def get_tune_parameters(tune, d): 56def get_tune_parameters(tune, d):
57 availtunes = d.getVar('AVAILTUNES', True) 57 availtunes = d.getVar('AVAILTUNES')
58 if tune not in availtunes.split(): 58 if tune not in availtunes.split():
59 bb.error('The tune: %s is not one of the available tunes: %s' % (tune or None, availtunes)) 59 bb.error('The tune: %s is not one of the available tunes: %s' % (tune or None, availtunes))
60 60
@@ -65,15 +65,15 @@ def get_tune_parameters(tune, d):
65 65
66 retdict = {} 66 retdict = {}
67 retdict['tune'] = tune 67 retdict['tune'] = tune
68 retdict['ccargs'] = localdata.getVar('TUNE_CCARGS', True) 68 retdict['ccargs'] = localdata.getVar('TUNE_CCARGS')
69 retdict['features'] = localdata.getVar('TUNE_FEATURES', True) 69 retdict['features'] = localdata.getVar('TUNE_FEATURES')
70 # BASELIB is used by the multilib code to change library paths 70 # BASELIB is used by the multilib code to change library paths
71 retdict['baselib'] = localdata.getVar('BASE_LIB', True) or localdata.getVar('BASELIB', True) 71 retdict['baselib'] = localdata.getVar('BASE_LIB') or localdata.getVar('BASELIB')
72 retdict['arch'] = localdata.getVar('TUNE_ARCH', True) 72 retdict['arch'] = localdata.getVar('TUNE_ARCH')
73 retdict['abiextension'] = localdata.getVar('ABIEXTENSION', True) 73 retdict['abiextension'] = localdata.getVar('ABIEXTENSION')
74 retdict['target_fpu'] = localdata.getVar('TARGET_FPU', True) 74 retdict['target_fpu'] = localdata.getVar('TARGET_FPU')
75 retdict['pkgarch'] = localdata.getVar('TUNE_PKGARCH', True) 75 retdict['pkgarch'] = localdata.getVar('TUNE_PKGARCH')
76 retdict['package_extra_archs'] = localdata.getVar('PACKAGE_EXTRA_ARCHS', True) 76 retdict['package_extra_archs'] = localdata.getVar('PACKAGE_EXTRA_ARCHS')
77 return retdict 77 return retdict
78 78
79get_tune_parameters[vardepsexclude] = "AVAILTUNES TUNE_CCARGS OVERRIDES TUNE_FEATURES BASE_LIB BASELIB TUNE_ARCH ABIEXTENSION TARGET_FPU TUNE_PKGARCH PACKAGE_EXTRA_ARCHS" 79get_tune_parameters[vardepsexclude] = "AVAILTUNES TUNE_CCARGS OVERRIDES TUNE_FEATURES BASE_LIB BASELIB TUNE_ARCH ABIEXTENSION TARGET_FPU TUNE_PKGARCH PACKAGE_EXTRA_ARCHS"
diff --git a/meta/recipes-devtools/gcc/gcc-configure-common.inc b/meta/recipes-devtools/gcc/gcc-configure-common.inc
index ddebbb8410..00ef89ec57 100644
--- a/meta/recipes-devtools/gcc/gcc-configure-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-configure-common.inc
@@ -23,7 +23,7 @@ GCCMULTILIB ?= "--disable-multilib"
23GCCTHREADS ?= "posix" 23GCCTHREADS ?= "posix"
24 24
25EXTRA_OECONF = "\ 25EXTRA_OECONF = "\
26 ${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', True) != 'no']} \ 26 ${@['--enable-clocale=generic', ''][d.getVar('USE_NLS') != 'no']} \
27 --with-gnu-ld \ 27 --with-gnu-ld \
28 --enable-shared \ 28 --enable-shared \
29 --enable-languages=${LANGUAGES} \ 29 --enable-languages=${LANGUAGES} \
diff --git a/meta/recipes-devtools/gcc/gcc-cross.inc b/meta/recipes-devtools/gcc/gcc-cross.inc
index cc465a2796..c4f7084ba0 100644
--- a/meta/recipes-devtools/gcc/gcc-cross.inc
+++ b/meta/recipes-devtools/gcc/gcc-cross.inc
@@ -5,7 +5,7 @@ EXTRADEPENDS = ""
5DEPENDS = "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}libc-for-gcc ${EXTRADEPENDS} ${NATIVEDEPS}" 5DEPENDS = "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}libc-for-gcc ${EXTRADEPENDS} ${NATIVEDEPS}"
6PROVIDES = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}g++" 6PROVIDES = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}g++"
7python () { 7python () {
8 if d.getVar("TARGET_OS", True).startswith("linux"): 8 if d.getVar("TARGET_OS").startswith("linux"):
9 d.setVar("EXTRADEPENDS", "linux-libc-headers") 9 d.setVar("EXTRADEPENDS", "linux-libc-headers")
10} 10}
11 11
diff --git a/meta/recipes-devtools/gcc/gcc-multilib-config.inc b/meta/recipes-devtools/gcc/gcc-multilib-config.inc
index a0a2ac09a7..31b8619be3 100644
--- a/meta/recipes-devtools/gcc/gcc-multilib-config.inc
+++ b/meta/recipes-devtools/gcc/gcc-multilib-config.inc
@@ -21,8 +21,8 @@ python gcc_multilib_setup() {
21 import shutil 21 import shutil
22 import glob 22 import glob
23 23
24 srcdir = d.getVar('S', True) 24 srcdir = d.getVar('S')
25 builddir = d.getVar('B', True) 25 builddir = d.getVar('B')
26 src_conf_dir = '%s/gcc/config' % srcdir 26 src_conf_dir = '%s/gcc/config' % srcdir
27 build_conf_dir = '%s/gcc/config' % builddir 27 build_conf_dir = '%s/gcc/config' % builddir
28 28
@@ -43,12 +43,12 @@ python gcc_multilib_setup() {
43 bb.utils.mkdirhier('%s/%s' % (build_conf_dir, parent_dir)) 43 bb.utils.mkdirhier('%s/%s' % (build_conf_dir, parent_dir))
44 bb.utils.copyfile(fn, '%s/%s' % (build_conf_dir, rel_path)) 44 bb.utils.copyfile(fn, '%s/%s' % (build_conf_dir, rel_path))
45 45
46 pn = d.getVar('PN', True) 46 pn = d.getVar('PN')
47 multilibs = (d.getVar('MULTILIB_VARIANTS', True) or '').split() 47 multilibs = (d.getVar('MULTILIB_VARIANTS') or '').split()
48 if not multilibs and pn != "nativesdk-gcc": 48 if not multilibs and pn != "nativesdk-gcc":
49 return 49 return
50 50
51 mlprefix = d.getVar('MLPREFIX', True) 51 mlprefix = d.getVar('MLPREFIX')
52 52
53 if ('%sgcc' % mlprefix) != pn and (not pn.startswith('gcc-cross-canadian')) and pn != "nativesdk-gcc": 53 if ('%sgcc' % mlprefix) != pn and (not pn.startswith('gcc-cross-canadian')) and pn != "nativesdk-gcc":
54 return 54 return
@@ -155,10 +155,10 @@ python gcc_multilib_setup() {
155 libdirn32 = 'SYSTEMLIBS_DIR' 155 libdirn32 = 'SYSTEMLIBS_DIR'
156 156
157 157
158 target_arch = (d.getVar('TARGET_ARCH_MULTILIB_ORIGINAL', True) if mlprefix 158 target_arch = (d.getVar('TARGET_ARCH_MULTILIB_ORIGINAL') if mlprefix
159 else d.getVar('TARGET_ARCH', True)) 159 else d.getVar('TARGET_ARCH'))
160 if pn == "nativesdk-gcc": 160 if pn == "nativesdk-gcc":
161 header_config_files = gcc_header_config_files[d.getVar("SDK_ARCH", True)] 161 header_config_files = gcc_header_config_files[d.getVar("SDK_ARCH")]
162 write_headers(builddir, header_config_files, libdir32, libdir64, libdirx32, libdirn32) 162 write_headers(builddir, header_config_files, libdir32, libdir64, libdirx32, libdirn32)
163 return 163 return
164 164
@@ -188,7 +188,7 @@ python gcc_multilib_setup() {
188 optsets = [] 188 optsets = []
189 189
190 for ml in ml_list: 190 for ml in ml_list:
191 tune = d.getVar(ml, True) 191 tune = d.getVar(ml)
192 if not tune: 192 if not tune:
193 bb.warn("%s doesn't have a corresponding tune. Skipping..." % ml) 193 bb.warn("%s doesn't have a corresponding tune. Skipping..." % ml)
194 continue 194 continue
@@ -212,7 +212,7 @@ python gcc_multilib_setup() {
212 212
213 # take out '-' mcpu='s and march='s from parameters 213 # take out '-' mcpu='s and march='s from parameters
214 opts = [] 214 opts = []
215 whitelist = (d.getVar("MULTILIB_OPTION_WHITELIST", True) or "").split() 215 whitelist = (d.getVar("MULTILIB_OPTION_WHITELIST") or "").split()
216 for i in d.expand(tune_parameters['ccargs']).split(): 216 for i in d.expand(tune_parameters['ccargs']).split():
217 if i in whitelist: 217 if i in whitelist:
218 # Need to strip '-' from option 218 # Need to strip '-' from option
diff --git a/meta/recipes-devtools/gcc/libgcc-common.inc b/meta/recipes-devtools/gcc/libgcc-common.inc
index 8a13f542c1..74e9faa528 100644
--- a/meta/recipes-devtools/gcc/libgcc-common.inc
+++ b/meta/recipes-devtools/gcc/libgcc-common.inc
@@ -64,18 +64,18 @@ addtask multilib_install after do_install before do_package do_populate_sysroot
64fakeroot python do_multilib_install() { 64fakeroot python do_multilib_install() {
65 import re 65 import re
66 66
67 multilibs = d.getVar('MULTILIB_VARIANTS', True) 67 multilibs = d.getVar('MULTILIB_VARIANTS')
68 if not multilibs or bb.data.inherits_class('nativesdk', d): 68 if not multilibs or bb.data.inherits_class('nativesdk', d):
69 return 69 return
70 70
71 binv = d.getVar('BINV', True) 71 binv = d.getVar('BINV')
72 72
73 mlprefix = d.getVar('MLPREFIX', True) 73 mlprefix = d.getVar('MLPREFIX')
74 if ('%slibgcc' % mlprefix) != d.getVar('PN', True): 74 if ('%slibgcc' % mlprefix) != d.getVar('PN'):
75 return 75 return
76 76
77 if mlprefix: 77 if mlprefix:
78 orig_tune = d.getVar('DEFAULTTUNE_MULTILIB_ORIGINAL', True) 78 orig_tune = d.getVar('DEFAULTTUNE_MULTILIB_ORIGINAL')
79 orig_tune_params = get_tune_parameters(orig_tune, d) 79 orig_tune_params = get_tune_parameters(orig_tune, d)
80 orig_tune_baselib = orig_tune_params['baselib'] 80 orig_tune_baselib = orig_tune_params['baselib']
81 orig_tune_bitness = orig_tune_baselib.replace('lib', '') 81 orig_tune_bitness = orig_tune_baselib.replace('lib', '')
@@ -83,10 +83,10 @@ fakeroot python do_multilib_install() {
83 orig_tune_bitness = '32' 83 orig_tune_bitness = '32'
84 84
85 src = '../../../' + orig_tune_baselib + '/' + \ 85 src = '../../../' + orig_tune_baselib + '/' + \
86 d.getVar('TARGET_SYS_MULTILIB_ORIGINAL', True) + '/' + binv + '/' 86 d.getVar('TARGET_SYS_MULTILIB_ORIGINAL') + '/' + binv + '/'
87 87
88 dest = d.getVar('D', True) + d.getVar('libdir', True) + '/' + \ 88 dest = d.getVar('D') + d.getVar('libdir') + '/' + \
89 d.getVar('TARGET_SYS', True) + '/' + binv + '/' + orig_tune_bitness 89 d.getVar('TARGET_SYS') + '/' + binv + '/' + orig_tune_bitness
90 90
91 if os.path.lexists(dest): 91 if os.path.lexists(dest):
92 os.unlink(dest) 92 os.unlink(dest)
@@ -95,7 +95,7 @@ fakeroot python do_multilib_install() {
95 95
96 96
97 for ml in multilibs.split(): 97 for ml in multilibs.split():
98 tune = d.getVar('DEFAULTTUNE_virtclass-multilib-' + ml, True) 98 tune = d.getVar('DEFAULTTUNE_virtclass-multilib-' + ml)
99 if not tune: 99 if not tune:
100 bb.warn('DEFAULTTUNE_virtclass-multilib-%s is not defined. Skipping...' % ml) 100 bb.warn('DEFAULTTUNE_virtclass-multilib-%s is not defined. Skipping...' % ml)
101 continue 101 continue
@@ -118,11 +118,11 @@ fakeroot python do_multilib_install() {
118 libcextension = '' 118 libcextension = ''
119 119
120 src = '../../../' + tune_baselib + '/' + \ 120 src = '../../../' + tune_baselib + '/' + \
121 tune_arch + d.getVar('TARGET_VENDOR', True) + 'ml' + ml + \ 121 tune_arch + d.getVar('TARGET_VENDOR') + 'ml' + ml + \
122 '-' + d.getVar('TARGET_OS', True) + libcextension + '/' + binv + '/' 122 '-' + d.getVar('TARGET_OS') + libcextension + '/' + binv + '/'
123 123
124 dest = d.getVar('D', True) + d.getVar('libdir', True) + '/' + \ 124 dest = d.getVar('D') + d.getVar('libdir') + '/' + \
125 d.getVar('TARGET_SYS', True) + '/' + binv + '/' + tune_bitness 125 d.getVar('TARGET_SYS') + '/' + binv + '/' + tune_bitness
126 126
127 if os.path.lexists(dest): 127 if os.path.lexists(dest):
128 os.unlink(dest) 128 os.unlink(dest)
@@ -131,7 +131,7 @@ fakeroot python do_multilib_install() {
131 131
132def get_original_os(d): 132def get_original_os(d):
133 vendoros = d.expand('${TARGET_ARCH}${ORIG_TARGET_VENDOR}-${TARGET_OS}') 133 vendoros = d.expand('${TARGET_ARCH}${ORIG_TARGET_VENDOR}-${TARGET_OS}')
134 for suffix in [d.getVar('ABIEXTENSION', True), d.getVar('LIBCEXTENSION', True)]: 134 for suffix in [d.getVar('ABIEXTENSION'), d.getVar('LIBCEXTENSION')]:
135 if suffix and vendoros.endswith(suffix): 135 if suffix and vendoros.endswith(suffix):
136 vendoros = vendoros[:-len(suffix)] 136 vendoros = vendoros[:-len(suffix)]
137 # Arm must use linux-gnueabi not linux as only the former is accepted by gcc 137 # Arm must use linux-gnueabi not linux as only the former is accepted by gcc
@@ -144,11 +144,11 @@ BASETARGET_SYS = "${@get_original_os(d)}"
144 144
145addtask extra_symlinks after do_multilib_install before do_package do_populate_sysroot 145addtask extra_symlinks after do_multilib_install before do_package do_populate_sysroot
146fakeroot python do_extra_symlinks() { 146fakeroot python do_extra_symlinks() {
147 targetsys = d.getVar('BASETARGET_SYS', True) 147 targetsys = d.getVar('BASETARGET_SYS')
148 148
149 if targetsys != d.getVar('TARGET_SYS', True): 149 if targetsys != d.getVar('TARGET_SYS'):
150 dest = d.getVar('D', True) + d.getVar('libdir', True) + '/' + targetsys 150 dest = d.getVar('D') + d.getVar('libdir') + '/' + targetsys
151 src = d.getVar('TARGET_SYS', True) 151 src = d.getVar('TARGET_SYS')
152 if not os.path.lexists(dest) and os.path.lexists(d.getVar('D', True) + d.getVar('libdir', True)): 152 if not os.path.lexists(dest) and os.path.lexists(d.getVar('D') + d.getVar('libdir')):
153 os.symlink(src, dest) 153 os.symlink(src, dest)
154} 154}
diff --git a/meta/recipes-devtools/gcc/libgfortran.inc b/meta/recipes-devtools/gcc/libgfortran.inc
index 58ceb2e073..194363543e 100644
--- a/meta/recipes-devtools/gcc/libgfortran.inc
+++ b/meta/recipes-devtools/gcc/libgfortran.inc
@@ -69,7 +69,7 @@ do_package_write_deb[depends] += "virtual/${MLPREFIX}libc:do_packagedata"
69do_package_write_rpm[depends] += "virtual/${MLPREFIX}libc:do_packagedata" 69do_package_write_rpm[depends] += "virtual/${MLPREFIX}libc:do_packagedata"
70 70
71python __anonymous () { 71python __anonymous () {
72 f = d.getVar("FORTRAN", True) 72 f = d.getVar("FORTRAN")
73 if "fortran" not in f: 73 if "fortran" not in f:
74 raise bb.parse.SkipPackage("libgfortran needs fortran support to be enabled in the compiler") 74 raise bb.parse.SkipPackage("libgfortran needs fortran support to be enabled in the compiler")
75} 75}
diff --git a/meta/recipes-devtools/git/git.inc b/meta/recipes-devtools/git/git.inc
index 753b0472a5..ade99ecfe0 100644
--- a/meta/recipes-devtools/git/git.inc
+++ b/meta/recipes-devtools/git/git.inc
@@ -46,7 +46,7 @@ do_install () {
46perl_native_fixup () { 46perl_native_fixup () {
47 sed -i -e 's#${STAGING_BINDIR_NATIVE}/perl-native/#${bindir}/#' \ 47 sed -i -e 's#${STAGING_BINDIR_NATIVE}/perl-native/#${bindir}/#' \
48 -e 's#${libdir}/perl-native/#${libdir}/#' \ 48 -e 's#${libdir}/perl-native/#${libdir}/#' \
49 ${@d.getVar("PERLTOOLS", True).replace(' /',d.getVar('D', True) + '/')} 49 ${@d.getVar("PERLTOOLS").replace(' /',d.getVar('D') + '/')}
50 50
51 # ${libdir} is not applicable here, perl-native files are always 51 # ${libdir} is not applicable here, perl-native files are always
52 # installed to /usr/lib on both 32/64 bits targets. 52 # installed to /usr/lib on both 32/64 bits targets.
diff --git a/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb b/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb
index 9b65a991d4..09e10c9e0e 100644
--- a/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb
+++ b/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb
@@ -32,7 +32,7 @@ EXTRA_OECONF += "--disable-gconf --disable-qconf"
32 32
33# Some packages have the version preceeding the .so instead properly 33# Some packages have the version preceeding the .so instead properly
34# versioned .so.<version>, so we need to reorder and repackage. 34# versioned .so.<version>, so we need to reorder and repackage.
35SOLIBS = "-${@d.getVar('PV', True)[:-2]}.so" 35SOLIBS = "-${@d.getVar('PV')[:-2]}.so"
36FILES_SOLIBSDEV = "${libdir}/libkconfig-parser.so" 36FILES_SOLIBSDEV = "${libdir}/libkconfig-parser.so"
37 37
38BBCLASSEXTEND = "native" 38BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/perl_5.24.0.bb b/meta/recipes-devtools/perl/perl_5.24.0.bb
index ab50d29d33..6804341170 100644
--- a/meta/recipes-devtools/perl/perl_5.24.0.bb
+++ b/meta/recipes-devtools/perl/perl_5.24.0.bb
@@ -356,7 +356,7 @@ python split_perl_packages () {
356 # perl-modules should recommend every perl module, and only the 356 # perl-modules should recommend every perl module, and only the
357 # modules. Don't attempt to use the result of do_split_packages() as some 357 # modules. Don't attempt to use the result of do_split_packages() as some
358 # modules are manually split (eg. perl-module-unicore). 358 # modules are manually split (eg. perl-module-unicore).
359 packages = filter(lambda p: 'perl-module-' in p, d.getVar('PACKAGES', True).split()) 359 packages = filter(lambda p: 'perl-module-' in p, d.getVar('PACKAGES').split())
360 d.setVar(d.expand("RRECOMMENDS_${PN}-modules"), ' '.join(packages)) 360 d.setVar(d.expand("RRECOMMENDS_${PN}-modules"), ' '.join(packages))
361} 361}
362 362
diff --git a/meta/recipes-devtools/prelink/prelink_git.bb b/meta/recipes-devtools/prelink/prelink_git.bb
index 0856b1c667..4529dbfcfb 100644
--- a/meta/recipes-devtools/prelink/prelink_git.bb
+++ b/meta/recipes-devtools/prelink/prelink_git.bb
@@ -132,7 +132,7 @@ python do_linkerpaths () {
132} 132}
133 133
134python () { 134python () {
135 overrides = d.getVar("OVERRIDES", True).split(":") 135 overrides = d.getVar("OVERRIDES").split(":")
136 if "class-target" in overrides: 136 if "class-target" in overrides:
137 bb.build.addtask('do_linkerpaths', 'do_configure', 'do_patch', d) 137 bb.build.addtask('do_linkerpaths', 'do_configure', 'do_patch', d)
138} 138}
diff --git a/meta/recipes-devtools/python/python-smartpm_git.bb b/meta/recipes-devtools/python/python-smartpm_git.bb
index 861910cc29..6daabf49a6 100644
--- a/meta/recipes-devtools/python/python-smartpm_git.bb
+++ b/meta/recipes-devtools/python/python-smartpm_git.bb
@@ -97,9 +97,9 @@ do_install_append() {
97 97
98add_native_wrapper() { 98add_native_wrapper() {
99 create_wrapper ${D}/${bindir}/smart \ 99 create_wrapper ${D}/${bindir}/smart \
100 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir', True), d.getVar('bindir', True))}/rpm \ 100 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir'), d.getVar('bindir'))}/rpm \
101 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir', True), d.getVar('bindir', True))}/rpm} \ 101 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir'), d.getVar('bindir'))}/rpm} \
102 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir', True), d.getVar('bindir', True))}/locale 102 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir'), d.getVar('bindir'))}/locale
103} 103}
104 104
105do_install_append_class-native() { 105do_install_append_class-native() {
diff --git a/meta/recipes-devtools/qemu/qemu-targets.inc b/meta/recipes-devtools/qemu/qemu-targets.inc
index a3e46a894e..66dd675ed4 100644
--- a/meta/recipes-devtools/qemu/qemu-targets.inc
+++ b/meta/recipes-devtools/qemu/qemu-targets.inc
@@ -4,8 +4,8 @@
4 4
5def get_qemu_target_list(d): 5def get_qemu_target_list(d):
6 import bb 6 import bb
7 archs = d.getVar('QEMU_TARGETS', True).split() 7 archs = d.getVar('QEMU_TARGETS').split()
8 tos = d.getVar('HOST_OS', True) 8 tos = d.getVar('HOST_OS')
9 softmmuonly = "" 9 softmmuonly = ""
10 for arch in ['ppcemb']: 10 for arch in ['ppcemb']:
11 if arch in archs: 11 if arch in archs:
diff --git a/meta/recipes-devtools/rpm/rpm_5.4.16.bb b/meta/recipes-devtools/rpm/rpm_5.4.16.bb
index 182818ee09..3df4d1ff1d 100644
--- a/meta/recipes-devtools/rpm/rpm_5.4.16.bb
+++ b/meta/recipes-devtools/rpm/rpm_5.4.16.bb
@@ -626,13 +626,13 @@ def multilib_rpmmacros(d):
626 localdata.delVar('TOOLCHAIN_OPTIONS') 626 localdata.delVar('TOOLCHAIN_OPTIONS')
627 627
628 # Set 'localdata' values to be consistent with 'd' values. 628 # Set 'localdata' values to be consistent with 'd' values.
629 distromacrodirVal = d.getVar('distromacrodir', True) 629 distromacrodirVal = d.getVar('distromacrodir')
630 workdirVal = d.getVar('WORKDIR', True) 630 workdirVal = d.getVar('WORKDIR')
631 dval = d.getVar('D', True) 631 dval = d.getVar('D')
632 632
633 ret = gen_arch_macro(localdata) 633 ret = gen_arch_macro(localdata)
634 634
635 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 635 variants = d.getVar("MULTILIB_VARIANTS") or ""
636 for item in variants.split(): 636 for item in variants.split():
637 # Load overrides from 'd' to avoid having to reset the value... 637 # Load overrides from 'd' to avoid having to reset the value...
638 localdata = d.createCopy() 638 localdata = d.createCopy()
@@ -656,17 +656,17 @@ def gen_arch_macro(d):
656 val += "\n" 656 val += "\n"
657 val += "# Toolchain configuration\n" 657 val += "# Toolchain configuration\n"
658 val += "%TOOLCHAIN_OPTIONS %{nil}\n" 658 val += "%TOOLCHAIN_OPTIONS %{nil}\n"
659 val += "%__ar ${@d.getVar('AR', True).replace('$','%')}\n" 659 val += "%__ar ${@d.getVar('AR').replace('$','%')}\n"
660 val += "%__as ${@d.getVar('AS', True).replace('$','%')}\n" 660 val += "%__as ${@d.getVar('AS').replace('$','%')}\n"
661 val += "%__cc ${@d.getVar('CC', True).replace('$','%')}\n" 661 val += "%__cc ${@d.getVar('CC').replace('$','%')}\n"
662 val += "%__cpp ${@d.getVar('CPP', True).replace('$','%')}\n" 662 val += "%__cpp ${@d.getVar('CPP').replace('$','%')}\n"
663 val += "%__cxx ${@d.getVar('CXX', True).replace('$','%')}\n" 663 val += "%__cxx ${@d.getVar('CXX').replace('$','%')}\n"
664 val += "%__ld ${@d.getVar('LD', True).replace('$','%')}\n" 664 val += "%__ld ${@d.getVar('LD').replace('$','%')}\n"
665 val += "%__nm ${@d.getVar('NM', True).replace('$','%')}\n" 665 val += "%__nm ${@d.getVar('NM').replace('$','%')}\n"
666 val += "%__objcopy ${@d.getVar('OBJCOPY', True).replace('$','%')}\n" 666 val += "%__objcopy ${@d.getVar('OBJCOPY').replace('$','%')}\n"
667 val += "%__objdump ${@d.getVar('OBJDUMP', True).replace('$','%')}\n" 667 val += "%__objdump ${@d.getVar('OBJDUMP').replace('$','%')}\n"
668 val += "%__ranlib ${@d.getVar('RANLIB', True).replace('$','%')}\n" 668 val += "%__ranlib ${@d.getVar('RANLIB').replace('$','%')}\n"
669 val += "%__strip ${@d.getVar('STRIP', True).replace('$','%')}\n" 669 val += "%__strip ${@d.getVar('STRIP').replace('$','%')}\n"
670 val += "EOF\n" 670 val += "EOF\n"
671 val += "\n" 671 val += "\n"
672 return d.expand(val) 672 return d.expand(val)
@@ -674,30 +674,30 @@ def gen_arch_macro(d):
674 674
675add_native_wrapper() { 675add_native_wrapper() {
676 create_wrapper ${D}/${bindir}/rpm \ 676 create_wrapper ${D}/${bindir}/rpm \
677 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir', True), d.getVar('bindir', True))}/rpm \ 677 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir'), d.getVar('bindir'))}/rpm \
678 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir', True), d.getVar('bindir', True))}/rpm} \ 678 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir'), d.getVar('bindir'))}/rpm} \
679 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir', True), d.getVar('bindir', True))}/locale 679 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir'), d.getVar('bindir'))}/locale
680 680
681 create_wrapper ${D}/${bindir}/rpm2cpio \ 681 create_wrapper ${D}/${bindir}/rpm2cpio \
682 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir', True), d.getVar('bindir', True))}/rpm \ 682 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir'), d.getVar('bindir'))}/rpm \
683 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir', True), d.getVar('bindir', True))}/rpm} \ 683 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir'), d.getVar('bindir'))}/rpm} \
684 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir', True), d.getVar('bindir', True))}/locale 684 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir'), d.getVar('bindir'))}/locale
685 685
686 create_wrapper ${D}/${bindir}/rpmbuild \ 686 create_wrapper ${D}/${bindir}/rpmbuild \
687 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir', True), d.getVar('bindir', True))}/rpm \ 687 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir'), d.getVar('bindir'))}/rpm \
688 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir', True), d.getVar('bindir', True))}/rpm} \ 688 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir'), d.getVar('bindir'))}/rpm} \
689 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir', True), d.getVar('bindir', True))}/locale 689 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir'), d.getVar('bindir'))}/locale
690 690
691 create_wrapper ${D}/${bindir}/rpmconstant \ 691 create_wrapper ${D}/${bindir}/rpmconstant \
692 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir', True), d.getVar('bindir', True))}/rpm \ 692 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir'), d.getVar('bindir'))}/rpm \
693 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir', True), d.getVar('bindir', True))}/rpm} \ 693 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir'), d.getVar('bindir'))}/rpm} \
694 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir', True), d.getVar('bindir', True))}/locale 694 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir'), d.getVar('bindir'))}/locale
695 695
696 for rpm_binary in ${D}/${libdir}/rpm/bin/rpm* ${D}/${libdir}/rpm/bin/debugedit; do 696 for rpm_binary in ${D}/${libdir}/rpm/bin/rpm* ${D}/${libdir}/rpm/bin/debugedit; do
697 create_wrapper $rpm_binary \ 697 create_wrapper $rpm_binary \
698 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir', True), d.getVar('bindir', True))}/rpm \ 698 RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir'), d.getVar('bindir'))}/rpm \
699 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir', True), d.getVar('bindir', True))}/rpm} \ 699 RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir'), d.getVar('bindir'))}/rpm} \
700 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir', True), d.getVar('bindir', True))}/locale 700 RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir'), d.getVar('bindir'))}/locale
701 done 701 done
702} 702}
703 703
diff --git a/meta/recipes-devtools/swig/swig.inc b/meta/recipes-devtools/swig/swig.inc
index 704bb27b92..8cb6fbd48b 100644
--- a/meta/recipes-devtools/swig/swig.inc
+++ b/meta/recipes-devtools/swig/swig.inc
@@ -54,8 +54,8 @@ do_install_append_class-nativesdk() {
54} 54}
55 55
56def swiglib_relpath(d): 56def swiglib_relpath(d):
57 swiglib = d.getVar('datadir', True) + "/" + d.getVar('BPN', True) + "/" + d.getVar('PV', True) 57 swiglib = d.getVar('datadir') + "/" + d.getVar('BPN') + "/" + d.getVar('PV')
58 return os.path.relpath(swiglib, d.getVar('bindir', True)) 58 return os.path.relpath(swiglib, d.getVar('bindir'))
59 59
60do_install_append_class-native() { 60do_install_append_class-native() {
61 create_wrapper ${D}${bindir}/swig SWIG_LIB='`dirname $''realpath`'/${@swiglib_relpath(d)} 61 create_wrapper ${D}${bindir}/swig SWIG_LIB='`dirname $''realpath`'/${@swiglib_relpath(d)}
diff --git a/meta/recipes-devtools/vala/vala.inc b/meta/recipes-devtools/vala/vala.inc
index afa09ec12e..b338ea097a 100644
--- a/meta/recipes-devtools/vala/vala.inc
+++ b/meta/recipes-devtools/vala/vala.inc
@@ -11,7 +11,7 @@ HOMEPAGE = "http://vala-project.org"
11LICENSE = "LGPLv2.1" 11LICENSE = "LGPLv2.1"
12LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24" 12LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
13 13
14SHRT_VER = "${@d.getVar('PV', True).split('.')[0]}.${@d.getVar('PV', True).split('.')[1]}" 14SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
15 15
16SRC_URI = "http://ftp.gnome.org/pub/GNOME/sources/${BPN}/${SHRT_VER}/${BP}.tar.xz" 16SRC_URI = "http://ftp.gnome.org/pub/GNOME/sources/${BPN}/${SHRT_VER}/${BP}.tar.xz"
17inherit autotools pkgconfig upstream-version-is-even 17inherit autotools pkgconfig upstream-version-is-even
diff --git a/meta/recipes-devtools/valgrind/valgrind_3.12.0.bb b/meta/recipes-devtools/valgrind/valgrind_3.12.0.bb
index 9b28d3701e..3385ef70c0 100644
--- a/meta/recipes-devtools/valgrind/valgrind_3.12.0.bb
+++ b/meta/recipes-devtools/valgrind/valgrind_3.12.0.bb
@@ -46,7 +46,7 @@ COMPATIBLE_HOST_mipsarchr6 = 'null'
46inherit autotools ptest 46inherit autotools ptest
47 47
48EXTRA_OECONF = "--enable-tls --without-mpicc" 48EXTRA_OECONF = "--enable-tls --without-mpicc"
49EXTRA_OECONF += "${@['--enable-only32bit','--enable-only64bit'][d.getVar('SITEINFO_BITS', True) != '32']}" 49EXTRA_OECONF += "${@['--enable-only32bit','--enable-only64bit'][d.getVar('SITEINFO_BITS') != '32']}"
50 50
51# valgrind checks host_cpu "armv7*)", so we need to over-ride the autotools.bbclass default --host option 51# valgrind checks host_cpu "armv7*)", so we need to over-ride the autotools.bbclass default --host option
52EXTRA_OECONF_append_arm = " --host=armv7${HOST_VENDOR}-${HOST_OS}" 52EXTRA_OECONF_append_arm = " --host=armv7${HOST_VENDOR}-${HOST_OS}"
diff --git a/meta/recipes-extended/cups/cups.inc b/meta/recipes-extended/cups/cups.inc
index e18980bcca..2ce9c7a179 100644
--- a/meta/recipes-extended/cups/cups.inc
+++ b/meta/recipes-extended/cups/cups.inc
@@ -83,7 +83,7 @@ do_install () {
83python do_package_append() { 83python do_package_append() {
84 import subprocess 84 import subprocess
85 # Change permissions back the way they were, they probably had a reason... 85 # Change permissions back the way they were, they probably had a reason...
86 workdir = d.getVar('WORKDIR', True) 86 workdir = d.getVar('WORKDIR')
87 subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True) 87 subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True)
88} 88}
89 89
diff --git a/meta/recipes-extended/man/man_1.6g.bb b/meta/recipes-extended/man/man_1.6g.bb
index e78aa18cbc..ff603f842c 100644
--- a/meta/recipes-extended/man/man_1.6g.bb
+++ b/meta/recipes-extended/man/man_1.6g.bb
@@ -10,8 +10,8 @@ PR = "r1"
10DEPENDS = "groff less" 10DEPENDS = "groff less"
11 11
12def compress_pkg(d): 12def compress_pkg(d):
13 if "compress_doc" in (d.getVar("INHERIT", True) or "").split(): 13 if "compress_doc" in (d.getVar("INHERIT") or "").split():
14 compress = d.getVar("DOC_COMPRESS", True) 14 compress = d.getVar("DOC_COMPRESS")
15 if compress == "gz": 15 if compress == "gz":
16 return "gzip" 16 return "gzip"
17 elif compress == "bz2": 17 elif compress == "bz2":
diff --git a/meta/recipes-extended/net-tools/net-tools_1.60-26.bb b/meta/recipes-extended/net-tools/net-tools_1.60-26.bb
index 47a68a59ac..49a65f873a 100644
--- a/meta/recipes-extended/net-tools/net-tools_1.60-26.bb
+++ b/meta/recipes-extended/net-tools/net-tools_1.60-26.bb
@@ -106,10 +106,10 @@ ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1"
106ALTERNATIVE_PRIORITY[hostname.1] = "10" 106ALTERNATIVE_PRIORITY[hostname.1] = "10"
107 107
108python __anonymous() { 108python __anonymous() {
109 for prog in d.getVar('base_sbindir_progs', True).split(): 109 for prog in d.getVar('base_sbindir_progs').split():
110 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_sbindir', True), prog)) 110 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_sbindir'), prog))
111 for prog in d.getVar('base_bindir_progs', True).split(): 111 for prog in d.getVar('base_bindir_progs').split():
112 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir', True), prog)) 112 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
113} 113}
114ALTERNATIVE_PRIORITY = "100" 114ALTERNATIVE_PRIORITY = "100"
115 115
diff --git a/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb b/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb
index a60a68cbf2..d8975f2157 100644
--- a/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb
+++ b/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb
@@ -31,7 +31,7 @@ python __anonymous () {
31 namemap["packagegroup-core-full-cmdline-initscripts"] = "packagegroup-core-initscripts" 31 namemap["packagegroup-core-full-cmdline-initscripts"] = "packagegroup-core-initscripts"
32 namemap["packagegroup-core-full-cmdline-sys-services"] = "packagegroup-core-sys-services" 32 namemap["packagegroup-core-full-cmdline-sys-services"] = "packagegroup-core-sys-services"
33 33
34 packages = d.getVar("PACKAGES", True).split() 34 packages = d.getVar("PACKAGES").split()
35 for pkg in packages: 35 for pkg in packages:
36 if pkg.endswith('-dev'): 36 if pkg.endswith('-dev'):
37 mapped = namemap.get(pkg[:-4], None) 37 mapped = namemap.get(pkg[:-4], None)
diff --git a/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb b/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb
index 29564e0c9d..734dda1d79 100644
--- a/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb
+++ b/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb
@@ -15,9 +15,9 @@ REQUIRED_DISTRO_FEATURES = "x11"
15# We will skip parsing this packagegeoup for non-glibc systems 15# We will skip parsing this packagegeoup for non-glibc systems
16# 16#
17python __anonymous () { 17python __anonymous () {
18 if d.getVar('TCLIBC', True) != "glibc": 18 if d.getVar('TCLIBC') != "glibc":
19 raise bb.parse.SkipPackage("incompatible with %s C library" % 19 raise bb.parse.SkipPackage("incompatible with %s C library" %
20 d.getVar('TCLIBC', True)) 20 d.getVar('TCLIBC'))
21} 21}
22 22
23PACKAGES = "\ 23PACKAGES = "\
diff --git a/meta/recipes-extended/pam/libpam_1.3.0.bb b/meta/recipes-extended/pam/libpam_1.3.0.bb
index bb5a4bd8d3..67b9d53d33 100644
--- a/meta/recipes-extended/pam/libpam_1.3.0.bb
+++ b/meta/recipes-extended/pam/libpam_1.3.0.bb
@@ -62,7 +62,7 @@ FILES_${PN}-xtests = "${datadir}/Linux-PAM/xtests"
62PACKAGES_DYNAMIC += "^${MLPREFIX}pam-plugin-.*" 62PACKAGES_DYNAMIC += "^${MLPREFIX}pam-plugin-.*"
63 63
64def get_multilib_bit(d): 64def get_multilib_bit(d):
65 baselib = d.getVar('baselib', True) or '' 65 baselib = d.getVar('baselib') or ''
66 return baselib.replace('lib', '') 66 return baselib.replace('lib', '')
67 67
68libpam_suffix = "suffix${@get_multilib_bit(d)}" 68libpam_suffix = "suffix${@get_multilib_bit(d)}"
@@ -92,30 +92,30 @@ RRECOMMENDS_${PN}_class-native = ""
92python populate_packages_prepend () { 92python populate_packages_prepend () {
93 def pam_plugin_append_file(pn, dir, file): 93 def pam_plugin_append_file(pn, dir, file):
94 nf = os.path.join(dir, file) 94 nf = os.path.join(dir, file)
95 of = d.getVar('FILES_' + pn, True) 95 of = d.getVar('FILES_' + pn)
96 if of: 96 if of:
97 nf = of + " " + nf 97 nf = of + " " + nf
98 d.setVar('FILES_' + pn, nf) 98 d.setVar('FILES_' + pn, nf)
99 99
100 def pam_plugin_hook(file, pkg, pattern, format, basename): 100 def pam_plugin_hook(file, pkg, pattern, format, basename):
101 pn = d.getVar('PN', True) 101 pn = d.getVar('PN')
102 libpam_suffix = d.getVar('libpam_suffix', True) 102 libpam_suffix = d.getVar('libpam_suffix')
103 103
104 rdeps = d.getVar('RDEPENDS_' + pkg, True) 104 rdeps = d.getVar('RDEPENDS_' + pkg)
105 if rdeps: 105 if rdeps:
106 rdeps = rdeps + " " + pn + "-" + libpam_suffix 106 rdeps = rdeps + " " + pn + "-" + libpam_suffix
107 else: 107 else:
108 rdeps = pn + "-" + libpam_suffix 108 rdeps = pn + "-" + libpam_suffix
109 d.setVar('RDEPENDS_' + pkg, rdeps) 109 d.setVar('RDEPENDS_' + pkg, rdeps)
110 110
111 provides = d.getVar('RPROVIDES_' + pkg, True) 111 provides = d.getVar('RPROVIDES_' + pkg)
112 if provides: 112 if provides:
113 provides = provides + " " + pkg + "-" + libpam_suffix 113 provides = provides + " " + pkg + "-" + libpam_suffix
114 else: 114 else:
115 provides = pkg + "-" + libpam_suffix 115 provides = pkg + "-" + libpam_suffix
116 d.setVar('RPROVIDES_' + pkg, provides) 116 d.setVar('RPROVIDES_' + pkg, provides)
117 117
118 mlprefix = d.getVar('MLPREFIX', True) or '' 118 mlprefix = d.getVar('MLPREFIX') or ''
119 dvar = bb.data.expand('${WORKDIR}/package', d, True) 119 dvar = bb.data.expand('${WORKDIR}/package', d, True)
120 pam_libdir = d.expand('${base_libdir}/security') 120 pam_libdir = d.expand('${base_libdir}/security')
121 pam_sbindir = d.expand('${sbindir}') 121 pam_sbindir = d.expand('${sbindir}')
diff --git a/meta/recipes-extended/procps/procps_3.3.12.bb b/meta/recipes-extended/procps/procps_3.3.12.bb
index 4ca1a5023b..1f793b9cac 100644
--- a/meta/recipes-extended/procps/procps_3.3.12.bb
+++ b/meta/recipes-extended/procps/procps_3.3.12.bb
@@ -57,10 +57,10 @@ ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
57ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1" 57ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1"
58 58
59python __anonymous() { 59python __anonymous() {
60 for prog in d.getVar('base_bindir_progs', True).split(): 60 for prog in d.getVar('base_bindir_progs').split():
61 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir', True), prog)) 61 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
62 62
63 for prog in d.getVar('base_sbindir_progs', True).split(): 63 for prog in d.getVar('base_sbindir_progs').split():
64 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_sbindir', True), prog)) 64 d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_sbindir'), prog))
65} 65}
66 66
diff --git a/meta/recipes-extended/texinfo/texinfo_6.3.bb b/meta/recipes-extended/texinfo/texinfo_6.3.bb
index 51199be758..d82731e11e 100644
--- a/meta/recipes-extended/texinfo/texinfo_6.3.bb
+++ b/meta/recipes-extended/texinfo/texinfo_6.3.bb
@@ -11,7 +11,7 @@ PROVIDES_append_class-native = " texinfo-replacement-native"
11 11
12def compress_pkg(d): 12def compress_pkg(d):
13 if bb.data.inherits_class('compress_doc', d): 13 if bb.data.inherits_class('compress_doc', d):
14 compress = d.getVar("DOC_COMPRESS", True) 14 compress = d.getVar("DOC_COMPRESS")
15 if compress == "gz": 15 if compress == "gz":
16 return "gzip" 16 return "gzip"
17 elif compress == "bz2": 17 elif compress == "bz2":
diff --git a/meta/recipes-extended/tzdata/tzdata_2016i.bb b/meta/recipes-extended/tzdata/tzdata_2016i.bb
index 3801a3fced..46488cda58 100644
--- a/meta/recipes-extended/tzdata/tzdata_2016i.bb
+++ b/meta/recipes-extended/tzdata/tzdata_2016i.bb
@@ -209,5 +209,5 @@ FILES_${PN} += "${datadir}/zoneinfo/Pacific/Honolulu \
209 ${datadir}/zoneinfo/iso3166.tab \ 209 ${datadir}/zoneinfo/iso3166.tab \
210 ${datadir}/zoneinfo/Etc/*" 210 ${datadir}/zoneinfo/Etc/*"
211 211
212CONFFILES_${PN} += "${@ "${sysconfdir}/timezone" if bb.utils.to_boolean(d.getVar('INSTALL_TIMEZONE_FILE', True)) else "" }" 212CONFFILES_${PN} += "${@ "${sysconfdir}/timezone" if bb.utils.to_boolean(d.getVar('INSTALL_TIMEZONE_FILE')) else "" }"
213CONFFILES_${PN} += "${sysconfdir}/localtime" 213CONFFILES_${PN} += "${sysconfdir}/localtime"
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.36.0.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.36.0.bb
index a5ebecaff3..9efe374c0d 100644
--- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.36.0.bb
+++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.36.0.bb
@@ -59,7 +59,7 @@ PACKAGES_DYNAMIC += "^gdk-pixbuf-loader-.*"
59PACKAGES_DYNAMIC_class-native = "" 59PACKAGES_DYNAMIC_class-native = ""
60 60
61python populate_packages_prepend () { 61python populate_packages_prepend () {
62 postinst_pixbufloader = d.getVar("postinst_pixbufloader", True) 62 postinst_pixbufloader = d.getVar("postinst_pixbufloader")
63 63
64 loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders') 64 loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
65 65
diff --git a/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.50.0.bb b/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.50.0.bb
index b6d296aa09..0c29a3b377 100644
--- a/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.50.0.bb
+++ b/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.50.0.bb
@@ -168,6 +168,6 @@ python gobject_introspection_preconfigure () {
168 168
169SSTATEPOSTINSTFUNCS += "gobject_introspection_postinst" 169SSTATEPOSTINSTFUNCS += "gobject_introspection_postinst"
170python gobject_introspection_postinst () { 170python gobject_introspection_postinst () {
171 if d.getVar("BB_CURRENTTASK", True).startswith("populate_sysroot"): 171 if d.getVar("BB_CURRENTTASK").startswith("populate_sysroot"):
172 oe.utils.write_ld_so_conf(d) 172 oe.utils.write_ld_so_conf(d)
173} 173}
diff --git a/meta/recipes-gnome/gtk+/gtk+3.inc b/meta/recipes-gnome/gtk+/gtk+3.inc
index 745c6fa5de..6499ea6c8d 100644
--- a/meta/recipes-gnome/gtk+/gtk+3.inc
+++ b/meta/recipes-gnome/gtk+/gtk+3.inc
@@ -125,6 +125,6 @@ python populate_packages_prepend () {
125 125
126 do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk3-printbackend-%s', 'GTK printbackend module for %s') 126 do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk3-printbackend-%s', 'GTK printbackend module for %s')
127 127
128 if (d.getVar('DEBIAN_NAMES', True)): 128 if (d.getVar('DEBIAN_NAMES')):
129 d.setVar(d.expand('PKG_${PN}'), '${MLPREFIX}libgtk-3.0') 129 d.setVar(d.expand('PKG_${PN}'), '${MLPREFIX}libgtk-3.0')
130} 130}
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.24.31.bb b/meta/recipes-gnome/gtk+/gtk+_2.24.31.bb
index 84d0b4d980..cc31bab6d1 100644
--- a/meta/recipes-gnome/gtk+/gtk+_2.24.31.bb
+++ b/meta/recipes-gnome/gtk+/gtk+_2.24.31.bb
@@ -29,6 +29,6 @@ python populate_packages_prepend () {
29 d.setVar('GTKIMMODULES_PACKAGES', ' '.join(do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s'))) 29 d.setVar('GTKIMMODULES_PACKAGES', ' '.join(do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s')))
30 do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') 30 do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
31 31
32 if (d.getVar('DEBIAN_NAMES', True)): 32 if (d.getVar('DEBIAN_NAMES')):
33 d.setVar(d.expand('PKG_${PN}'), '${MLPREFIX}libgtk-2.0') 33 d.setVar(d.expand('PKG_${PN}'), '${MLPREFIX}libgtk-2.0')
34} 34}
diff --git a/meta/recipes-graphics/cairo/cairo-fpu.inc b/meta/recipes-graphics/cairo/cairo-fpu.inc
index ef72e8904b..fe7c53337a 100644
--- a/meta/recipes-graphics/cairo/cairo-fpu.inc
+++ b/meta/recipes-graphics/cairo/cairo-fpu.inc
@@ -1,6 +1,6 @@
1 1
2def get_cairo_fpu_setting(bb, d): 2def get_cairo_fpu_setting(bb, d):
3 if d.getVar('TARGET_FPU', True) in [ 'soft' ]: 3 if d.getVar('TARGET_FPU') in [ 'soft' ]:
4 return "--disable-some-floating-point" 4 return "--disable-some-floating-point"
5 return "" 5 return ""
6 6
diff --git a/meta/recipes-graphics/mesa/mesa.inc b/meta/recipes-graphics/mesa/mesa.inc
index 1f9fbcab7d..5d882b1001 100644
--- a/meta/recipes-graphics/mesa/mesa.inc
+++ b/meta/recipes-graphics/mesa/mesa.inc
@@ -114,7 +114,7 @@ do_install_append () {
114# they don't get Debian-renamed (which would remove the -mesa suffix), and 114# they don't get Debian-renamed (which would remove the -mesa suffix), and
115# RPROVIDEs/RCONFLICTs on the generic libgl name. 115# RPROVIDEs/RCONFLICTs on the generic libgl name.
116python __anonymous() { 116python __anonymous() {
117 pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() 117 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
118 for p in (("egl", "libegl", "libegl1"), 118 for p in (("egl", "libegl", "libegl1"),
119 ("dri", "libgl", "libgl1"), 119 ("dri", "libgl", "libgl1"),
120 ("gles", "libgles1", "libglesv1-cm1"), 120 ("gles", "libgles1", "libglesv1-cm1"),
@@ -148,7 +148,7 @@ python mesa_populate_packages() {
148 d.setVar("RREPLACES_%s" % pkg, pkg.replace("mesa", "mesa-dri", 1)) 148 d.setVar("RREPLACES_%s" % pkg, pkg.replace("mesa", "mesa-dri", 1))
149 149
150 import re 150 import re
151 dri_drivers_root = oe.path.join(d.getVar('PKGD', True), d.getVar('libdir', True), "dri") 151 dri_drivers_root = oe.path.join(d.getVar('PKGD'), d.getVar('libdir'), "dri")
152 if os.path.isdir(dri_drivers_root): 152 if os.path.isdir(dri_drivers_root):
153 dri_pkgs = os.listdir(dri_drivers_root) 153 dri_pkgs = os.listdir(dri_drivers_root)
154 lib_name = d.expand("${MLPREFIX}mesa-megadriver") 154 lib_name = d.expand("${MLPREFIX}mesa-megadriver")
@@ -160,7 +160,7 @@ python mesa_populate_packages() {
160 d.appendVar("RCONFLICTS_%s" % lib_name, pkg_name) 160 d.appendVar("RCONFLICTS_%s" % lib_name, pkg_name)
161 d.appendVar("RREPLACES_%s" % lib_name, pkg_name) 161 d.appendVar("RREPLACES_%s" % lib_name, pkg_name)
162 162
163 pipe_drivers_root = os.path.join(d.getVar('libdir', True), "gallium-pipe") 163 pipe_drivers_root = os.path.join(d.getVar('libdir'), "gallium-pipe")
164 do_split_packages(d, pipe_drivers_root, '^pipe_(.*)\.so$', 'mesa-driver-pipe-%s', 'Mesa %s pipe driver', extra_depends='') 164 do_split_packages(d, pipe_drivers_root, '^pipe_(.*)\.so$', 'mesa-driver-pipe-%s', 'Mesa %s pipe driver', extra_depends='')
165} 165}
166 166
diff --git a/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc b/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc
index 57a4c827dd..fe42f11b68 100644
--- a/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc
+++ b/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc
@@ -33,8 +33,8 @@ def _add_xorg_abi_depends(d, name):
33 } 33 }
34 34
35 output = os.popen("pkg-config xorg-server --variable=%s" % abis[name]).read() 35 output = os.popen("pkg-config xorg-server --variable=%s" % abis[name]).read()
36 mlprefix = d.getVar('MLPREFIX', True) or '' 36 mlprefix = d.getVar('MLPREFIX') or ''
37 abi = "%sxorg-abi-%s-%s" % (mlprefix, name, output.split(".")[0]) 37 abi = "%sxorg-abi-%s-%s" % (mlprefix, name, output.split(".")[0])
38 38
39 pn = d.getVar("PN", True) 39 pn = d.getVar("PN")
40 d.appendVar('RDEPENDS_' + pn, ' ' + abi) 40 d.appendVar('RDEPENDS_' + pn, ' ' + abi)
diff --git a/meta/recipes-graphics/xorg-lib/libxft_2.3.2.bb b/meta/recipes-graphics/xorg-lib/libxft_2.3.2.bb
index a967cdc585..b4547819f2 100644
--- a/meta/recipes-graphics/xorg-lib/libxft_2.3.2.bb
+++ b/meta/recipes-graphics/xorg-lib/libxft_2.3.2.bb
@@ -28,6 +28,6 @@ XORG_PN = "libXft"
28BBCLASSEXTEND = "native" 28BBCLASSEXTEND = "native"
29 29
30python () { 30python () {
31 if d.getVar('DEBIAN_NAMES', True): 31 if d.getVar('DEBIAN_NAMES'):
32 d.setVar('PKG_${PN}', '${MLPREFIX}libxft2') 32 d.setVar('PKG_${PN}', '${MLPREFIX}libxft2')
33} 33}
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
index 4a5f9a6a1e..6a13380c98 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
@@ -172,10 +172,10 @@ python populate_packages_prepend() {
172 shell=True, env=newenv, stdout=subprocess.PIPE) 172 shell=True, env=newenv, stdout=subprocess.PIPE)
173 stdout, stderr = p.communicate() 173 stdout, stderr = p.communicate()
174 output = stdout.decode("utf-8").split(".")[0] 174 output = stdout.decode("utf-8").split(".")[0]
175 mlprefix = d.getVar('MLPREFIX', True) or '' 175 mlprefix = d.getVar('MLPREFIX') or ''
176 return "%sxorg-abi-%s-%s" % (mlprefix, name, output) 176 return "%sxorg-abi-%s-%s" % (mlprefix, name, output)
177 177
178 pn = d.getVar("PN", True) 178 pn = d.getVar("PN")
179 d.appendVar("RPROVIDES_" + pn, " " + get_abi("input")) 179 d.appendVar("RPROVIDES_" + pn, " " + get_abi("input"))
180 d.appendVar("RPROVIDES_" + pn, " " + get_abi("video")) 180 d.appendVar("RPROVIDES_" + pn, " " + get_abi("video"))
181} 181}
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb b/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb
index de3582a7aa..70658832a9 100644
--- a/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb
@@ -7,7 +7,7 @@ require recipes-kernel/linux/linux-yocto.inc
7# to build multiple virtual/kernel providers, e.g. as dependency of 7# to build multiple virtual/kernel providers, e.g. as dependency of
8# core-image-rt-sdk, core-image-rt. 8# core-image-rt-sdk, core-image-rt.
9python () { 9python () {
10 if d.getVar("PREFERRED_PROVIDER_virtual/kernel", True) != "linux-yocto-rt": 10 if d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
11 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it") 11 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
12} 12}
13 13
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_4.4.bb b/meta/recipes-kernel/linux/linux-yocto-rt_4.4.bb
index d6e9626c00..4487c06bf3 100644
--- a/meta/recipes-kernel/linux/linux-yocto-rt_4.4.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-rt_4.4.bb
@@ -7,7 +7,7 @@ require recipes-kernel/linux/linux-yocto.inc
7# to build multiple virtual/kernel providers, e.g. as dependency of 7# to build multiple virtual/kernel providers, e.g. as dependency of
8# core-image-rt-sdk, core-image-rt. 8# core-image-rt-sdk, core-image-rt.
9python () { 9python () {
10 if d.getVar("PREFERRED_PROVIDER_virtual/kernel", True) != "linux-yocto-rt": 10 if d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
11 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it") 11 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
12} 12}
13 13
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_4.8.bb b/meta/recipes-kernel/linux/linux-yocto-rt_4.8.bb
index d392a6de5e..6079225cbc 100644
--- a/meta/recipes-kernel/linux/linux-yocto-rt_4.8.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-rt_4.8.bb
@@ -7,7 +7,7 @@ require recipes-kernel/linux/linux-yocto.inc
7# to build multiple virtual/kernel providers, e.g. as dependency of 7# to build multiple virtual/kernel providers, e.g. as dependency of
8# core-image-rt-sdk, core-image-rt. 8# core-image-rt-sdk, core-image-rt.
9python () { 9python () {
10 if d.getVar("PREFERRED_PROVIDER_virtual/kernel", True) != "linux-yocto-rt": 10 if d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
11 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it") 11 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
12} 12}
13 13
diff --git a/meta/recipes-kernel/linux/linux-yocto.inc b/meta/recipes-kernel/linux/linux-yocto.inc
index 09cd1f178a..556546f1e7 100644
--- a/meta/recipes-kernel/linux/linux-yocto.inc
+++ b/meta/recipes-kernel/linux/linux-yocto.inc
@@ -12,9 +12,9 @@ INC_PR = "r4"
12# PREFERRED_PROVIDER for virtual/kernel. This avoids network access required 12# PREFERRED_PROVIDER for virtual/kernel. This avoids network access required
13# by the use of AUTOREV SRCREVs, which are the default for this recipe. 13# by the use of AUTOREV SRCREVs, which are the default for this recipe.
14python () { 14python () {
15 if d.getVar("PREFERRED_PROVIDER_virtual/kernel", True) != d.getVar("PN", True): 15 if d.getVar("PREFERRED_PROVIDER_virtual/kernel") != d.getVar("PN"):
16 d.delVar("BB_DONT_CACHE") 16 d.delVar("BB_DONT_CACHE")
17 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to %s to enable it" % (d.getVar("PN", True))) 17 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to %s to enable it" % (d.getVar("PN")))
18} 18}
19 19
20DEPENDS += "xz-native bc-native" 20DEPENDS += "xz-native bc-native"
diff --git a/meta/recipes-kernel/lttng/lttng-modules_git.bb b/meta/recipes-kernel/lttng/lttng-modules_git.bb
index 70145413a3..8a287131d0 100644
--- a/meta/recipes-kernel/lttng/lttng-modules_git.bb
+++ b/meta/recipes-kernel/lttng/lttng-modules_git.bb
@@ -28,7 +28,7 @@ do_install_append() {
28} 28}
29 29
30python do_package_prepend() { 30python do_package_prepend() {
31 if not os.path.exists(os.path.join(d.getVar('D', True), 'lib/modules')): 31 if not os.path.exists(os.path.join(d.getVar('D'), 'lib/modules')):
32 bb.warn("%s: no modules were created; this may be due to CONFIG_TRACEPOINTS not being enabled in your kernel." % d.getVar('PN', True)) 32 bb.warn("%s: no modules were created; this may be due to CONFIG_TRACEPOINTS not being enabled in your kernel." % d.getVar('PN'))
33} 33}
34 34
diff --git a/meta/recipes-kernel/perf/perf-features.inc b/meta/recipes-kernel/perf/perf-features.inc
index b8859ab7d5..a4402cc50b 100644
--- a/meta/recipes-kernel/perf/perf-features.inc
+++ b/meta/recipes-kernel/perf/perf-features.inc
@@ -16,7 +16,7 @@ def perf_feature_enabled(feature, trueval, falseval, d):
16 perf-tui: enable support for the perf TUI (via libnewt) 16 perf-tui: enable support for the perf TUI (via libnewt)
17 17
18 """ 18 """
19 enabled_features = d.getVar("PERF_FEATURES_ENABLE", True) or "" 19 enabled_features = d.getVar("PERF_FEATURES_ENABLE") or ""
20 if feature in enabled_features: 20 if feature in enabled_features:
21 return trueval 21 return trueval
22 return falseval 22 return falseval
diff --git a/meta/recipes-kernel/perf/perf.bb b/meta/recipes-kernel/perf/perf.bb
index 6cdaa68b57..cfbe628345 100644
--- a/meta/recipes-kernel/perf/perf.bb
+++ b/meta/recipes-kernel/perf/perf.bb
@@ -210,7 +210,7 @@ do_configure_prepend () {
210} 210}
211 211
212python do_package_prepend() { 212python do_package_prepend() {
213 d.setVar('PKGV', d.getVar("KERNEL_VERSION", True).split("-")[0]) 213 d.setVar('PKGV', d.getVar("KERNEL_VERSION").split("-")[0])
214} 214}
215 215
216PACKAGE_ARCH = "${MACHINE_ARCH}" 216PACKAGE_ARCH = "${MACHINE_ARCH}"
diff --git a/meta/recipes-multimedia/alsa/alsa-fpu.inc b/meta/recipes-multimedia/alsa/alsa-fpu.inc
index 50402307c5..1ca31e7498 100644
--- a/meta/recipes-multimedia/alsa/alsa-fpu.inc
+++ b/meta/recipes-multimedia/alsa/alsa-fpu.inc
@@ -1,6 +1,6 @@
1 1
2def get_alsa_fpu_setting(bb, d): 2def get_alsa_fpu_setting(bb, d):
3 if d.getVar('TARGET_FPU', True) in [ 'soft' ]: 3 if d.getVar('TARGET_FPU') in [ 'soft' ]:
4 return "--with-softfloat" 4 return "--with-softfloat"
5 return "" 5 return ""
6 6
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
index c24493e1e8..3fdb10e404 100644
--- a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
+++ b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
@@ -3,8 +3,8 @@ PACKAGESPLITFUNCS_append = " set_metapkg_rdepends "
3 3
4python split_gstreamer10_packages () { 4python split_gstreamer10_packages () {
5 gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}') 5 gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}')
6 postinst = d.getVar('plugin_postinst', True) 6 postinst = d.getVar('plugin_postinst')
7 glibdir = d.getVar('libdir', True) 7 glibdir = d.getVar('libdir')
8 8
9 do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True) 9 do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
10 do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends='') 10 do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends='')
@@ -16,14 +16,14 @@ python split_gstreamer10_packages () {
16python set_metapkg_rdepends () { 16python set_metapkg_rdepends () {
17 import os 17 import os
18 18
19 pn = d.getVar('PN', True) 19 pn = d.getVar('PN')
20 metapkg = pn + '-meta' 20 metapkg = pn + '-meta'
21 d.setVar('ALLOW_EMPTY_' + metapkg, "1") 21 d.setVar('ALLOW_EMPTY_' + metapkg, "1")
22 d.setVar('FILES_' + metapkg, "") 22 d.setVar('FILES_' + metapkg, "")
23 blacklist = [ pn, pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc', pn + '-meta' ] 23 blacklist = [ pn, pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc', pn + '-meta' ]
24 metapkg_rdepends = [] 24 metapkg_rdepends = []
25 packages = d.getVar('PACKAGES', True).split() 25 packages = d.getVar('PACKAGES').split()
26 pkgdest = d.getVar('PKGDEST', True) 26 pkgdest = d.getVar('PKGDEST')
27 for pkg in packages[1:]: 27 for pkg in packages[1:]:
28 if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'): 28 if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'):
29 # See if the package is empty by looking at the contents of its PKGDEST subdirectory. 29 # See if the package is empty by looking at the contents of its PKGDEST subdirectory.
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
index 07e5b7db5e..05562b11a7 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
@@ -18,7 +18,7 @@ GSTREAMER_1_0_OMX_CORE_NAME ?= "${libdir}/libomxil-bellagio.so.0"
18EXTRA_OECONF += "--disable-valgrind --with-omx-target=${GSTREAMER_1_0_OMX_TARGET}" 18EXTRA_OECONF += "--disable-valgrind --with-omx-target=${GSTREAMER_1_0_OMX_TARGET}"
19 19
20python __anonymous () { 20python __anonymous () {
21 omx_target = d.getVar("GSTREAMER_1_0_OMX_TARGET", True) 21 omx_target = d.getVar("GSTREAMER_1_0_OMX_TARGET")
22 if omx_target in ['generic', 'bellagio']: 22 if omx_target in ['generic', 'bellagio']:
23 # Bellagio headers are incomplete (they are missing the OMX_VERSION_MAJOR,# 23 # Bellagio headers are incomplete (they are missing the OMX_VERSION_MAJOR,#
24 # OMX_VERSION_MINOR, OMX_VERSION_REVISION, and OMX_VERSION_STEP macros); 24 # OMX_VERSION_MINOR, OMX_VERSION_REVISION, and OMX_VERSION_STEP macros);
@@ -26,7 +26,7 @@ python __anonymous () {
26 d.appendVar("CFLAGS", " -I${S}/omx/openmax") 26 d.appendVar("CFLAGS", " -I${S}/omx/openmax")
27 elif omx_target == "rpi": 27 elif omx_target == "rpi":
28 # Dedicated Raspberry Pi OpenMAX IL support makes this package machine specific 28 # Dedicated Raspberry Pi OpenMAX IL support makes this package machine specific
29 d.setVar("PACKAGE_ARCH", d.getVar("MACHINE_ARCH", True)) 29 d.setVar("PACKAGE_ARCH", d.getVar("MACHINE_ARCH"))
30} 30}
31 31
32set_omx_core_name() { 32set_omx_core_name() {
diff --git a/meta/recipes-rt/images/core-image-rt-sdk.bb b/meta/recipes-rt/images/core-image-rt-sdk.bb
index 7ddf671bca..7c1e5bf261 100644
--- a/meta/recipes-rt/images/core-image-rt-sdk.bb
+++ b/meta/recipes-rt/images/core-image-rt-sdk.bb
@@ -4,7 +4,7 @@ require recipes-core/images/core-image-minimal.bb
4# PREFERRED_PROVIDER for virtual/kernel. This avoids errors when trying 4# PREFERRED_PROVIDER for virtual/kernel. This avoids errors when trying
5# to build multiple virtual/kernel providers. 5# to build multiple virtual/kernel providers.
6python () { 6python () {
7 if d.getVar("PREFERRED_PROVIDER_virtual/kernel", True) != "linux-yocto-rt": 7 if d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
8 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it") 8 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
9} 9}
10 10
diff --git a/meta/recipes-rt/images/core-image-rt.bb b/meta/recipes-rt/images/core-image-rt.bb
index 9c86f25335..d8bb04aa1e 100644
--- a/meta/recipes-rt/images/core-image-rt.bb
+++ b/meta/recipes-rt/images/core-image-rt.bb
@@ -4,7 +4,7 @@ require recipes-core/images/core-image-minimal.bb
4# PREFERRED_PROVIDER for virtual/kernel. This avoids errors when trying 4# PREFERRED_PROVIDER for virtual/kernel. This avoids errors when trying
5# to build multiple virtual/kernel providers. 5# to build multiple virtual/kernel providers.
6python () { 6python () {
7 if d.getVar("PREFERRED_PROVIDER_virtual/kernel", True) != "linux-yocto-rt": 7 if d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
8 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it") 8 raise bb.parse.SkipPackage("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
9} 9}
10 10
diff --git a/meta/recipes-support/attr/ea-acl.inc b/meta/recipes-support/attr/ea-acl.inc
index 583ca1f84c..8750c3bc64 100644
--- a/meta/recipes-support/attr/ea-acl.inc
+++ b/meta/recipes-support/attr/ea-acl.inc
@@ -21,8 +21,8 @@ do_install_append_class-native () {
21 if test "${libdir}" = "${base_libdir}" ; then 21 if test "${libdir}" = "${base_libdir}" ; then
22 return 22 return
23 fi 23 fi
24 librelpath=${@os.path.relpath(d.getVar('libdir',True), d.getVar('base_libdir', True))} 24 librelpath=${@os.path.relpath(d.getVar('libdir',True), d.getVar('base_libdir'))}
25 baselibrelpath=${@os.path.relpath(d.getVar('base_libdir',True), d.getVar('libdir', True))} 25 baselibrelpath=${@os.path.relpath(d.getVar('base_libdir',True), d.getVar('libdir'))}
26 26
27 # Remove bad symlinks & create the correct symlinks 27 # Remove bad symlinks & create the correct symlinks
28 if test -L ${D}${libdir}/lib${BPN}.so ; then 28 if test -L ${D}${libdir}/lib${BPN}.so ; then
@@ -45,5 +45,5 @@ FILES_lib${BPN} = "${base_libdir}/lib*${SOLIBS}"
45 45
46BBCLASSEXTEND = "native" 46BBCLASSEXTEND = "native"
47# Only append ldflags for target recipe and if USE_NLS is enabled 47# Only append ldflags for target recipe and if USE_NLS is enabled
48LDFLAGS_append_libc-uclibc_class-target = "${@['', ' -lintl '][(d.getVar('USE_NLS', True) == 'yes')]}" 48LDFLAGS_append_libc-uclibc_class-target = "${@['', ' -lintl '][(d.getVar('USE_NLS') == 'yes')]}"
49EXTRA_OECONF_append_libc-uclibc_class-target = "${@['', ' --disable-gettext '][(d.getVar('USE_NLS', True) == 'no')]}" 49EXTRA_OECONF_append_libc-uclibc_class-target = "${@['', ' --disable-gettext '][(d.getVar('USE_NLS') == 'no')]}"
diff --git a/meta/recipes-support/boost/boost-1.62.0.inc b/meta/recipes-support/boost/boost-1.62.0.inc
index 1e555e19bf..1138de61f1 100644
--- a/meta/recipes-support/boost/boost-1.62.0.inc
+++ b/meta/recipes-support/boost/boost-1.62.0.inc
@@ -7,8 +7,8 @@ HOMEPAGE = "http://www.boost.org/"
7LICENSE = "BSL-1.0 & MIT & Python-2.0" 7LICENSE = "BSL-1.0 & MIT & Python-2.0"
8LIC_FILES_CHKSUM = "file://LICENSE_1_0.txt;md5=e4224ccaecb14d942c71d31bef20d78c" 8LIC_FILES_CHKSUM = "file://LICENSE_1_0.txt;md5=e4224ccaecb14d942c71d31bef20d78c"
9 9
10BOOST_VER = "${@"_".join(d.getVar("PV", True).split("."))}" 10BOOST_VER = "${@"_".join(d.getVar("PV").split("."))}"
11BOOST_MAJ = "${@"_".join(d.getVar("PV", True).split(".")[0:2])}" 11BOOST_MAJ = "${@"_".join(d.getVar("PV").split(".")[0:2])}"
12BOOST_P = "boost_${BOOST_VER}" 12BOOST_P = "boost_${BOOST_VER}"
13 13
14SRC_URI = "${SOURCEFORGE_MIRROR}/project/boost/boost/${PV}/${BOOST_P}.tar.bz2" 14SRC_URI = "${SOURCEFORGE_MIRROR}/project/boost/boost/${PV}/${BOOST_P}.tar.bz2"
diff --git a/meta/recipes-support/boost/boost.inc b/meta/recipes-support/boost/boost.inc
index 1966d3d807..a1a6a90f9c 100644
--- a/meta/recipes-support/boost/boost.inc
+++ b/meta/recipes-support/boost/boost.inc
@@ -59,7 +59,7 @@ PACKAGES = "${PN}-dbg ${BOOST_PACKAGES}"
59python __anonymous () { 59python __anonymous () {
60 packages = [] 60 packages = []
61 extras = [] 61 extras = []
62 for lib in d.getVar('BOOST_LIBS', True).split( ): 62 for lib in d.getVar('BOOST_LIBS').split( ):
63 # BJAM does not know '--with-python3' (only --with-python) 63 # BJAM does not know '--with-python3' (only --with-python)
64 if lib != "python3": 64 if lib != "python3":
65 extras.append("--with-%s" % lib) 65 extras.append("--with-%s" % lib)
@@ -67,10 +67,10 @@ python __anonymous () {
67 packages.append(pkg) 67 packages.append(pkg)
68 if lib == "python": 68 if lib == "python":
69 # special: python*.so matches python3.so !! 69 # special: python*.so matches python3.so !!
70 if not d.getVar("FILES_%s" % pkg, True): 70 if not d.getVar("FILES_%s" % pkg):
71 d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s.so.*" % lib) 71 d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s.so.*" % lib)
72 else: 72 else:
73 if not d.getVar("FILES_%s" % pkg, True): 73 if not d.getVar("FILES_%s" % pkg):
74 d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) 74 d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib)
75 d.setVar("BOOST_PACKAGES", " ".join(packages)) 75 d.setVar("BOOST_PACKAGES", " ".join(packages))
76 d.setVar("BJAM_EXTRA", " ".join(extras)) 76 d.setVar("BJAM_EXTRA", " ".join(extras))
@@ -145,7 +145,7 @@ BJAM_TOOLS = "--ignore-site-config \
145# use PARALLEL_MAKE to speed up the build, but limit it by -j 64, greater parallelism causes bjam to segfault or to ignore -j 145# use PARALLEL_MAKE to speed up the build, but limit it by -j 64, greater parallelism causes bjam to segfault or to ignore -j
146# https://svn.boost.org/trac/boost/ticket/7634 146# https://svn.boost.org/trac/boost/ticket/7634
147def get_boost_parallel_make(d): 147def get_boost_parallel_make(d):
148 pm = d.getVar('PARALLEL_MAKE', True) 148 pm = d.getVar('PARALLEL_MAKE')
149 if pm: 149 if pm:
150 # look for '-j' and throw other options (e.g. '-l') away 150 # look for '-j' and throw other options (e.g. '-l') away
151 # because they might have different meaning in bjam 151 # because they might have different meaning in bjam
diff --git a/meta/recipes-support/gnutls/gnutls.inc b/meta/recipes-support/gnutls/gnutls.inc
index 51b9d2b191..4a5c3dfebf 100644
--- a/meta/recipes-support/gnutls/gnutls.inc
+++ b/meta/recipes-support/gnutls/gnutls.inc
@@ -15,7 +15,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=71391c8e0c1cfe68077e7fce3b586283 \
15DEPENDS = "nettle gmp virtual/libiconv" 15DEPENDS = "nettle gmp virtual/libiconv"
16DEPENDS_append_libc-musl = " argp-standalone" 16DEPENDS_append_libc-musl = " argp-standalone"
17 17
18SHRT_VER = "${@d.getVar('PV', True).split('.')[0]}.${@d.getVar('PV', True).split('.')[1]}" 18SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
19 19
20SRC_URI = "ftp://ftp.gnutls.org/gcrypt/gnutls/v${SHRT_VER}/gnutls-${PV}.tar.xz" 20SRC_URI = "ftp://ftp.gnutls.org/gcrypt/gnutls/v${SHRT_VER}/gnutls-${PV}.tar.xz"
21 21
diff --git a/meta/recipes-support/icu/icu_58.1.bb b/meta/recipes-support/icu/icu_58.1.bb
index cc7c94707b..9243a6a173 100644
--- a/meta/recipes-support/icu/icu_58.1.bb
+++ b/meta/recipes-support/icu/icu_58.1.bb
@@ -3,7 +3,7 @@ require icu.inc
3LIC_FILES_CHKSUM = "file://../LICENSE;md5=1b3b75c1777cd49ad5c6a24cd338cfc9" 3LIC_FILES_CHKSUM = "file://../LICENSE;md5=1b3b75c1777cd49ad5c6a24cd338cfc9"
4 4
5def icu_download_version(d): 5def icu_download_version(d):
6 pvsplit = d.getVar('PV', True).split('.') 6 pvsplit = d.getVar('PV').split('.')
7 return pvsplit[0] + "_" + pvsplit[1] 7 return pvsplit[0] + "_" + pvsplit[1]
8 8
9ICU_PV = "${@icu_download_version(d)}" 9ICU_PV = "${@icu_download_version(d)}"
diff --git a/meta/recipes-support/libiconv/libiconv_1.11.1.bb b/meta/recipes-support/libiconv/libiconv_1.11.1.bb
index 6ce4b96e33..f28e64ae2e 100644
--- a/meta/recipes-support/libiconv/libiconv_1.11.1.bb
+++ b/meta/recipes-support/libiconv/libiconv_1.11.1.bb
@@ -23,7 +23,7 @@ S = "${WORKDIR}/libiconv-${PV}"
23inherit autotools pkgconfig gettext 23inherit autotools pkgconfig gettext
24 24
25python __anonymous() { 25python __anonymous() {
26 if d.getVar("TCLIBC", True) == "glibc": 26 if d.getVar("TCLIBC") == "glibc":
27 raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - glibc already provides iconv") 27 raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - glibc already provides iconv")
28} 28}
29 29
diff --git a/meta/recipes-support/libiconv/libiconv_1.14.bb b/meta/recipes-support/libiconv/libiconv_1.14.bb
index 1b6fe09bb7..9fd5114ac8 100644
--- a/meta/recipes-support/libiconv/libiconv_1.14.bb
+++ b/meta/recipes-support/libiconv/libiconv_1.14.bb
@@ -23,9 +23,9 @@ S = "${WORKDIR}/libiconv-${PV}"
23inherit autotools pkgconfig gettext 23inherit autotools pkgconfig gettext
24 24
25python __anonymous() { 25python __anonymous() {
26 if d.getVar("TARGET_OS", True) != "linux": 26 if d.getVar("TARGET_OS") != "linux":
27 return 27 return
28 if d.getVar("TCLIBC", True) == "glibc": 28 if d.getVar("TCLIBC") == "glibc":
29 raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - glibc already provides iconv") 29 raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - glibc already provides iconv")
30} 30}
31 31
diff --git a/meta/recipes-support/libnl/libnl_3.2.28.bb b/meta/recipes-support/libnl/libnl_3.2.28.bb
index 26982f3efb..04e2d18f79 100644
--- a/meta/recipes-support/libnl/libnl_3.2.28.bb
+++ b/meta/recipes-support/libnl/libnl_3.2.28.bb
@@ -9,7 +9,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
9 9
10DEPENDS = "flex-native bison-native" 10DEPENDS = "flex-native bison-native"
11 11
12SRC_URI = "https://github.com/thom311/${BPN}/releases/download/${BPN}${@d.getVar('PV', True).replace('.','_')}/${BP}.tar.gz \ 12SRC_URI = "https://github.com/thom311/${BPN}/releases/download/${BPN}${@d.getVar('PV').replace('.','_')}/${BP}.tar.gz \
13 file://fix-pktloc_syntax_h-race.patch \ 13 file://fix-pktloc_syntax_h-race.patch \
14 file://fix-pc-file.patch \ 14 file://fix-pc-file.patch \
15 file://0001-lib-add-utility-function-nl_strerror_l.patch \ 15 file://0001-lib-add-utility-function-nl_strerror_l.patch \
diff --git a/meta/recipes-support/libsoup/libsoup-2.4_2.56.0.bb b/meta/recipes-support/libsoup/libsoup-2.4_2.56.0.bb
index 16c6068109..a1f294efbf 100644
--- a/meta/recipes-support/libsoup/libsoup-2.4_2.56.0.bb
+++ b/meta/recipes-support/libsoup/libsoup-2.4_2.56.0.bb
@@ -7,7 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
7 7
8DEPENDS = "glib-2.0 glib-2.0-native libxml2 sqlite3 intltool-native" 8DEPENDS = "glib-2.0 glib-2.0-native libxml2 sqlite3 intltool-native"
9 9
10SHRT_VER = "${@d.getVar('PV', True).split('.')[0]}.${@d.getVar('PV', True).split('.')[1]}" 10SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
11 11
12SRC_URI = "${GNOME_MIRROR}/libsoup/${SHRT_VER}/libsoup-${PV}.tar.xz" 12SRC_URI = "${GNOME_MIRROR}/libsoup/${SHRT_VER}/libsoup-${PV}.tar.xz"
13 13
diff --git a/meta/recipes-support/sqlite/sqlite3.inc b/meta/recipes-support/sqlite/sqlite3.inc
index 80424c9cd6..42b534d8ef 100644
--- a/meta/recipes-support/sqlite/sqlite3.inc
+++ b/meta/recipes-support/sqlite/sqlite3.inc
@@ -5,7 +5,7 @@ SECTION = "libs"
5PE = "3" 5PE = "3"
6 6
7def sqlite_download_version(d): 7def sqlite_download_version(d):
8 pvsplit = d.getVar('PV', True).split('.') 8 pvsplit = d.getVar('PV').split('.')
9 if len(pvsplit) < 4: 9 if len(pvsplit) < 4:
10 pvsplit.append('0') 10 pvsplit.append('0')
11 return pvsplit[0] + ''.join([part.rjust(2,'0') for part in pvsplit[1:]]) 11 return pvsplit[0] + ''.join([part.rjust(2,'0') for part in pvsplit[1:]])
diff --git a/meta/recipes-support/taglib/taglib_1.11.1.bb b/meta/recipes-support/taglib/taglib_1.11.1.bb
index ee1b5234bb..54a92d94ec 100644
--- a/meta/recipes-support/taglib/taglib_1.11.1.bb
+++ b/meta/recipes-support/taglib/taglib_1.11.1.bb
@@ -26,7 +26,7 @@ EXTRA_OECMAKE = "-DBUILD_SHARED_LIBS=ON \
26 -DHAVE_BOOST_BYTESWAP=FALSE \ 26 -DHAVE_BOOST_BYTESWAP=FALSE \
27 -DCMAKE_CXX_STANDARD=11 \ 27 -DCMAKE_CXX_STANDARD=11 \
28 -DCMAKE_CXX_STANDARD_REQUIRED=OFF \ 28 -DCMAKE_CXX_STANDARD_REQUIRED=OFF \
29 -DLIB_SUFFIX=${@d.getVar('baselib', True).replace('lib', '')} \ 29 -DLIB_SUFFIX=${@d.getVar('baselib').replace('lib', '')} \
30" 30"
31CXXFLAGS += "-std=c++11" 31CXXFLAGS += "-std=c++11"
32 32