summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/allarch.bbclass4
-rw-r--r--meta/classes/archiver.bbclass74
-rw-r--r--meta/classes/autotools.bbclass12
-rw-r--r--meta/classes/base.bbclass126
-rw-r--r--meta/classes/binconfig.bbclass2
-rw-r--r--meta/classes/blacklist.bbclass4
-rw-r--r--meta/classes/bugzilla.bbclass24
-rw-r--r--meta/classes/buildhistory.bbclass62
-rw-r--r--meta/classes/buildstats-summary.bbclass2
-rw-r--r--meta/classes/buildstats.bbclass14
-rw-r--r--meta/classes/ccache.bbclass2
-rw-r--r--meta/classes/chrpath.bbclass4
-rw-r--r--meta/classes/cmake.bbclass16
-rw-r--r--meta/classes/cml1.bbclass4
-rw-r--r--meta/classes/compress_doc.bbclass26
-rw-r--r--meta/classes/copyleft_compliance.bbclass10
-rw-r--r--meta/classes/copyleft_filter.bbclass10
-rw-r--r--meta/classes/cross-canadian.bbclass34
-rw-r--r--meta/classes/crosssdk.bbclass2
-rw-r--r--meta/classes/cve-check.bbclass46
-rw-r--r--meta/classes/debian.bbclass24
-rw-r--r--meta/classes/devshell.bbclass6
-rw-r--r--meta/classes/distro_features_check.bbclass8
-rw-r--r--meta/classes/distrodata.bbclass132
-rw-r--r--meta/classes/distutils-base.bbclass2
-rw-r--r--meta/classes/distutils3-base.bbclass2
-rw-r--r--meta/classes/externalsrc.bbclass38
-rw-r--r--meta/classes/extrausers.bbclass2
-rw-r--r--meta/classes/fontcache.bbclass14
-rw-r--r--meta/classes/fs-uuid.bbclass2
-rw-r--r--meta/classes/gconf.bbclass12
-rw-r--r--meta/classes/gettext.bbclass8
-rw-r--r--meta/classes/gio-module-cache.bbclass10
-rw-r--r--meta/classes/grub-efi.bbclass22
-rw-r--r--meta/classes/gsettings.bbclass10
-rw-r--r--meta/classes/gtk-icon-cache.bbclass14
-rw-r--r--meta/classes/gtk-immodules-cache.bbclass10
-rw-r--r--meta/classes/icecc.bbclass4
-rw-r--r--meta/classes/image-buildinfo.bbclass10
-rw-r--r--meta/classes/image-live.bbclass8
-rw-r--r--meta/classes/image-vm.bbclass12
-rw-r--r--meta/classes/image.bbclass96
-rw-r--r--meta/classes/image_types.bbclass24
-rw-r--r--meta/classes/insane.bbclass194
-rw-r--r--meta/classes/kernel-arch.bbclass6
-rw-r--r--meta/classes/kernel-fitimage.bbclass14
-rw-r--r--meta/classes/kernel-grub.bbclass2
-rw-r--r--meta/classes/kernel-module-split.bbclass38
-rw-r--r--meta/classes/kernel-uimage.bbclass8
-rw-r--r--meta/classes/kernel-yocto.bbclass2
-rw-r--r--meta/classes/kernel.bbclass40
-rw-r--r--meta/classes/libc-common.bbclass10
-rw-r--r--meta/classes/libc-package.bbclass60
-rw-r--r--meta/classes/license.bbclass96
-rw-r--r--meta/classes/live-vm-common.bbclass8
-rw-r--r--meta/classes/metadata_scm.bbclass2
-rw-r--r--meta/classes/migrate_localcount.bbclass12
-rw-r--r--meta/classes/mime.bbclass12
-rw-r--r--meta/classes/module.bbclass2
-rw-r--r--meta/classes/multilib.bbclass32
-rw-r--r--meta/classes/multilib_global.bbclass22
-rw-r--r--meta/classes/native.bbclass10
-rw-r--r--meta/classes/nativesdk.bbclass8
-rw-r--r--meta/classes/npm.bbclass4
-rw-r--r--meta/classes/oelint.bbclass4
-rw-r--r--meta/classes/package.bbclass282
-rw-r--r--meta/classes/package_deb.bbclass54
-rw-r--r--meta/classes/package_ipk.bbclass60
-rw-r--r--meta/classes/package_rpm.bbclass136
-rw-r--r--meta/classes/package_tar.bbclass14
-rw-r--r--meta/classes/packagedata.bbclass10
-rw-r--r--meta/classes/packagefeed-stability.bbclass14
-rw-r--r--meta/classes/packagegroup.bbclass8
-rw-r--r--meta/classes/patch.bbclass20
-rw-r--r--meta/classes/pixbufcache.bbclass10
-rw-r--r--meta/classes/populate_sdk_base.bbclass36
-rw-r--r--meta/classes/populate_sdk_ext.bbclass108
-rw-r--r--meta/classes/prexport.bbclass4
-rw-r--r--meta/classes/ptest.bbclass2
-rw-r--r--meta/classes/qemu.bbclass10
-rw-r--r--meta/classes/qemuboot.bbclass8
-rw-r--r--meta/classes/recipe_sanity.bbclass26
-rw-r--r--meta/classes/report-error.bbclass22
-rw-r--r--meta/classes/rm_work.bbclass6
-rw-r--r--meta/classes/rootfs-postcommands.bbclass6
-rw-r--r--meta/classes/rootfs_deb.bbclass4
-rw-r--r--meta/classes/rootfs_ipk.bbclass2
-rw-r--r--meta/classes/rootfs_rpm.bbclass2
-rw-r--r--meta/classes/sanity.bbclass108
-rw-r--r--meta/classes/sign_ipk.bbclass12
-rw-r--r--meta/classes/sign_package_feed.bbclass4
-rw-r--r--meta/classes/sign_rpm.bbclass12
-rw-r--r--meta/classes/siteconfig.bbclass2
-rw-r--r--meta/classes/siteinfo.bbclass14
-rw-r--r--meta/classes/spdx.bbclass26
-rw-r--r--meta/classes/sstate.bbclass114
-rw-r--r--meta/classes/staging.bbclass22
-rw-r--r--meta/classes/syslinux.bbclass34
-rw-r--r--meta/classes/systemd-boot.bbclass14
-rw-r--r--meta/classes/systemd.bbclass38
-rw-r--r--meta/classes/terminal.bbclass8
-rw-r--r--meta/classes/testexport.bbclass40
-rw-r--r--meta/classes/testimage.bbclass12
-rw-r--r--meta/classes/testsdk.bbclass12
-rw-r--r--meta/classes/tinderclient.bbclass54
-rw-r--r--meta/classes/toaster.bbclass14
-rw-r--r--meta/classes/toolchain-scripts.bbclass4
-rw-r--r--meta/classes/uboot-config.bbclass14
-rw-r--r--meta/classes/uboot-extlinux-config.bbclass24
-rw-r--r--meta/classes/uboot-sign.bbclass6
-rw-r--r--meta/classes/uninative.bbclass18
-rw-r--r--meta/classes/update-alternatives.bbclass52
-rw-r--r--meta/classes/update-rc.d.bbclass28
-rw-r--r--meta/classes/useradd-staticids.bbclass48
-rw-r--r--meta/classes/useradd.bbclass22
-rw-r--r--meta/classes/utility-tasks.bbclass4
-rw-r--r--meta/classes/utils.bbclass30
-rw-r--r--meta/classes/waf.bbclass2
118 files changed, 1585 insertions, 1585 deletions
diff --git a/meta/classes/allarch.bbclass b/meta/classes/allarch.bbclass
index ddc2a85050..9dce49879a 100644
--- a/meta/classes/allarch.bbclass
+++ b/meta/classes/allarch.bbclass
@@ -11,7 +11,7 @@ PACKAGE_ARCH = "all"
11python () { 11python () {
12 # Allow this class to be included but overridden - only set 12 # Allow this class to be included but overridden - only set
13 # the values if we're still "all" package arch. 13 # the values if we're still "all" package arch.
14 if d.getVar("PACKAGE_ARCH", True) == "all": 14 if d.getVar("PACKAGE_ARCH") == "all":
15 # No need for virtual/libc or a cross compiler 15 # No need for virtual/libc or a cross compiler
16 d.setVar("INHIBIT_DEFAULT_DEPS","1") 16 d.setVar("INHIBIT_DEFAULT_DEPS","1")
17 17
@@ -47,6 +47,6 @@ python () {
47 d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS") 47 d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS")
48 d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS") 48 d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS")
49 elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d): 49 elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d):
50 bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE", True)) 50 bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE"))
51} 51}
52 52
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index 3543ca9c58..52959776c3 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -52,10 +52,10 @@ do_deploy_all_archives[dirs] = "${WORKDIR}"
52 52
53 53
54python () { 54python () {
55 pn = d.getVar('PN', True) 55 pn = d.getVar('PN')
56 assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split() 56 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
57 if pn in assume_provided: 57 if pn in assume_provided:
58 for p in d.getVar("PROVIDES", True).split(): 58 for p in d.getVar("PROVIDES").split():
59 if p != pn: 59 if p != pn:
60 pn = p 60 pn = p
61 break 61 break
@@ -68,7 +68,7 @@ python () {
68 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 68 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
69 69
70 # We just archive gcc-source for all the gcc related recipes 70 # We just archive gcc-source for all the gcc related recipes
71 if d.getVar('BPN', True) in ['gcc', 'libgcc'] \ 71 if d.getVar('BPN') in ['gcc', 'libgcc'] \
72 and not pn.startswith('gcc-source'): 72 and not pn.startswith('gcc-source'):
73 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 73 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
74 return 74 return
@@ -106,7 +106,7 @@ python () {
106 # Output the srpm package 106 # Output the srpm package
107 ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) 107 ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True)
108 if ar_srpm == "1": 108 if ar_srpm == "1":
109 if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': 109 if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm':
110 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) 110 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
111 if ar_dumpdata == "1": 111 if ar_dumpdata == "1":
112 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 112 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
@@ -130,9 +130,9 @@ python do_ar_original() {
130 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": 130 if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original":
131 return 131 return
132 132
133 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 133 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
134 bb.note('Archiving the original source...') 134 bb.note('Archiving the original source...')
135 urls = d.getVar("SRC_URI", True).split() 135 urls = d.getVar("SRC_URI").split()
136 # destsuffix (git fetcher) and subdir (everything else) are allowed to be 136 # destsuffix (git fetcher) and subdir (everything else) are allowed to be
137 # absolute paths (for example, destsuffix=${S}/foobar). 137 # absolute paths (for example, destsuffix=${S}/foobar).
138 # That messes with unpacking inside our tmpdir below, because the fetchers 138 # That messes with unpacking inside our tmpdir below, because the fetchers
@@ -157,7 +157,7 @@ python do_ar_original() {
157 if os.path.isfile(local): 157 if os.path.isfile(local):
158 shutil.copy(local, ar_outdir) 158 shutil.copy(local, ar_outdir)
159 elif os.path.isdir(local): 159 elif os.path.isdir(local):
160 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) 160 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
161 fetch.unpack(tmpdir, (url,)) 161 fetch.unpack(tmpdir, (url,))
162 # To handle recipes with more than one source, we add the "name" 162 # To handle recipes with more than one source, we add the "name"
163 # URL parameter as suffix. We treat it as an error when 163 # URL parameter as suffix. We treat it as an error when
@@ -195,24 +195,24 @@ python do_ar_patched() {
195 return 195 return
196 196
197 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR 197 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
198 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 198 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
199 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) 199 ar_workdir = d.getVar('ARCHIVER_WORKDIR')
200 bb.note('Archiving the patched source...') 200 bb.note('Archiving the patched source...')
201 d.setVar('WORKDIR', ar_workdir) 201 d.setVar('WORKDIR', ar_workdir)
202 create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) 202 create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
203} 203}
204 204
205python do_ar_configured() { 205python do_ar_configured() {
206 import shutil 206 import shutil
207 207
208 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 208 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
209 if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': 209 if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured':
210 bb.note('Archiving the configured source...') 210 bb.note('Archiving the configured source...')
211 pn = d.getVar('PN', True) 211 pn = d.getVar('PN')
212 # "gcc-source-${PV}" recipes don't have "do_configure" 212 # "gcc-source-${PV}" recipes don't have "do_configure"
213 # task, so we need to run "do_preconfigure" instead 213 # task, so we need to run "do_preconfigure" instead
214 if pn.startswith("gcc-source-"): 214 if pn.startswith("gcc-source-"):
215 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) 215 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
216 bb.build.exec_func('do_preconfigure', d) 216 bb.build.exec_func('do_preconfigure', d)
217 217
218 # The libtool-native's do_configure will remove the 218 # The libtool-native's do_configure will remove the
@@ -221,7 +221,7 @@ python do_ar_configured() {
221 # instead of. 221 # instead of.
222 elif pn != 'libtool-native': 222 elif pn != 'libtool-native':
223 # Change the WORKDIR to make do_configure run in another dir. 223 # Change the WORKDIR to make do_configure run in another dir.
224 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) 224 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
225 if bb.data.inherits_class('kernel-yocto', d): 225 if bb.data.inherits_class('kernel-yocto', d):
226 bb.build.exec_func('do_kernel_configme', d) 226 bb.build.exec_func('do_kernel_configme', d)
227 if bb.data.inherits_class('cmake', d): 227 if bb.data.inherits_class('cmake', d):
@@ -235,12 +235,12 @@ python do_ar_configured() {
235 for func in (postfuncs or '').split(): 235 for func in (postfuncs or '').split():
236 if func != "do_qa_configure": 236 if func != "do_qa_configure":
237 bb.build.exec_func(func, d) 237 bb.build.exec_func(func, d)
238 srcdir = d.getVar('S', True) 238 srcdir = d.getVar('S')
239 builddir = d.getVar('B', True) 239 builddir = d.getVar('B')
240 if srcdir != builddir: 240 if srcdir != builddir:
241 if os.path.exists(builddir): 241 if os.path.exists(builddir):
242 oe.path.copytree(builddir, os.path.join(srcdir, \ 242 oe.path.copytree(builddir, os.path.join(srcdir, \
243 'build.%s.ar_configured' % d.getVar('PF', True))) 243 'build.%s.ar_configured' % d.getVar('PF')))
244 create_tarball(d, srcdir, 'configured', ar_outdir) 244 create_tarball(d, srcdir, 'configured', ar_outdir)
245} 245}
246 246
@@ -251,14 +251,14 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
251 import tarfile 251 import tarfile
252 252
253 # Make sure we are only creating a single tarball for gcc sources 253 # Make sure we are only creating a single tarball for gcc sources
254 if (d.getVar('SRC_URI', True) == ""): 254 if (d.getVar('SRC_URI') == ""):
255 return 255 return
256 256
257 bb.utils.mkdirhier(ar_outdir) 257 bb.utils.mkdirhier(ar_outdir)
258 if suffix: 258 if suffix:
259 filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix) 259 filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
260 else: 260 else:
261 filename = '%s.tar.gz' % d.getVar('PF', True) 261 filename = '%s.tar.gz' % d.getVar('PF')
262 tarname = os.path.join(ar_outdir, filename) 262 tarname = os.path.join(ar_outdir, filename)
263 263
264 bb.note('Creating %s' % tarname) 264 bb.note('Creating %s' % tarname)
@@ -286,7 +286,7 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
286 dirname = os.path.dirname(src) 286 dirname = os.path.dirname(src)
287 basename = os.path.basename(src) 287 basename = os.path.basename(src)
288 os.chdir(dirname) 288 os.chdir(dirname)
289 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) 289 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
290 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) 290 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
291 subprocess.call(diff_cmd, shell=True) 291 subprocess.call(diff_cmd, shell=True)
292 bb.utils.remove(src_patched, recurse=True) 292 bb.utils.remove(src_patched, recurse=True)
@@ -297,9 +297,9 @@ python do_unpack_and_patch() {
297 [ 'patched', 'configured'] and \ 297 [ 'patched', 'configured'] and \
298 d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': 298 d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1':
299 return 299 return
300 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 300 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
301 ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) 301 ar_workdir = d.getVar('ARCHIVER_WORKDIR')
302 pn = d.getVar('PN', True) 302 pn = d.getVar('PN')
303 303
304 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR 304 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
305 if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): 305 if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')):
@@ -309,18 +309,18 @@ python do_unpack_and_patch() {
309 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 309 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
310 # possibly requiring of the following tasks (such as some recipes's 310 # possibly requiring of the following tasks (such as some recipes's
311 # do_patch required 'B' existed). 311 # do_patch required 'B' existed).
312 bb.utils.mkdirhier(d.getVar('B', True)) 312 bb.utils.mkdirhier(d.getVar('B'))
313 313
314 bb.build.exec_func('do_unpack', d) 314 bb.build.exec_func('do_unpack', d)
315 315
316 # Save the original source for creating the patches 316 # Save the original source for creating the patches
317 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': 317 if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
318 src = d.getVar('S', True).rstrip('/') 318 src = d.getVar('S').rstrip('/')
319 src_orig = '%s.orig' % src 319 src_orig = '%s.orig' % src
320 oe.path.copytree(src, src_orig) 320 oe.path.copytree(src, src_orig)
321 321
322 # Make sure gcc and kernel sources are patched only once 322 # Make sure gcc and kernel sources are patched only once
323 if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): 323 if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))):
324 bb.build.exec_func('do_patch', d) 324 bb.build.exec_func('do_patch', d)
325 325
326 # Create the patches 326 # Create the patches
@@ -339,14 +339,14 @@ python do_ar_recipe () {
339 339
340 require_re = re.compile( r"require\s+(.+)" ) 340 require_re = re.compile( r"require\s+(.+)" )
341 include_re = re.compile( r"include\s+(.+)" ) 341 include_re = re.compile( r"include\s+(.+)" )
342 bbfile = d.getVar('FILE', True) 342 bbfile = d.getVar('FILE')
343 outdir = os.path.join(d.getVar('WORKDIR', True), \ 343 outdir = os.path.join(d.getVar('WORKDIR'), \
344 '%s-recipe' % d.getVar('PF', True)) 344 '%s-recipe' % d.getVar('PF'))
345 bb.utils.mkdirhier(outdir) 345 bb.utils.mkdirhier(outdir)
346 shutil.copy(bbfile, outdir) 346 shutil.copy(bbfile, outdir)
347 347
348 pn = d.getVar('PN', True) 348 pn = d.getVar('PN')
349 bbappend_files = d.getVar('BBINCLUDED', True).split() 349 bbappend_files = d.getVar('BBINCLUDED').split()
350 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend 350 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
351 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. 351 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
352 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) 352 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn)
@@ -356,7 +356,7 @@ python do_ar_recipe () {
356 shutil.copy(file, outdir) 356 shutil.copy(file, outdir)
357 357
358 dirname = os.path.dirname(bbfile) 358 dirname = os.path.dirname(bbfile)
359 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) 359 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
360 f = open(bbfile, 'r') 360 f = open(bbfile, 'r')
361 for line in f.readlines(): 361 for line in f.readlines():
362 incfile = None 362 incfile = None
@@ -370,7 +370,7 @@ python do_ar_recipe () {
370 if incfile: 370 if incfile:
371 shutil.copy(incfile, outdir) 371 shutil.copy(incfile, outdir)
372 372
373 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) 373 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
374 bb.utils.remove(outdir, recurse=True) 374 bb.utils.remove(outdir, recurse=True)
375} 375}
376 376
@@ -379,8 +379,8 @@ python do_dumpdata () {
379 dump environment data to ${PF}-showdata.dump 379 dump environment data to ${PF}-showdata.dump
380 """ 380 """
381 381
382 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ 382 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
383 '%s-showdata.dump' % d.getVar('PF', True)) 383 '%s-showdata.dump' % d.getVar('PF'))
384 bb.note('Dumping metadata into %s' % dumpfile) 384 bb.note('Dumping metadata into %s' % dumpfile)
385 with open(dumpfile, "w") as f: 385 with open(dumpfile, "w") as f:
386 # emit variables and shell functions 386 # emit variables and shell functions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index c43ea9a7ef..c43531b050 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -1,8 +1,8 @@
1def autotools_dep_prepend(d): 1def autotools_dep_prepend(d):
2 if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): 2 if d.getVar('INHIBIT_AUTOTOOLS_DEPS'):
3 return '' 3 return ''
4 4
5 pn = d.getVar('PN', True) 5 pn = d.getVar('PN')
6 deps = '' 6 deps = ''
7 7
8 if pn in ['autoconf-native', 'automake-native', 'help2man-native']: 8 if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
@@ -14,7 +14,7 @@ def autotools_dep_prepend(d):
14 if not bb.data.inherits_class('native', d) \ 14 if not bb.data.inherits_class('native', d) \
15 and not bb.data.inherits_class('nativesdk', d) \ 15 and not bb.data.inherits_class('nativesdk', d) \
16 and not bb.data.inherits_class('cross', d) \ 16 and not bb.data.inherits_class('cross', d) \
17 and not d.getVar('INHIBIT_DEFAULT_DEPS', True): 17 and not d.getVar('INHIBIT_DEFAULT_DEPS'):
18 deps += 'libtool-cross ' 18 deps += 'libtool-cross '
19 19
20 return deps + 'gnu-config-native ' 20 return deps + 'gnu-config-native '
@@ -139,15 +139,15 @@ ACLOCALDIR = "${WORKDIR}/aclocal-copy"
139python autotools_copy_aclocals () { 139python autotools_copy_aclocals () {
140 import copy 140 import copy
141 141
142 s = d.getVar("AUTOTOOLS_SCRIPT_PATH", True) 142 s = d.getVar("AUTOTOOLS_SCRIPT_PATH")
143 if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): 143 if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"):
144 if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): 144 if not d.getVar("AUTOTOOLS_COPYACLOCAL", False):
145 return 145 return
146 146
147 taskdepdata = d.getVar("BB_TASKDEPDATA", False) 147 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
148 #bb.warn(str(taskdepdata)) 148 #bb.warn(str(taskdepdata))
149 pn = d.getVar("PN", True) 149 pn = d.getVar("PN")
150 aclocaldir = d.getVar("ACLOCALDIR", True) 150 aclocaldir = d.getVar("ACLOCALDIR")
151 oe.path.remove(aclocaldir) 151 oe.path.remove(aclocaldir)
152 bb.utils.mkdirhier(aclocaldir) 152 bb.utils.mkdirhier(aclocaldir)
153 start = None 153 start = None
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 19673e6913..2765ebf61b 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -16,7 +16,7 @@ OE_IMPORTS[type] = "list"
16def oe_import(d): 16def oe_import(d):
17 import sys 17 import sys
18 18
19 bbpath = d.getVar("BBPATH", True).split(":") 19 bbpath = d.getVar("BBPATH").split(":")
20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] 20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21 21
22 def inject(name, value): 22 def inject(name, value):
@@ -37,7 +37,7 @@ def oe_import(d):
37OE_IMPORTED := "${@oe_import(d)}" 37OE_IMPORTED := "${@oe_import(d)}"
38 38
39def lsb_distro_identifier(d): 39def lsb_distro_identifier(d):
40 adjust = d.getVar('LSB_DISTRO_ADJUST', True) 40 adjust = d.getVar('LSB_DISTRO_ADJUST')
41 adjust_func = None 41 adjust_func = None
42 if adjust: 42 if adjust:
43 try: 43 try:
@@ -72,7 +72,7 @@ def base_dep_prepend(d):
72 # we need that built is the responsibility of the patch function / class, not 72 # we need that built is the responsibility of the patch function / class, not
73 # the application. 73 # the application.
74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False): 74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
75 if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): 75 if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " 76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
77 return deps 77 return deps
78 78
@@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} "
83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" 83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
84# THISDIR only works properly with imediate expansion as it has to run 84# THISDIR only works properly with imediate expansion as it has to run
85# in the context of the location its used (:=) 85# in the context of the location its used (:=)
86THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" 86THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
87 87
88def extra_path_elements(d): 88def extra_path_elements(d):
89 path = "" 89 path = ""
90 elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() 90 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
91 for e in elements: 91 for e in elements:
92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" 92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
93 return path 93 return path
@@ -96,11 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}"
96 96
97def get_lic_checksum_file_list(d): 97def get_lic_checksum_file_list(d):
98 filelist = [] 98 filelist = []
99 lic_files = d.getVar("LIC_FILES_CHKSUM", True) or '' 99 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
100 tmpdir = d.getVar("TMPDIR", True) 100 tmpdir = d.getVar("TMPDIR")
101 s = d.getVar("S", True) 101 s = d.getVar("S")
102 b = d.getVar("B", True) 102 b = d.getVar("B")
103 workdir = d.getVar("WORKDIR", True) 103 workdir = d.getVar("WORKDIR")
104 104
105 urls = lic_files.split() 105 urls = lic_files.split()
106 for url in urls: 106 for url in urls:
@@ -116,7 +116,7 @@ def get_lic_checksum_file_list(d):
116 continue 116 continue
117 filelist.append(path + ":" + str(os.path.exists(path))) 117 filelist.append(path + ":" + str(os.path.exists(path)))
118 except bb.fetch.MalformedUrl: 118 except bb.fetch.MalformedUrl:
119 bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) 119 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
120 return " ".join(filelist) 120 return " ".join(filelist)
121 121
122addtask fetch 122addtask fetch
@@ -126,7 +126,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
126do_fetch[vardeps] += "SRCREV" 126do_fetch[vardeps] += "SRCREV"
127python base_do_fetch() { 127python base_do_fetch() {
128 128
129 src_uri = (d.getVar('SRC_URI', True) or "").split() 129 src_uri = (d.getVar('SRC_URI') or "").split()
130 if len(src_uri) == 0: 130 if len(src_uri) == 0:
131 return 131 return
132 132
@@ -141,31 +141,31 @@ addtask unpack after do_fetch
141do_unpack[dirs] = "${WORKDIR}" 141do_unpack[dirs] = "${WORKDIR}"
142 142
143python () { 143python () {
144 if d.getVar('S', True) != d.getVar('WORKDIR', True): 144 if d.getVar('S') != d.getVar('WORKDIR'):
145 d.setVarFlag('do_unpack', 'cleandirs', '${S}') 145 d.setVarFlag('do_unpack', 'cleandirs', '${S}')
146 else: 146 else:
147 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) 147 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
148} 148}
149python base_do_unpack() { 149python base_do_unpack() {
150 src_uri = (d.getVar('SRC_URI', True) or "").split() 150 src_uri = (d.getVar('SRC_URI') or "").split()
151 if len(src_uri) == 0: 151 if len(src_uri) == 0:
152 return 152 return
153 153
154 try: 154 try:
155 fetcher = bb.fetch2.Fetch(src_uri, d) 155 fetcher = bb.fetch2.Fetch(src_uri, d)
156 fetcher.unpack(d.getVar('WORKDIR', True)) 156 fetcher.unpack(d.getVar('WORKDIR'))
157 except bb.fetch2.BBFetchException as e: 157 except bb.fetch2.BBFetchException as e:
158 bb.fatal(str(e)) 158 bb.fatal(str(e))
159} 159}
160 160
161def pkgarch_mapping(d): 161def pkgarch_mapping(d):
162 # Compatibility mappings of TUNE_PKGARCH (opt in) 162 # Compatibility mappings of TUNE_PKGARCH (opt in)
163 if d.getVar("PKGARCHCOMPAT_ARMV7A", True): 163 if d.getVar("PKGARCHCOMPAT_ARMV7A"):
164 if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": 164 if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon":
165 d.setVar("TUNE_PKGARCH", "armv7a") 165 d.setVar("TUNE_PKGARCH", "armv7a")
166 166
167def get_layers_branch_rev(d): 167def get_layers_branch_rev(d):
168 layers = (d.getVar("BBLAYERS", True) or "").split() 168 layers = (d.getVar("BBLAYERS") or "").split()
169 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ 169 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
170 base_get_metadata_git_branch(i, None).strip(), \ 170 base_get_metadata_git_branch(i, None).strip(), \
171 base_get_metadata_git_revision(i, None)) \ 171 base_get_metadata_git_revision(i, None)) \
@@ -192,7 +192,7 @@ BUILDCFG_FUNCS[type] = "list"
192def buildcfg_vars(d): 192def buildcfg_vars(d):
193 statusvars = oe.data.typed_value('BUILDCFG_VARS', d) 193 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
194 for var in statusvars: 194 for var in statusvars:
195 value = d.getVar(var, True) 195 value = d.getVar(var)
196 if value is not None: 196 if value is not None:
197 yield '%-17s = "%s"' % (var, value) 197 yield '%-17s = "%s"' % (var, value)
198 198
@@ -200,7 +200,7 @@ def buildcfg_neededvars(d):
200 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) 200 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
201 pesteruser = [] 201 pesteruser = []
202 for v in needed_vars: 202 for v in needed_vars:
203 val = d.getVar(v, True) 203 val = d.getVar(v)
204 if not val or val == 'INVALID': 204 if not val or val == 'INVALID':
205 pesteruser.append(v) 205 pesteruser.append(v)
206 206
@@ -233,7 +233,7 @@ python base_eventhandler() {
233 if flines: 233 if flines:
234 statuslines.extend(flines) 234 statuslines.extend(flines)
235 235
236 statusheader = e.data.getVar('BUILDCFG_HEADER', True) 236 statusheader = e.data.getVar('BUILDCFG_HEADER')
237 if statusheader: 237 if statusheader:
238 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 238 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
239 239
@@ -241,7 +241,7 @@ python base_eventhandler() {
241 # target ones and we'd see dulpicate key names overwriting each other 241 # target ones and we'd see dulpicate key names overwriting each other
242 # for various PREFERRED_PROVIDERS 242 # for various PREFERRED_PROVIDERS
243 if isinstance(e, bb.event.RecipePreFinalise): 243 if isinstance(e, bb.event.RecipePreFinalise):
244 if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True): 244 if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"):
245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") 245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
246 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") 246 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
247 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") 247 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
@@ -267,14 +267,14 @@ python base_eventhandler() {
267 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in 267 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
268 # particular. 268 # particular.
269 # 269 #
270 pn = d.getVar('PN', True) 270 pn = d.getVar('PN')
271 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 271 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
272 if not source_mirror_fetch: 272 if not source_mirror_fetch:
273 provs = (d.getVar("PROVIDES", True) or "").split() 273 provs = (d.getVar("PROVIDES") or "").split()
274 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() 274 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
275 for p in provs: 275 for p in provs:
276 if p.startswith("virtual/") and p not in multiwhitelist: 276 if p.startswith("virtual/") and p not in multiwhitelist:
277 profprov = d.getVar("PREFERRED_PROVIDER_" + p, True) 277 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
278 if profprov and pn != profprov: 278 if profprov and pn != profprov:
279 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) 279 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
280} 280}
@@ -336,9 +336,9 @@ def set_packagetriplet(d):
336 tos = [] 336 tos = []
337 tvs = [] 337 tvs = []
338 338
339 archs.append(d.getVar("PACKAGE_ARCHS", True).split()) 339 archs.append(d.getVar("PACKAGE_ARCHS").split())
340 tos.append(d.getVar("TARGET_OS", True)) 340 tos.append(d.getVar("TARGET_OS"))
341 tvs.append(d.getVar("TARGET_VENDOR", True)) 341 tvs.append(d.getVar("TARGET_VENDOR"))
342 342
343 def settriplet(d, varname, archs, tos, tvs): 343 def settriplet(d, varname, archs, tos, tvs):
344 triplets = [] 344 triplets = []
@@ -350,16 +350,16 @@ def set_packagetriplet(d):
350 350
351 settriplet(d, "PKGTRIPLETS", archs, tos, tvs) 351 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
352 352
353 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 353 variants = d.getVar("MULTILIB_VARIANTS") or ""
354 for item in variants.split(): 354 for item in variants.split():
355 localdata = bb.data.createCopy(d) 355 localdata = bb.data.createCopy(d)
356 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 356 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
357 localdata.setVar("OVERRIDES", overrides) 357 localdata.setVar("OVERRIDES", overrides)
358 bb.data.update_data(localdata) 358 bb.data.update_data(localdata)
359 359
360 archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) 360 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
361 tos.append(localdata.getVar("TARGET_OS", True)) 361 tos.append(localdata.getVar("TARGET_OS"))
362 tvs.append(localdata.getVar("TARGET_VENDOR", True)) 362 tvs.append(localdata.getVar("TARGET_VENDOR"))
363 363
364 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) 364 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
365 365
@@ -374,10 +374,10 @@ python () {
374 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" 374 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
375 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} 375 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
376 if pkgconfigflags: 376 if pkgconfigflags:
377 pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() 377 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
378 pn = d.getVar("PN", True) 378 pn = d.getVar("PN")
379 379
380 mlprefix = d.getVar("MLPREFIX", True) 380 mlprefix = d.getVar("MLPREFIX")
381 381
382 def expandFilter(appends, extension, prefix): 382 def expandFilter(appends, extension, prefix):
383 appends = bb.utils.explode_deps(d.expand(" ".join(appends))) 383 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
@@ -419,7 +419,7 @@ python () {
419 num = len(items) 419 num = len(items)
420 if num > 4: 420 if num > 4:
421 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" 421 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
422 % (d.getVar('PN', True), flag)) 422 % (d.getVar('PN'), flag))
423 423
424 if flag in pkgconfig: 424 if flag in pkgconfig:
425 if num >= 3 and items[2]: 425 if num >= 3 and items[2]:
@@ -434,8 +434,8 @@ python () {
434 appendVar('RDEPENDS_${PN}', extrardeps) 434 appendVar('RDEPENDS_${PN}', extrardeps)
435 appendVar('PACKAGECONFIG_CONFARGS', extraconf) 435 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
436 436
437 pn = d.getVar('PN', True) 437 pn = d.getVar('PN')
438 license = d.getVar('LICENSE', True) 438 license = d.getVar('LICENSE')
439 if license == "INVALID": 439 if license == "INVALID":
440 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 440 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
441 441
@@ -465,26 +465,26 @@ python () {
465 d.setVarFlag('do_devshell', 'fakeroot', '1') 465 d.setVarFlag('do_devshell', 'fakeroot', '1')
466 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 466 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
467 467
468 need_machine = d.getVar('COMPATIBLE_MACHINE', True) 468 need_machine = d.getVar('COMPATIBLE_MACHINE')
469 if need_machine: 469 if need_machine:
470 import re 470 import re
471 compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") 471 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
472 for m in compat_machines: 472 for m in compat_machines:
473 if re.match(need_machine, m): 473 if re.match(need_machine, m):
474 break 474 break
475 else: 475 else:
476 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) 476 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
477 477
478 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 478 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
479 if not source_mirror_fetch: 479 if not source_mirror_fetch:
480 need_host = d.getVar('COMPATIBLE_HOST', True) 480 need_host = d.getVar('COMPATIBLE_HOST')
481 if need_host: 481 if need_host:
482 import re 482 import re
483 this_host = d.getVar('HOST_SYS', True) 483 this_host = d.getVar('HOST_SYS')
484 if not re.match(need_host, this_host): 484 if not re.match(need_host, this_host):
485 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) 485 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
486 486
487 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() 487 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
488 488
489 check_license = False if pn.startswith("nativesdk-") else True 489 check_license = False if pn.startswith("nativesdk-") else True
490 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", 490 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
@@ -503,21 +503,21 @@ python () {
503 for lic in bad_licenses: 503 for lic in bad_licenses:
504 spdx_license = return_spdx(d, lic) 504 spdx_license = return_spdx(d, lic)
505 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: 505 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
506 whitelist.extend((d.getVar(w + lic, True) or "").split()) 506 whitelist.extend((d.getVar(w + lic) or "").split())
507 if spdx_license: 507 if spdx_license:
508 whitelist.extend((d.getVar(w + spdx_license, True) or "").split()) 508 whitelist.extend((d.getVar(w + spdx_license) or "").split())
509 ''' 509 '''
510 We need to track what we are whitelisting and why. If pn is 510 We need to track what we are whitelisting and why. If pn is
511 incompatible we need to be able to note that the image that 511 incompatible we need to be able to note that the image that
512 is created may infact contain incompatible licenses despite 512 is created may infact contain incompatible licenses despite
513 INCOMPATIBLE_LICENSE being set. 513 INCOMPATIBLE_LICENSE being set.
514 ''' 514 '''
515 incompatwl.extend((d.getVar(w + lic, True) or "").split()) 515 incompatwl.extend((d.getVar(w + lic) or "").split())
516 if spdx_license: 516 if spdx_license:
517 incompatwl.extend((d.getVar(w + spdx_license, True) or "").split()) 517 incompatwl.extend((d.getVar(w + spdx_license) or "").split())
518 518
519 if not pn in whitelist: 519 if not pn in whitelist:
520 pkgs = d.getVar('PACKAGES', True).split() 520 pkgs = d.getVar('PACKAGES').split()
521 skipped_pkgs = [] 521 skipped_pkgs = []
522 unskipped_pkgs = [] 522 unskipped_pkgs = []
523 for pkg in pkgs: 523 for pkg in pkgs:
@@ -529,7 +529,7 @@ python () {
529 if unskipped_pkgs: 529 if unskipped_pkgs:
530 for pkg in skipped_pkgs: 530 for pkg in skipped_pkgs:
531 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) 531 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
532 mlprefix = d.getVar('MLPREFIX', True) 532 mlprefix = d.getVar('MLPREFIX')
533 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) 533 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
534 for pkg in unskipped_pkgs: 534 for pkg in unskipped_pkgs:
535 bb.debug(1, "INCLUDING the package " + pkg) 535 bb.debug(1, "INCLUDING the package " + pkg)
@@ -545,8 +545,8 @@ python () {
545 # matching of license expressions - just check that all license strings 545 # matching of license expressions - just check that all license strings
546 # in LICENSE_<pkg> are found in LICENSE. 546 # in LICENSE_<pkg> are found in LICENSE.
547 license_set = oe.license.list_licenses(license) 547 license_set = oe.license.list_licenses(license)
548 for pkg in d.getVar('PACKAGES', True).split(): 548 for pkg in d.getVar('PACKAGES').split():
549 pkg_license = d.getVar('LICENSE_' + pkg, True) 549 pkg_license = d.getVar('LICENSE_' + pkg)
550 if pkg_license: 550 if pkg_license:
551 unlisted = oe.license.list_licenses(pkg_license) - license_set 551 unlisted = oe.license.list_licenses(pkg_license) - license_set
552 if unlisted: 552 if unlisted:
@@ -554,7 +554,7 @@ python () {
554 "listed in LICENSE" % (pkg, ' '.join(unlisted))) 554 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
555 555
556 needsrcrev = False 556 needsrcrev = False
557 srcuri = d.getVar('SRC_URI', True) 557 srcuri = d.getVar('SRC_URI')
558 for uri in srcuri.split(): 558 for uri in srcuri.split():
559 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] 559 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
560 560
@@ -614,8 +614,8 @@ python () {
614 set_packagetriplet(d) 614 set_packagetriplet(d)
615 615
616 # 'multimachine' handling 616 # 'multimachine' handling
617 mach_arch = d.getVar('MACHINE_ARCH', True) 617 mach_arch = d.getVar('MACHINE_ARCH')
618 pkg_arch = d.getVar('PACKAGE_ARCH', True) 618 pkg_arch = d.getVar('PACKAGE_ARCH')
619 619
620 if (pkg_arch == mach_arch): 620 if (pkg_arch == mach_arch):
621 # Already machine specific - nothing further to do 621 # Already machine specific - nothing further to do
@@ -625,11 +625,11 @@ python () {
625 # We always try to scan SRC_URI for urls with machine overrides 625 # We always try to scan SRC_URI for urls with machine overrides
626 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 626 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
627 # 627 #
628 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) 628 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
629 if override != '0': 629 if override != '0':
630 paths = [] 630 paths = []
631 fpaths = (d.getVar('FILESPATH', True) or '').split(':') 631 fpaths = (d.getVar('FILESPATH') or '').split(':')
632 machine = d.getVar('MACHINE', True) 632 machine = d.getVar('MACHINE')
633 for p in fpaths: 633 for p in fpaths:
634 if os.path.basename(p) == machine and os.path.isdir(p): 634 if os.path.basename(p) == machine and os.path.isdir(p):
635 paths.append(p) 635 paths.append(p)
@@ -646,16 +646,16 @@ python () {
646 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 646 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
647 return 647 return
648 648
649 packages = d.getVar('PACKAGES', True).split() 649 packages = d.getVar('PACKAGES').split()
650 for pkg in packages: 650 for pkg in packages:
651 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) 651 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
652 652
653 # We could look for != PACKAGE_ARCH here but how to choose 653 # We could look for != PACKAGE_ARCH here but how to choose
654 # if multiple differences are present? 654 # if multiple differences are present?
655 # Look through PACKAGE_ARCHS for the priority order? 655 # Look through PACKAGE_ARCHS for the priority order?
656 if pkgarch and pkgarch == mach_arch: 656 if pkgarch and pkgarch == mach_arch:
657 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 657 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
658 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) 658 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
659} 659}
660 660
661addtask cleansstate after do_clean 661addtask cleansstate after do_clean
@@ -666,7 +666,7 @@ addtask cleanall after do_cleansstate
666do_cleansstate[nostamp] = "1" 666do_cleansstate[nostamp] = "1"
667 667
668python do_cleanall() { 668python do_cleanall() {
669 src_uri = (d.getVar('SRC_URI', True) or "").split() 669 src_uri = (d.getVar('SRC_URI') or "").split()
670 if len(src_uri) == 0: 670 if len(src_uri) == 0:
671 return 671 return
672 672
diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass
index 8591308aa7..5372294142 100644
--- a/meta/classes/binconfig.bbclass
+++ b/meta/classes/binconfig.bbclass
@@ -22,7 +22,7 @@ def get_binconfig_mangle(d):
22 s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" 22 s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'"
23 s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" 23 s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'"
24 if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False): 24 if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False):
25 s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE", True) 25 s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE")
26 26
27 return s 27 return s
28 28
diff --git a/meta/classes/blacklist.bbclass b/meta/classes/blacklist.bbclass
index a0141a82c0..c6f422398c 100644
--- a/meta/classes/blacklist.bbclass
+++ b/meta/classes/blacklist.bbclass
@@ -16,7 +16,7 @@
16addhandler blacklist_multilib_eventhandler 16addhandler blacklist_multilib_eventhandler
17blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed" 17blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed"
18python blacklist_multilib_eventhandler() { 18python blacklist_multilib_eventhandler() {
19 multilibs = e.data.getVar('MULTILIBS', True) 19 multilibs = e.data.getVar('MULTILIBS')
20 if not multilibs: 20 if not multilibs:
21 return 21 return
22 22
@@ -38,7 +38,7 @@ python blacklist_multilib_eventhandler() {
38} 38}
39 39
40python () { 40python () {
41 blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN', True), True) 41 blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN'), True)
42 42
43 if blacklist: 43 if blacklist:
44 raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) 44 raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist))
diff --git a/meta/classes/bugzilla.bbclass b/meta/classes/bugzilla.bbclass
index 3fc8956428..6d9a8211f0 100644
--- a/meta/classes/bugzilla.bbclass
+++ b/meta/classes/bugzilla.bbclass
@@ -110,12 +110,12 @@ python bugzilla_eventhandler() {
110 return 110 return
111 111
112 if name == "TaskFailed": 112 if name == "TaskFailed":
113 xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) 113 xmlrpc = data.getVar("BUGZILLA_XMLRPC")
114 user = data.getVar("BUGZILLA_USER", True) 114 user = data.getVar("BUGZILLA_USER")
115 passw = data.getVar("BUGZILLA_PASS", True) 115 passw = data.getVar("BUGZILLA_PASS")
116 product = data.getVar("BUGZILLA_PRODUCT", True) 116 product = data.getVar("BUGZILLA_PRODUCT")
117 compon = data.getVar("BUGZILLA_COMPONENT", True) 117 compon = data.getVar("BUGZILLA_COMPONENT")
118 version = data.getVar("BUGZILLA_VERSION", True) 118 version = data.getVar("BUGZILLA_VERSION")
119 119
120 proxy = data.getVar('http_proxy', True ) 120 proxy = data.getVar('http_proxy', True )
121 if (proxy): 121 if (proxy):
@@ -133,14 +133,14 @@ python bugzilla_eventhandler() {
133 'component': compon} 133 'component': compon}
134 134
135 # evil hack to figure out what is going on 135 # evil hack to figure out what is going on
136 debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a") 136 debug_file = open(os.path.join(data.getVar("TMPDIR"),"..","bugzilla-log"),"a")
137 137
138 file = None 138 file = None
139 bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True), 139 bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN"),
140 "pv" : data.getVar("PV", True), 140 "pv" : data.getVar("PV"),
141 } 141 }
142 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) 142 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task))
143 text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) ) 143 text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar( 'MACHINE', True ) )
144 if len(log_file) != 0: 144 if len(log_file) != 0:
145 print >> debug_file, "Adding log file %s" % log_file[0] 145 print >> debug_file, "Adding log file %s" % log_file[0]
146 file = open(log_file[0], 'r') 146 file = open(log_file[0], 'r')
@@ -168,7 +168,7 @@ python bugzilla_eventhandler() {
168 168
169 if bug_number and log: 169 if bug_number and log:
170 print >> debug_file, "The bug is known as '%s'" % bug_number 170 print >> debug_file, "The bug is known as '%s'" % bug_number
171 desc = "Build log for machine %s" % (data.getVar('MACHINE', True)) 171 desc = "Build log for machine %s" % (data.getVar('MACHINE'))
172 if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): 172 if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc):
173 print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number 173 print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number
174 else: 174 else:
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index d82e9bb55c..73cd88669a 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -64,18 +64,18 @@ PATCH_GIT_USER_NAME ?= "OpenEmbedded"
64# Write out metadata about this package for comparison when writing future packages 64# Write out metadata about this package for comparison when writing future packages
65# 65#
66python buildhistory_emit_pkghistory() { 66python buildhistory_emit_pkghistory() {
67 if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: 67 if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']:
68 return 0 68 return 0
69 69
70 if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): 70 if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
71 return 0 71 return 0
72 72
73 import re 73 import re
74 import json 74 import json
75 import errno 75 import errno
76 76
77 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 77 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
78 oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE', True) 78 oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE')
79 79
80 class RecipeInfo: 80 class RecipeInfo:
81 def __init__(self, name): 81 def __init__(self, name):
@@ -182,12 +182,12 @@ python buildhistory_emit_pkghistory() {
182 items.sort() 182 items.sort()
183 return ' '.join(items) 183 return ' '.join(items)
184 184
185 pn = d.getVar('PN', True) 185 pn = d.getVar('PN')
186 pe = d.getVar('PE', True) or "0" 186 pe = d.getVar('PE') or "0"
187 pv = d.getVar('PV', True) 187 pv = d.getVar('PV')
188 pr = d.getVar('PR', True) 188 pr = d.getVar('PR')
189 189
190 pkgdata_dir = d.getVar('PKGDATA_DIR', True) 190 pkgdata_dir = d.getVar('PKGDATA_DIR')
191 packages = "" 191 packages = ""
192 try: 192 try:
193 with open(os.path.join(pkgdata_dir, pn)) as f: 193 with open(os.path.join(pkgdata_dir, pn)) as f:
@@ -203,7 +203,7 @@ python buildhistory_emit_pkghistory() {
203 raise 203 raise
204 204
205 packagelist = packages.split() 205 packagelist = packages.split()
206 preserve = d.getVar('BUILDHISTORY_PRESERVE', True).split() 206 preserve = d.getVar('BUILDHISTORY_PRESERVE').split()
207 if not os.path.exists(pkghistdir): 207 if not os.path.exists(pkghistdir):
208 bb.utils.mkdirhier(pkghistdir) 208 bb.utils.mkdirhier(pkghistdir)
209 else: 209 else:
@@ -223,11 +223,11 @@ python buildhistory_emit_pkghistory() {
223 rcpinfo.pe = pe 223 rcpinfo.pe = pe
224 rcpinfo.pv = pv 224 rcpinfo.pv = pv
225 rcpinfo.pr = pr 225 rcpinfo.pr = pr
226 rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or "")) 226 rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or ""))
227 rcpinfo.packages = packages 227 rcpinfo.packages = packages
228 write_recipehistory(rcpinfo, d) 228 write_recipehistory(rcpinfo, d)
229 229
230 pkgdest = d.getVar('PKGDEST', True) 230 pkgdest = d.getVar('PKGDEST')
231 for pkg in packagelist: 231 for pkg in packagelist:
232 pkgdata = {} 232 pkgdata = {}
233 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 233 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
@@ -293,7 +293,7 @@ python buildhistory_emit_pkghistory() {
293def write_recipehistory(rcpinfo, d): 293def write_recipehistory(rcpinfo, d):
294 bb.debug(2, "Writing recipe history") 294 bb.debug(2, "Writing recipe history")
295 295
296 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 296 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
297 297
298 infofile = os.path.join(pkghistdir, "latest") 298 infofile = os.path.join(pkghistdir, "latest")
299 with open(infofile, "w") as f: 299 with open(infofile, "w") as f:
@@ -308,7 +308,7 @@ def write_recipehistory(rcpinfo, d):
308def write_pkghistory(pkginfo, d): 308def write_pkghistory(pkginfo, d):
309 bb.debug(2, "Writing package history for package %s" % pkginfo.name) 309 bb.debug(2, "Writing package history for package %s" % pkginfo.name)
310 310
311 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 311 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
312 312
313 pkgpath = os.path.join(pkghistdir, pkginfo.name) 313 pkgpath = os.path.join(pkghistdir, pkginfo.name)
314 if not os.path.exists(pkgpath): 314 if not os.path.exists(pkgpath):
@@ -369,7 +369,7 @@ def buildhistory_list_installed(d, rootfs_type="image"):
369 pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target") 369 pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target")
370 370
371 for output_type, output_file in process_list: 371 for output_type, output_file in process_list:
372 output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) 372 output_file_full = os.path.join(d.getVar('WORKDIR'), output_file)
373 373
374 with open(output_file_full, 'w') as output: 374 with open(output_file_full, 'w') as output:
375 output.write(format_pkg_list(pkgs, output_type)) 375 output.write(format_pkg_list(pkgs, output_type))
@@ -550,7 +550,7 @@ END
550python buildhistory_get_extra_sdkinfo() { 550python buildhistory_get_extra_sdkinfo() {
551 import operator 551 import operator
552 import math 552 import math
553 if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': 553 if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
554 tasksizes = {} 554 tasksizes = {}
555 filesizes = {} 555 filesizes = {}
556 for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')): 556 for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')):
@@ -591,7 +591,7 @@ SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_e
591SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " 591SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
592 592
593def buildhistory_get_build_id(d): 593def buildhistory_get_build_id(d):
594 if d.getVar('BB_WORKERCONTEXT', True) != '1': 594 if d.getVar('BB_WORKERCONTEXT') != '1':
595 return "" 595 return ""
596 localdata = bb.data.createCopy(d) 596 localdata = bb.data.createCopy(d)
597 bb.data.update_data(localdata) 597 bb.data.update_data(localdata)
@@ -605,12 +605,12 @@ def buildhistory_get_build_id(d):
605 if flines: 605 if flines:
606 statuslines.extend(flines) 606 statuslines.extend(flines)
607 607
608 statusheader = d.getVar('BUILDCFG_HEADER', True) 608 statusheader = d.getVar('BUILDCFG_HEADER')
609 return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 609 return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
610 610
611def buildhistory_get_metadata_revs(d): 611def buildhistory_get_metadata_revs(d):
612 # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want 612 # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want
613 layers = (d.getVar("BBLAYERS", True) or "").split() 613 layers = (d.getVar("BBLAYERS") or "").split()
614 medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ 614 medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \
615 base_get_metadata_git_branch(i, None).strip(), \ 615 base_get_metadata_git_branch(i, None).strip(), \
616 base_get_metadata_git_revision(i, None)) \ 616 base_get_metadata_git_revision(i, None)) \
@@ -622,7 +622,7 @@ def outputvars(vars, listvars, d):
622 listvars = listvars.split() 622 listvars = listvars.split()
623 ret = "" 623 ret = ""
624 for var in vars: 624 for var in vars:
625 value = d.getVar(var, True) or "" 625 value = d.getVar(var) or ""
626 if var in listvars: 626 if var in listvars:
627 # Squash out spaces 627 # Squash out spaces
628 value = oe.utils.squashspaces(value) 628 value = oe.utils.squashspaces(value)
@@ -630,17 +630,17 @@ def outputvars(vars, listvars, d):
630 return ret.rstrip('\n') 630 return ret.rstrip('\n')
631 631
632def buildhistory_get_imagevars(d): 632def buildhistory_get_imagevars(d):
633 if d.getVar('BB_WORKERCONTEXT', True) != '1': 633 if d.getVar('BB_WORKERCONTEXT') != '1':
634 return "" 634 return ""
635 imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" 635 imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND"
636 listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" 636 listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE"
637 return outputvars(imagevars, listvars, d) 637 return outputvars(imagevars, listvars, d)
638 638
639def buildhistory_get_sdkvars(d): 639def buildhistory_get_sdkvars(d):
640 if d.getVar('BB_WORKERCONTEXT', True) != '1': 640 if d.getVar('BB_WORKERCONTEXT') != '1':
641 return "" 641 return ""
642 sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" 642 sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
643 if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': 643 if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
644 # Extensible SDK uses some additional variables 644 # Extensible SDK uses some additional variables
645 sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN" 645 sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN"
646 listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST" 646 listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST"
@@ -735,16 +735,16 @@ END
735} 735}
736 736
737python buildhistory_eventhandler() { 737python buildhistory_eventhandler() {
738 if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): 738 if e.data.getVar('BUILDHISTORY_FEATURES').strip():
739 reset = e.data.getVar("BUILDHISTORY_RESET", True) 739 reset = e.data.getVar("BUILDHISTORY_RESET")
740 olddir = e.data.getVar("BUILDHISTORY_OLD_DIR", True) 740 olddir = e.data.getVar("BUILDHISTORY_OLD_DIR")
741 if isinstance(e, bb.event.BuildStarted): 741 if isinstance(e, bb.event.BuildStarted):
742 if reset: 742 if reset:
743 import shutil 743 import shutil
744 # Clean up after potentially interrupted build. 744 # Clean up after potentially interrupted build.
745 if os.path.isdir(olddir): 745 if os.path.isdir(olddir):
746 shutil.rmtree(olddir) 746 shutil.rmtree(olddir)
747 rootdir = e.data.getVar("BUILDHISTORY_DIR", True) 747 rootdir = e.data.getVar("BUILDHISTORY_DIR")
748 entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ] 748 entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ]
749 bb.utils.mkdirhier(olddir) 749 bb.utils.mkdirhier(olddir)
750 for entry in entries: 750 for entry in entries:
@@ -754,7 +754,7 @@ python buildhistory_eventhandler() {
754 if reset: 754 if reset:
755 import shutil 755 import shutil
756 shutil.rmtree(olddir) 756 shutil.rmtree(olddir)
757 if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": 757 if e.data.getVar("BUILDHISTORY_COMMIT") == "1":
758 bb.note("Writing buildhistory") 758 bb.note("Writing buildhistory")
759 localdata = bb.data.createCopy(e.data) 759 localdata = bb.data.createCopy(e.data)
760 localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) 760 localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures))
@@ -774,7 +774,7 @@ def _get_srcrev_values(d):
774 """ 774 """
775 775
776 scms = [] 776 scms = []
777 fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) 777 fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
778 urldata = fetcher.ud 778 urldata = fetcher.ud
779 for u in urldata: 779 for u in urldata:
780 if urldata[u].method.supports_srcrev(): 780 if urldata[u].method.supports_srcrev():
@@ -806,7 +806,7 @@ def _get_srcrev_values(d):
806do_fetch[postfuncs] += "write_srcrev" 806do_fetch[postfuncs] += "write_srcrev"
807do_fetch[vardepsexclude] += "write_srcrev" 807do_fetch[vardepsexclude] += "write_srcrev"
808python write_srcrev() { 808python write_srcrev() {
809 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) 809 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
810 srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') 810 srcrevfile = os.path.join(pkghistdir, 'latest_srcrev')
811 811
812 srcrevs, tag_srcrevs = _get_srcrev_values(d) 812 srcrevs, tag_srcrevs = _get_srcrev_values(d)
@@ -838,7 +838,7 @@ python write_srcrev() {
838 for name, srcrev in tag_srcrevs.items(): 838 for name, srcrev in tag_srcrevs.items():
839 f.write('# tag_%s = "%s"\n' % (name, srcrev)) 839 f.write('# tag_%s = "%s"\n' % (name, srcrev))
840 if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: 840 if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
841 pkg = d.getVar('PN', True) 841 pkg = d.getVar('PN')
842 bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) 842 bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
843 843
844 else: 844 else:
diff --git a/meta/classes/buildstats-summary.bbclass b/meta/classes/buildstats-summary.bbclass
index b86abcc3f1..f9b241b6c5 100644
--- a/meta/classes/buildstats-summary.bbclass
+++ b/meta/classes/buildstats-summary.bbclass
@@ -7,7 +7,7 @@ python buildstats_summary () {
7 if not os.path.exists(bsdir): 7 if not os.path.exists(bsdir):
8 return 8 return
9 9
10 sstatetasks = (e.data.getVar('SSTATETASKS', True) or '').split() 10 sstatetasks = (e.data.getVar('SSTATETASKS') or '').split()
11 built = collections.defaultdict(lambda: [set(), set()]) 11 built = collections.defaultdict(lambda: [set(), set()])
12 for pf in os.listdir(bsdir): 12 for pf in os.listdir(bsdir):
13 taskdir = os.path.join(bsdir, pf) 13 taskdir = os.path.join(bsdir, pf)
diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass
index c6b77e6a2a..8703cb2b33 100644
--- a/meta/classes/buildstats.bbclass
+++ b/meta/classes/buildstats.bbclass
@@ -75,8 +75,8 @@ def get_buildtimedata(var, d):
75 return timediff, cpuperc 75 return timediff, cpuperc
76 76
77def write_task_data(status, logfile, e, d): 77def write_task_data(status, logfile, e, d):
78 bn = d.getVar('BUILDNAME', True) 78 bn = d.getVar('BUILDNAME')
79 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) 79 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
80 with open(os.path.join(logfile), "a") as f: 80 with open(os.path.join(logfile), "a") as f:
81 elapsedtime = get_timedata("__timedata_task", d, e.time) 81 elapsedtime = get_timedata("__timedata_task", d, e.time)
82 if elapsedtime: 82 if elapsedtime:
@@ -106,9 +106,9 @@ python run_buildstats () {
106 import bb.event 106 import bb.event
107 import time, subprocess, platform 107 import time, subprocess, platform
108 108
109 bn = d.getVar('BUILDNAME', True) 109 bn = d.getVar('BUILDNAME')
110 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) 110 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
111 taskdir = os.path.join(bsdir, d.getVar('PF', True)) 111 taskdir = os.path.join(bsdir, d.getVar('PF'))
112 112
113 if isinstance(e, bb.event.BuildStarted): 113 if isinstance(e, bb.event.BuildStarted):
114 ######################################################################## 114 ########################################################################
@@ -162,7 +162,7 @@ python run_buildstats () {
162 if e.task == "do_rootfs": 162 if e.task == "do_rootfs":
163 bs = os.path.join(bsdir, "build_stats") 163 bs = os.path.join(bsdir, "build_stats")
164 with open(bs, "a") as f: 164 with open(bs, "a") as f:
165 rootfs = d.getVar('IMAGE_ROOTFS', True) 165 rootfs = d.getVar('IMAGE_ROOTFS')
166 if os.path.isdir(rootfs): 166 if os.path.isdir(rootfs):
167 try: 167 try:
168 rootfs_size = subprocess.check_output(["du", "-sh", rootfs], 168 rootfs_size = subprocess.check_output(["du", "-sh", rootfs],
@@ -197,7 +197,7 @@ python runqueue_stats () {
197 # are available that we need to find the output directory. 197 # are available that we need to find the output directory.
198 # The persistent SystemStats is stored in the datastore and 198 # The persistent SystemStats is stored in the datastore and
199 # closed when the build is done. 199 # closed when the build is done.
200 system_stats = d.getVar('_buildstats_system_stats', True) 200 system_stats = d.getVar('_buildstats_system_stats')
201 if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): 201 if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)):
202 system_stats = buildstats.SystemStats(d) 202 system_stats = buildstats.SystemStats(d)
203 d.setVar('_buildstats_system_stats', system_stats) 203 d.setVar('_buildstats_system_stats', system_stats)
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass
index 2e9837cf07..93fcacaf1a 100644
--- a/meta/classes/ccache.bbclass
+++ b/meta/classes/ccache.bbclass
@@ -1,4 +1,4 @@
1CCACHE = "${@bb.utils.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" 1CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}"
2export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" 2export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}"
3CCACHE_DISABLE[unexport] = "1" 3CCACHE_DISABLE[unexport] = "1"
4 4
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass
index f183b4aeeb..ad3c3975a5 100644
--- a/meta/classes/chrpath.bbclass
+++ b/meta/classes/chrpath.bbclass
@@ -44,7 +44,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d):
44 p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) 44 p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE)
45 out, err = p.communicate() 45 out, err = p.communicate()
46 if p.returncode != 0: 46 if p.returncode != 0:
47 bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN', True), p.returncode, out, err)) 47 bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err))
48 48
49def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): 49def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d):
50 import subprocess as sub 50 import subprocess as sub
@@ -72,7 +72,7 @@ def process_dir (rootdir, directory, d):
72 cmd = d.expand('${CHRPATH_BIN}') 72 cmd = d.expand('${CHRPATH_BIN}')
73 tmpdir = os.path.normpath(d.getVar('TMPDIR', False)) 73 tmpdir = os.path.normpath(d.getVar('TMPDIR', False))
74 baseprefix = os.path.normpath(d.expand('${base_prefix}')) 74 baseprefix = os.path.normpath(d.expand('${base_prefix}'))
75 hostos = d.getVar("HOST_OS", True) 75 hostos = d.getVar("HOST_OS")
76 76
77 #bb.debug("Checking %s for binaries to process" % directory) 77 #bb.debug("Checking %s for binaries to process" % directory)
78 if not os.path.exists(directory): 78 if not os.path.exists(directory):
diff --git a/meta/classes/cmake.bbclass b/meta/classes/cmake.bbclass
index fad0baa519..9e74599296 100644
--- a/meta/classes/cmake.bbclass
+++ b/meta/classes/cmake.bbclass
@@ -46,7 +46,7 @@ cmake_do_generate_toolchain_file() {
46# CMake system name must be something like "Linux". 46# CMake system name must be something like "Linux".
47# This is important for cross-compiling. 47# This is important for cross-compiling.
48set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` ) 48set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` )
49set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH', True))} ) 49set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH'))} )
50set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} ) 50set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} )
51set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} ) 51set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} )
52set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} ) 52set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} )
@@ -112,15 +112,15 @@ cmake_do_configure() {
112 ${OECMAKE_SITEFILE} \ 112 ${OECMAKE_SITEFILE} \
113 ${OECMAKE_SOURCEPATH} \ 113 ${OECMAKE_SOURCEPATH} \
114 -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \ 114 -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \
115 -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir', True), d.getVar('prefix', True))} \ 115 -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix'))} \
116 -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir', True), d.getVar('prefix', True))} \ 116 -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix'))} \
117 -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir', True), d.getVar('prefix', True))} \ 117 -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix'))} \
118 -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \ 118 -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \
119 -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir', True), d. getVar('prefix', True))} \ 119 -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d. getVar('prefix'))} \
120 -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \ 120 -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \
121 -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir', True), d.getVar('prefix', True))} \ 121 -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix'))} \
122 -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir', True), d.getVar('prefix', True))} \ 122 -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix'))} \
123 -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir', True), d.getVar('prefix', True))} \ 123 -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix'))} \
124 -DCMAKE_INSTALL_SO_NO_EXE=0 \ 124 -DCMAKE_INSTALL_SO_NO_EXE=0 \
125 -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \ 125 -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \
126 -DCMAKE_VERBOSE_MAKEFILE=1 \ 126 -DCMAKE_VERBOSE_MAKEFILE=1 \
diff --git a/meta/classes/cml1.bbclass b/meta/classes/cml1.bbclass
index 5834806269..187d407d98 100644
--- a/meta/classes/cml1.bbclass
+++ b/meta/classes/cml1.bbclass
@@ -26,7 +26,7 @@ python do_menuconfig() {
26 except OSError: 26 except OSError:
27 mtime = 0 27 mtime = 0
28 28
29 oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND', True), 29 oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'),
30 d.getVar('PN', True ) + ' Configuration', d) 30 d.getVar('PN', True ) + ' Configuration', d)
31 31
32 # FIXME this check can be removed when the minimum bitbake version has been bumped 32 # FIXME this check can be removed when the minimum bitbake version has been bumped
@@ -49,7 +49,7 @@ python do_diffconfig() {
49 import shutil 49 import shutil
50 import subprocess 50 import subprocess
51 51
52 workdir = d.getVar('WORKDIR', True) 52 workdir = d.getVar('WORKDIR')
53 fragment = workdir + '/fragment.cfg' 53 fragment = workdir + '/fragment.cfg'
54 configorig = '.config.orig' 54 configorig = '.config.orig'
55 config = '.config' 55 config = '.config'
diff --git a/meta/classes/compress_doc.bbclass b/meta/classes/compress_doc.bbclass
index 8073c173e5..069db1997b 100644
--- a/meta/classes/compress_doc.bbclass
+++ b/meta/classes/compress_doc.bbclass
@@ -31,25 +31,25 @@ DOC_DECOMPRESS_CMD[xz] ?= "unxz -v"
31 31
32PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives" 32PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives"
33python package_do_compress_doc() { 33python package_do_compress_doc() {
34 compress_mode = d.getVar('DOC_COMPRESS', True) 34 compress_mode = d.getVar('DOC_COMPRESS')
35 compress_list = (d.getVar('DOC_COMPRESS_LIST', True) or '').split() 35 compress_list = (d.getVar('DOC_COMPRESS_LIST') or '').split()
36 if compress_mode not in compress_list: 36 if compress_mode not in compress_list:
37 bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list)) 37 bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list))
38 38
39 dvar = d.getVar('PKGD', True) 39 dvar = d.getVar('PKGD')
40 compress_cmds = {} 40 compress_cmds = {}
41 decompress_cmds = {} 41 decompress_cmds = {}
42 for mode in compress_list: 42 for mode in compress_list:
43 compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True) 43 compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True)
44 decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True) 44 decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True)
45 45
46 mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir", True)) 46 mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir"))
47 if os.path.exists(mandir): 47 if os.path.exists(mandir):
48 # Decompress doc files which format is not compress_mode 48 # Decompress doc files which format is not compress_mode
49 decompress_doc(mandir, compress_mode, decompress_cmds) 49 decompress_doc(mandir, compress_mode, decompress_cmds)
50 compress_doc(mandir, compress_mode, compress_cmds) 50 compress_doc(mandir, compress_mode, compress_cmds)
51 51
52 infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir", True)) 52 infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir"))
53 if os.path.exists(infodir): 53 if os.path.exists(infodir):
54 # Decompress doc files which format is not compress_mode 54 # Decompress doc files which format is not compress_mode
55 decompress_doc(infodir, compress_mode, decompress_cmds) 55 decompress_doc(infodir, compress_mode, decompress_cmds)
@@ -218,18 +218,18 @@ python compress_doc_updatealternatives () {
218 if not bb.data.inherits_class('update-alternatives', d): 218 if not bb.data.inherits_class('update-alternatives', d):
219 return 219 return
220 220
221 mandir = d.getVar("mandir", True) 221 mandir = d.getVar("mandir")
222 infodir = d.getVar("infodir", True) 222 infodir = d.getVar("infodir")
223 compress_mode = d.getVar('DOC_COMPRESS', True) 223 compress_mode = d.getVar('DOC_COMPRESS')
224 for pkg in (d.getVar('PACKAGES', True) or "").split(): 224 for pkg in (d.getVar('PACKAGES') or "").split():
225 old_names = (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split() 225 old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split()
226 new_names = [] 226 new_names = []
227 for old_name in old_names: 227 for old_name in old_names:
228 old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True) 228 old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True)
229 old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \ 229 old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \
230 d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \ 230 d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \
231 d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or \ 231 d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \
232 d.getVar('ALTERNATIVE_TARGET', True) or \ 232 d.getVar('ALTERNATIVE_TARGET') or \
233 old_link 233 old_link
234 # Sometimes old_target is specified as relative to the link name. 234 # Sometimes old_target is specified as relative to the link name.
235 old_target = os.path.join(os.path.dirname(old_link), old_target) 235 old_target = os.path.join(os.path.dirname(old_link), old_target)
@@ -247,7 +247,7 @@ python compress_doc_updatealternatives () {
247 elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True): 247 elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True):
248 d.delVarFlag('ALTERNATIVE_TARGET', old_name) 248 d.delVarFlag('ALTERNATIVE_TARGET', old_name)
249 d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target) 249 d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target)
250 elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True): 250 elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg):
251 d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target) 251 d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target)
252 elif d.getVar('ALTERNATIVE_TARGET', old_name, True): 252 elif d.getVar('ALTERNATIVE_TARGET', old_name, True):
253 d.setVar('ALTERNATIVE_TARGET', new_target) 253 d.setVar('ALTERNATIVE_TARGET', new_target)
diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass
index 907c1836b3..eabf12ce7a 100644
--- a/meta/classes/copyleft_compliance.bbclass
+++ b/meta/classes/copyleft_compliance.bbclass
@@ -13,7 +13,7 @@ python do_prepare_copyleft_sources () {
13 import os.path 13 import os.path
14 import shutil 14 import shutil
15 15
16 p = d.getVar('P', True) 16 p = d.getVar('P')
17 included, reason = copyleft_should_include(d) 17 included, reason = copyleft_should_include(d)
18 if not included: 18 if not included:
19 bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason)) 19 bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason))
@@ -21,13 +21,13 @@ python do_prepare_copyleft_sources () {
21 else: 21 else:
22 bb.debug(1, 'copyleft: %s is included: %s' % (p, reason)) 22 bb.debug(1, 'copyleft: %s is included: %s' % (p, reason))
23 23
24 sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True) 24 sources_dir = d.getVar('COPYLEFT_SOURCES_DIR')
25 dl_dir = d.getVar('DL_DIR', True) 25 dl_dir = d.getVar('DL_DIR')
26 src_uri = d.getVar('SRC_URI', True).split() 26 src_uri = d.getVar('SRC_URI').split()
27 fetch = bb.fetch2.Fetch(src_uri, d) 27 fetch = bb.fetch2.Fetch(src_uri, d)
28 ud = fetch.ud 28 ud = fetch.ud
29 29
30 pf = d.getVar('PF', True) 30 pf = d.getVar('PF')
31 dest = os.path.join(sources_dir, pf) 31 dest = os.path.join(sources_dir, pf)
32 shutil.rmtree(dest, ignore_errors=True) 32 shutil.rmtree(dest, ignore_errors=True)
33 bb.utils.mkdirhier(dest) 33 bb.utils.mkdirhier(dest)
diff --git a/meta/classes/copyleft_filter.bbclass b/meta/classes/copyleft_filter.bbclass
index 46be7f7d2f..426956f08f 100644
--- a/meta/classes/copyleft_filter.bbclass
+++ b/meta/classes/copyleft_filter.bbclass
@@ -49,7 +49,7 @@ def copyleft_should_include(d):
49 49
50 included, motive = False, 'recipe did not match anything' 50 included, motive = False, 'recipe did not match anything'
51 51
52 recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True) 52 recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE')
53 if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d): 53 if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d):
54 include, motive = False, 'recipe type "%s" is excluded' % recipe_type 54 include, motive = False, 'recipe type "%s" is excluded' % recipe_type
55 55
@@ -57,9 +57,9 @@ def copyleft_should_include(d):
57 exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d) 57 exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d)
58 58
59 try: 59 try:
60 is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude) 60 is_included, reason = oe.license.is_included(d.getVar('LICENSE'), include, exclude)
61 except oe.license.LicenseError as exc: 61 except oe.license.LicenseError as exc:
62 bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) 62 bb.fatal('%s: %s' % (d.getVar('PF'), exc))
63 else: 63 else:
64 if is_included: 64 if is_included:
65 if reason: 65 if reason:
@@ -69,10 +69,10 @@ def copyleft_should_include(d):
69 else: 69 else:
70 included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason) 70 included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason)
71 71
72 if any(fnmatch(d.getVar('PN', True), name) \ 72 if any(fnmatch(d.getVar('PN'), name) \
73 for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)): 73 for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)):
74 included, motive = True, 'recipe included by name' 74 included, motive = True, 'recipe included by name'
75 if any(fnmatch(d.getVar('PN', True), name) \ 75 if any(fnmatch(d.getVar('PN'), name) \
76 for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)): 76 for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)):
77 included, motive = False, 'recipe excluded by name' 77 included, motive = False, 'recipe excluded by name'
78 78
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass
index 21921b3dd0..64db1134f5 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes/cross-canadian.bbclass
@@ -20,25 +20,25 @@ CANADIANEXTRAOS = "${BASECANADIANEXTRAOS}"
20CANADIANEXTRAVENDOR = "" 20CANADIANEXTRAVENDOR = ""
21MODIFYTOS ??= "1" 21MODIFYTOS ??= "1"
22python () { 22python () {
23 archs = d.getVar('PACKAGE_ARCHS', True).split() 23 archs = d.getVar('PACKAGE_ARCHS').split()
24 sdkarchs = [] 24 sdkarchs = []
25 for arch in archs: 25 for arch in archs:
26 sdkarchs.append(arch + '-${SDKPKGSUFFIX}') 26 sdkarchs.append(arch + '-${SDKPKGSUFFIX}')
27 d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) 27 d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs))
28 28
29 # Allow the following code segment to be disabled, e.g. meta-environment 29 # Allow the following code segment to be disabled, e.g. meta-environment
30 if d.getVar("MODIFYTOS", True) != "1": 30 if d.getVar("MODIFYTOS") != "1":
31 return 31 return
32 32
33 if d.getVar("TCLIBC", True) == "baremetal": 33 if d.getVar("TCLIBC") == "baremetal":
34 return 34 return
35 35
36 tos = d.getVar("TARGET_OS", True) 36 tos = d.getVar("TARGET_OS")
37 whitelist = [] 37 whitelist = []
38 extralibcs = [""] 38 extralibcs = [""]
39 if "uclibc" in d.getVar("BASECANADIANEXTRAOS", True): 39 if "uclibc" in d.getVar("BASECANADIANEXTRAOS"):
40 extralibcs.append("uclibc") 40 extralibcs.append("uclibc")
41 if "musl" in d.getVar("BASECANADIANEXTRAOS", True): 41 if "musl" in d.getVar("BASECANADIANEXTRAOS"):
42 extralibcs.append("musl") 42 extralibcs.append("musl")
43 for variant in ["", "spe", "x32", "eabi", "n32"]: 43 for variant in ["", "spe", "x32", "eabi", "n32"]:
44 for libc in extralibcs: 44 for libc in extralibcs:
@@ -51,33 +51,33 @@ python () {
51 entry = entry + "-" + libc 51 entry = entry + "-" + libc
52 whitelist.append(entry) 52 whitelist.append(entry)
53 if tos not in whitelist: 53 if tos not in whitelist:
54 bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS", True)) 54 bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS"))
55 55
56 for n in ["PROVIDES", "DEPENDS"]: 56 for n in ["PROVIDES", "DEPENDS"]:
57 d.setVar(n, d.getVar(n, True)) 57 d.setVar(n, d.getVar(n))
58 d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN", True)) 58 d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN"))
59 for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]: 59 for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]:
60 n = prefix + "_FOR_TARGET" 60 n = prefix + "_FOR_TARGET"
61 d.setVar(n, d.getVar(n, True)) 61 d.setVar(n, d.getVar(n))
62 # This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS 62 # This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS
63 # however we need the old value in some variables. We expand those here first. 63 # however we need the old value in some variables. We expand those here first.
64 tarch = d.getVar("TARGET_ARCH", True) 64 tarch = d.getVar("TARGET_ARCH")
65 if tarch == "x86_64": 65 if tarch == "x86_64":
66 d.setVar("LIBCEXTENSION", "") 66 d.setVar("LIBCEXTENSION", "")
67 d.setVar("ABIEXTENSION", "") 67 d.setVar("ABIEXTENSION", "")
68 d.appendVar("CANADIANEXTRAOS", " linux-gnux32") 68 d.appendVar("CANADIANEXTRAOS", " linux-gnux32")
69 for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): 69 for extraos in d.getVar("BASECANADIANEXTRAOS").split():
70 d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32") 70 d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32")
71 elif tarch == "powerpc": 71 elif tarch == "powerpc":
72 # PowerPC can build "linux" and "linux-gnuspe" 72 # PowerPC can build "linux" and "linux-gnuspe"
73 d.setVar("LIBCEXTENSION", "") 73 d.setVar("LIBCEXTENSION", "")
74 d.setVar("ABIEXTENSION", "") 74 d.setVar("ABIEXTENSION", "")
75 d.appendVar("CANADIANEXTRAOS", " linux-gnuspe") 75 d.appendVar("CANADIANEXTRAOS", " linux-gnuspe")
76 for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): 76 for extraos in d.getVar("BASECANADIANEXTRAOS").split():
77 d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe") 77 d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe")
78 elif tarch == "mips64": 78 elif tarch == "mips64":
79 d.appendVar("CANADIANEXTRAOS", " linux-gnun32") 79 d.appendVar("CANADIANEXTRAOS", " linux-gnun32")
80 for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): 80 for extraos in d.getVar("BASECANADIANEXTRAOS").split():
81 d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32") 81 d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32")
82 if tarch == "arm" or tarch == "armeb": 82 if tarch == "arm" or tarch == "armeb":
83 d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi") 83 d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi")
@@ -86,10 +86,10 @@ python () {
86 d.setVar("TARGET_OS", "linux") 86 d.setVar("TARGET_OS", "linux")
87 87
88 # Also need to handle multilib target vendors 88 # Also need to handle multilib target vendors
89 vendors = d.getVar("CANADIANEXTRAVENDOR", True) 89 vendors = d.getVar("CANADIANEXTRAVENDOR")
90 if not vendors: 90 if not vendors:
91 vendors = all_multilib_tune_values(d, 'TARGET_VENDOR') 91 vendors = all_multilib_tune_values(d, 'TARGET_VENDOR')
92 origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL", True) 92 origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL")
93 if origvendor: 93 if origvendor:
94 d.setVar("TARGET_VENDOR", origvendor) 94 d.setVar("TARGET_VENDOR", origvendor)
95 if origvendor not in vendors.split(): 95 if origvendor not in vendors.split():
@@ -116,7 +116,7 @@ HOST_LD_ARCH = "${SDK_LD_ARCH}"
116HOST_AS_ARCH = "${SDK_AS_ARCH}" 116HOST_AS_ARCH = "${SDK_AS_ARCH}"
117 117
118#assign DPKG_ARCH 118#assign DPKG_ARCH
119DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH', True), '')}" 119DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH'), '')}"
120 120
121CPPFLAGS = "${BUILDSDK_CPPFLAGS}" 121CPPFLAGS = "${BUILDSDK_CPPFLAGS}"
122CFLAGS = "${BUILDSDK_CFLAGS}" 122CFLAGS = "${BUILDSDK_CFLAGS}"
diff --git a/meta/classes/crosssdk.bbclass b/meta/classes/crosssdk.bbclass
index 7315c38f13..eaf2beb94d 100644
--- a/meta/classes/crosssdk.bbclass
+++ b/meta/classes/crosssdk.bbclass
@@ -5,7 +5,7 @@ MACHINEOVERRIDES = ""
5PACKAGE_ARCH = "${SDK_ARCH}" 5PACKAGE_ARCH = "${SDK_ARCH}"
6python () { 6python () {
7 # set TUNE_PKGARCH to SDK_ARCH 7 # set TUNE_PKGARCH to SDK_ARCH
8 d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH', True)) 8 d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH'))
9} 9}
10 10
11STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}" 11STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}"
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index 75b8fa9ab9..aad0573ee9 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -51,7 +51,7 @@ python do_cve_check () {
51 Check recipe for patched and unpatched CVEs 51 Check recipe for patched and unpatched CVEs
52 """ 52 """
53 53
54 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): 54 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
55 patched_cves = get_patches_cves(d) 55 patched_cves = get_patches_cves(d)
56 patched, unpatched = check_cves(d, patched_cves) 56 patched, unpatched = check_cves(d, patched_cves)
57 if patched or unpatched: 57 if patched or unpatched:
@@ -70,7 +70,7 @@ python cve_check_cleanup () {
70 Delete the file used to gather all the CVE information. 70 Delete the file used to gather all the CVE information.
71 """ 71 """
72 72
73 bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE", True)) 73 bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE"))
74} 74}
75 75
76addhandler cve_check_cleanup 76addhandler cve_check_cleanup
@@ -83,12 +83,12 @@ python cve_check_write_rootfs_manifest () {
83 83
84 import shutil 84 import shutil
85 85
86 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): 86 if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
87 bb.note("Writing rootfs CVE manifest") 87 bb.note("Writing rootfs CVE manifest")
88 deploy_dir = d.getVar("DEPLOY_DIR_IMAGE", True) 88 deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
89 link_name = d.getVar("IMAGE_LINK_NAME", True) 89 link_name = d.getVar("IMAGE_LINK_NAME")
90 manifest_name = d.getVar("CVE_CHECK_MANIFEST", True) 90 manifest_name = d.getVar("CVE_CHECK_MANIFEST")
91 cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE", True) 91 cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
92 92
93 shutil.copyfile(cve_tmp_file, manifest_name) 93 shutil.copyfile(cve_tmp_file, manifest_name)
94 94
@@ -101,7 +101,7 @@ python cve_check_write_rootfs_manifest () {
101 bb.plain("Image CVE report stored in: %s" % manifest_name) 101 bb.plain("Image CVE report stored in: %s" % manifest_name)
102} 102}
103 103
104ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST', True) == '1' else ''}" 104ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
105 105
106def get_patches_cves(d): 106def get_patches_cves(d):
107 """ 107 """
@@ -110,7 +110,7 @@ def get_patches_cves(d):
110 110
111 import re 111 import re
112 112
113 pn = d.getVar("PN", True) 113 pn = d.getVar("PN")
114 cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") 114 cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
115 patched_cves = set() 115 patched_cves = set()
116 bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) 116 bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
@@ -149,15 +149,15 @@ def check_cves(d, patched_cves):
149 cves_patched = [] 149 cves_patched = []
150 cves_unpatched = [] 150 cves_unpatched = []
151 bpn = d.getVar("CVE_PRODUCT") 151 bpn = d.getVar("CVE_PRODUCT")
152 pv = d.getVar("PV", True).split("git+")[0] 152 pv = d.getVar("PV").split("git+")[0]
153 cves = " ".join(patched_cves) 153 cves = " ".join(patched_cves)
154 cve_db_dir = d.getVar("CVE_CHECK_DB_DIR", True) 154 cve_db_dir = d.getVar("CVE_CHECK_DB_DIR")
155 cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST", True)) 155 cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST"))
156 cve_cmd = "cve-check-tool" 156 cve_cmd = "cve-check-tool"
157 cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir] 157 cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir]
158 158
159 # If the recipe has been whitlisted we return empty lists 159 # If the recipe has been whitlisted we return empty lists
160 if d.getVar("PN", True) in d.getVar("CVE_CHECK_PN_WHITELIST", True).split(): 160 if d.getVar("PN") in d.getVar("CVE_CHECK_PN_WHITELIST").split():
161 bb.note("Recipe has been whitelisted, skipping check") 161 bb.note("Recipe has been whitelisted, skipping check")
162 return ([], []) 162 return ([], [])
163 163
@@ -210,7 +210,7 @@ def get_cve_info(d, cves):
210 from pysqlite2 import dbapi2 as sqlite3 210 from pysqlite2 import dbapi2 as sqlite3
211 211
212 cve_data = {} 212 cve_data = {}
213 db_file = d.getVar("CVE_CHECK_DB_FILE", True) 213 db_file = d.getVar("CVE_CHECK_DB_FILE")
214 placeholder = ",".join("?" * len(cves)) 214 placeholder = ",".join("?" * len(cves))
215 query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder 215 query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder
216 conn = sqlite3.connect(db_file) 216 conn = sqlite3.connect(db_file)
@@ -231,15 +231,15 @@ def cve_write_data(d, patched, unpatched, cve_data):
231 CVE manifest if enabled. 231 CVE manifest if enabled.
232 """ 232 """
233 233
234 cve_file = d.getVar("CVE_CHECK_LOCAL_FILE", True) 234 cve_file = d.getVar("CVE_CHECK_LOCAL_FILE")
235 nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId=" 235 nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId="
236 write_string = "" 236 write_string = ""
237 first_alert = True 237 first_alert = True
238 bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR", True)) 238 bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR"))
239 239
240 for cve in sorted(cve_data): 240 for cve in sorted(cve_data):
241 write_string += "PACKAGE NAME: %s\n" % d.getVar("PN", True) 241 write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
242 write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV", True) 242 write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV")
243 write_string += "CVE: %s\n" % cve 243 write_string += "CVE: %s\n" % cve
244 if cve in patched: 244 if cve in patched:
245 write_string += "CVE STATUS: Patched\n" 245 write_string += "CVE STATUS: Patched\n"
@@ -257,13 +257,13 @@ def cve_write_data(d, patched, unpatched, cve_data):
257 bb.note("Writing file %s with CVE information" % cve_file) 257 bb.note("Writing file %s with CVE information" % cve_file)
258 f.write(write_string) 258 f.write(write_string)
259 259
260 if d.getVar("CVE_CHECK_COPY_FILES", True) == "1": 260 if d.getVar("CVE_CHECK_COPY_FILES") == "1":
261 cve_dir = d.getVar("CVE_CHECK_DIR", True) 261 cve_dir = d.getVar("CVE_CHECK_DIR")
262 bb.utils.mkdirhier(cve_dir) 262 bb.utils.mkdirhier(cve_dir)
263 deploy_file = os.path.join(cve_dir, d.getVar("PN", True)) 263 deploy_file = os.path.join(cve_dir, d.getVar("PN"))
264 with open(deploy_file, "w") as f: 264 with open(deploy_file, "w") as f:
265 f.write(write_string) 265 f.write(write_string)
266 266
267 if d.getVar("CVE_CHECK_CREATE_MANIFEST", True) == "1": 267 if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
268 with open(d.getVar("CVE_CHECK_TMP_FILE", True), "a") as f: 268 with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
269 f.write("%s" % write_string) 269 f.write("%s" % write_string)
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass
index be7cacca98..8124558b81 100644
--- a/meta/classes/debian.bbclass
+++ b/meta/classes/debian.bbclass
@@ -20,17 +20,17 @@ do_package_write_tar[rdeptask] = "${DEBIANRDEP}"
20do_package_write_rpm[rdeptask] = "${DEBIANRDEP}" 20do_package_write_rpm[rdeptask] = "${DEBIANRDEP}"
21 21
22python () { 22python () {
23 if not d.getVar("PACKAGES", True): 23 if not d.getVar("PACKAGES"):
24 d.setVar("DEBIANRDEP", "") 24 d.setVar("DEBIANRDEP", "")
25} 25}
26 26
27python debian_package_name_hook () { 27python debian_package_name_hook () {
28 import glob, copy, stat, errno, re 28 import glob, copy, stat, errno, re
29 29
30 pkgdest = d.getVar('PKGDEST', True) 30 pkgdest = d.getVar('PKGDEST')
31 packages = d.getVar('PACKAGES', True) 31 packages = d.getVar('PACKAGES')
32 bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") 32 bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir")) + "$")
33 lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") 33 lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir")) + "$")
34 so_re = re.compile("lib.*\.so") 34 so_re = re.compile("lib.*\.so")
35 35
36 def socrunch(s): 36 def socrunch(s):
@@ -53,11 +53,11 @@ python debian_package_name_hook () {
53 return (s[stat.ST_MODE] & stat.S_IEXEC) 53 return (s[stat.ST_MODE] & stat.S_IEXEC)
54 54
55 def add_rprovides(pkg, d): 55 def add_rprovides(pkg, d):
56 newpkg = d.getVar('PKG_' + pkg, True) 56 newpkg = d.getVar('PKG_' + pkg)
57 if newpkg and newpkg != pkg: 57 if newpkg and newpkg != pkg:
58 provs = (d.getVar('RPROVIDES_' + pkg, True) or "").split() 58 provs = (d.getVar('RPROVIDES_' + pkg) or "").split()
59 if pkg not in provs: 59 if pkg not in provs:
60 d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV", True) + ")") 60 d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")")
61 61
62 def auto_libname(packages, orig_pkg): 62 def auto_libname(packages, orig_pkg):
63 sonames = [] 63 sonames = []
@@ -70,7 +70,7 @@ python debian_package_name_hook () {
70 if lib_re.match(root): 70 if lib_re.match(root):
71 has_libs = 1 71 has_libs = 1
72 if so_re.match(os.path.basename(file)): 72 if so_re.match(os.path.basename(file)):
73 cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + file + " 2>/dev/null" 73 cmd = (d.getVar('TARGET_PREFIX') or "") + "objdump -p " + file + " 2>/dev/null"
74 fd = os.popen(cmd) 74 fd = os.popen(cmd)
75 lines = fd.readlines() 75 lines = fd.readlines()
76 fd.close() 76 fd.close()
@@ -84,7 +84,7 @@ python debian_package_name_hook () {
84 if len(sonames) == 1: 84 if len(sonames) == 1:
85 soname = sonames[0] 85 soname = sonames[0]
86 elif len(sonames) > 1: 86 elif len(sonames) > 1:
87 lead = d.getVar('LEAD_SONAME', True) 87 lead = d.getVar('LEAD_SONAME')
88 if lead: 88 if lead:
89 r = re.compile(lead) 89 r = re.compile(lead)
90 filtered = [] 90 filtered = []
@@ -115,7 +115,7 @@ python debian_package_name_hook () {
115 newpkg = pkgname 115 newpkg = pkgname
116 else: 116 else:
117 newpkg = pkg.replace(orig_pkg, devname, 1) 117 newpkg = pkg.replace(orig_pkg, devname, 1)
118 mlpre=d.getVar('MLPREFIX', True) 118 mlpre=d.getVar('MLPREFIX')
119 if mlpre: 119 if mlpre:
120 if not newpkg.find(mlpre) == 0: 120 if not newpkg.find(mlpre) == 0:
121 newpkg = mlpre + newpkg 121 newpkg = mlpre + newpkg
@@ -131,7 +131,7 @@ python debian_package_name_hook () {
131 # and later 131 # and later
132 # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw 132 # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
133 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 133 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
134 for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): 134 for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS') or "").split(), reverse=True):
135 auto_libname(packages, pkg) 135 auto_libname(packages, pkg)
136} 136}
137 137
diff --git a/meta/classes/devshell.bbclass b/meta/classes/devshell.bbclass
index be71aff35f..864ace4cb4 100644
--- a/meta/classes/devshell.bbclass
+++ b/meta/classes/devshell.bbclass
@@ -5,14 +5,14 @@ DEVSHELL = "${SHELL}"
5python do_devshell () { 5python do_devshell () {
6 if d.getVarFlag("do_devshell", "manualfakeroot", True): 6 if d.getVarFlag("do_devshell", "manualfakeroot", True):
7 d.prependVar("DEVSHELL", "pseudo ") 7 d.prependVar("DEVSHELL", "pseudo ")
8 fakeenv = d.getVar("FAKEROOTENV", True).split() 8 fakeenv = d.getVar("FAKEROOTENV").split()
9 for f in fakeenv: 9 for f in fakeenv:
10 k = f.split("=") 10 k = f.split("=")
11 d.setVar(k[0], k[1]) 11 d.setVar(k[0], k[1])
12 d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0]) 12 d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0])
13 d.delVarFlag("do_devshell", "fakeroot") 13 d.delVarFlag("do_devshell", "fakeroot")
14 14
15 oe_terminal(d.getVar('DEVSHELL', True), 'OpenEmbedded Developer Shell', d) 15 oe_terminal(d.getVar('DEVSHELL'), 'OpenEmbedded Developer Shell', d)
16} 16}
17 17
18addtask devshell after do_patch 18addtask devshell after do_patch
@@ -82,7 +82,7 @@ def devpyshell(d):
82 more = False 82 more = False
83 83
84 i = code.InteractiveInterpreter(locals=_context) 84 i = code.InteractiveInterpreter(locals=_context)
85 print("OE PyShell (PN = %s)\n" % d.getVar("PN", True)) 85 print("OE PyShell (PN = %s)\n" % d.getVar("PN"))
86 86
87 def prompt(more): 87 def prompt(more):
88 if more: 88 if more:
diff --git a/meta/classes/distro_features_check.bbclass b/meta/classes/distro_features_check.bbclass
index 7e91dbcf4a..e74d3c04ba 100644
--- a/meta/classes/distro_features_check.bbclass
+++ b/meta/classes/distro_features_check.bbclass
@@ -11,15 +11,15 @@
11 11
12python () { 12python () {
13 # Assume at least one var is set. 13 # Assume at least one var is set.
14 distro_features = (d.getVar('DISTRO_FEATURES', True) or "").split() 14 distro_features = (d.getVar('DISTRO_FEATURES') or "").split()
15 15
16 any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES', True) 16 any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES')
17 if any_of_distro_features: 17 if any_of_distro_features:
18 any_of_distro_features = any_of_distro_features.split() 18 any_of_distro_features = any_of_distro_features.split()
19 if set.isdisjoint(set(any_of_distro_features),set(distro_features)): 19 if set.isdisjoint(set(any_of_distro_features),set(distro_features)):
20 raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features) 20 raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features)
21 21
22 required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES', True) 22 required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES')
23 if required_distro_features: 23 if required_distro_features:
24 required_distro_features = required_distro_features.split() 24 required_distro_features = required_distro_features.split()
25 for f in required_distro_features: 25 for f in required_distro_features:
@@ -28,7 +28,7 @@ python () {
28 else: 28 else:
29 raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f) 29 raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f)
30 30
31 conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES', True) 31 conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES')
32 if conflict_distro_features: 32 if conflict_distro_features:
33 conflict_distro_features = conflict_distro_features.split() 33 conflict_distro_features = conflict_distro_features.split()
34 for f in conflict_distro_features: 34 for f in conflict_distro_features:
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index fbb7402e0c..5b3a3e0f1c 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -25,75 +25,75 @@ addtask distrodata_np
25do_distrodata_np[nostamp] = "1" 25do_distrodata_np[nostamp] = "1"
26python do_distrodata_np() { 26python do_distrodata_np() {
27 localdata = bb.data.createCopy(d) 27 localdata = bb.data.createCopy(d)
28 pn = d.getVar("PN", True) 28 pn = d.getVar("PN")
29 bb.note("Package Name: %s" % pn) 29 bb.note("Package Name: %s" % pn)
30 30
31 import oe.distro_check as dist_check 31 import oe.distro_check as dist_check
32 tmpdir = d.getVar('TMPDIR', True) 32 tmpdir = d.getVar('TMPDIR')
33 distro_check_dir = os.path.join(tmpdir, "distro_check") 33 distro_check_dir = os.path.join(tmpdir, "distro_check")
34 datetime = localdata.getVar('DATETIME', True) 34 datetime = localdata.getVar('DATETIME')
35 dist_check.update_distro_data(distro_check_dir, datetime, localdata) 35 dist_check.update_distro_data(distro_check_dir, datetime, localdata)
36 36
37 if pn.find("-native") != -1: 37 if pn.find("-native") != -1:
38 pnstripped = pn.split("-native") 38 pnstripped = pn.split("-native")
39 bb.note("Native Split: %s" % pnstripped) 39 bb.note("Native Split: %s" % pnstripped)
40 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 40 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
41 bb.data.update_data(localdata) 41 bb.data.update_data(localdata)
42 42
43 if pn.find("-cross") != -1: 43 if pn.find("-cross") != -1:
44 pnstripped = pn.split("-cross") 44 pnstripped = pn.split("-cross")
45 bb.note("cross Split: %s" % pnstripped) 45 bb.note("cross Split: %s" % pnstripped)
46 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 46 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
47 bb.data.update_data(localdata) 47 bb.data.update_data(localdata)
48 48
49 if pn.find("-crosssdk") != -1: 49 if pn.find("-crosssdk") != -1:
50 pnstripped = pn.split("-crosssdk") 50 pnstripped = pn.split("-crosssdk")
51 bb.note("cross Split: %s" % pnstripped) 51 bb.note("cross Split: %s" % pnstripped)
52 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 52 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
53 bb.data.update_data(localdata) 53 bb.data.update_data(localdata)
54 54
55 if pn.startswith("nativesdk-"): 55 if pn.startswith("nativesdk-"):
56 pnstripped = pn.replace("nativesdk-", "") 56 pnstripped = pn.replace("nativesdk-", "")
57 bb.note("NativeSDK Split: %s" % pnstripped) 57 bb.note("NativeSDK Split: %s" % pnstripped)
58 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) 58 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
59 bb.data.update_data(localdata) 59 bb.data.update_data(localdata)
60 60
61 61
62 if pn.find("-initial") != -1: 62 if pn.find("-initial") != -1:
63 pnstripped = pn.split("-initial") 63 pnstripped = pn.split("-initial")
64 bb.note("initial Split: %s" % pnstripped) 64 bb.note("initial Split: %s" % pnstripped)
65 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 65 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
66 bb.data.update_data(localdata) 66 bb.data.update_data(localdata)
67 67
68 """generate package information from .bb file""" 68 """generate package information from .bb file"""
69 pname = localdata.getVar('PN', True) 69 pname = localdata.getVar('PN')
70 pcurver = localdata.getVar('PV', True) 70 pcurver = localdata.getVar('PV')
71 pdesc = localdata.getVar('DESCRIPTION', True) 71 pdesc = localdata.getVar('DESCRIPTION')
72 if pdesc is not None: 72 if pdesc is not None:
73 pdesc = pdesc.replace(',','') 73 pdesc = pdesc.replace(',','')
74 pdesc = pdesc.replace('\n','') 74 pdesc = pdesc.replace('\n','')
75 75
76 pgrp = localdata.getVar('SECTION', True) 76 pgrp = localdata.getVar('SECTION')
77 plicense = localdata.getVar('LICENSE', True).replace(',','_') 77 plicense = localdata.getVar('LICENSE').replace(',','_')
78 78
79 rstatus = localdata.getVar('RECIPE_COLOR', True) 79 rstatus = localdata.getVar('RECIPE_COLOR')
80 if rstatus is not None: 80 if rstatus is not None:
81 rstatus = rstatus.replace(',','') 81 rstatus = rstatus.replace(',','')
82 82
83 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) 83 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
84 if pcurver == pupver: 84 if pcurver == pupver:
85 vermatch="1" 85 vermatch="1"
86 else: 86 else:
87 vermatch="0" 87 vermatch="0"
88 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) 88 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
89 if noupdate_reason is None: 89 if noupdate_reason is None:
90 noupdate="0" 90 noupdate="0"
91 else: 91 else:
92 noupdate="1" 92 noupdate="1"
93 noupdate_reason = noupdate_reason.replace(',','') 93 noupdate_reason = noupdate_reason.replace(',','')
94 94
95 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 95 maintainer = localdata.getVar('RECIPE_MAINTAINER')
96 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) 96 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
97 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) 97 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
98 98
99 bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \ 99 bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \
@@ -109,80 +109,80 @@ addtask distrodata
109do_distrodata[nostamp] = "1" 109do_distrodata[nostamp] = "1"
110python do_distrodata() { 110python do_distrodata() {
111 import csv 111 import csv
112 logpath = d.getVar('LOG_DIR', True) 112 logpath = d.getVar('LOG_DIR')
113 bb.utils.mkdirhier(logpath) 113 bb.utils.mkdirhier(logpath)
114 logfile = os.path.join(logpath, "distrodata.csv") 114 logfile = os.path.join(logpath, "distrodata.csv")
115 115
116 import oe.distro_check as dist_check 116 import oe.distro_check as dist_check
117 localdata = bb.data.createCopy(d) 117 localdata = bb.data.createCopy(d)
118 tmpdir = d.getVar('TMPDIR', True) 118 tmpdir = d.getVar('TMPDIR')
119 distro_check_dir = os.path.join(tmpdir, "distro_check") 119 distro_check_dir = os.path.join(tmpdir, "distro_check")
120 datetime = localdata.getVar('DATETIME', True) 120 datetime = localdata.getVar('DATETIME')
121 dist_check.update_distro_data(distro_check_dir, datetime, localdata) 121 dist_check.update_distro_data(distro_check_dir, datetime, localdata)
122 122
123 pn = d.getVar("PN", True) 123 pn = d.getVar("PN")
124 bb.note("Package Name: %s" % pn) 124 bb.note("Package Name: %s" % pn)
125 125
126 if pn.find("-native") != -1: 126 if pn.find("-native") != -1:
127 pnstripped = pn.split("-native") 127 pnstripped = pn.split("-native")
128 bb.note("Native Split: %s" % pnstripped) 128 bb.note("Native Split: %s" % pnstripped)
129 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 129 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
130 bb.data.update_data(localdata) 130 bb.data.update_data(localdata)
131 131
132 if pn.startswith("nativesdk-"): 132 if pn.startswith("nativesdk-"):
133 pnstripped = pn.replace("nativesdk-", "") 133 pnstripped = pn.replace("nativesdk-", "")
134 bb.note("NativeSDK Split: %s" % pnstripped) 134 bb.note("NativeSDK Split: %s" % pnstripped)
135 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) 135 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
136 bb.data.update_data(localdata) 136 bb.data.update_data(localdata)
137 137
138 if pn.find("-cross") != -1: 138 if pn.find("-cross") != -1:
139 pnstripped = pn.split("-cross") 139 pnstripped = pn.split("-cross")
140 bb.note("cross Split: %s" % pnstripped) 140 bb.note("cross Split: %s" % pnstripped)
141 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 141 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
142 bb.data.update_data(localdata) 142 bb.data.update_data(localdata)
143 143
144 if pn.find("-crosssdk") != -1: 144 if pn.find("-crosssdk") != -1:
145 pnstripped = pn.split("-crosssdk") 145 pnstripped = pn.split("-crosssdk")
146 bb.note("cross Split: %s" % pnstripped) 146 bb.note("cross Split: %s" % pnstripped)
147 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 147 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
148 bb.data.update_data(localdata) 148 bb.data.update_data(localdata)
149 149
150 if pn.find("-initial") != -1: 150 if pn.find("-initial") != -1:
151 pnstripped = pn.split("-initial") 151 pnstripped = pn.split("-initial")
152 bb.note("initial Split: %s" % pnstripped) 152 bb.note("initial Split: %s" % pnstripped)
153 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 153 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
154 bb.data.update_data(localdata) 154 bb.data.update_data(localdata)
155 155
156 """generate package information from .bb file""" 156 """generate package information from .bb file"""
157 pname = localdata.getVar('PN', True) 157 pname = localdata.getVar('PN')
158 pcurver = localdata.getVar('PV', True) 158 pcurver = localdata.getVar('PV')
159 pdesc = localdata.getVar('DESCRIPTION', True) 159 pdesc = localdata.getVar('DESCRIPTION')
160 if pdesc is not None: 160 if pdesc is not None:
161 pdesc = pdesc.replace(',','') 161 pdesc = pdesc.replace(',','')
162 pdesc = pdesc.replace('\n','') 162 pdesc = pdesc.replace('\n','')
163 163
164 pgrp = localdata.getVar('SECTION', True) 164 pgrp = localdata.getVar('SECTION')
165 plicense = localdata.getVar('LICENSE', True).replace(',','_') 165 plicense = localdata.getVar('LICENSE').replace(',','_')
166 166
167 rstatus = localdata.getVar('RECIPE_COLOR', True) 167 rstatus = localdata.getVar('RECIPE_COLOR')
168 if rstatus is not None: 168 if rstatus is not None:
169 rstatus = rstatus.replace(',','') 169 rstatus = rstatus.replace(',','')
170 170
171 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) 171 pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
172 if pcurver == pupver: 172 if pcurver == pupver:
173 vermatch="1" 173 vermatch="1"
174 else: 174 else:
175 vermatch="0" 175 vermatch="0"
176 176
177 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) 177 noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
178 if noupdate_reason is None: 178 if noupdate_reason is None:
179 noupdate="0" 179 noupdate="0"
180 else: 180 else:
181 noupdate="1" 181 noupdate="1"
182 noupdate_reason = noupdate_reason.replace(',','') 182 noupdate_reason = noupdate_reason.replace(',','')
183 183
184 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 184 maintainer = localdata.getVar('RECIPE_MAINTAINER')
185 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) 185 rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
186 # do the comparison 186 # do the comparison
187 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) 187 result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
188 188
@@ -272,60 +272,60 @@ python do_checkpkg() {
272 from bb.fetch2 import FetchError, NoMethodError, decodeurl 272 from bb.fetch2 import FetchError, NoMethodError, decodeurl
273 273
274 """first check whether a uri is provided""" 274 """first check whether a uri is provided"""
275 src_uri = (d.getVar('SRC_URI', True) or '').split() 275 src_uri = (d.getVar('SRC_URI') or '').split()
276 if src_uri: 276 if src_uri:
277 uri_type, _, _, _, _, _ = decodeurl(src_uri[0]) 277 uri_type, _, _, _, _, _ = decodeurl(src_uri[0])
278 else: 278 else:
279 uri_type = "none" 279 uri_type = "none"
280 280
281 """initialize log files.""" 281 """initialize log files."""
282 logpath = d.getVar('LOG_DIR', True) 282 logpath = d.getVar('LOG_DIR')
283 bb.utils.mkdirhier(logpath) 283 bb.utils.mkdirhier(logpath)
284 logfile = os.path.join(logpath, "checkpkg.csv") 284 logfile = os.path.join(logpath, "checkpkg.csv")
285 285
286 """generate package information from .bb file""" 286 """generate package information from .bb file"""
287 pname = d.getVar('PN', True) 287 pname = d.getVar('PN')
288 288
289 if pname.find("-native") != -1: 289 if pname.find("-native") != -1:
290 if d.getVar('BBCLASSEXTEND', True): 290 if d.getVar('BBCLASSEXTEND'):
291 return 291 return
292 pnstripped = pname.split("-native") 292 pnstripped = pname.split("-native")
293 bb.note("Native Split: %s" % pnstripped) 293 bb.note("Native Split: %s" % pnstripped)
294 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 294 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
295 bb.data.update_data(localdata) 295 bb.data.update_data(localdata)
296 296
297 if pname.startswith("nativesdk-"): 297 if pname.startswith("nativesdk-"):
298 if d.getVar('BBCLASSEXTEND', True): 298 if d.getVar('BBCLASSEXTEND'):
299 return 299 return
300 pnstripped = pname.replace("nativesdk-", "") 300 pnstripped = pname.replace("nativesdk-", "")
301 bb.note("NativeSDK Split: %s" % pnstripped) 301 bb.note("NativeSDK Split: %s" % pnstripped)
302 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) 302 localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
303 bb.data.update_data(localdata) 303 bb.data.update_data(localdata)
304 304
305 if pname.find("-cross") != -1: 305 if pname.find("-cross") != -1:
306 pnstripped = pname.split("-cross") 306 pnstripped = pname.split("-cross")
307 bb.note("cross Split: %s" % pnstripped) 307 bb.note("cross Split: %s" % pnstripped)
308 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 308 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
309 bb.data.update_data(localdata) 309 bb.data.update_data(localdata)
310 310
311 if pname.find("-initial") != -1: 311 if pname.find("-initial") != -1:
312 pnstripped = pname.split("-initial") 312 pnstripped = pname.split("-initial")
313 bb.note("initial Split: %s" % pnstripped) 313 bb.note("initial Split: %s" % pnstripped)
314 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) 314 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
315 bb.data.update_data(localdata) 315 bb.data.update_data(localdata)
316 316
317 pdesc = localdata.getVar('DESCRIPTION', True) 317 pdesc = localdata.getVar('DESCRIPTION')
318 pgrp = localdata.getVar('SECTION', True) 318 pgrp = localdata.getVar('SECTION')
319 pversion = localdata.getVar('PV', True) 319 pversion = localdata.getVar('PV')
320 plicense = localdata.getVar('LICENSE', True) 320 plicense = localdata.getVar('LICENSE')
321 psection = localdata.getVar('SECTION', True) 321 psection = localdata.getVar('SECTION')
322 phome = localdata.getVar('HOMEPAGE', True) 322 phome = localdata.getVar('HOMEPAGE')
323 prelease = localdata.getVar('PR', True) 323 prelease = localdata.getVar('PR')
324 pdepends = localdata.getVar('DEPENDS', True) 324 pdepends = localdata.getVar('DEPENDS')
325 pbugtracker = localdata.getVar('BUGTRACKER', True) 325 pbugtracker = localdata.getVar('BUGTRACKER')
326 ppe = localdata.getVar('PE', True) 326 ppe = localdata.getVar('PE')
327 psrcuri = localdata.getVar('SRC_URI', True) 327 psrcuri = localdata.getVar('SRC_URI')
328 maintainer = localdata.getVar('RECIPE_MAINTAINER', True) 328 maintainer = localdata.getVar('RECIPE_MAINTAINER')
329 329
330 """ Get upstream version version """ 330 """ Get upstream version version """
331 pupver = "" 331 pupver = ""
@@ -362,7 +362,7 @@ python do_checkpkg() {
362 psrcuri = "none" 362 psrcuri = "none"
363 pdepends = "".join(pdepends.split("\t")) 363 pdepends = "".join(pdepends.split("\t"))
364 pdesc = "".join(pdesc.split("\t")) 364 pdesc = "".join(pdesc.split("\t"))
365 no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True) 365 no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON')
366 lf = bb.utils.lockfile("%s.lock" % logfile) 366 lf = bb.utils.lockfile("%s.lock" % logfile)
367 with open(logfile, "a") as f: 367 with open(logfile, "a") as f:
368 writer = csv.writer(f, delimiter='\t') 368 writer = csv.writer(f, delimiter='\t')
@@ -401,12 +401,12 @@ python do_distro_check() {
401 401
402 localdata = bb.data.createCopy(d) 402 localdata = bb.data.createCopy(d)
403 bb.data.update_data(localdata) 403 bb.data.update_data(localdata)
404 tmpdir = d.getVar('TMPDIR', True) 404 tmpdir = d.getVar('TMPDIR')
405 distro_check_dir = os.path.join(tmpdir, "distro_check") 405 distro_check_dir = os.path.join(tmpdir, "distro_check")
406 logpath = d.getVar('LOG_DIR', True) 406 logpath = d.getVar('LOG_DIR')
407 bb.utils.mkdirhier(logpath) 407 bb.utils.mkdirhier(logpath)
408 result_file = os.path.join(logpath, "distrocheck.csv") 408 result_file = os.path.join(logpath, "distrocheck.csv")
409 datetime = localdata.getVar('DATETIME', True) 409 datetime = localdata.getVar('DATETIME')
410 dc.update_distro_data(distro_check_dir, datetime, localdata) 410 dc.update_distro_data(distro_check_dir, datetime, localdata)
411 411
412 # do the comparison 412 # do the comparison
@@ -449,12 +449,12 @@ do_checklicense[nostamp] = "1"
449python do_checklicense() { 449python do_checklicense() {
450 import csv 450 import csv
451 import shutil 451 import shutil
452 logpath = d.getVar('LOG_DIR', True) 452 logpath = d.getVar('LOG_DIR')
453 bb.utils.mkdirhier(logpath) 453 bb.utils.mkdirhier(logpath)
454 pn = d.getVar('PN', True) 454 pn = d.getVar('PN')
455 logfile = os.path.join(logpath, "missinglicense.csv") 455 logfile = os.path.join(logpath, "missinglicense.csv")
456 generic_directory = d.getVar('COMMON_LICENSE_DIR', True) 456 generic_directory = d.getVar('COMMON_LICENSE_DIR')
457 license_types = d.getVar('LICENSE', True) 457 license_types = d.getVar('LICENSE')
458 for license_type in ((license_types.replace('+', '').replace('|', '&') 458 for license_type in ((license_types.replace('+', '').replace('|', '&')
459 .replace('(', '').replace(')', '').replace(';', '') 459 .replace('(', '').replace(')', '').replace(';', '')
460 .replace(',', '').replace(" ", "").split("&"))): 460 .replace(',', '').replace(" ", "").split("&"))):
diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass
index aa18e8b292..9f398d7051 100644
--- a/meta/classes/distutils-base.bbclass
+++ b/meta/classes/distutils-base.bbclass
@@ -1,4 +1,4 @@
1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" 1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" 2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
3 3
4inherit distutils-common-base pythonnative 4inherit distutils-common-base pythonnative
diff --git a/meta/classes/distutils3-base.bbclass b/meta/classes/distutils3-base.bbclass
index 82ab6a3d1c..7dbf07ac4b 100644
--- a/meta/classes/distutils3-base.bbclass
+++ b/meta/classes/distutils3-base.bbclass
@@ -1,4 +1,4 @@
1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" 1DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" 2RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
3 3
4inherit distutils-common-base python3native 4inherit distutils-common-base python3native
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index 31908c3ca2..5ba6c3472b 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -28,34 +28,34 @@ SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
28EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" 28EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}"
29 29
30python () { 30python () {
31 externalsrc = d.getVar('EXTERNALSRC', True) 31 externalsrc = d.getVar('EXTERNALSRC')
32 32
33 # If this is the base recipe and EXTERNALSRC is set for it or any of its 33 # If this is the base recipe and EXTERNALSRC is set for it or any of its
34 # derivatives, then enable BB_DONT_CACHE to force the recipe to always be 34 # derivatives, then enable BB_DONT_CACHE to force the recipe to always be
35 # re-parsed so that the file-checksums function for do_compile is run every 35 # re-parsed so that the file-checksums function for do_compile is run every
36 # time. 36 # time.
37 bpn = d.getVar('BPN', True) 37 bpn = d.getVar('BPN')
38 if bpn == d.getVar('PN', True): 38 if bpn == d.getVar('PN'):
39 classextend = (d.getVar('BBCLASSEXTEND', True) or '').split() 39 classextend = (d.getVar('BBCLASSEXTEND') or '').split()
40 if (externalsrc or 40 if (externalsrc or
41 ('native' in classextend and 41 ('native' in classextend and
42 d.getVar('EXTERNALSRC_pn-%s-native' % bpn, True)) or 42 d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or
43 ('nativesdk' in classextend and 43 ('nativesdk' in classextend and
44 d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn, True)) or 44 d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or
45 ('cross' in classextend and 45 ('cross' in classextend and
46 d.getVar('EXTERNALSRC_pn-%s-cross' % bpn, True))): 46 d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))):
47 d.setVar('BB_DONT_CACHE', '1') 47 d.setVar('BB_DONT_CACHE', '1')
48 48
49 if externalsrc: 49 if externalsrc:
50 d.setVar('S', externalsrc) 50 d.setVar('S', externalsrc)
51 externalsrcbuild = d.getVar('EXTERNALSRC_BUILD', True) 51 externalsrcbuild = d.getVar('EXTERNALSRC_BUILD')
52 if externalsrcbuild: 52 if externalsrcbuild:
53 d.setVar('B', externalsrcbuild) 53 d.setVar('B', externalsrcbuild)
54 else: 54 else:
55 d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') 55 d.setVar('B', '${WORKDIR}/${BPN}-${PV}/')
56 56
57 local_srcuri = [] 57 local_srcuri = []
58 fetch = bb.fetch2.Fetch((d.getVar('SRC_URI', True) or '').split(), d) 58 fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
59 for url in fetch.urls: 59 for url in fetch.urls:
60 url_data = fetch.ud[url] 60 url_data = fetch.ud[url]
61 parm = url_data.parm 61 parm = url_data.parm
@@ -94,7 +94,7 @@ python () {
94 # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string 94 # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
95 d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) 95 d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
96 96
97 for task in d.getVar("SRCTREECOVEREDTASKS", True).split(): 97 for task in d.getVar("SRCTREECOVEREDTASKS").split():
98 if local_srcuri and task in fetch_tasks: 98 if local_srcuri and task in fetch_tasks:
99 continue 99 continue
100 bb.build.deltask(task, d) 100 bb.build.deltask(task, d)
@@ -106,13 +106,13 @@ python () {
106 d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') 106 d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
107 107
108 # We don't want the workdir to go away 108 # We don't want the workdir to go away
109 d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN', True)) 109 d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
110 110
111 # If B=S the same builddir is used even for different architectures. 111 # If B=S the same builddir is used even for different architectures.
112 # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that 112 # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that
113 # change of do_configure task hash is correctly detected and stamps are 113 # change of do_configure task hash is correctly detected and stamps are
114 # invalidated if e.g. MACHINE changes. 114 # invalidated if e.g. MACHINE changes.
115 if d.getVar('S', True) == d.getVar('B', True): 115 if d.getVar('S') == d.getVar('B'):
116 configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' 116 configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate'
117 d.setVar('CONFIGURESTAMPFILE', configstamp) 117 d.setVar('CONFIGURESTAMPFILE', configstamp)
118 d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') 118 d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}')
@@ -120,10 +120,10 @@ python () {
120 120
121python externalsrc_configure_prefunc() { 121python externalsrc_configure_prefunc() {
122 # Create desired symlinks 122 # Create desired symlinks
123 symlinks = (d.getVar('EXTERNALSRC_SYMLINKS', True) or '').split() 123 symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()
124 for symlink in symlinks: 124 for symlink in symlinks:
125 symsplit = symlink.split(':', 1) 125 symsplit = symlink.split(':', 1)
126 lnkfile = os.path.join(d.getVar('S', True), symsplit[0]) 126 lnkfile = os.path.join(d.getVar('S'), symsplit[0])
127 target = d.expand(symsplit[1]) 127 target = d.expand(symsplit[1])
128 if len(symsplit) > 1: 128 if len(symsplit) > 1:
129 if os.path.islink(lnkfile): 129 if os.path.islink(lnkfile):
@@ -139,7 +139,7 @@ python externalsrc_configure_prefunc() {
139 139
140python externalsrc_compile_prefunc() { 140python externalsrc_compile_prefunc() {
141 # Make it obvious that this is happening, since forgetting about it could lead to much confusion 141 # Make it obvious that this is happening, since forgetting about it could lead to much confusion
142 bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN', True), d.getVar('EXTERNALSRC', True))) 142 bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC')))
143} 143}
144 144
145def srctree_hash_files(d): 145def srctree_hash_files(d):
@@ -147,7 +147,7 @@ def srctree_hash_files(d):
147 import subprocess 147 import subprocess
148 import tempfile 148 import tempfile
149 149
150 s_dir = d.getVar('EXTERNALSRC', True) 150 s_dir = d.getVar('EXTERNALSRC')
151 git_dir = os.path.join(s_dir, '.git') 151 git_dir = os.path.join(s_dir, '.git')
152 oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') 152 oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1')
153 153
@@ -165,7 +165,7 @@ def srctree_hash_files(d):
165 fobj.write(sha1) 165 fobj.write(sha1)
166 ret = oe_hash_file + ':True' 166 ret = oe_hash_file + ':True'
167 else: 167 else:
168 ret = d.getVar('EXTERNALSRC', True) + '/*:True' 168 ret = d.getVar('EXTERNALSRC') + '/*:True'
169 return ret 169 return ret
170 170
171def srctree_configure_hash_files(d): 171def srctree_configure_hash_files(d):
@@ -173,7 +173,7 @@ def srctree_configure_hash_files(d):
173 Get the list of files that should trigger do_configure to re-execute, 173 Get the list of files that should trigger do_configure to re-execute,
174 based on the value of CONFIGURE_FILES 174 based on the value of CONFIGURE_FILES
175 """ 175 """
176 in_files = (d.getVar('CONFIGURE_FILES', True) or '').split() 176 in_files = (d.getVar('CONFIGURE_FILES') or '').split()
177 out_items = [] 177 out_items = []
178 search_files = [] 178 search_files = []
179 for entry in in_files: 179 for entry in in_files:
@@ -182,7 +182,7 @@ def srctree_configure_hash_files(d):
182 else: 182 else:
183 search_files.append(entry) 183 search_files.append(entry)
184 if search_files: 184 if search_files:
185 s_dir = d.getVar('EXTERNALSRC', True) 185 s_dir = d.getVar('EXTERNALSRC')
186 for root, _, files in os.walk(s_dir): 186 for root, _, files in os.walk(s_dir):
187 for f in files: 187 for f in files:
188 if f in search_files: 188 if f in search_files:
diff --git a/meta/classes/extrausers.bbclass b/meta/classes/extrausers.bbclass
index 852810e866..402fc7d808 100644
--- a/meta/classes/extrausers.bbclass
+++ b/meta/classes/extrausers.bbclass
@@ -15,7 +15,7 @@
15 15
16inherit useradd_base 16inherit useradd_base
17 17
18PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS', True))]}" 18PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}"
19 19
20# Image level user / group settings 20# Image level user / group settings
21ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;" 21ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;"
diff --git a/meta/classes/fontcache.bbclass b/meta/classes/fontcache.bbclass
index 8ebdfc4f5c..d047a79aa6 100644
--- a/meta/classes/fontcache.bbclass
+++ b/meta/classes/fontcache.bbclass
@@ -30,26 +30,26 @@ fi
30} 30}
31 31
32python () { 32python () {
33 font_pkgs = d.getVar('FONT_PACKAGES', True).split() 33 font_pkgs = d.getVar('FONT_PACKAGES').split()
34 deps = d.getVar("FONT_EXTRA_RDEPENDS", True) 34 deps = d.getVar("FONT_EXTRA_RDEPENDS")
35 35
36 for pkg in font_pkgs: 36 for pkg in font_pkgs:
37 if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps) 37 if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps)
38} 38}
39 39
40python add_fontcache_postinsts() { 40python add_fontcache_postinsts() {
41 for pkg in d.getVar('FONT_PACKAGES', True).split(): 41 for pkg in d.getVar('FONT_PACKAGES').split():
42 bb.note("adding fonts postinst and postrm scripts to %s" % pkg) 42 bb.note("adding fonts postinst and postrm scripts to %s" % pkg)
43 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) 43 postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
44 if not postinst: 44 if not postinst:
45 postinst = '#!/bin/sh\n' 45 postinst = '#!/bin/sh\n'
46 postinst += d.getVar('fontcache_common', True) 46 postinst += d.getVar('fontcache_common')
47 d.setVar('pkg_postinst_%s' % pkg, postinst) 47 d.setVar('pkg_postinst_%s' % pkg, postinst)
48 48
49 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) 49 postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
50 if not postrm: 50 if not postrm:
51 postrm = '#!/bin/sh\n' 51 postrm = '#!/bin/sh\n'
52 postrm += d.getVar('fontcache_common', True) 52 postrm += d.getVar('fontcache_common')
53 d.setVar('pkg_postrm_%s' % pkg, postrm) 53 d.setVar('pkg_postrm_%s' % pkg, postrm)
54} 54}
55 55
diff --git a/meta/classes/fs-uuid.bbclass b/meta/classes/fs-uuid.bbclass
index bd2613cf10..313c5a3597 100644
--- a/meta/classes/fs-uuid.bbclass
+++ b/meta/classes/fs-uuid.bbclass
@@ -3,7 +3,7 @@
3# on ext file systems and depends on tune2fs. 3# on ext file systems and depends on tune2fs.
4def get_rootfs_uuid(d): 4def get_rootfs_uuid(d):
5 import subprocess 5 import subprocess
6 rootfs = d.getVar('ROOTFS', True) 6 rootfs = d.getVar('ROOTFS')
7 output = subprocess.check_output(['tune2fs', '-l', rootfs]) 7 output = subprocess.check_output(['tune2fs', '-l', rootfs])
8 for line in output.split('\n'): 8 for line in output.split('\n'):
9 if line.startswith('Filesystem UUID:'): 9 if line.startswith('Filesystem UUID:'):
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass
index d7afa7282f..d07beadc95 100644
--- a/meta/classes/gconf.bbclass
+++ b/meta/classes/gconf.bbclass
@@ -42,8 +42,8 @@ done
42 42
43python populate_packages_append () { 43python populate_packages_append () {
44 import re 44 import re
45 packages = d.getVar('PACKAGES', True).split() 45 packages = d.getVar('PACKAGES').split()
46 pkgdest = d.getVar('PKGDEST', True) 46 pkgdest = d.getVar('PKGDEST')
47 47
48 for pkg in packages: 48 for pkg in packages:
49 schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) 49 schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
@@ -56,15 +56,15 @@ python populate_packages_append () {
56 if schemas != []: 56 if schemas != []:
57 bb.note("adding gconf postinst and prerm scripts to %s" % pkg) 57 bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
58 d.setVar('SCHEMA_FILES', " ".join(schemas)) 58 d.setVar('SCHEMA_FILES', " ".join(schemas))
59 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 59 postinst = d.getVar('pkg_postinst_%s' % pkg)
60 if not postinst: 60 if not postinst:
61 postinst = '#!/bin/sh\n' 61 postinst = '#!/bin/sh\n'
62 postinst += d.getVar('gconf_postinst', True) 62 postinst += d.getVar('gconf_postinst')
63 d.setVar('pkg_postinst_%s' % pkg, postinst) 63 d.setVar('pkg_postinst_%s' % pkg, postinst)
64 prerm = d.getVar('pkg_prerm_%s' % pkg, True) 64 prerm = d.getVar('pkg_prerm_%s' % pkg)
65 if not prerm: 65 if not prerm:
66 prerm = '#!/bin/sh\n' 66 prerm = '#!/bin/sh\n'
67 prerm += d.getVar('gconf_prerm', True) 67 prerm += d.getVar('gconf_prerm')
68 d.setVar('pkg_prerm_%s' % pkg, prerm) 68 d.setVar('pkg_prerm_%s' % pkg, prerm)
69 d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') 69 d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf')
70} 70}
diff --git a/meta/classes/gettext.bbclass b/meta/classes/gettext.bbclass
index 03b89b2455..0be14246bf 100644
--- a/meta/classes/gettext.bbclass
+++ b/meta/classes/gettext.bbclass
@@ -1,15 +1,15 @@
1def gettext_dependencies(d): 1def gettext_dependencies(d):
2 if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): 2 if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
3 return "" 3 return ""
4 if d.getVar('USE_NLS', True) == 'no': 4 if d.getVar('USE_NLS') == 'no':
5 return "gettext-minimal-native" 5 return "gettext-minimal-native"
6 return d.getVar('DEPENDS_GETTEXT', False) 6 return d.getVar('DEPENDS_GETTEXT', False)
7 7
8def gettext_oeconf(d): 8def gettext_oeconf(d):
9 if d.getVar('USE_NLS', True) == 'no': 9 if d.getVar('USE_NLS') == 'no':
10 return '--disable-nls' 10 return '--disable-nls'
11 # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set 11 # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set
12 if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): 12 if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
13 return '--disable-nls' 13 return '--disable-nls'
14 return "--enable-nls" 14 return "--enable-nls"
15 15
diff --git a/meta/classes/gio-module-cache.bbclass b/meta/classes/gio-module-cache.bbclass
index 91461b11e7..39b7bef720 100644
--- a/meta/classes/gio-module-cache.bbclass
+++ b/meta/classes/gio-module-cache.bbclass
@@ -17,21 +17,21 @@ fi
17} 17}
18 18
19python populate_packages_append () { 19python populate_packages_append () {
20 packages = d.getVar('GIO_MODULE_PACKAGES', True).split() 20 packages = d.getVar('GIO_MODULE_PACKAGES').split()
21 21
22 for pkg in packages: 22 for pkg in packages:
23 bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) 23 bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg)
24 24
25 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 25 postinst = d.getVar('pkg_postinst_%s' % pkg)
26 if not postinst: 26 if not postinst:
27 postinst = '#!/bin/sh\n' 27 postinst = '#!/bin/sh\n'
28 postinst += d.getVar('gio_module_cache_common', True) 28 postinst += d.getVar('gio_module_cache_common')
29 d.setVar('pkg_postinst_%s' % pkg, postinst) 29 d.setVar('pkg_postinst_%s' % pkg, postinst)
30 30
31 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 31 postrm = d.getVar('pkg_postrm_%s' % pkg)
32 if not postrm: 32 if not postrm:
33 postrm = '#!/bin/sh\n' 33 postrm = '#!/bin/sh\n'
34 postrm += d.getVar('gio_module_cache_common', True) 34 postrm += d.getVar('gio_module_cache_common')
35 d.setVar('pkg_postrm_%s' % pkg, postrm) 35 d.setVar('pkg_postrm_%s' % pkg, postrm)
36} 36}
37 37
diff --git a/meta/classes/grub-efi.bbclass b/meta/classes/grub-efi.bbclass
index 17417ba5d9..3dc9146d90 100644
--- a/meta/classes/grub-efi.bbclass
+++ b/meta/classes/grub-efi.bbclass
@@ -72,14 +72,14 @@ efi_hddimg_populate() {
72python build_efi_cfg() { 72python build_efi_cfg() {
73 import sys 73 import sys
74 74
75 workdir = d.getVar('WORKDIR', True) 75 workdir = d.getVar('WORKDIR')
76 if not workdir: 76 if not workdir:
77 bb.error("WORKDIR not defined, unable to package") 77 bb.error("WORKDIR not defined, unable to package")
78 return 78 return
79 79
80 gfxserial = d.getVar('GRUB_GFXSERIAL', True) or "" 80 gfxserial = d.getVar('GRUB_GFXSERIAL') or ""
81 81
82 labels = d.getVar('LABELS', True) 82 labels = d.getVar('LABELS')
83 if not labels: 83 if not labels:
84 bb.debug(1, "LABELS not defined, nothing to do") 84 bb.debug(1, "LABELS not defined, nothing to do")
85 return 85 return
@@ -88,7 +88,7 @@ python build_efi_cfg() {
88 bb.debug(1, "No labels, nothing to do") 88 bb.debug(1, "No labels, nothing to do")
89 return 89 return
90 90
91 cfile = d.getVar('GRUB_CFG', True) 91 cfile = d.getVar('GRUB_CFG')
92 if not cfile: 92 if not cfile:
93 bb.fatal('Unable to read GRUB_CFG') 93 bb.fatal('Unable to read GRUB_CFG')
94 94
@@ -99,33 +99,33 @@ python build_efi_cfg() {
99 99
100 cfgfile.write('# Automatically created by OE\n') 100 cfgfile.write('# Automatically created by OE\n')
101 101
102 opts = d.getVar('GRUB_OPTS', True) 102 opts = d.getVar('GRUB_OPTS')
103 if opts: 103 if opts:
104 for opt in opts.split(';'): 104 for opt in opts.split(';'):
105 cfgfile.write('%s\n' % opt) 105 cfgfile.write('%s\n' % opt)
106 106
107 cfgfile.write('default=%s\n' % (labels.split()[0])) 107 cfgfile.write('default=%s\n' % (labels.split()[0]))
108 108
109 timeout = d.getVar('GRUB_TIMEOUT', True) 109 timeout = d.getVar('GRUB_TIMEOUT')
110 if timeout: 110 if timeout:
111 cfgfile.write('timeout=%s\n' % timeout) 111 cfgfile.write('timeout=%s\n' % timeout)
112 else: 112 else:
113 cfgfile.write('timeout=50\n') 113 cfgfile.write('timeout=50\n')
114 114
115 root = d.getVar('GRUB_ROOT', True) 115 root = d.getVar('GRUB_ROOT')
116 if not root: 116 if not root:
117 bb.fatal('GRUB_ROOT not defined') 117 bb.fatal('GRUB_ROOT not defined')
118 118
119 if gfxserial == "1": 119 if gfxserial == "1":
120 btypes = [ [ " graphics console", "" ], 120 btypes = [ [ " graphics console", "" ],
121 [ " serial console", d.getVar('GRUB_SERIAL', True) or "" ] ] 121 [ " serial console", d.getVar('GRUB_SERIAL') or "" ] ]
122 else: 122 else:
123 btypes = [ [ "", "" ] ] 123 btypes = [ [ "", "" ] ]
124 124
125 for label in labels.split(): 125 for label in labels.split():
126 localdata = d.createCopy() 126 localdata = d.createCopy()
127 127
128 overrides = localdata.getVar('OVERRIDES', True) 128 overrides = localdata.getVar('OVERRIDES')
129 if not overrides: 129 if not overrides:
130 bb.fatal('OVERRIDES not defined') 130 bb.fatal('OVERRIDES not defined')
131 131
@@ -141,8 +141,8 @@ python build_efi_cfg() {
141 141
142 cfgfile.write(' %s' % replace_rootfs_uuid(d, root)) 142 cfgfile.write(' %s' % replace_rootfs_uuid(d, root))
143 143
144 append = localdata.getVar('APPEND', True) 144 append = localdata.getVar('APPEND')
145 initrd = localdata.getVar('INITRD', True) 145 initrd = localdata.getVar('INITRD')
146 146
147 if append: 147 if append:
148 append = replace_rootfs_uuid(d, append) 148 append = replace_rootfs_uuid(d, append)
diff --git a/meta/classes/gsettings.bbclass b/meta/classes/gsettings.bbclass
index dec5abc026..e6d1c8a893 100644
--- a/meta/classes/gsettings.bbclass
+++ b/meta/classes/gsettings.bbclass
@@ -18,20 +18,20 @@ gsettings_postinstrm () {
18} 18}
19 19
20python populate_packages_append () { 20python populate_packages_append () {
21 pkg = d.getVar('PN', True) 21 pkg = d.getVar('PN')
22 bb.note("adding gsettings postinst scripts to %s" % pkg) 22 bb.note("adding gsettings postinst scripts to %s" % pkg)
23 23
24 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) 24 postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
25 if not postinst: 25 if not postinst:
26 postinst = '#!/bin/sh\n' 26 postinst = '#!/bin/sh\n'
27 postinst += d.getVar('gsettings_postinstrm', True) 27 postinst += d.getVar('gsettings_postinstrm')
28 d.setVar('pkg_postinst_%s' % pkg, postinst) 28 d.setVar('pkg_postinst_%s' % pkg, postinst)
29 29
30 bb.note("adding gsettings postrm scripts to %s" % pkg) 30 bb.note("adding gsettings postrm scripts to %s" % pkg)
31 31
32 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) 32 postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
33 if not postrm: 33 if not postrm:
34 postrm = '#!/bin/sh\n' 34 postrm = '#!/bin/sh\n'
35 postrm += d.getVar('gsettings_postinstrm', True) 35 postrm += d.getVar('gsettings_postinstrm')
36 d.setVar('pkg_postrm_%s' % pkg, postrm) 36 d.setVar('pkg_postrm_%s' % pkg, postrm)
37} 37}
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass
index 0f1052b08c..c5d8d7cfed 100644
--- a/meta/classes/gtk-icon-cache.bbclass
+++ b/meta/classes/gtk-icon-cache.bbclass
@@ -35,11 +35,11 @@ fi
35} 35}
36 36
37python populate_packages_append () { 37python populate_packages_append () {
38 packages = d.getVar('PACKAGES', True).split() 38 packages = d.getVar('PACKAGES').split()
39 pkgdest = d.getVar('PKGDEST', True) 39 pkgdest = d.getVar('PKGDEST')
40 40
41 for pkg in packages: 41 for pkg in packages:
42 icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) 42 icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir'))
43 if not os.path.exists(icon_dir): 43 if not os.path.exists(icon_dir):
44 continue 44 continue
45 45
@@ -49,16 +49,16 @@ python populate_packages_append () {
49 49
50 bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) 50 bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
51 51
52 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 52 postinst = d.getVar('pkg_postinst_%s' % pkg)
53 if not postinst: 53 if not postinst:
54 postinst = '#!/bin/sh\n' 54 postinst = '#!/bin/sh\n'
55 postinst += d.getVar('gtk_icon_cache_postinst', True) 55 postinst += d.getVar('gtk_icon_cache_postinst')
56 d.setVar('pkg_postinst_%s' % pkg, postinst) 56 d.setVar('pkg_postinst_%s' % pkg, postinst)
57 57
58 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 58 postrm = d.getVar('pkg_postrm_%s' % pkg)
59 if not postrm: 59 if not postrm:
60 postrm = '#!/bin/sh\n' 60 postrm = '#!/bin/sh\n'
61 postrm += d.getVar('gtk_icon_cache_postrm', True) 61 postrm += d.getVar('gtk_icon_cache_postrm')
62 d.setVar('pkg_postrm_%s' % pkg, postrm) 62 d.setVar('pkg_postrm_%s' % pkg, postrm)
63} 63}
64 64
diff --git a/meta/classes/gtk-immodules-cache.bbclass b/meta/classes/gtk-immodules-cache.bbclass
index ebbc9dea89..baea95959b 100644
--- a/meta/classes/gtk-immodules-cache.bbclass
+++ b/meta/classes/gtk-immodules-cache.bbclass
@@ -61,21 +61,21 @@ fi
61} 61}
62 62
63python populate_packages_append () { 63python populate_packages_append () {
64 gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES', True).split() 64 gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES').split()
65 65
66 for pkg in gtkimmodules_pkgs: 66 for pkg in gtkimmodules_pkgs:
67 bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) 67 bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg)
68 68
69 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 69 postinst = d.getVar('pkg_postinst_%s' % pkg)
70 if not postinst: 70 if not postinst:
71 postinst = '#!/bin/sh\n' 71 postinst = '#!/bin/sh\n'
72 postinst += d.getVar('gtk_immodule_cache_postinst', True) 72 postinst += d.getVar('gtk_immodule_cache_postinst')
73 d.setVar('pkg_postinst_%s' % pkg, postinst) 73 d.setVar('pkg_postinst_%s' % pkg, postinst)
74 74
75 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 75 postrm = d.getVar('pkg_postrm_%s' % pkg)
76 if not postrm: 76 if not postrm:
77 postrm = '#!/bin/sh\n' 77 postrm = '#!/bin/sh\n'
78 postrm += d.getVar('gtk_immodule_cache_postrm', True) 78 postrm += d.getVar('gtk_immodule_cache_postrm')
79 d.setVar('pkg_postrm_%s' % pkg, postrm) 79 d.setVar('pkg_postrm_%s' % pkg, postrm)
80} 80}
81 81
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index a837894150..8a351cf3b8 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -100,7 +100,7 @@ def use_icecc(bb,d):
100 if icecc_is_allarch(bb, d): 100 if icecc_is_allarch(bb, d):
101 return "no" 101 return "no"
102 102
103 pn = d.getVar('PN', True) 103 pn = d.getVar('PN')
104 104
105 system_class_blacklist = [] 105 system_class_blacklist = []
106 user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split() 106 user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split()
@@ -139,7 +139,7 @@ def use_icecc(bb,d):
139 return "yes" 139 return "yes"
140 140
141def icecc_is_allarch(bb, d): 141def icecc_is_allarch(bb, d):
142 return d.getVar("PACKAGE_ARCH", True) == "all" or bb.data.inherits_class('allarch', d) 142 return d.getVar("PACKAGE_ARCH") == "all" or bb.data.inherits_class('allarch', d)
143 143
144def icecc_is_kernel(bb, d): 144def icecc_is_kernel(bb, d):
145 return \ 145 return \
diff --git a/meta/classes/image-buildinfo.bbclass b/meta/classes/image-buildinfo.bbclass
index da1edf7496..3b79de5ad8 100644
--- a/meta/classes/image-buildinfo.bbclass
+++ b/meta/classes/image-buildinfo.bbclass
@@ -18,7 +18,7 @@ def image_buildinfo_outputvars(vars, listvars, d):
18 listvars = listvars.split() 18 listvars = listvars.split()
19 ret = "" 19 ret = ""
20 for var in vars: 20 for var in vars:
21 value = d.getVar(var, True) or "" 21 value = d.getVar(var) or ""
22 if (d.getVarFlag(var, 'type', True) == "list"): 22 if (d.getVarFlag(var, 'type', True) == "list"):
23 value = oe.utils.squashspaces(value) 23 value = oe.utils.squashspaces(value)
24 ret += "%s = %s\n" % (var, value) 24 ret += "%s = %s\n" % (var, value)
@@ -42,7 +42,7 @@ def get_layer_git_status(path):
42 42
43# Returns layer revisions along with their respective status 43# Returns layer revisions along with their respective status
44def get_layer_revs(d): 44def get_layer_revs(d):
45 layers = (d.getVar("BBLAYERS", True) or "").split() 45 layers = (d.getVar("BBLAYERS") or "").split()
46 medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \ 46 medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \
47 base_get_metadata_git_branch(i, None).strip(), \ 47 base_get_metadata_git_branch(i, None).strip(), \
48 base_get_metadata_git_revision(i, None), \ 48 base_get_metadata_git_revision(i, None), \
@@ -52,11 +52,11 @@ def get_layer_revs(d):
52 52
53def buildinfo_target(d): 53def buildinfo_target(d):
54 # Get context 54 # Get context
55 if d.getVar('BB_WORKERCONTEXT', True) != '1': 55 if d.getVar('BB_WORKERCONTEXT') != '1':
56 return "" 56 return ""
57 # Single and list variables to be read 57 # Single and list variables to be read
58 vars = (d.getVar("IMAGE_BUILDINFO_VARS", True) or "") 58 vars = (d.getVar("IMAGE_BUILDINFO_VARS") or "")
59 listvars = (d.getVar("IMAGE_BUILDINFO_LVARS", True) or "") 59 listvars = (d.getVar("IMAGE_BUILDINFO_LVARS") or "")
60 return image_buildinfo_outputvars(vars, listvars, d) 60 return image_buildinfo_outputvars(vars, listvars, d)
61 61
62# Write build information to target filesystem 62# Write build information to target filesystem
diff --git a/meta/classes/image-live.bbclass b/meta/classes/image-live.bbclass
index 4a634dca96..a3d1b4e567 100644
--- a/meta/classes/image-live.bbclass
+++ b/meta/classes/image-live.bbclass
@@ -51,8 +51,8 @@ IMAGE_TYPEDEP_hddimg = "ext4"
51IMAGE_TYPES_MASKED += "live hddimg iso" 51IMAGE_TYPES_MASKED += "live hddimg iso"
52 52
53python() { 53python() {
54 image_b = d.getVar('IMAGE_BASENAME', True) 54 image_b = d.getVar('IMAGE_BASENAME')
55 initrd_i = d.getVar('INITRD_IMAGE_LIVE', True) 55 initrd_i = d.getVar('INITRD_IMAGE_LIVE')
56 if image_b == initrd_i: 56 if image_b == initrd_i:
57 bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i) 57 bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i)
58 bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.') 58 bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.')
@@ -264,9 +264,9 @@ build_hddimg() {
264 264
265python do_bootimg() { 265python do_bootimg() {
266 set_live_vm_vars(d, 'LIVE') 266 set_live_vm_vars(d, 'LIVE')
267 if d.getVar("PCBIOS", True) == "1": 267 if d.getVar("PCBIOS") == "1":
268 bb.build.exec_func('build_syslinux_cfg', d) 268 bb.build.exec_func('build_syslinux_cfg', d)
269 if d.getVar("EFI", True) == "1": 269 if d.getVar("EFI") == "1":
270 bb.build.exec_func('build_efi_cfg', d) 270 bb.build.exec_func('build_efi_cfg', d)
271 bb.build.exec_func('build_hddimg', d) 271 bb.build.exec_func('build_hddimg', d)
272 bb.build.exec_func('build_iso', d) 272 bb.build.exec_func('build_iso', d)
diff --git a/meta/classes/image-vm.bbclass b/meta/classes/image-vm.bbclass
index 2f35d6b4d1..35c9244e9b 100644
--- a/meta/classes/image-vm.bbclass
+++ b/meta/classes/image-vm.bbclass
@@ -112,9 +112,9 @@ build_boot_dd() {
112python do_bootdirectdisk() { 112python do_bootdirectdisk() {
113 validate_disk_signature(d) 113 validate_disk_signature(d)
114 set_live_vm_vars(d, 'VM') 114 set_live_vm_vars(d, 'VM')
115 if d.getVar("PCBIOS", True) == "1": 115 if d.getVar("PCBIOS") == "1":
116 bb.build.exec_func('build_syslinux_cfg', d) 116 bb.build.exec_func('build_syslinux_cfg', d)
117 if d.getVar("EFI", True) == "1": 117 if d.getVar("EFI") == "1":
118 bb.build.exec_func('build_efi_cfg', d) 118 bb.build.exec_func('build_efi_cfg', d)
119 bb.build.exec_func('build_boot_dd', d) 119 bb.build.exec_func('build_boot_dd', d)
120} 120}
@@ -132,7 +132,7 @@ def generate_disk_signature():
132def validate_disk_signature(d): 132def validate_disk_signature(d):
133 import re 133 import re
134 134
135 disk_signature = d.getVar("DISK_SIGNATURE", True) 135 disk_signature = d.getVar("DISK_SIGNATURE")
136 136
137 if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature): 137 if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature):
138 bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature) 138 bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature)
@@ -158,11 +158,11 @@ create_qcow2_image () {
158} 158}
159 159
160python do_vmimg() { 160python do_vmimg() {
161 if 'vmdk' in d.getVar('IMAGE_FSTYPES', True): 161 if 'vmdk' in d.getVar('IMAGE_FSTYPES'):
162 bb.build.exec_func('create_vmdk_image', d) 162 bb.build.exec_func('create_vmdk_image', d)
163 if 'vdi' in d.getVar('IMAGE_FSTYPES', True): 163 if 'vdi' in d.getVar('IMAGE_FSTYPES'):
164 bb.build.exec_func('create_vdi_image', d) 164 bb.build.exec_func('create_vdi_image', d)
165 if 'qcow2' in d.getVar('IMAGE_FSTYPES', True): 165 if 'qcow2' in d.getVar('IMAGE_FSTYPES'):
166 bb.build.exec_func('create_qcow2_image', d) 166 bb.build.exec_func('create_qcow2_image', d)
167} 167}
168 168
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index e63f6a3bfe..28bff9e75a 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -2,7 +2,7 @@ inherit rootfs_${IMAGE_PKGTYPE}
2 2
3# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk 3# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk
4# in the non-Linux SDK_OS case, such as mingw32 4# in the non-Linux SDK_OS case, such as mingw32
5SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS", True)]}" 5SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}"
6inherit ${SDKEXTCLASS} 6inherit ${SDKEXTCLASS}
7 7
8TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}" 8TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}"
@@ -133,7 +133,7 @@ def build_live(d):
133 if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg 133 if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg
134 d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d)) 134 d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d))
135 d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d)) 135 d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d))
136 if d.getVar('NOISO', True) == "0" or d.getVar('NOHDD', True) == "0": 136 if d.getVar('NOISO') == "0" or d.getVar('NOHDD') == "0":
137 return "image-live" 137 return "image-live"
138 return "" 138 return ""
139 return "image-live" 139 return "image-live"
@@ -145,7 +145,7 @@ IMAGE_TYPE_vm = '${@bb.utils.contains_any("IMAGE_FSTYPES", ["vmdk", "vdi", "qcow
145inherit ${IMAGE_TYPE_vm} 145inherit ${IMAGE_TYPE_vm}
146 146
147def build_uboot(d): 147def build_uboot(d):
148 if 'u-boot' in (d.getVar('IMAGE_FSTYPES', True) or ''): 148 if 'u-boot' in (d.getVar('IMAGE_FSTYPES') or ''):
149 return "image_types_uboot" 149 return "image_types_uboot"
150 else: 150 else:
151 return "" 151 return ""
@@ -158,7 +158,7 @@ python () {
158 d.appendVarFlag('do_rootfs', 'depends', deps) 158 d.appendVarFlag('do_rootfs', 'depends', deps)
159 159
160 deps = "" 160 deps = ""
161 for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split(): 161 for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split():
162 deps += " %s:do_populate_sysroot" % dep 162 deps += " %s:do_populate_sysroot" % dep
163 d.appendVarFlag('do_build', 'depends', deps) 163 d.appendVarFlag('do_build', 'depends', deps)
164 164
@@ -167,22 +167,22 @@ python () {
167 features = set(oe.data.typed_value('IMAGE_FEATURES', d)) 167 features = set(oe.data.typed_value('IMAGE_FEATURES', d))
168 remain_features = features.copy() 168 remain_features = features.copy()
169 for feature in features: 169 for feature in features:
170 replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature, True) or "").split()) 170 replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature) or "").split())
171 remain_features -= replaces 171 remain_features -= replaces
172 172
173 #Check for conflict image features 173 #Check for conflict image features
174 for feature in remain_features: 174 for feature in remain_features:
175 conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature, True) or "").split()) 175 conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature) or "").split())
176 temp = conflicts & remain_features 176 temp = conflicts & remain_features
177 if temp: 177 if temp:
178 bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN', True), feature, ' '.join(list(temp)))) 178 bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN'), feature, ' '.join(list(temp))))
179 179
180 d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features)))) 180 d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features))))
181 181
182 check_image_features(d) 182 check_image_features(d)
183 initramfs_image = d.getVar('INITRAMFS_IMAGE', True) or "" 183 initramfs_image = d.getVar('INITRAMFS_IMAGE') or ""
184 if initramfs_image != "": 184 if initramfs_image != "":
185 d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN', True)) 185 d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN'))
186 d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image) 186 d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image)
187} 187}
188 188
@@ -194,7 +194,7 @@ IMAGE_POSTPROCESS_COMMAND ?= ""
194# some default locales 194# some default locales
195IMAGE_LINGUAS ?= "de-de fr-fr en-gb" 195IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
196 196
197LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}" 197LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}"
198 198
199# Prefer image, but use the fallback files for lookups if the image ones 199# Prefer image, but use the fallback files for lookups if the image ones
200# aren't yet available. 200# aren't yet available.
@@ -229,20 +229,20 @@ fakeroot python do_rootfs () {
229 progress_reporter.next_stage() 229 progress_reporter.next_stage()
230 230
231 # Handle package exclusions 231 # Handle package exclusions
232 excl_pkgs = d.getVar("PACKAGE_EXCLUDE", True).split() 232 excl_pkgs = d.getVar("PACKAGE_EXCLUDE").split()
233 inst_pkgs = d.getVar("PACKAGE_INSTALL", True).split() 233 inst_pkgs = d.getVar("PACKAGE_INSTALL").split()
234 inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY", True).split() 234 inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY").split()
235 235
236 d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs)) 236 d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs))
237 d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs)) 237 d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs))
238 238
239 for pkg in excl_pkgs: 239 for pkg in excl_pkgs:
240 if pkg in inst_pkgs: 240 if pkg in inst_pkgs:
241 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) 241 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs))
242 inst_pkgs.remove(pkg) 242 inst_pkgs.remove(pkg)
243 243
244 if pkg in inst_attempt_pkgs: 244 if pkg in inst_attempt_pkgs:
245 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) 245 bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs))
246 inst_attempt_pkgs.remove(pkg) 246 inst_attempt_pkgs.remove(pkg)
247 247
248 d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs)) 248 d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs))
@@ -252,7 +252,7 @@ fakeroot python do_rootfs () {
252 # We have to delay the runtime_mapping_rename until just before rootfs runs 252 # We have to delay the runtime_mapping_rename until just before rootfs runs
253 # otherwise, the multilib renaming could step in and squash any fixups that 253 # otherwise, the multilib renaming could step in and squash any fixups that
254 # may have occurred. 254 # may have occurred.
255 pn = d.getVar('PN', True) 255 pn = d.getVar('PN')
256 runtime_mapping_rename("PACKAGE_INSTALL", pn, d) 256 runtime_mapping_rename("PACKAGE_INSTALL", pn, d)
257 runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d) 257 runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d)
258 runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d) 258 runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d)
@@ -275,7 +275,7 @@ addtask rootfs before do_build
275fakeroot python do_image () { 275fakeroot python do_image () {
276 from oe.utils import execute_pre_post_process 276 from oe.utils import execute_pre_post_process
277 277
278 pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND", True) 278 pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND")
279 279
280 execute_pre_post_process(d, pre_process_cmds) 280 execute_pre_post_process(d, pre_process_cmds)
281} 281}
@@ -286,7 +286,7 @@ addtask do_image after do_rootfs before do_build
286fakeroot python do_image_complete () { 286fakeroot python do_image_complete () {
287 from oe.utils import execute_pre_post_process 287 from oe.utils import execute_pre_post_process
288 288
289 post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND", True) 289 post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND")
290 290
291 execute_pre_post_process(d, post_process_cmds) 291 execute_pre_post_process(d, post_process_cmds)
292} 292}
@@ -309,7 +309,7 @@ addtask do_image_complete after do_image before do_build
309fakeroot python do_image_qa () { 309fakeroot python do_image_qa () {
310 from oe.utils import ImageQAFailed 310 from oe.utils import ImageQAFailed
311 311
312 qa_cmds = (d.getVar('IMAGE_QA_COMMANDS', True) or '').split() 312 qa_cmds = (d.getVar('IMAGE_QA_COMMANDS') or '').split()
313 qamsg = "" 313 qamsg = ""
314 314
315 for cmd in qa_cmds: 315 for cmd in qa_cmds:
@@ -324,7 +324,7 @@ fakeroot python do_image_qa () {
324 qamsg = qamsg + '\n' 324 qamsg = qamsg + '\n'
325 325
326 if qamsg: 326 if qamsg:
327 imgname = d.getVar('IMAGE_NAME', True) 327 imgname = d.getVar('IMAGE_NAME')
328 bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg)) 328 bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg))
329} 329}
330addtask do_image_qa after do_image_complete before do_build 330addtask do_image_qa after do_image_complete before do_build
@@ -334,17 +334,17 @@ addtask do_image_qa after do_image_complete before do_build
334# to tmp/sysroots/<machine>/imgdata/<image>.env 334# to tmp/sysroots/<machine>/imgdata/<image>.env
335# 335#
336python do_rootfs_wicenv () { 336python do_rootfs_wicenv () {
337 wicvars = d.getVar('WICVARS', True) 337 wicvars = d.getVar('WICVARS')
338 if not wicvars: 338 if not wicvars:
339 return 339 return
340 340
341 stdir = d.getVar('STAGING_DIR_TARGET', True) 341 stdir = d.getVar('STAGING_DIR_TARGET')
342 outdir = os.path.join(stdir, 'imgdata') 342 outdir = os.path.join(stdir, 'imgdata')
343 bb.utils.mkdirhier(outdir) 343 bb.utils.mkdirhier(outdir)
344 basename = d.getVar('IMAGE_BASENAME', True) 344 basename = d.getVar('IMAGE_BASENAME')
345 with open(os.path.join(outdir, basename) + '.env', 'w') as envf: 345 with open(os.path.join(outdir, basename) + '.env', 'w') as envf:
346 for var in wicvars.split(): 346 for var in wicvars.split():
347 value = d.getVar(var, True) 347 value = d.getVar(var)
348 if value: 348 if value:
349 envf.write('%s="%s"\n' % (var, value.strip())) 349 envf.write('%s="%s"\n' % (var, value.strip()))
350} 350}
@@ -357,7 +357,7 @@ def setup_debugfs_variables(d):
357 d.appendVar('IMAGE_LINK_NAME', '-dbg') 357 d.appendVar('IMAGE_LINK_NAME', '-dbg')
358 d.appendVar('IMAGE_NAME','-dbg') 358 d.appendVar('IMAGE_NAME','-dbg')
359 d.setVar('IMAGE_BUILDING_DEBUGFS', 'true') 359 d.setVar('IMAGE_BUILDING_DEBUGFS', 'true')
360 debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True) 360 debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS')
361 if debugfs_image_fstypes: 361 if debugfs_image_fstypes:
362 d.setVar('IMAGE_FSTYPES', debugfs_image_fstypes) 362 d.setVar('IMAGE_FSTYPES', debugfs_image_fstypes)
363 363
@@ -375,7 +375,7 @@ python () {
375 # 375 #
376 # Without de-duplication, gen_conversion_cmds() below 376 # Without de-duplication, gen_conversion_cmds() below
377 # would create the same compression command multiple times. 377 # would create the same compression command multiple times.
378 ctypes = set(d.getVar('CONVERSIONTYPES', True).split()) 378 ctypes = set(d.getVar('CONVERSIONTYPES').split())
379 old_overrides = d.getVar('OVERRIDES', False) 379 old_overrides = d.getVar('OVERRIDES', False)
380 380
381 def _image_base_type(type): 381 def _image_base_type(type):
@@ -392,11 +392,11 @@ python () {
392 return basetype 392 return basetype
393 393
394 basetypes = {} 394 basetypes = {}
395 alltypes = d.getVar('IMAGE_FSTYPES', True).split() 395 alltypes = d.getVar('IMAGE_FSTYPES').split()
396 typedeps = {} 396 typedeps = {}
397 397
398 if d.getVar('IMAGE_GEN_DEBUGFS', True) == "1": 398 if d.getVar('IMAGE_GEN_DEBUGFS') == "1":
399 debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True).split() 399 debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS').split()
400 for t in debugfs_fstypes: 400 for t in debugfs_fstypes:
401 alltypes.append("debugfs_" + t) 401 alltypes.append("debugfs_" + t)
402 402
@@ -411,7 +411,7 @@ python () {
411 if t.startswith("debugfs_"): 411 if t.startswith("debugfs_"):
412 t = t[8:] 412 t = t[8:]
413 debug = "debugfs_" 413 debug = "debugfs_"
414 deps = (d.getVar('IMAGE_TYPEDEP_' + t, True) or "").split() 414 deps = (d.getVar('IMAGE_TYPEDEP_' + t) or "").split()
415 vardeps.add('IMAGE_TYPEDEP_' + t) 415 vardeps.add('IMAGE_TYPEDEP_' + t)
416 if baset not in typedeps: 416 if baset not in typedeps:
417 typedeps[baset] = set() 417 typedeps[baset] = set()
@@ -431,7 +431,7 @@ python () {
431 431
432 d.appendVarFlag('do_image', 'vardeps', ' '.join(vardeps)) 432 d.appendVarFlag('do_image', 'vardeps', ' '.join(vardeps))
433 433
434 maskedtypes = (d.getVar('IMAGE_TYPES_MASKED', True) or "").split() 434 maskedtypes = (d.getVar('IMAGE_TYPES_MASKED') or "").split()
435 maskedtypes = [dbg + t for t in maskedtypes for dbg in ("", "debugfs_")] 435 maskedtypes = [dbg + t for t in maskedtypes for dbg in ("", "debugfs_")]
436 436
437 for t in basetypes: 437 for t in basetypes:
@@ -459,7 +459,7 @@ python () {
459 localdata.delVar('DATETIME') 459 localdata.delVar('DATETIME')
460 localdata.delVar('TMPDIR') 460 localdata.delVar('TMPDIR')
461 461
462 image_cmd = localdata.getVar("IMAGE_CMD", True) 462 image_cmd = localdata.getVar("IMAGE_CMD")
463 vardeps.add('IMAGE_CMD_' + realt) 463 vardeps.add('IMAGE_CMD_' + realt)
464 if image_cmd: 464 if image_cmd:
465 cmds.append("\t" + image_cmd) 465 cmds.append("\t" + image_cmd)
@@ -481,7 +481,7 @@ python () {
481 # Create input image first. 481 # Create input image first.
482 gen_conversion_cmds(type) 482 gen_conversion_cmds(type)
483 localdata.setVar('type', type) 483 localdata.setVar('type', type)
484 cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype, True) or localdata.getVar("COMPRESS_CMD_" + ctype, True)) 484 cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype) or localdata.getVar("COMPRESS_CMD_" + ctype))
485 if cmd not in cmds: 485 if cmd not in cmds:
486 cmds.append(cmd) 486 cmds.append(cmd)
487 vardeps.add('CONVERSION_CMD_' + ctype) 487 vardeps.add('CONVERSION_CMD_' + ctype)
@@ -532,17 +532,17 @@ python () {
532def get_rootfs_size(d): 532def get_rootfs_size(d):
533 import subprocess 533 import subprocess
534 534
535 rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT', True)) 535 rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT'))
536 overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR', True)) 536 overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR'))
537 rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE', True)) 537 rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE'))
538 rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE', True)) 538 rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE'))
539 rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE', True) 539 rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE')
540 image_fstypes = d.getVar('IMAGE_FSTYPES', True) or '' 540 image_fstypes = d.getVar('IMAGE_FSTYPES') or ''
541 initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES', True) or '' 541 initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES') or ''
542 initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE', True) 542 initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE')
543 543
544 output = subprocess.check_output(['du', '-ks', 544 output = subprocess.check_output(['du', '-ks',
545 d.getVar('IMAGE_ROOTFS', True)]) 545 d.getVar('IMAGE_ROOTFS')])
546 size_kb = int(output.split()[0]) 546 size_kb = int(output.split()[0])
547 base_size = size_kb * overhead_factor 547 base_size = size_kb * overhead_factor
548 base_size = max(base_size, rootfs_req_size) + rootfs_extra_space 548 base_size = max(base_size, rootfs_req_size) + rootfs_extra_space
@@ -558,7 +558,7 @@ def get_rootfs_size(d):
558 # Do not check image size of the debugfs image. This is not supposed 558 # Do not check image size of the debugfs image. This is not supposed
559 # to be deployed, etc. so it doesn't make sense to limit the size 559 # to be deployed, etc. so it doesn't make sense to limit the size
560 # of the debug. 560 # of the debug.
561 if (d.getVar('IMAGE_BUILDING_DEBUGFS', True) or "") == "true": 561 if (d.getVar('IMAGE_BUILDING_DEBUGFS') or "") == "true":
562 return base_size 562 return base_size
563 563
564 # Check the rootfs size against IMAGE_ROOTFS_MAXSIZE (if set) 564 # Check the rootfs size against IMAGE_ROOTFS_MAXSIZE (if set)
@@ -589,11 +589,11 @@ python set_image_size () {
589# 589#
590python create_symlinks() { 590python create_symlinks() {
591 591
592 deploy_dir = d.getVar('IMGDEPLOYDIR', True) 592 deploy_dir = d.getVar('IMGDEPLOYDIR')
593 img_name = d.getVar('IMAGE_NAME', True) 593 img_name = d.getVar('IMAGE_NAME')
594 link_name = d.getVar('IMAGE_LINK_NAME', True) 594 link_name = d.getVar('IMAGE_LINK_NAME')
595 manifest_name = d.getVar('IMAGE_MANIFEST', True) 595 manifest_name = d.getVar('IMAGE_MANIFEST')
596 taskname = d.getVar("BB_CURRENTTASK", True) 596 taskname = d.getVar("BB_CURRENTTASK")
597 subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split() 597 subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split()
598 imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.") 598 imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.")
599 599
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass
index 9d66d7da7a..8f048497a1 100644
--- a/meta/classes/image_types.bbclass
+++ b/meta/classes/image_types.bbclass
@@ -17,20 +17,20 @@ def imagetypes_getdepends(d):
17 d += ":do_populate_sysroot" 17 d += ":do_populate_sysroot"
18 deps.add(d) 18 deps.add(d)
19 19
20 fstypes = set((d.getVar('IMAGE_FSTYPES', True) or "").split()) 20 fstypes = set((d.getVar('IMAGE_FSTYPES') or "").split())
21 fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS', True) or "").split()) 21 fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS') or "").split())
22 22
23 deps = set() 23 deps = set()
24 for typestring in fstypes: 24 for typestring in fstypes:
25 types = typestring.split(".") 25 types = typestring.split(".")
26 basetype, resttypes = types[0], types[1:] 26 basetype, resttypes = types[0], types[1:]
27 27
28 adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype, True) , deps) 28 adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype) , deps)
29 for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype, True) or "").split(): 29 for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype) or "").split():
30 adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends, True) , deps) 30 adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends) , deps)
31 for ctype in resttypes: 31 for ctype in resttypes:
32 adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype, True), deps) 32 adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype), deps)
33 adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype, True), deps) 33 adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype), deps)
34 34
35 # Sort the set so that ordering is consistant 35 # Sort the set so that ordering is consistant
36 return " ".join(sorted(deps)) 36 return " ".join(sorted(deps))
@@ -220,7 +220,7 @@ WKS_FILE_CHECKSUM = "${@'${WKS_FULL_PATH}:%s' % os.path.exists('${WKS_FULL_PATH}
220do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}" 220do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}"
221 221
222python () { 222python () {
223 if d.getVar('USING_WIC', True) and 'do_bootimg' in d: 223 if d.getVar('USING_WIC') and 'do_bootimg' in d:
224 bb.build.addtask('do_image_wic', '', 'do_bootimg', d) 224 bb.build.addtask('do_image_wic', '', 'do_bootimg', d)
225} 225}
226 226
@@ -228,7 +228,7 @@ python do_write_wks_template () {
228 """Write out expanded template contents to WKS_FULL_PATH.""" 228 """Write out expanded template contents to WKS_FULL_PATH."""
229 import re 229 import re
230 230
231 template_body = d.getVar('_WKS_TEMPLATE', True) 231 template_body = d.getVar('_WKS_TEMPLATE')
232 232
233 # Remove any remnant variable references left behind by the expansion 233 # Remove any remnant variable references left behind by the expansion
234 # due to undefined variables 234 # due to undefined variables
@@ -240,18 +240,18 @@ python do_write_wks_template () {
240 else: 240 else:
241 template_body = new_body 241 template_body = new_body
242 242
243 wks_file = d.getVar('WKS_FULL_PATH', True) 243 wks_file = d.getVar('WKS_FULL_PATH')
244 with open(wks_file, 'w') as f: 244 with open(wks_file, 'w') as f:
245 f.write(template_body) 245 f.write(template_body)
246} 246}
247 247
248python () { 248python () {
249 if d.getVar('USING_WIC', True): 249 if d.getVar('USING_WIC'):
250 wks_file_u = d.getVar('WKS_FULL_PATH', False) 250 wks_file_u = d.getVar('WKS_FULL_PATH', False)
251 wks_file = d.expand(wks_file_u) 251 wks_file = d.expand(wks_file_u)
252 base, ext = os.path.splitext(wks_file) 252 base, ext = os.path.splitext(wks_file)
253 if ext == '.in' and os.path.exists(wks_file): 253 if ext == '.in' and os.path.exists(wks_file):
254 wks_out_file = os.path.join(d.getVar('WORKDIR', True), os.path.basename(base)) 254 wks_out_file = os.path.join(d.getVar('WORKDIR'), os.path.basename(base))
255 d.setVar('WKS_FULL_PATH', wks_out_file) 255 d.setVar('WKS_FULL_PATH', wks_out_file)
256 d.setVar('WKS_TEMPLATE_PATH', wks_file_u) 256 d.setVar('WKS_TEMPLATE_PATH', wks_file_u)
257 d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True') 257 d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True')
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 01494e3443..1a742cf6f8 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -179,7 +179,7 @@ def package_qa_get_machine_dict(d):
179 179
180 # Add in any extra user supplied data which may come from a BSP layer, removing the 180 # Add in any extra user supplied data which may come from a BSP layer, removing the
181 # need to always change this class directly 181 # need to always change this class directly
182 extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS", True) or "").split() 182 extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS") or "").split()
183 for m in extra_machdata: 183 for m in extra_machdata:
184 call = m + "(machdata, d)" 184 call = m + "(machdata, d)"
185 locs = { "machdata" : machdata, "d" : d} 185 locs = { "machdata" : machdata, "d" : d}
@@ -194,23 +194,23 @@ def package_qa_clean_path(path, d, pkg=None):
194 TMPDIR is stripped, otherwise PKGDEST/pkg is stripped. 194 TMPDIR is stripped, otherwise PKGDEST/pkg is stripped.
195 """ 195 """
196 if pkg: 196 if pkg:
197 path = path.replace(os.path.join(d.getVar("PKGDEST", True), pkg), "/") 197 path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/")
198 return path.replace(d.getVar("TMPDIR", True), "/").replace("//", "/") 198 return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
199 199
200def package_qa_write_error(type, error, d): 200def package_qa_write_error(type, error, d):
201 logfile = d.getVar('QA_LOGFILE', True) 201 logfile = d.getVar('QA_LOGFILE')
202 if logfile: 202 if logfile:
203 p = d.getVar('P', True) 203 p = d.getVar('P')
204 with open(logfile, "a+") as f: 204 with open(logfile, "a+") as f:
205 f.write("%s: %s [%s]\n" % (p, error, type)) 205 f.write("%s: %s [%s]\n" % (p, error, type))
206 206
207def package_qa_handle_error(error_class, error_msg, d): 207def package_qa_handle_error(error_class, error_msg, d):
208 package_qa_write_error(error_class, error_msg, d) 208 package_qa_write_error(error_class, error_msg, d)
209 if error_class in (d.getVar("ERROR_QA", True) or "").split(): 209 if error_class in (d.getVar("ERROR_QA") or "").split():
210 bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) 210 bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
211 d.setVar("QA_SANE", False) 211 d.setVar("QA_SANE", False)
212 return False 212 return False
213 elif error_class in (d.getVar("WARN_QA", True) or "").split(): 213 elif error_class in (d.getVar("WARN_QA") or "").split():
214 bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) 214 bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
215 else: 215 else:
216 bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) 216 bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
@@ -226,7 +226,7 @@ QAPATHTEST[libexec] = "package_qa_check_libexec"
226def package_qa_check_libexec(path,name, d, elf, messages): 226def package_qa_check_libexec(path,name, d, elf, messages):
227 227
228 # Skip the case where the default is explicitly /usr/libexec 228 # Skip the case where the default is explicitly /usr/libexec
229 libexec = d.getVar('libexecdir', True) 229 libexec = d.getVar('libexecdir')
230 if libexec == "/usr/libexec": 230 if libexec == "/usr/libexec":
231 return True 231 return True
232 232
@@ -247,7 +247,7 @@ def package_qa_check_rpath(file,name, d, elf, messages):
247 if os.path.islink(file): 247 if os.path.islink(file):
248 return 248 return
249 249
250 bad_dirs = [d.getVar('BASE_WORKDIR', True), d.getVar('STAGING_DIR_TARGET', True)] 250 bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')]
251 251
252 phdrs = elf.run_objdump("-p", d) 252 phdrs = elf.run_objdump("-p", d)
253 253
@@ -275,8 +275,8 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
275 if os.path.islink(file): 275 if os.path.islink(file):
276 return 276 return
277 277
278 libdir = d.getVar("libdir", True) 278 libdir = d.getVar("libdir")
279 base_libdir = d.getVar("base_libdir", True) 279 base_libdir = d.getVar("base_libdir")
280 280
281 phdrs = elf.run_objdump("-p", d) 281 phdrs = elf.run_objdump("-p", d)
282 282
@@ -333,11 +333,11 @@ def package_qa_check_libdir(d):
333 """ 333 """
334 import re 334 import re
335 335
336 pkgdest = d.getVar('PKGDEST', True) 336 pkgdest = d.getVar('PKGDEST')
337 base_libdir = d.getVar("base_libdir",True) + os.sep 337 base_libdir = d.getVar("base_libdir",True) + os.sep
338 libdir = d.getVar("libdir", True) + os.sep 338 libdir = d.getVar("libdir") + os.sep
339 libexecdir = d.getVar("libexecdir", True) + os.sep 339 libexecdir = d.getVar("libexecdir") + os.sep
340 exec_prefix = d.getVar("exec_prefix", True) + os.sep 340 exec_prefix = d.getVar("exec_prefix") + os.sep
341 341
342 messages = [] 342 messages = []
343 343
@@ -352,10 +352,10 @@ def package_qa_check_libdir(d):
352 # Skip subdirectories for any packages with libdir in INSANE_SKIP 352 # Skip subdirectories for any packages with libdir in INSANE_SKIP
353 skippackages = [] 353 skippackages = []
354 for package in dirs: 354 for package in dirs:
355 if 'libdir' in (d.getVar('INSANE_SKIP_' + package, True) or "").split(): 355 if 'libdir' in (d.getVar('INSANE_SKIP_' + package) or "").split():
356 bb.note("Package %s skipping libdir QA test" % (package)) 356 bb.note("Package %s skipping libdir QA test" % (package))
357 skippackages.append(package) 357 skippackages.append(package)
358 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory' and package.endswith("-dbg"): 358 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"):
359 bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package)) 359 bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package))
360 skippackages.append(package) 360 skippackages.append(package)
361 for package in skippackages: 361 for package in skippackages:
@@ -416,10 +416,10 @@ def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages)
416 416
417 if elf: 417 if elf:
418 import subprocess as sub 418 import subprocess as sub
419 pn = d.getVar('PN', True) 419 pn = d.getVar('PN')
420 420
421 exec_prefix = d.getVar('exec_prefix', True) 421 exec_prefix = d.getVar('exec_prefix')
422 sysroot_path = d.getVar('STAGING_DIR_TARGET', True) 422 sysroot_path = d.getVar('STAGING_DIR_TARGET')
423 sysroot_path_usr = sysroot_path + exec_prefix 423 sysroot_path_usr = sysroot_path + exec_prefix
424 424
425 try: 425 try:
@@ -432,8 +432,8 @@ def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages)
432 if sysroot_path_usr in ldd_output: 432 if sysroot_path_usr in ldd_output:
433 ldd_output = ldd_output.replace(sysroot_path, "") 433 ldd_output = ldd_output.replace(sysroot_path, "")
434 434
435 pkgdest = d.getVar('PKGDEST', True) 435 pkgdest = d.getVar('PKGDEST')
436 packages = d.getVar('PACKAGES', True) 436 packages = d.getVar('PACKAGES')
437 437
438 for package in packages.split(): 438 for package in packages.split():
439 short_path = path.replace('%s/%s' % (pkgdest, package), "", 1) 439 short_path = path.replace('%s/%s' % (pkgdest, package), "", 1)
@@ -459,13 +459,13 @@ def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages):
459 if not elf: 459 if not elf:
460 import stat 460 import stat
461 import subprocess 461 import subprocess
462 pn = d.getVar('PN', True) 462 pn = d.getVar('PN')
463 463
464 # Ensure we're checking an executable script 464 # Ensure we're checking an executable script
465 statinfo = os.stat(path) 465 statinfo = os.stat(path)
466 if bool(statinfo.st_mode & stat.S_IXUSR): 466 if bool(statinfo.st_mode & stat.S_IXUSR):
467 # grep shell scripts for possible references to /exec_prefix/ 467 # grep shell scripts for possible references to /exec_prefix/
468 exec_prefix = d.getVar('exec_prefix', True) 468 exec_prefix = d.getVar('exec_prefix')
469 statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path) 469 statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path)
470 if subprocess.call(statement, shell=True) == 0: 470 if subprocess.call(statement, shell=True) == 0:
471 error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path) 471 error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path)
@@ -489,19 +489,19 @@ def unsafe_references_skippable(path, name, d):
489 return True 489 return True
490 490
491 # Skip unusual rootfs layouts which make these tests irrelevant 491 # Skip unusual rootfs layouts which make these tests irrelevant
492 exec_prefix = d.getVar('exec_prefix', True) 492 exec_prefix = d.getVar('exec_prefix')
493 if exec_prefix == "": 493 if exec_prefix == "":
494 return True 494 return True
495 495
496 pkgdest = d.getVar('PKGDEST', True) 496 pkgdest = d.getVar('PKGDEST')
497 pkgdest = pkgdest + "/" + name 497 pkgdest = pkgdest + "/" + name
498 pkgdest = os.path.abspath(pkgdest) 498 pkgdest = os.path.abspath(pkgdest)
499 base_bindir = pkgdest + d.getVar('base_bindir', True) 499 base_bindir = pkgdest + d.getVar('base_bindir')
500 base_sbindir = pkgdest + d.getVar('base_sbindir', True) 500 base_sbindir = pkgdest + d.getVar('base_sbindir')
501 base_libdir = pkgdest + d.getVar('base_libdir', True) 501 base_libdir = pkgdest + d.getVar('base_libdir')
502 bindir = pkgdest + d.getVar('bindir', True) 502 bindir = pkgdest + d.getVar('bindir')
503 sbindir = pkgdest + d.getVar('sbindir', True) 503 sbindir = pkgdest + d.getVar('sbindir')
504 libdir = pkgdest + d.getVar('libdir', True) 504 libdir = pkgdest + d.getVar('libdir')
505 505
506 if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir: 506 if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir:
507 return True 507 return True
@@ -523,13 +523,13 @@ def package_qa_check_arch(path,name,d, elf, messages):
523 if not elf: 523 if not elf:
524 return 524 return
525 525
526 target_os = d.getVar('TARGET_OS', True) 526 target_os = d.getVar('TARGET_OS')
527 target_arch = d.getVar('TARGET_ARCH', True) 527 target_arch = d.getVar('TARGET_ARCH')
528 provides = d.getVar('PROVIDES', True) 528 provides = d.getVar('PROVIDES')
529 bpn = d.getVar('BPN', True) 529 bpn = d.getVar('BPN')
530 530
531 if target_arch == "allarch": 531 if target_arch == "allarch":
532 pn = d.getVar('PN', True) 532 pn = d.getVar('PN')
533 package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") 533 package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
534 return 534 return
535 535
@@ -549,7 +549,7 @@ def package_qa_check_arch(path,name,d, elf, messages):
549 549
550 # Check the architecture and endiannes of the binary 550 # Check the architecture and endiannes of the binary
551 is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ 551 is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \
552 (target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE', True))) 552 (target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE')))
553 if not ((machine == elf.machine()) or is_32): 553 if not ((machine == elf.machine()) or is_32):
554 package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \ 554 package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \
555 (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d))) 555 (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d)))
@@ -608,9 +608,9 @@ def package_qa_hash_style(path, name, d, elf, messages):
608 if os.path.islink(path): 608 if os.path.islink(path):
609 return 609 return
610 610
611 gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True) 611 gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS')
612 if not gnu_hash: 612 if not gnu_hash:
613 gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True) 613 gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS')
614 if not gnu_hash: 614 if not gnu_hash:
615 return 615 return
616 616
@@ -649,7 +649,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
649 if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1: 649 if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1:
650 return 650 return
651 651
652 tmpdir = d.getVar('TMPDIR', True) 652 tmpdir = d.getVar('TMPDIR')
653 with open(path, 'rb') as f: 653 with open(path, 'rb') as f:
654 file_content = f.read().decode('utf-8', errors='ignore') 654 file_content = f.read().decode('utf-8', errors='ignore')
655 if tmpdir in file_content: 655 if tmpdir in file_content:
@@ -668,8 +668,8 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
668 668
669 driverdir = d.expand("${libdir}/xorg/modules/drivers/") 669 driverdir = d.expand("${libdir}/xorg/modules/drivers/")
670 if driverdir in path and path.endswith(".so"): 670 if driverdir in path and path.endswith(".so"):
671 mlprefix = d.getVar('MLPREFIX', True) or '' 671 mlprefix = d.getVar('MLPREFIX') or ''
672 for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name, True) or ""): 672 for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name) or ""):
673 if rdep.startswith("%sxorg-abi-" % mlprefix): 673 if rdep.startswith("%sxorg-abi-" % mlprefix):
674 return 674 return
675 package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) 675 package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
@@ -692,9 +692,9 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
692 if os.path.islink(path): 692 if os.path.islink(path):
693 target = os.readlink(path) 693 target = os.readlink(path)
694 if os.path.isabs(target): 694 if os.path.isabs(target):
695 tmpdir = d.getVar('TMPDIR', True) 695 tmpdir = d.getVar('TMPDIR')
696 if target.startswith(tmpdir): 696 if target.startswith(tmpdir):
697 trimmed = path.replace(os.path.join (d.getVar("PKGDEST", True), name), "") 697 trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
698 package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) 698 package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
699 699
700# Check license variables 700# Check license variables
@@ -706,17 +706,17 @@ python populate_lic_qa_checksum() {
706 import tempfile 706 import tempfile
707 sane = True 707 sane = True
708 708
709 lic_files = d.getVar('LIC_FILES_CHKSUM', True) or '' 709 lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
710 lic = d.getVar('LICENSE', True) 710 lic = d.getVar('LICENSE')
711 pn = d.getVar('PN', True) 711 pn = d.getVar('PN')
712 712
713 if lic == "CLOSED": 713 if lic == "CLOSED":
714 return 714 return
715 715
716 if not lic_files and d.getVar('SRC_URI', True): 716 if not lic_files and d.getVar('SRC_URI'):
717 sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) 717 sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
718 718
719 srcdir = d.getVar('S', True) 719 srcdir = d.getVar('S')
720 720
721 for url in lic_files.split(): 721 for url in lic_files.split():
722 try: 722 try:
@@ -794,7 +794,7 @@ def package_qa_check_staged(path,d):
794 """ 794 """
795 795
796 sane = True 796 sane = True
797 tmpdir = d.getVar('TMPDIR', True) 797 tmpdir = d.getVar('TMPDIR')
798 workdir = os.path.join(tmpdir, "work") 798 workdir = os.path.join(tmpdir, "work")
799 799
800 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): 800 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
@@ -845,8 +845,8 @@ def package_qa_walk(warnfuncs, errorfuncs, skip, package, d):
845 import oe.qa 845 import oe.qa
846 846
847 #if this will throw an exception, then fix the dict above 847 #if this will throw an exception, then fix the dict above
848 target_os = d.getVar('TARGET_OS', True) 848 target_os = d.getVar('TARGET_OS')
849 target_arch = d.getVar('TARGET_ARCH', True) 849 target_arch = d.getVar('TARGET_ARCH')
850 850
851 warnings = {} 851 warnings = {}
852 errors = {} 852 errors = {}
@@ -879,7 +879,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
879 bb.data.update_data(localdata) 879 bb.data.update_data(localdata)
880 880
881 # Now check the RDEPENDS 881 # Now check the RDEPENDS
882 rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "") 882 rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "")
883 883
884 # Now do the sanity check!!! 884 # Now do the sanity check!!!
885 if "build-deps" not in skip: 885 if "build-deps" not in skip:
@@ -895,7 +895,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
895 if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: 895 if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
896 continue 896 continue
897 if not rdep_data or not 'PN' in rdep_data: 897 if not rdep_data or not 'PN' in rdep_data:
898 pkgdata_dir = d.getVar("PKGDATA_DIR", True) 898 pkgdata_dir = d.getVar("PKGDATA_DIR")
899 try: 899 try:
900 possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend)) 900 possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
901 except OSError: 901 except OSError:
@@ -954,7 +954,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
954 # The python is not a package, but python-core provides it, so 954 # The python is not a package, but python-core provides it, so
955 # skip checking /usr/bin/python if python is in the rdeps, in 955 # skip checking /usr/bin/python if python is in the rdeps, in
956 # case there is a RDEPENDS_pkg = "python" in the recipe. 956 # case there is a RDEPENDS_pkg = "python" in the recipe.
957 for py in [ d.getVar('MLPREFIX', True) + "python", "python" ]: 957 for py in [ d.getVar('MLPREFIX') + "python", "python" ]:
958 if py in done: 958 if py in done:
959 filerdepends.pop("/usr/bin/python",None) 959 filerdepends.pop("/usr/bin/python",None)
960 done.remove(py) 960 done.remove(py)
@@ -987,7 +987,7 @@ def package_qa_check_deps(pkg, pkgdest, skip, d):
987 987
988 def check_valid_deps(var): 988 def check_valid_deps(var):
989 try: 989 try:
990 rvar = bb.utils.explode_dep_versions2(localdata.getVar(var, True) or "") 990 rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "")
991 except ValueError as e: 991 except ValueError as e:
992 bb.fatal("%s_%s: %s" % (var, pkg, e)) 992 bb.fatal("%s_%s: %s" % (var, pkg, e))
993 for dep in rvar: 993 for dep in rvar:
@@ -1010,10 +1010,10 @@ def package_qa_check_expanded_d(package, d, messages):
1010 variables, warn the user to use it correctly. 1010 variables, warn the user to use it correctly.
1011 """ 1011 """
1012 sane = True 1012 sane = True
1013 expanded_d = d.getVar('D', True) 1013 expanded_d = d.getVar('D')
1014 1014
1015 for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': 1015 for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
1016 bbvar = d.getVar(var + "_" + package, True) or "" 1016 bbvar = d.getVar(var + "_" + package) or ""
1017 if expanded_d in bbvar: 1017 if expanded_d in bbvar:
1018 if var == 'FILES': 1018 if var == 'FILES':
1019 package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) 1019 package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
@@ -1026,7 +1026,7 @@ def package_qa_check_expanded_d(package, d, messages):
1026def package_qa_check_encoding(keys, encode, d): 1026def package_qa_check_encoding(keys, encode, d):
1027 def check_encoding(key, enc): 1027 def check_encoding(key, enc):
1028 sane = True 1028 sane = True
1029 value = d.getVar(key, True) 1029 value = d.getVar(key)
1030 if value: 1030 if value:
1031 try: 1031 try:
1032 s = value.encode(enc) 1032 s = value.encode(enc)
@@ -1051,8 +1051,8 @@ def package_qa_check_host_user(path, name, d, elf, messages):
1051 if not os.path.lexists(path): 1051 if not os.path.lexists(path):
1052 return 1052 return
1053 1053
1054 dest = d.getVar('PKGDEST', True) 1054 dest = d.getVar('PKGDEST')
1055 pn = d.getVar('PN', True) 1055 pn = d.getVar('PN')
1056 home = os.path.join(dest, 'home') 1056 home = os.path.join(dest, 'home')
1057 if path == home or path.startswith(home + os.sep): 1057 if path == home or path.startswith(home + os.sep):
1058 return 1058 return
@@ -1065,12 +1065,12 @@ def package_qa_check_host_user(path, name, d, elf, messages):
1065 raise 1065 raise
1066 else: 1066 else:
1067 rootfs_path = path[len(dest):] 1067 rootfs_path = path[len(dest):]
1068 check_uid = int(d.getVar('HOST_USER_UID', True)) 1068 check_uid = int(d.getVar('HOST_USER_UID'))
1069 if stat.st_uid == check_uid: 1069 if stat.st_uid == check_uid:
1070 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid)) 1070 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid))
1071 return False 1071 return False
1072 1072
1073 check_gid = int(d.getVar('HOST_USER_GID', True)) 1073 check_gid = int(d.getVar('HOST_USER_GID'))
1074 if stat.st_gid == check_gid: 1074 if stat.st_gid == check_gid:
1075 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid)) 1075 package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid))
1076 return False 1076 return False
@@ -1088,8 +1088,8 @@ python do_package_qa () {
1088 # Check non UTF-8 characters on recipe's metadata 1088 # Check non UTF-8 characters on recipe's metadata
1089 package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d) 1089 package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d)
1090 1090
1091 logdir = d.getVar('T', True) 1091 logdir = d.getVar('T')
1092 pkg = d.getVar('PN', True) 1092 pkg = d.getVar('PN')
1093 1093
1094 # Check the compile log for host contamination 1094 # Check the compile log for host contamination
1095 compilelog = os.path.join(logdir,"log.do_compile") 1095 compilelog = os.path.join(logdir,"log.do_compile")
@@ -1112,8 +1112,8 @@ python do_package_qa () {
1112 package_qa_handle_error("install-host-path", msg, d) 1112 package_qa_handle_error("install-host-path", msg, d)
1113 1113
1114 # Scan the packages... 1114 # Scan the packages...
1115 pkgdest = d.getVar('PKGDEST', True) 1115 pkgdest = d.getVar('PKGDEST')
1116 packages = set((d.getVar('PACKAGES', True) or '').split()) 1116 packages = set((d.getVar('PACKAGES') or '').split())
1117 1117
1118 cpath = oe.cachedpath.CachedPath() 1118 cpath = oe.cachedpath.CachedPath()
1119 global pkgfiles 1119 global pkgfiles
@@ -1142,7 +1142,7 @@ python do_package_qa () {
1142 testmatrix = d.getVarFlags(matrix_name) or {} 1142 testmatrix = d.getVarFlags(matrix_name) or {}
1143 g = globals() 1143 g = globals()
1144 warnchecks = [] 1144 warnchecks = []
1145 for w in (d.getVar("WARN_QA", True) or "").split(): 1145 for w in (d.getVar("WARN_QA") or "").split():
1146 if w in skip: 1146 if w in skip:
1147 continue 1147 continue
1148 if w in testmatrix and testmatrix[w] in g: 1148 if w in testmatrix and testmatrix[w] in g:
@@ -1151,7 +1151,7 @@ python do_package_qa () {
1151 oe.utils.write_ld_so_conf(d) 1151 oe.utils.write_ld_so_conf(d)
1152 1152
1153 errorchecks = [] 1153 errorchecks = []
1154 for e in (d.getVar("ERROR_QA", True) or "").split(): 1154 for e in (d.getVar("ERROR_QA") or "").split():
1155 if e in skip: 1155 if e in skip:
1156 continue 1156 continue
1157 if e in testmatrix and testmatrix[e] in g: 1157 if e in testmatrix and testmatrix[e] in g:
@@ -1160,7 +1160,7 @@ python do_package_qa () {
1160 oe.utils.write_ld_so_conf(d) 1160 oe.utils.write_ld_so_conf(d)
1161 return warnchecks, errorchecks 1161 return warnchecks, errorchecks
1162 1162
1163 skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split() 1163 skip = (d.getVar('INSANE_SKIP_' + package) or "").split()
1164 if skip: 1164 if skip:
1165 bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) 1165 bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
1166 1166
@@ -1180,10 +1180,10 @@ python do_package_qa () {
1180 package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) 1180 package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
1181 package_qa_check_deps(package, pkgdest, skip, d) 1181 package_qa_check_deps(package, pkgdest, skip, d)
1182 1182
1183 if 'libdir' in d.getVar("ALL_QA", True).split(): 1183 if 'libdir' in d.getVar("ALL_QA").split():
1184 package_qa_check_libdir(d) 1184 package_qa_check_libdir(d)
1185 1185
1186 qa_sane = d.getVar("QA_SANE", True) 1186 qa_sane = d.getVar("QA_SANE")
1187 if not qa_sane: 1187 if not qa_sane:
1188 bb.fatal("QA run found fatal errors. Please consider fixing them.") 1188 bb.fatal("QA run found fatal errors. Please consider fixing them.")
1189 bb.note("DONE with PACKAGE QA") 1189 bb.note("DONE with PACKAGE QA")
@@ -1216,7 +1216,7 @@ python do_qa_configure() {
1216 ########################################################################### 1216 ###########################################################################
1217 1217
1218 configs = [] 1218 configs = []
1219 workdir = d.getVar('WORKDIR', True) 1219 workdir = d.getVar('WORKDIR')
1220 1220
1221 if bb.data.inherits_class('autotools', d): 1221 if bb.data.inherits_class('autotools', d):
1222 bb.note("Checking autotools environment for common misconfiguration") 1222 bb.note("Checking autotools environment for common misconfiguration")
@@ -1237,16 +1237,16 @@ Rerun configure task after fixing this.""")
1237 # Check gettext configuration and dependencies are correct 1237 # Check gettext configuration and dependencies are correct
1238 ########################################################################### 1238 ###########################################################################
1239 1239
1240 cnf = d.getVar('EXTRA_OECONF', True) or "" 1240 cnf = d.getVar('EXTRA_OECONF') or ""
1241 if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: 1241 if "gettext" not in d.getVar('P') and "gcc-runtime" not in d.getVar('P') and "--disable-nls" not in cnf:
1242 ml = d.getVar("MLPREFIX", True) or "" 1242 ml = d.getVar("MLPREFIX") or ""
1243 if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): 1243 if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
1244 gt = "gettext-native" 1244 gt = "gettext-native"
1245 elif bb.data.inherits_class('cross-canadian', d): 1245 elif bb.data.inherits_class('cross-canadian', d):
1246 gt = "nativesdk-gettext" 1246 gt = "nativesdk-gettext"
1247 else: 1247 else:
1248 gt = "virtual/" + ml + "gettext" 1248 gt = "virtual/" + ml + "gettext"
1249 deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") 1249 deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "")
1250 if gt not in deps: 1250 if gt not in deps:
1251 for config in configs: 1251 for config in configs:
1252 gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config 1252 gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
@@ -1261,40 +1261,40 @@ Missing inherit gettext?""" % (gt, config))
1261 bb.note("Checking configure output for unrecognised options") 1261 bb.note("Checking configure output for unrecognised options")
1262 try: 1262 try:
1263 flag = "WARNING: unrecognized options:" 1263 flag = "WARNING: unrecognized options:"
1264 log = os.path.join(d.getVar('B', True), 'config.log') 1264 log = os.path.join(d.getVar('B'), 'config.log')
1265 output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ') 1265 output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ')
1266 options = set() 1266 options = set()
1267 for line in output.splitlines(): 1267 for line in output.splitlines():
1268 options |= set(line.partition(flag)[2].split()) 1268 options |= set(line.partition(flag)[2].split())
1269 whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST", True).split()) 1269 whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST").split())
1270 options -= whitelist 1270 options -= whitelist
1271 if options: 1271 if options:
1272 pn = d.getVar('PN', True) 1272 pn = d.getVar('PN')
1273 error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) 1273 error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
1274 package_qa_handle_error("unknown-configure-option", error_msg, d) 1274 package_qa_handle_error("unknown-configure-option", error_msg, d)
1275 except subprocess.CalledProcessError: 1275 except subprocess.CalledProcessError:
1276 pass 1276 pass
1277 1277
1278 # Check invalid PACKAGECONFIG 1278 # Check invalid PACKAGECONFIG
1279 pkgconfig = (d.getVar("PACKAGECONFIG", True) or "").split() 1279 pkgconfig = (d.getVar("PACKAGECONFIG") or "").split()
1280 if pkgconfig: 1280 if pkgconfig:
1281 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} 1281 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
1282 for pconfig in pkgconfig: 1282 for pconfig in pkgconfig:
1283 if pconfig not in pkgconfigflags: 1283 if pconfig not in pkgconfigflags:
1284 pn = d.getVar('PN', True) 1284 pn = d.getVar('PN')
1285 error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) 1285 error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
1286 package_qa_handle_error("invalid-packageconfig", error_msg, d) 1286 package_qa_handle_error("invalid-packageconfig", error_msg, d)
1287 1287
1288 qa_sane = d.getVar("QA_SANE", True) 1288 qa_sane = d.getVar("QA_SANE")
1289 if not qa_sane: 1289 if not qa_sane:
1290 bb.fatal("Fatal QA errors found, failing task.") 1290 bb.fatal("Fatal QA errors found, failing task.")
1291} 1291}
1292 1292
1293python do_qa_unpack() { 1293python do_qa_unpack() {
1294 src_uri = d.getVar('SRC_URI', True) 1294 src_uri = d.getVar('SRC_URI')
1295 s_dir = d.getVar('S', True) 1295 s_dir = d.getVar('S')
1296 if src_uri and not os.path.exists(s_dir): 1296 if src_uri and not os.path.exists(s_dir):
1297 bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN', True), d.getVar('S', False), s_dir)) 1297 bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
1298} 1298}
1299 1299
1300# The Staging Func, to check all staging 1300# The Staging Func, to check all staging
@@ -1310,7 +1310,7 @@ do_configure[postfuncs] += "do_qa_configure "
1310do_unpack[postfuncs] += "do_qa_unpack" 1310do_unpack[postfuncs] += "do_qa_unpack"
1311 1311
1312python () { 1312python () {
1313 tests = d.getVar('ALL_QA', True).split() 1313 tests = d.getVar('ALL_QA').split()
1314 if "desktop" in tests: 1314 if "desktop" in tests:
1315 d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") 1315 d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
1316 1316
@@ -1319,7 +1319,7 @@ python () {
1319 ########################################################################### 1319 ###########################################################################
1320 1320
1321 # Checking ${FILESEXTRAPATHS} 1321 # Checking ${FILESEXTRAPATHS}
1322 extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") 1322 extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
1323 if '__default' not in extrapaths.split(":"): 1323 if '__default' not in extrapaths.split(":"):
1324 msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n" 1324 msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n"
1325 msg += "type of assignment, and don't forget the colon.\n" 1325 msg += "type of assignment, and don't forget the colon.\n"
@@ -1331,29 +1331,29 @@ python () {
1331 msg += "%s\n" % extrapaths 1331 msg += "%s\n" % extrapaths
1332 bb.warn(msg) 1332 bb.warn(msg)
1333 1333
1334 overrides = d.getVar('OVERRIDES', True).split(':') 1334 overrides = d.getVar('OVERRIDES').split(':')
1335 pn = d.getVar('PN', True) 1335 pn = d.getVar('PN')
1336 if pn in overrides: 1336 if pn in overrides:
1337 msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE", True), pn) 1337 msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn)
1338 package_qa_handle_error("pn-overrides", msg, d) 1338 package_qa_handle_error("pn-overrides", msg, d)
1339 1339
1340 issues = [] 1340 issues = []
1341 if (d.getVar('PACKAGES', True) or "").split(): 1341 if (d.getVar('PACKAGES') or "").split():
1342 for dep in (d.getVar('QADEPENDS', True) or "").split(): 1342 for dep in (d.getVar('QADEPENDS') or "").split():
1343 d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep) 1343 d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep)
1344 for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': 1344 for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY':
1345 if d.getVar(var, False): 1345 if d.getVar(var, False):
1346 issues.append(var) 1346 issues.append(var)
1347 1347
1348 fakeroot_tests = d.getVar('FAKEROOT_QA', True).split() 1348 fakeroot_tests = d.getVar('FAKEROOT_QA').split()
1349 if set(tests) & set(fakeroot_tests): 1349 if set(tests) & set(fakeroot_tests):
1350 d.setVarFlag('do_package_qa', 'fakeroot', '1') 1350 d.setVarFlag('do_package_qa', 'fakeroot', '1')
1351 d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 1351 d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
1352 else: 1352 else:
1353 d.setVarFlag('do_package_qa', 'rdeptask', '') 1353 d.setVarFlag('do_package_qa', 'rdeptask', '')
1354 for i in issues: 1354 for i in issues:
1355 package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE", True), i), d) 1355 package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
1356 qa_sane = d.getVar("QA_SANE", True) 1356 qa_sane = d.getVar("QA_SANE")
1357 if not qa_sane: 1357 if not qa_sane:
1358 bb.fatal("Fatal QA errors found, failing task.") 1358 bb.fatal("Fatal QA errors found, failing task.")
1359} 1359}
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass
index ea976c66b3..e09cdc4184 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes/kernel-arch.bbclass
@@ -19,7 +19,7 @@ valid_archs = "alpha cris ia64 \
19def map_kernel_arch(a, d): 19def map_kernel_arch(a, d):
20 import re 20 import re
21 21
22 valid_archs = d.getVar('valid_archs', True).split() 22 valid_archs = d.getVar('valid_archs').split()
23 23
24 if re.match('(i.86|athlon|x86.64)$', a): return 'x86' 24 if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
25 elif re.match('armeb$', a): return 'arm' 25 elif re.match('armeb$', a): return 'arm'
@@ -34,7 +34,7 @@ def map_kernel_arch(a, d):
34 else: 34 else:
35 bb.error("cannot map '%s' to a linux kernel architecture" % a) 35 bb.error("cannot map '%s' to a linux kernel architecture" % a)
36 36
37export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" 37export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH'), d)}"
38 38
39def map_uboot_arch(a, d): 39def map_uboot_arch(a, d):
40 import re 40 import re
@@ -43,7 +43,7 @@ def map_uboot_arch(a, d):
43 elif re.match('i.86$', a): return 'x86' 43 elif re.match('i.86$', a): return 'x86'
44 return a 44 return a
45 45
46export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" 46export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH'), d)}"
47 47
48# Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture 48# Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture
49# specific options necessary for building the kernel and modules. 49# specific options necessary for building the kernel and modules.
diff --git a/meta/classes/kernel-fitimage.bbclass b/meta/classes/kernel-fitimage.bbclass
index 8580247f82..2934b3a31a 100644
--- a/meta/classes/kernel-fitimage.bbclass
+++ b/meta/classes/kernel-fitimage.bbclass
@@ -1,13 +1,13 @@
1inherit kernel-uboot uboot-sign 1inherit kernel-uboot uboot-sign
2 2
3python __anonymous () { 3python __anonymous () {
4 kerneltypes = d.getVar('KERNEL_IMAGETYPES', True) or "" 4 kerneltypes = d.getVar('KERNEL_IMAGETYPES') or ""
5 if 'fitImage' in kerneltypes.split(): 5 if 'fitImage' in kerneltypes.split():
6 depends = d.getVar("DEPENDS", True) 6 depends = d.getVar("DEPENDS")
7 depends = "%s u-boot-mkimage-native dtc-native" % depends 7 depends = "%s u-boot-mkimage-native dtc-native" % depends
8 d.setVar("DEPENDS", depends) 8 d.setVar("DEPENDS", depends)
9 9
10 if d.getVar("UBOOT_ARCH", True) == "x86": 10 if d.getVar("UBOOT_ARCH") == "x86":
11 replacementtype = "bzImage" 11 replacementtype = "bzImage"
12 else: 12 else:
13 replacementtype = "zImage" 13 replacementtype = "zImage"
@@ -15,19 +15,19 @@ python __anonymous () {
15 # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal 15 # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal
16 # to kernel.bbclass . We have to override it, since we pack zImage 16 # to kernel.bbclass . We have to override it, since we pack zImage
17 # (at least for now) into the fitImage . 17 # (at least for now) into the fitImage .
18 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" 18 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or ""
19 if 'fitImage' in typeformake.split(): 19 if 'fitImage' in typeformake.split():
20 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype)) 20 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype))
21 21
22 image = d.getVar('INITRAMFS_IMAGE', True) 22 image = d.getVar('INITRAMFS_IMAGE')
23 if image: 23 if image:
24 d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') 24 d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
25 25
26 # Verified boot will sign the fitImage and append the public key to 26 # Verified boot will sign the fitImage and append the public key to
27 # U-boot dtb. We ensure the U-Boot dtb is deployed before assembling 27 # U-boot dtb. We ensure the U-Boot dtb is deployed before assembling
28 # the fitImage: 28 # the fitImage:
29 if d.getVar('UBOOT_SIGN_ENABLE', True): 29 if d.getVar('UBOOT_SIGN_ENABLE'):
30 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot', True) or 'u-boot' 30 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
31 d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn) 31 d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn)
32} 32}
33 33
diff --git a/meta/classes/kernel-grub.bbclass b/meta/classes/kernel-grub.bbclass
index f7dcc0715a..5d92f3b636 100644
--- a/meta/classes/kernel-grub.bbclass
+++ b/meta/classes/kernel-grub.bbclass
@@ -92,7 +92,7 @@ python __anonymous () {
92 fi 92 fi
93''' 93'''
94 94
95 imagetypes = d.getVar('KERNEL_IMAGETYPES', True) 95 imagetypes = d.getVar('KERNEL_IMAGETYPES')
96 imagetypes = re.sub(r'\.gz$', '', imagetypes) 96 imagetypes = re.sub(r'\.gz$', '', imagetypes)
97 97
98 for type in imagetypes.split(): 98 for type in imagetypes.split():
diff --git a/meta/classes/kernel-module-split.bbclass b/meta/classes/kernel-module-split.bbclass
index 08d226276e..efe1b42656 100644
--- a/meta/classes/kernel-module-split.bbclass
+++ b/meta/classes/kernel-module-split.bbclass
@@ -39,10 +39,10 @@ python split_kernel_module_packages () {
39 39
40 def extract_modinfo(file): 40 def extract_modinfo(file):
41 import tempfile, subprocess 41 import tempfile, subprocess
42 tempfile.tempdir = d.getVar("WORKDIR", True) 42 tempfile.tempdir = d.getVar("WORKDIR")
43 tf = tempfile.mkstemp() 43 tf = tempfile.mkstemp()
44 tmpfile = tf[1] 44 tmpfile = tf[1]
45 cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX", True) or "", file, tmpfile) 45 cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX") or "", file, tmpfile)
46 subprocess.call(cmd, shell=True) 46 subprocess.call(cmd, shell=True)
47 f = open(tmpfile) 47 f = open(tmpfile)
48 l = f.read().split("\000") 48 l = f.read().split("\000")
@@ -60,12 +60,12 @@ python split_kernel_module_packages () {
60 def frob_metadata(file, pkg, pattern, format, basename): 60 def frob_metadata(file, pkg, pattern, format, basename):
61 vals = extract_modinfo(file) 61 vals = extract_modinfo(file)
62 62
63 dvar = d.getVar('PKGD', True) 63 dvar = d.getVar('PKGD')
64 64
65 # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append 65 # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
66 # appropriate modprobe commands to the postinst 66 # appropriate modprobe commands to the postinst
67 autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD", True) or "").split() 67 autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD") or "").split()
68 autoload = d.getVar('module_autoload_%s' % basename, True) 68 autoload = d.getVar('module_autoload_%s' % basename)
69 if autoload and autoload == basename: 69 if autoload and autoload == basename:
70 bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename) 70 bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename)
71 if autoload and basename not in autoloadlist: 71 if autoload and basename not in autoloadlist:
@@ -79,15 +79,15 @@ python split_kernel_module_packages () {
79 else: 79 else:
80 f.write('%s\n' % basename) 80 f.write('%s\n' % basename)
81 f.close() 81 f.close()
82 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 82 postinst = d.getVar('pkg_postinst_%s' % pkg)
83 if not postinst: 83 if not postinst:
84 bb.fatal("pkg_postinst_%s not defined" % pkg) 84 bb.fatal("pkg_postinst_%s not defined" % pkg)
85 postinst += d.getVar('autoload_postinst_fragment', True) % (autoload or basename) 85 postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename)
86 d.setVar('pkg_postinst_%s' % pkg, postinst) 86 d.setVar('pkg_postinst_%s' % pkg, postinst)
87 87
88 # Write out any modconf fragment 88 # Write out any modconf fragment
89 modconflist = (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split() 89 modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()
90 modconf = d.getVar('module_conf_%s' % basename, True) 90 modconf = d.getVar('module_conf_%s' % basename)
91 if modconf and basename in modconflist: 91 if modconf and basename in modconflist:
92 name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) 92 name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
93 f = open(name, 'w') 93 f = open(name, 'w')
@@ -96,15 +96,15 @@ python split_kernel_module_packages () {
96 elif modconf: 96 elif modconf:
97 bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename)) 97 bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename))
98 98
99 files = d.getVar('FILES_%s' % pkg, True) 99 files = d.getVar('FILES_%s' % pkg)
100 files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) 100 files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
101 d.setVar('FILES_%s' % pkg, files) 101 d.setVar('FILES_%s' % pkg, files)
102 102
103 if "description" in vals: 103 if "description" in vals:
104 old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" 104 old_desc = d.getVar('DESCRIPTION_' + pkg) or ""
105 d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) 105 d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
106 106
107 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") 107 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
108 modinfo_deps = [] 108 modinfo_deps = []
109 if "depends" in vals and vals["depends"] != "": 109 if "depends" in vals and vals["depends"] != "":
110 for dep in vals["depends"].split(","): 110 for dep in vals["depends"].split(","):
@@ -121,24 +121,24 @@ python split_kernel_module_packages () {
121 121
122 module_regex = '^(.*)\.k?o$' 122 module_regex = '^(.*)\.k?o$'
123 123
124 module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX', True) 124 module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX')
125 module_pattern = module_pattern_prefix + 'kernel-module-%s' 125 module_pattern = module_pattern_prefix + 'kernel-module-%s'
126 126
127 postinst = d.getVar('pkg_postinst_modules', True) 127 postinst = d.getVar('pkg_postinst_modules')
128 postrm = d.getVar('pkg_postrm_modules', True) 128 postrm = d.getVar('pkg_postrm_modules')
129 129
130 modules = do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION", True))) 130 modules = do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION")))
131 if modules: 131 if modules:
132 metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE', True) 132 metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE')
133 d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules)) 133 d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules))
134 134
135 # If modules-load.d and modprobe.d are empty at this point, remove them to 135 # If modules-load.d and modprobe.d are empty at this point, remove them to
136 # avoid warnings. removedirs only raises an OSError if an empty 136 # avoid warnings. removedirs only raises an OSError if an empty
137 # directory cannot be removed. 137 # directory cannot be removed.
138 dvar = d.getVar('PKGD', True) 138 dvar = d.getVar('PKGD')
139 for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]: 139 for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
140 if len(os.listdir(dir)) == 0: 140 if len(os.listdir(dir)) == 0:
141 os.rmdir(dir) 141 os.rmdir(dir)
142} 142}
143 143
144do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split()))}' 144do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()))}'
diff --git a/meta/classes/kernel-uimage.bbclass b/meta/classes/kernel-uimage.bbclass
index 340503a2d6..7e7185f903 100644
--- a/meta/classes/kernel-uimage.bbclass
+++ b/meta/classes/kernel-uimage.bbclass
@@ -1,8 +1,8 @@
1inherit kernel-uboot 1inherit kernel-uboot
2 2
3python __anonymous () { 3python __anonymous () {
4 if "uImage" in (d.getVar('KERNEL_IMAGETYPES', True) or "").split(): 4 if "uImage" in (d.getVar('KERNEL_IMAGETYPES') or "").split():
5 depends = d.getVar("DEPENDS", True) 5 depends = d.getVar("DEPENDS")
6 depends = "%s u-boot-mkimage-native" % depends 6 depends = "%s u-boot-mkimage-native" % depends
7 d.setVar("DEPENDS", depends) 7 d.setVar("DEPENDS", depends)
8 8
@@ -11,8 +11,8 @@ python __anonymous () {
11 # to build uImage using the kernel build system if and only if 11 # to build uImage using the kernel build system if and only if
12 # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into 12 # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into
13 # the uImage . 13 # the uImage .
14 if d.getVar("KEEPUIMAGE", True) != 'yes': 14 if d.getVar("KEEPUIMAGE") != 'yes':
15 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" 15 typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or ""
16 if "uImage" in typeformake.split(): 16 if "uImage" in typeformake.split():
17 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('uImage', 'vmlinux')) 17 d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('uImage', 'vmlinux'))
18} 18}
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass
index a60327a07e..c7fff5a512 100644
--- a/meta/classes/kernel-yocto.bbclass
+++ b/meta/classes/kernel-yocto.bbclass
@@ -290,7 +290,7 @@ python do_kernel_configcheck() {
290 if not os.path.exists(kmeta): 290 if not os.path.exists(kmeta):
291 kmeta = "." + kmeta 291 kmeta = "." + kmeta
292 292
293 pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH', True), "${S}/scripts/util/") 293 pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH'), "${S}/scripts/util/")
294 294
295 cmd = d.expand("scc --configs -o ${S}/.kernel-meta") 295 cmd = d.expand("scc --configs -o ${S}/.kernel-meta")
296 ret, configs = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd)) 296 ret, configs = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd))
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index b779a767d3..fa598cff7f 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -22,18 +22,18 @@ INITRAMFS_IMAGE_BUNDLE ?= ""
22# number and cause kernel to be rebuilt. To avoid this, make 22# number and cause kernel to be rebuilt. To avoid this, make
23# KERNEL_VERSION_NAME and KERNEL_VERSION_PKG_NAME depend on 23# KERNEL_VERSION_NAME and KERNEL_VERSION_PKG_NAME depend on
24# LINUX_VERSION which is a constant. 24# LINUX_VERSION which is a constant.
25KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION', True) or ""}" 25KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION') or ""}"
26KERNEL_VERSION_NAME[vardepvalue] = "${LINUX_VERSION}" 26KERNEL_VERSION_NAME[vardepvalue] = "${LINUX_VERSION}"
27KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION', True))}" 27KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}"
28KERNEL_VERSION_PKG_NAME[vardepvalue] = "${LINUX_VERSION}" 28KERNEL_VERSION_PKG_NAME[vardepvalue] = "${LINUX_VERSION}"
29 29
30python __anonymous () { 30python __anonymous () {
31 import re 31 import re
32 32
33 # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES 33 # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES
34 type = d.getVar('KERNEL_IMAGETYPE', True) or "" 34 type = d.getVar('KERNEL_IMAGETYPE') or ""
35 alttype = d.getVar('KERNEL_ALT_IMAGETYPE', True) or "" 35 alttype = d.getVar('KERNEL_ALT_IMAGETYPE') or ""
36 types = d.getVar('KERNEL_IMAGETYPES', True) or "" 36 types = d.getVar('KERNEL_IMAGETYPES') or ""
37 if type not in types.split(): 37 if type not in types.split():
38 types = (type + ' ' + types).strip() 38 types = (type + ' ' + types).strip()
39 if alttype not in types.split(): 39 if alttype not in types.split():
@@ -56,15 +56,15 @@ python __anonymous () {
56 56
57 d.setVar('ALLOW_EMPTY_kernel-image-' + typelower, '1') 57 d.setVar('ALLOW_EMPTY_kernel-image-' + typelower, '1')
58 58
59 imagedest = d.getVar('KERNEL_IMAGEDEST', True) 59 imagedest = d.getVar('KERNEL_IMAGEDEST')
60 priority = d.getVar('KERNEL_PRIORITY', True) 60 priority = d.getVar('KERNEL_PRIORITY')
61 postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n' 61 postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n'
62 d.setVar('pkg_postinst_kernel-image-' + typelower, postinst) 62 d.setVar('pkg_postinst_kernel-image-' + typelower, postinst)
63 63
64 postrm = '#!/bin/sh\n' + 'update-alternatives --remove' + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} || true' + '\n' 64 postrm = '#!/bin/sh\n' + 'update-alternatives --remove' + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} || true' + '\n'
65 d.setVar('pkg_postrm_kernel-image-' + typelower, postrm) 65 d.setVar('pkg_postrm_kernel-image-' + typelower, postrm)
66 66
67 image = d.getVar('INITRAMFS_IMAGE', True) 67 image = d.getVar('INITRAMFS_IMAGE')
68 if image: 68 if image:
69 d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') 69 d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
70 70
@@ -72,7 +72,7 @@ python __anonymous () {
72 # The preferred method is to set INITRAMFS_IMAGE, because 72 # The preferred method is to set INITRAMFS_IMAGE, because
73 # this INITRAMFS_TASK has circular dependency problems 73 # this INITRAMFS_TASK has circular dependency problems
74 # if the initramfs requires kernel modules 74 # if the initramfs requires kernel modules
75 image_task = d.getVar('INITRAMFS_TASK', True) 75 image_task = d.getVar('INITRAMFS_TASK')
76 if image_task: 76 if image_task:
77 d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}') 77 d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}')
78} 78}
@@ -101,15 +101,15 @@ inherit ${KERNEL_CLASSES}
101do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" 101do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}"
102do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" 102do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}"
103base_do_unpack_append () { 103base_do_unpack_append () {
104 s = d.getVar("S", True) 104 s = d.getVar("S")
105 if s[-1] == '/': 105 if s[-1] == '/':
106 # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail 106 # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail
107 s=s[:-1] 107 s=s[:-1]
108 kernsrc = d.getVar("STAGING_KERNEL_DIR", True) 108 kernsrc = d.getVar("STAGING_KERNEL_DIR")
109 if s != kernsrc: 109 if s != kernsrc:
110 bb.utils.mkdirhier(kernsrc) 110 bb.utils.mkdirhier(kernsrc)
111 bb.utils.remove(kernsrc, recurse=True) 111 bb.utils.remove(kernsrc, recurse=True)
112 if d.getVar("EXTERNALSRC", True): 112 if d.getVar("EXTERNALSRC"):
113 # With EXTERNALSRC S will not be wiped so we can symlink to it 113 # With EXTERNALSRC S will not be wiped so we can symlink to it
114 os.symlink(s, kernsrc) 114 os.symlink(s, kernsrc)
115 else: 115 else:
@@ -127,9 +127,9 @@ PACKAGES_DYNAMIC += "^kernel-firmware-.*"
127export OS = "${TARGET_OS}" 127export OS = "${TARGET_OS}"
128export CROSS_COMPILE = "${TARGET_PREFIX}" 128export CROSS_COMPILE = "${TARGET_PREFIX}"
129 129
130KERNEL_PRIORITY ?= "${@int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[0]) * 10000 + \ 130KERNEL_PRIORITY ?= "${@int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[0]) * 10000 + \
131 int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[1]) * 100 + \ 131 int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[1]) * 100 + \
132 int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[-1])}" 132 int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[-1])}"
133 133
134KERNEL_RELEASE ?= "${KERNEL_VERSION}" 134KERNEL_RELEASE ?= "${KERNEL_VERSION}"
135 135
@@ -140,7 +140,7 @@ KERNEL_IMAGEDEST = "boot"
140# 140#
141# configuration 141# configuration
142# 142#
143export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE", True) or "ttyS0"}" 143export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE") or "ttyS0"}"
144 144
145KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}" 145KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}"
146 146
@@ -430,14 +430,14 @@ sysroot_stage_all () {
430KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig" 430KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig"
431 431
432python check_oldest_kernel() { 432python check_oldest_kernel() {
433 oldest_kernel = d.getVar('OLDEST_KERNEL', True) 433 oldest_kernel = d.getVar('OLDEST_KERNEL')
434 kernel_version = d.getVar('KERNEL_VERSION', True) 434 kernel_version = d.getVar('KERNEL_VERSION')
435 tclibc = d.getVar('TCLIBC', True) 435 tclibc = d.getVar('TCLIBC')
436 if tclibc == 'glibc': 436 if tclibc == 'glibc':
437 kernel_version = kernel_version.split('-', 1)[0] 437 kernel_version = kernel_version.split('-', 1)[0]
438 if oldest_kernel and kernel_version: 438 if oldest_kernel and kernel_version:
439 if bb.utils.vercmp_string(kernel_version, oldest_kernel) < 0: 439 if bb.utils.vercmp_string(kernel_version, oldest_kernel) < 0:
440 bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN', True), oldest_kernel, kernel_version, tclibc)) 440 bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN'), oldest_kernel, kernel_version, tclibc))
441} 441}
442 442
443check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION" 443check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION"
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass
index 11b0065a6d..9ea2c03749 100644
--- a/meta/classes/libc-common.bbclass
+++ b/meta/classes/libc-common.bbclass
@@ -17,15 +17,15 @@ do_install() {
17} 17}
18 18
19def get_libc_fpu_setting(bb, d): 19def get_libc_fpu_setting(bb, d):
20 if d.getVar('TARGET_FPU', True) in [ 'soft', 'ppc-efd' ]: 20 if d.getVar('TARGET_FPU') in [ 'soft', 'ppc-efd' ]:
21 return "--without-fp" 21 return "--without-fp"
22 return "" 22 return ""
23 23
24python populate_packages_prepend () { 24python populate_packages_prepend () {
25 if d.getVar('DEBIAN_NAMES', True): 25 if d.getVar('DEBIAN_NAMES'):
26 pkgs = d.getVar('PACKAGES', True).split() 26 pkgs = d.getVar('PACKAGES').split()
27 bpn = d.getVar('BPN', True) 27 bpn = d.getVar('BPN')
28 prefix = d.getVar('MLPREFIX', True) or "" 28 prefix = d.getVar('MLPREFIX') or ""
29 # Set the base package... 29 # Set the base package...
30 d.setVar('PKG_' + prefix + bpn, prefix + 'libc6') 30 d.setVar('PKG_' + prefix + bpn, prefix + 'libc6')
31 libcprefix = prefix + bpn + '-' 31 libcprefix = prefix + bpn + '-'
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass
index 071978b519..739adce694 100644
--- a/meta/classes/libc-package.bbclass
+++ b/meta/classes/libc-package.bbclass
@@ -12,24 +12,24 @@ GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
12GLIBC_SPLIT_LC_PACKAGES ?= "0" 12GLIBC_SPLIT_LC_PACKAGES ?= "0"
13 13
14python __anonymous () { 14python __anonymous () {
15 enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True) 15 enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION")
16 16
17 pn = d.getVar("PN", True) 17 pn = d.getVar("PN")
18 if pn.endswith("-initial"): 18 if pn.endswith("-initial"):
19 enabled = False 19 enabled = False
20 20
21 if enabled and int(enabled): 21 if enabled and int(enabled):
22 import re 22 import re
23 23
24 target_arch = d.getVar("TARGET_ARCH", True) 24 target_arch = d.getVar("TARGET_ARCH")
25 binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or "" 25 binary_arches = d.getVar("BINARY_LOCALE_ARCHES") or ""
26 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "" 26 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or ""
27 27
28 for regexp in binary_arches.split(" "): 28 for regexp in binary_arches.split(" "):
29 r = re.compile(regexp) 29 r = re.compile(regexp)
30 30
31 if r.match(target_arch): 31 if r.match(target_arch):
32 depends = d.getVar("DEPENDS", True) 32 depends = d.getVar("DEPENDS")
33 if use_cross_localedef == "1" : 33 if use_cross_localedef == "1" :
34 depends = "%s cross-localedef-native" % depends 34 depends = "%s cross-localedef-native" % depends
35 else: 35 else:
@@ -94,21 +94,21 @@ inherit qemu
94 94
95python package_do_split_gconvs () { 95python package_do_split_gconvs () {
96 import re 96 import re
97 if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): 97 if (d.getVar('PACKAGE_NO_GCONV') == '1'):
98 bb.note("package requested not splitting gconvs") 98 bb.note("package requested not splitting gconvs")
99 return 99 return
100 100
101 if not d.getVar('PACKAGES', True): 101 if not d.getVar('PACKAGES'):
102 return 102 return
103 103
104 mlprefix = d.getVar("MLPREFIX", True) or "" 104 mlprefix = d.getVar("MLPREFIX") or ""
105 105
106 bpn = d.getVar('BPN', True) 106 bpn = d.getVar('BPN')
107 libdir = d.getVar('libdir', True) 107 libdir = d.getVar('libdir')
108 if not libdir: 108 if not libdir:
109 bb.error("libdir not defined") 109 bb.error("libdir not defined")
110 return 110 return
111 datadir = d.getVar('datadir', True) 111 datadir = d.getVar('datadir')
112 if not datadir: 112 if not datadir:
113 bb.error("datadir not defined") 113 bb.error("datadir not defined")
114 return 114 return
@@ -116,7 +116,7 @@ python package_do_split_gconvs () {
116 gconv_libdir = base_path_join(libdir, "gconv") 116 gconv_libdir = base_path_join(libdir, "gconv")
117 charmap_dir = base_path_join(datadir, "i18n", "charmaps") 117 charmap_dir = base_path_join(datadir, "i18n", "charmaps")
118 locales_dir = base_path_join(datadir, "i18n", "locales") 118 locales_dir = base_path_join(datadir, "i18n", "locales")
119 binary_locales_dir = d.getVar('localedir', True) 119 binary_locales_dir = d.getVar('localedir')
120 120
121 def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): 121 def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
122 deps = [] 122 deps = []
@@ -183,13 +183,13 @@ python package_do_split_gconvs () {
183 description='locale definition for %s', hook=calc_locale_deps, extra_depends='') 183 description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
184 d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv') 184 d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv')
185 185
186 use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) 186 use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE")
187 187
188 dot_re = re.compile("(.*)\.(.*)") 188 dot_re = re.compile("(.*)\.(.*)")
189 189
190 # Read in supported locales and associated encodings 190 # Read in supported locales and associated encodings
191 supported = {} 191 supported = {}
192 with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f: 192 with open(base_path_join(d.getVar('WORKDIR'), "SUPPORTED")) as f:
193 for line in f.readlines(): 193 for line in f.readlines():
194 try: 194 try:
195 locale, charset = line.rstrip().split() 195 locale, charset = line.rstrip().split()
@@ -198,7 +198,7 @@ python package_do_split_gconvs () {
198 supported[locale] = charset 198 supported[locale] = charset
199 199
200 # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales 200 # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
201 to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True) 201 to_generate = d.getVar('GLIBC_GENERATE_LOCALES')
202 if not to_generate or to_generate == 'all': 202 if not to_generate or to_generate == 'all':
203 to_generate = sorted(supported.keys()) 203 to_generate = sorted(supported.keys())
204 else: 204 else:
@@ -215,14 +215,14 @@ python package_do_split_gconvs () {
215 def output_locale_source(name, pkgname, locale, encoding): 215 def output_locale_source(name, pkgname, locale, encoding):
216 d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \ 216 d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \
217 (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) 217 (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
218 d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ 218 d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst') \
219 % (locale, encoding, locale)) 219 % (locale, encoding, locale))
220 d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ 220 d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm') % \
221 (locale, encoding, locale)) 221 (locale, encoding, locale))
222 222
223 def output_locale_binary_rdepends(name, pkgname, locale, encoding): 223 def output_locale_binary_rdepends(name, pkgname, locale, encoding):
224 dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name)) 224 dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name))
225 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES', True) 225 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES')
226 if lcsplit and int(lcsplit): 226 if lcsplit and int(lcsplit):
227 d.appendVar('PACKAGES', ' ' + dep) 227 d.appendVar('PACKAGES', ' ' + dep)
228 d.setVar('ALLOW_EMPTY_%s' % dep, '1') 228 d.setVar('ALLOW_EMPTY_%s' % dep, '1')
@@ -231,16 +231,16 @@ python package_do_split_gconvs () {
231 commands = {} 231 commands = {}
232 232
233 def output_locale_binary(name, pkgname, locale, encoding): 233 def output_locale_binary(name, pkgname, locale, encoding):
234 treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") 234 treedir = base_path_join(d.getVar("WORKDIR"), "locale-tree")
235 ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) 235 ldlibdir = base_path_join(treedir, d.getVar("base_libdir"))
236 path = d.getVar("PATH", True) 236 path = d.getVar("PATH")
237 i18npath = base_path_join(treedir, datadir, "i18n") 237 i18npath = base_path_join(treedir, datadir, "i18n")
238 gconvpath = base_path_join(treedir, "iconvdata") 238 gconvpath = base_path_join(treedir, "iconvdata")
239 outputpath = base_path_join(treedir, binary_locales_dir) 239 outputpath = base_path_join(treedir, binary_locales_dir)
240 240
241 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" 241 use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or "0"
242 if use_cross_localedef == "1": 242 if use_cross_localedef == "1":
243 target_arch = d.getVar('TARGET_ARCH', True) 243 target_arch = d.getVar('TARGET_ARCH')
244 locale_arch_options = { \ 244 locale_arch_options = { \
245 "arm": " --uint32-align=4 --little-endian ", \ 245 "arm": " --uint32-align=4 --little-endian ", \
246 "armeb": " --uint32-align=4 --big-endian ", \ 246 "armeb": " --uint32-align=4 --big-endian ", \
@@ -279,7 +279,7 @@ python package_do_split_gconvs () {
279 --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ 279 --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
280 % (treedir, datadir, locale, encoding, name) 280 % (treedir, datadir, locale, encoding, name)
281 281
282 qemu_options = d.getVar('QEMU_OPTIONS', True) 282 qemu_options = d.getVar('QEMU_OPTIONS')
283 283
284 cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ 284 cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
285 -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ 285 -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
@@ -292,7 +292,7 @@ python package_do_split_gconvs () {
292 def output_locale(name, locale, encoding): 292 def output_locale(name, locale, encoding):
293 pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name) 293 pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name)
294 d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') 294 d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
295 d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) 295 d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES')))
296 rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) 296 rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
297 m = re.match("(.*)_(.*)", name) 297 m = re.match("(.*)_(.*)", name)
298 if m: 298 if m:
@@ -311,8 +311,8 @@ python package_do_split_gconvs () {
311 bb.note("preparing tree for binary locale generation") 311 bb.note("preparing tree for binary locale generation")
312 bb.build.exec_func("do_prep_locale_tree", d) 312 bb.build.exec_func("do_prep_locale_tree", d)
313 313
314 utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) 314 utf8_only = int(d.getVar('LOCALE_UTF8_ONLY') or 0)
315 utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT', True) or 0) 315 utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT') or 0)
316 316
317 encodings = {} 317 encodings = {}
318 for locale in to_generate: 318 for locale in to_generate:
@@ -344,7 +344,7 @@ python package_do_split_gconvs () {
344 d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg) 344 d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg)
345 345
346 if use_bin == "compile": 346 if use_bin == "compile":
347 makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") 347 makefile = base_path_join(d.getVar("WORKDIR"), "locale-tree", "Makefile")
348 m = open(makefile, "w") 348 m = open(makefile, "w")
349 m.write("all: %s\n\n" % " ".join(commands.keys())) 349 m.write("all: %s\n\n" % " ".join(commands.keys()))
350 for cmd in commands: 350 for cmd in commands:
@@ -358,7 +358,7 @@ python package_do_split_gconvs () {
358 bb.build.exec_func("do_collect_bins_from_locale_tree", d) 358 bb.build.exec_func("do_collect_bins_from_locale_tree", d)
359 359
360 if use_bin in ('compile', 'precompiled'): 360 if use_bin in ('compile', 'precompiled'):
361 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES', True) 361 lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES')
362 if lcsplit and int(lcsplit): 362 if lcsplit and int(lcsplit):
363 do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \ 363 do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \
364 output_pattern=bpn+'-binary-localedata-%s', \ 364 output_pattern=bpn+'-binary-localedata-%s', \
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass
index afcfbfc038..c5d1204ca1 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes/license.bbclass
@@ -37,13 +37,13 @@ python license_create_manifest() {
37 import oe.packagedata 37 import oe.packagedata
38 from oe.rootfs import image_list_installed_packages 38 from oe.rootfs import image_list_installed_packages
39 39
40 build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS', True) 40 build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS')
41 if build_images_from_feeds == "1": 41 if build_images_from_feeds == "1":
42 return 0 42 return 0
43 43
44 pkg_dic = {} 44 pkg_dic = {}
45 for pkg in sorted(image_list_installed_packages(d)): 45 for pkg in sorted(image_list_installed_packages(d)):
46 pkg_info = os.path.join(d.getVar('PKGDATA_DIR', True), 46 pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
47 'runtime-reverse', pkg) 47 'runtime-reverse', pkg)
48 pkg_name = os.path.basename(os.readlink(pkg_info)) 48 pkg_name = os.path.basename(os.readlink(pkg_info))
49 49
@@ -52,15 +52,15 @@ python license_create_manifest() {
52 pkg_lic_name = "LICENSE_" + pkg_name 52 pkg_lic_name = "LICENSE_" + pkg_name
53 pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name] 53 pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name]
54 54
55 rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 55 rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'),
56 d.getVar('IMAGE_NAME', True), 'license.manifest') 56 d.getVar('IMAGE_NAME'), 'license.manifest')
57 write_license_files(d, rootfs_license_manifest, pkg_dic) 57 write_license_files(d, rootfs_license_manifest, pkg_dic)
58} 58}
59 59
60def write_license_files(d, license_manifest, pkg_dic): 60def write_license_files(d, license_manifest, pkg_dic):
61 import re 61 import re
62 62
63 bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE", True) or "").split() 63 bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE") or "").split()
64 bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses) 64 bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses)
65 bad_licenses = expand_wildcard_licenses(d, bad_licenses) 65 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
66 66
@@ -72,7 +72,7 @@ def write_license_files(d, license_manifest, pkg_dic):
72 oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"], 72 oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"],
73 bad_licenses, canonical_license, d) 73 bad_licenses, canonical_license, d)
74 except oe.license.LicenseError as exc: 74 except oe.license.LicenseError as exc:
75 bb.fatal('%s: %s' % (d.getVar('P', True), exc)) 75 bb.fatal('%s: %s' % (d.getVar('P'), exc))
76 else: 76 else:
77 pkg_dic[pkg]["LICENSES"] = re.sub('[|&()*]', ' ', pkg_dic[pkg]["LICENSE"]) 77 pkg_dic[pkg]["LICENSES"] = re.sub('[|&()*]', ' ', pkg_dic[pkg]["LICENSE"])
78 pkg_dic[pkg]["LICENSES"] = re.sub(' *', ' ', pkg_dic[pkg]["LICENSES"]) 78 pkg_dic[pkg]["LICENSES"] = re.sub(' *', ' ', pkg_dic[pkg]["LICENSES"])
@@ -98,7 +98,7 @@ def write_license_files(d, license_manifest, pkg_dic):
98 license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"]) 98 license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"])
99 99
100 for lic in pkg_dic[pkg]["LICENSES"]: 100 for lic in pkg_dic[pkg]["LICENSES"]:
101 lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 101 lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY'),
102 pkg_dic[pkg]["PN"], "generic_%s" % 102 pkg_dic[pkg]["PN"], "generic_%s" %
103 re.sub('\+', '', lic)) 103 re.sub('\+', '', lic))
104 # add explicity avoid of CLOSED license because isn't generic 104 # add explicity avoid of CLOSED license because isn't generic
@@ -114,8 +114,8 @@ def write_license_files(d, license_manifest, pkg_dic):
114 # - Just copy the manifest 114 # - Just copy the manifest
115 # - Copy the manifest and the license directories 115 # - Copy the manifest and the license directories
116 # With both options set we see a .5 M increase in core-image-minimal 116 # With both options set we see a .5 M increase in core-image-minimal
117 copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST', True) 117 copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST')
118 copy_lic_dirs = d.getVar('COPY_LIC_DIRS', True) 118 copy_lic_dirs = d.getVar('COPY_LIC_DIRS')
119 if copy_lic_manifest == "1": 119 if copy_lic_manifest == "1":
120 rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'), 120 rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'),
121 'usr', 'share', 'common-licenses') 121 'usr', 'share', 'common-licenses')
@@ -129,7 +129,7 @@ def write_license_files(d, license_manifest, pkg_dic):
129 for pkg in sorted(pkg_dic): 129 for pkg in sorted(pkg_dic):
130 pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg) 130 pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg)
131 bb.utils.mkdirhier(pkg_rootfs_license_dir) 131 bb.utils.mkdirhier(pkg_rootfs_license_dir)
132 pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 132 pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
133 pkg_dic[pkg]["PN"]) 133 pkg_dic[pkg]["PN"])
134 licenses = os.listdir(pkg_license_dir) 134 licenses = os.listdir(pkg_license_dir)
135 for lic in licenses: 135 for lic in licenses:
@@ -166,7 +166,7 @@ def license_deployed_manifest(d):
166 166
167 dep_dic = {} 167 dep_dic = {}
168 man_dic = {} 168 man_dic = {}
169 lic_dir = d.getVar("LICENSE_DIRECTORY", True) 169 lic_dir = d.getVar("LICENSE_DIRECTORY")
170 170
171 dep_dic = get_deployed_dependencies(d) 171 dep_dic = get_deployed_dependencies(d)
172 for dep in dep_dic.keys(): 172 for dep in dep_dic.keys():
@@ -181,8 +181,8 @@ def license_deployed_manifest(d):
181 key,val = line.split(": ", 1) 181 key,val = line.split(": ", 1)
182 man_dic[dep][key] = val[:-1] 182 man_dic[dep][key] = val[:-1]
183 183
184 lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), 184 lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
185 d.getVar('IMAGE_NAME', True)) 185 d.getVar('IMAGE_NAME'))
186 bb.utils.mkdirhier(lic_manifest_dir) 186 bb.utils.mkdirhier(lic_manifest_dir)
187 image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest') 187 image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest')
188 write_license_files(d, image_license_manifest, man_dic) 188 write_license_files(d, image_license_manifest, man_dic)
@@ -202,7 +202,7 @@ def get_deployed_dependencies(d):
202 depends = list(set([dep[0] for dep 202 depends = list(set([dep[0] for dep
203 in list(taskdata.values()) 203 in list(taskdata.values())
204 if not dep[0].endswith("-native")])) 204 if not dep[0].endswith("-native")]))
205 extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) 205 extra_depends = d.getVar("EXTRA_IMAGEDEPENDS")
206 boot_depends = get_boot_dependencies(d) 206 boot_depends = get_boot_dependencies(d)
207 depends.extend(extra_depends.split()) 207 depends.extend(extra_depends.split())
208 depends.extend(boot_depends) 208 depends.extend(boot_depends)
@@ -212,13 +212,13 @@ def get_deployed_dependencies(d):
212 # the SSTATE_MANIFESTS for "deploy" task. 212 # the SSTATE_MANIFESTS for "deploy" task.
213 # The manifest file name contains the arch. Because we are not running 213 # The manifest file name contains the arch. Because we are not running
214 # in the recipe context it is necessary to check every arch used. 214 # in the recipe context it is necessary to check every arch used.
215 sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS", True) 215 sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS")
216 sstate_archs = d.getVar("SSTATE_ARCHS", True) 216 sstate_archs = d.getVar("SSTATE_ARCHS")
217 extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS", True) 217 extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS")
218 archs = list(set(("%s %s" % (sstate_archs, extra_archs)).split())) 218 archs = list(set(("%s %s" % (sstate_archs, extra_archs)).split()))
219 for dep in depends: 219 for dep in depends:
220 # Some recipes have an arch on their own, so we try that first. 220 # Some recipes have an arch on their own, so we try that first.
221 special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep, True) 221 special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep)
222 if special_arch: 222 if special_arch:
223 sstate_manifest_file = os.path.join(sstate_manifest_dir, 223 sstate_manifest_file = os.path.join(sstate_manifest_dir,
224 "manifest-%s-%s.deploy" % (special_arch, dep)) 224 "manifest-%s-%s.deploy" % (special_arch, dep))
@@ -254,7 +254,7 @@ def get_boot_dependencies(d):
254 in boot_depends_string.split() 254 in boot_depends_string.split()
255 if not dep.split(":")[0].endswith("-native")] 255 if not dep.split(":")[0].endswith("-native")]
256 for dep in boot_depends: 256 for dep in boot_depends:
257 info_file = os.path.join(d.getVar("LICENSE_DIRECTORY", True), 257 info_file = os.path.join(d.getVar("LICENSE_DIRECTORY"),
258 dep, "recipeinfo") 258 dep, "recipeinfo")
259 # If the recipe and dependency name is the same 259 # If the recipe and dependency name is the same
260 if os.path.exists(info_file): 260 if os.path.exists(info_file):
@@ -265,7 +265,7 @@ def get_boot_dependencies(d):
265 # The fifth field contains what the task provides 265 # The fifth field contains what the task provides
266 if dep in taskdep[4]: 266 if dep in taskdep[4]:
267 info_file = os.path.join( 267 info_file = os.path.join(
268 d.getVar("LICENSE_DIRECTORY", True), 268 d.getVar("LICENSE_DIRECTORY"),
269 taskdep[0], "recipeinfo") 269 taskdep[0], "recipeinfo")
270 if os.path.exists(info_file): 270 if os.path.exists(info_file):
271 depends.append(taskdep[0]) 271 depends.append(taskdep[0])
@@ -295,7 +295,7 @@ python do_populate_lic() {
295 lic_files_paths = find_license_files(d) 295 lic_files_paths = find_license_files(d)
296 296
297 # The base directory we wrangle licenses to 297 # The base directory we wrangle licenses to
298 destdir = os.path.join(d.getVar('LICSSTATEDIR', True), d.getVar('PN', True)) 298 destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('PN'))
299 copy_license_files(lic_files_paths, destdir) 299 copy_license_files(lic_files_paths, destdir)
300 info = get_recipe_info(d) 300 info = get_recipe_info(d)
301 with open(os.path.join(destdir, "recipeinfo"), "w") as f: 301 with open(os.path.join(destdir, "recipeinfo"), "w") as f:
@@ -306,11 +306,11 @@ python do_populate_lic() {
306# it would be better to copy them in do_install_append, but find_license_filesa is python 306# it would be better to copy them in do_install_append, but find_license_filesa is python
307python perform_packagecopy_prepend () { 307python perform_packagecopy_prepend () {
308 enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) 308 enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
309 if d.getVar('CLASSOVERRIDE', True) == 'class-target' and enabled: 309 if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled:
310 lic_files_paths = find_license_files(d) 310 lic_files_paths = find_license_files(d)
311 311
312 # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY 312 # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY
313 destdir = d.getVar('D', True) + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY', True), d.getVar('PN', True)) 313 destdir = d.getVar('D') + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY'), d.getVar('PN'))
314 copy_license_files(lic_files_paths, destdir) 314 copy_license_files(lic_files_paths, destdir)
315 add_package_and_files(d) 315 add_package_and_files(d)
316} 316}
@@ -318,15 +318,15 @@ perform_packagecopy[vardeps] += "LICENSE_CREATE_PACKAGE"
318 318
319def get_recipe_info(d): 319def get_recipe_info(d):
320 info = {} 320 info = {}
321 info["PV"] = d.getVar("PV", True) 321 info["PV"] = d.getVar("PV")
322 info["PR"] = d.getVar("PR", True) 322 info["PR"] = d.getVar("PR")
323 info["LICENSE"] = d.getVar("LICENSE", True) 323 info["LICENSE"] = d.getVar("LICENSE")
324 return info 324 return info
325 325
326def add_package_and_files(d): 326def add_package_and_files(d):
327 packages = d.getVar('PACKAGES', True) 327 packages = d.getVar('PACKAGES')
328 files = d.getVar('LICENSE_FILES_DIRECTORY', True) 328 files = d.getVar('LICENSE_FILES_DIRECTORY')
329 pn = d.getVar('PN', True) 329 pn = d.getVar('PN')
330 pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False)) 330 pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False))
331 if pn_lic in packages: 331 if pn_lic in packages:
332 bb.warn("%s package already existed in %s." % (pn_lic, pn)) 332 bb.warn("%s package already existed in %s." % (pn_lic, pn))
@@ -334,7 +334,7 @@ def add_package_and_files(d):
334 # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY 334 # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY
335 d.setVar('PACKAGES', "%s %s" % (pn_lic, packages)) 335 d.setVar('PACKAGES', "%s %s" % (pn_lic, packages))
336 d.setVar('FILES_' + pn_lic, files) 336 d.setVar('FILES_' + pn_lic, files)
337 rrecommends_pn = d.getVar('RRECOMMENDS_' + pn, True) 337 rrecommends_pn = d.getVar('RRECOMMENDS_' + pn)
338 if rrecommends_pn: 338 if rrecommends_pn:
339 d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn)) 339 d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn))
340 else: 340 else:
@@ -390,12 +390,12 @@ def find_license_files(d):
390 from collections import defaultdict, OrderedDict 390 from collections import defaultdict, OrderedDict
391 391
392 # All the license files for the package 392 # All the license files for the package
393 lic_files = d.getVar('LIC_FILES_CHKSUM', True) or "" 393 lic_files = d.getVar('LIC_FILES_CHKSUM') or ""
394 pn = d.getVar('PN', True) 394 pn = d.getVar('PN')
395 # The license files are located in S/LIC_FILE_CHECKSUM. 395 # The license files are located in S/LIC_FILE_CHECKSUM.
396 srcdir = d.getVar('S', True) 396 srcdir = d.getVar('S')
397 # Directory we store the generic licenses as set in the distro configuration 397 # Directory we store the generic licenses as set in the distro configuration
398 generic_directory = d.getVar('COMMON_LICENSE_DIR', True) 398 generic_directory = d.getVar('COMMON_LICENSE_DIR')
399 # List of basename, path tuples 399 # List of basename, path tuples
400 lic_files_paths = [] 400 lic_files_paths = []
401 # Entries from LIC_FILES_CHKSUM 401 # Entries from LIC_FILES_CHKSUM
@@ -403,7 +403,7 @@ def find_license_files(d):
403 license_source_dirs = [] 403 license_source_dirs = []
404 license_source_dirs.append(generic_directory) 404 license_source_dirs.append(generic_directory)
405 try: 405 try:
406 additional_lic_dirs = d.getVar('LICENSE_PATH', True).split() 406 additional_lic_dirs = d.getVar('LICENSE_PATH').split()
407 for lic_dir in additional_lic_dirs: 407 for lic_dir in additional_lic_dirs:
408 license_source_dirs.append(lic_dir) 408 license_source_dirs.append(lic_dir)
409 except: 409 except:
@@ -473,18 +473,18 @@ def find_license_files(d):
473 try: 473 try:
474 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) 474 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
475 except bb.fetch.MalformedUrl: 475 except bb.fetch.MalformedUrl:
476 bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF', True), url)) 476 bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF'), url))
477 # We want the license filename and path 477 # We want the license filename and path
478 chksum = parm['md5'] if 'md5' in parm else parm['sha256'] 478 chksum = parm['md5'] if 'md5' in parm else parm['sha256']
479 lic_chksums[path] = chksum 479 lic_chksums[path] = chksum
480 480
481 v = FindVisitor() 481 v = FindVisitor()
482 try: 482 try:
483 v.visit_string(d.getVar('LICENSE', True)) 483 v.visit_string(d.getVar('LICENSE'))
484 except oe.license.InvalidLicense as exc: 484 except oe.license.InvalidLicense as exc:
485 bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) 485 bb.fatal('%s: %s' % (d.getVar('PF'), exc))
486 except SyntaxError: 486 except SyntaxError:
487 bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF', True))) 487 bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF')))
488 488
489 # Add files from LIC_FILES_CHKSUM to list of license files 489 # Add files from LIC_FILES_CHKSUM to list of license files
490 lic_chksum_paths = defaultdict(OrderedDict) 490 lic_chksum_paths = defaultdict(OrderedDict)
@@ -542,7 +542,7 @@ def expand_wildcard_licenses(d, wildcard_licenses):
542 542
543def incompatible_license_contains(license, truevalue, falsevalue, d): 543def incompatible_license_contains(license, truevalue, falsevalue, d):
544 license = canonical_license(d, license) 544 license = canonical_license(d, license)
545 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() 545 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
546 bad_licenses = expand_wildcard_licenses(d, bad_licenses) 546 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
547 return truevalue if license in bad_licenses else falsevalue 547 return truevalue if license in bad_licenses else falsevalue
548 548
@@ -553,9 +553,9 @@ def incompatible_license(d, dont_want_licenses, package=None):
553 as canonical (SPDX) names. 553 as canonical (SPDX) names.
554 """ 554 """
555 import oe.license 555 import oe.license
556 license = d.getVar("LICENSE_%s" % package, True) if package else None 556 license = d.getVar("LICENSE_%s" % package) if package else None
557 if not license: 557 if not license:
558 license = d.getVar('LICENSE', True) 558 license = d.getVar('LICENSE')
559 559
560 # Handles an "or" or two license sets provided by 560 # Handles an "or" or two license sets provided by
561 # flattened_licenses(), pick one that works if possible. 561 # flattened_licenses(), pick one that works if possible.
@@ -566,7 +566,7 @@ def incompatible_license(d, dont_want_licenses, package=None):
566 try: 566 try:
567 licenses = oe.license.flattened_licenses(license, choose_lic_set) 567 licenses = oe.license.flattened_licenses(license, choose_lic_set)
568 except oe.license.LicenseError as exc: 568 except oe.license.LicenseError as exc:
569 bb.fatal('%s: %s' % (d.getVar('P', True), exc)) 569 bb.fatal('%s: %s' % (d.getVar('P'), exc))
570 return any(not oe.license.license_ok(canonical_license(d, l), \ 570 return any(not oe.license.license_ok(canonical_license(d, l), \
571 dont_want_licenses) for l in licenses) 571 dont_want_licenses) for l in licenses)
572 572
@@ -614,16 +614,16 @@ def check_license_flags(d):
614 614
615 def all_license_flags_match(license_flags, whitelist): 615 def all_license_flags_match(license_flags, whitelist):
616 """ Return first unmatched flag, None if all flags match """ 616 """ Return first unmatched flag, None if all flags match """
617 pn = d.getVar('PN', True) 617 pn = d.getVar('PN')
618 split_whitelist = whitelist.split() 618 split_whitelist = whitelist.split()
619 for flag in license_flags.split(): 619 for flag in license_flags.split():
620 if not license_flag_matches(flag, split_whitelist, pn): 620 if not license_flag_matches(flag, split_whitelist, pn):
621 return flag 621 return flag
622 return None 622 return None
623 623
624 license_flags = d.getVar('LICENSE_FLAGS', True) 624 license_flags = d.getVar('LICENSE_FLAGS')
625 if license_flags: 625 if license_flags:
626 whitelist = d.getVar('LICENSE_FLAGS_WHITELIST', True) 626 whitelist = d.getVar('LICENSE_FLAGS_WHITELIST')
627 if not whitelist: 627 if not whitelist:
628 return license_flags 628 return license_flags
629 unmatched_flag = all_license_flags_match(license_flags, whitelist) 629 unmatched_flag = all_license_flags_match(license_flags, whitelist)
@@ -637,8 +637,8 @@ def check_license_format(d):
637 Validate operators in LICENSES. 637 Validate operators in LICENSES.
638 No spaces are allowed between LICENSES. 638 No spaces are allowed between LICENSES.
639 """ 639 """
640 pn = d.getVar('PN', True) 640 pn = d.getVar('PN')
641 licenses = d.getVar('LICENSE', True) 641 licenses = d.getVar('LICENSE')
642 from oe.license import license_operator, license_operator_chars, license_pattern 642 from oe.license import license_operator, license_operator_chars, license_pattern
643 643
644 elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) 644 elements = list(filter(lambda x: x.strip(), license_operator.split(licenses)))
diff --git a/meta/classes/live-vm-common.bbclass b/meta/classes/live-vm-common.bbclass
index 734697f9e6..27b137dec6 100644
--- a/meta/classes/live-vm-common.bbclass
+++ b/meta/classes/live-vm-common.bbclass
@@ -4,11 +4,11 @@ def set_live_vm_vars(d, suffix):
4 vars = ['GRUB_CFG', 'SYSLINUX_CFG', 'ROOT', 'LABELS', 'INITRD'] 4 vars = ['GRUB_CFG', 'SYSLINUX_CFG', 'ROOT', 'LABELS', 'INITRD']
5 for var in vars: 5 for var in vars:
6 var_with_suffix = var + '_' + suffix 6 var_with_suffix = var + '_' + suffix
7 if d.getVar(var, True): 7 if d.getVar(var):
8 bb.warn('Found potential conflicted var %s, please use %s rather than %s' % \ 8 bb.warn('Found potential conflicted var %s, please use %s rather than %s' % \
9 (var, var_with_suffix, var)) 9 (var, var_with_suffix, var))
10 elif d.getVar(var_with_suffix, True): 10 elif d.getVar(var_with_suffix):
11 d.setVar(var, d.getVar(var_with_suffix, True)) 11 d.setVar(var, d.getVar(var_with_suffix))
12 12
13 13
14EFI = "${@bb.utils.contains("MACHINE_FEATURES", "efi", "1", "0", d)}" 14EFI = "${@bb.utils.contains("MACHINE_FEATURES", "efi", "1", "0", d)}"
@@ -25,7 +25,7 @@ def pcbios(d):
25 return pcbios 25 return pcbios
26 26
27PCBIOS = "${@pcbios(d)}" 27PCBIOS = "${@pcbios(d)}"
28PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS', True) == '1']}" 28PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS') == '1']}"
29 29
30inherit ${EFI_CLASS} 30inherit ${EFI_CLASS}
31inherit ${PCBIOS_CLASS} 31inherit ${PCBIOS_CLASS}
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass
index 2e6fac209a..fa791f04c4 100644
--- a/meta/classes/metadata_scm.bbclass
+++ b/meta/classes/metadata_scm.bbclass
@@ -26,7 +26,7 @@ def base_detect_branch(d):
26 return "<unknown>" 26 return "<unknown>"
27 27
28def base_get_scmbasepath(d): 28def base_get_scmbasepath(d):
29 return os.path.join(d.getVar('COREBASE', True), 'meta') 29 return os.path.join(d.getVar('COREBASE'), 'meta')
30 30
31def base_get_metadata_monotone_branch(path, d): 31def base_get_metadata_monotone_branch(path, d):
32 monotone_branch = "<unknown>" 32 monotone_branch = "<unknown>"
diff --git a/meta/classes/migrate_localcount.bbclass b/meta/classes/migrate_localcount.bbclass
index aa0df8bb76..810a541316 100644
--- a/meta/classes/migrate_localcount.bbclass
+++ b/meta/classes/migrate_localcount.bbclass
@@ -6,12 +6,12 @@ python migrate_localcount_handler () {
6 if not e.data: 6 if not e.data:
7 return 7 return
8 8
9 pv = e.data.getVar('PV', True) 9 pv = e.data.getVar('PV')
10 if not 'AUTOINC' in pv: 10 if not 'AUTOINC' in pv:
11 return 11 return
12 12
13 localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data) 13 localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data)
14 pn = e.data.getVar('PN', True) 14 pn = e.data.getVar('PN')
15 revs = localcounts.get_by_pattern('%%-%s_rev' % pn) 15 revs = localcounts.get_by_pattern('%%-%s_rev' % pn)
16 counts = localcounts.get_by_pattern('%%-%s_count' % pn) 16 counts = localcounts.get_by_pattern('%%-%s_count' % pn)
17 if not revs or not counts: 17 if not revs or not counts:
@@ -21,10 +21,10 @@ python migrate_localcount_handler () {
21 bb.warn("The number of revs and localcounts don't match in %s" % pn) 21 bb.warn("The number of revs and localcounts don't match in %s" % pn)
22 return 22 return
23 23
24 version = e.data.getVar('PRAUTOINX', True) 24 version = e.data.getVar('PRAUTOINX')
25 srcrev = bb.fetch2.get_srcrev(e.data) 25 srcrev = bb.fetch2.get_srcrev(e.data)
26 base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)] 26 base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)]
27 pkgarch = e.data.getVar('PACKAGE_ARCH', True) 27 pkgarch = e.data.getVar('PACKAGE_ARCH')
28 value = max(int(count) for count in counts) 28 value = max(int(count) for count in counts)
29 29
30 if len(revs) == 1: 30 if len(revs) == 1:
@@ -33,8 +33,8 @@ python migrate_localcount_handler () {
33 else: 33 else:
34 value += 1 34 value += 1
35 35
36 bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR', True)) 36 bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR'))
37 df = e.data.getVar('LOCALCOUNT_DUMPFILE', True) 37 df = e.data.getVar('LOCALCOUNT_DUMPFILE')
38 flock = bb.utils.lockfile("%s.lock" % df) 38 flock = bb.utils.lockfile("%s.lock" % df)
39 with open(df, 'a') as fd: 39 with open(df, 'a') as fd:
40 fd.write('PRAUTO$%s$%s$%s = "%s"\n' % 40 fd.write('PRAUTO$%s$%s$%s = "%s"\n' %
diff --git a/meta/classes/mime.bbclass b/meta/classes/mime.bbclass
index 721c73fcff..6cd59af391 100644
--- a/meta/classes/mime.bbclass
+++ b/meta/classes/mime.bbclass
@@ -28,8 +28,8 @@ fi
28 28
29python populate_packages_append () { 29python populate_packages_append () {
30 import re 30 import re
31 packages = d.getVar('PACKAGES', True).split() 31 packages = d.getVar('PACKAGES').split()
32 pkgdest = d.getVar('PKGDEST', True) 32 pkgdest = d.getVar('PKGDEST')
33 33
34 for pkg in packages: 34 for pkg in packages:
35 mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg) 35 mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
@@ -41,15 +41,15 @@ python populate_packages_append () {
41 mimes.append(f) 41 mimes.append(f)
42 if mimes: 42 if mimes:
43 bb.note("adding mime postinst and postrm scripts to %s" % pkg) 43 bb.note("adding mime postinst and postrm scripts to %s" % pkg)
44 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 44 postinst = d.getVar('pkg_postinst_%s' % pkg)
45 if not postinst: 45 if not postinst:
46 postinst = '#!/bin/sh\n' 46 postinst = '#!/bin/sh\n'
47 postinst += d.getVar('mime_postinst', True) 47 postinst += d.getVar('mime_postinst')
48 d.setVar('pkg_postinst_%s' % pkg, postinst) 48 d.setVar('pkg_postinst_%s' % pkg, postinst)
49 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 49 postrm = d.getVar('pkg_postrm_%s' % pkg)
50 if not postrm: 50 if not postrm:
51 postrm = '#!/bin/sh\n' 51 postrm = '#!/bin/sh\n'
52 postrm += d.getVar('mime_postrm', True) 52 postrm += d.getVar('mime_postrm')
53 d.setVar('pkg_postrm_%s' % pkg, postrm) 53 d.setVar('pkg_postrm_%s' % pkg, postrm)
54 bb.note("adding shared-mime-info-data dependency to %s" % pkg) 54 bb.note("adding shared-mime-info-data dependency to %s" % pkg)
55 d.appendVar('RDEPENDS_' + pkg, " shared-mime-info-data") 55 d.appendVar('RDEPENDS_' + pkg, " shared-mime-info-data")
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass
index 68e3d341a3..95d42da9e7 100644
--- a/meta/classes/module.bbclass
+++ b/meta/classes/module.bbclass
@@ -9,7 +9,7 @@ EXTRA_OEMAKE += "KERNEL_SRC=${STAGING_KERNEL_DIR}"
9MODULES_INSTALL_TARGET ?= "modules_install" 9MODULES_INSTALL_TARGET ?= "modules_install"
10 10
11python __anonymous () { 11python __anonymous () {
12 depends = d.getVar('DEPENDS', True) 12 depends = d.getVar('DEPENDS')
13 extra_symbols = [] 13 extra_symbols = []
14 for dep in depends.split(): 14 for dep in depends.split():
15 if dep.startswith("kernel-module-"): 15 if dep.startswith("kernel-module-"):
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index d5a31287a8..498f7f5735 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -1,20 +1,20 @@
1python multilib_virtclass_handler () { 1python multilib_virtclass_handler () {
2 cls = e.data.getVar("BBEXTENDCURR", True) 2 cls = e.data.getVar("BBEXTENDCURR")
3 variant = e.data.getVar("BBEXTENDVARIANT", True) 3 variant = e.data.getVar("BBEXTENDVARIANT")
4 if cls != "multilib" or not variant: 4 if cls != "multilib" or not variant:
5 return 5 return
6 6
7 e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR', True)) 7 e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR'))
8 8
9 # There should only be one kernel in multilib configs 9 # There should only be one kernel in multilib configs
10 # We also skip multilib setup for module packages. 10 # We also skip multilib setup for module packages.
11 provides = (e.data.getVar("PROVIDES", True) or "").split() 11 provides = (e.data.getVar("PROVIDES") or "").split()
12 if "virtual/kernel" in provides or bb.data.inherits_class('module-base', e.data): 12 if "virtual/kernel" in provides or bb.data.inherits_class('module-base', e.data):
13 raise bb.parse.SkipPackage("We shouldn't have multilib variants for the kernel") 13 raise bb.parse.SkipPackage("We shouldn't have multilib variants for the kernel")
14 14
15 save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME", True) or "" 15 save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME") or ""
16 for name in save_var_name.split(): 16 for name in save_var_name.split():
17 val=e.data.getVar(name, True) 17 val=e.data.getVar(name)
18 if val: 18 if val:
19 e.data.setVar(name + "_MULTILIB_ORIGINAL", val) 19 e.data.setVar(name + "_MULTILIB_ORIGINAL", val)
20 20
@@ -26,7 +26,7 @@ python multilib_virtclass_handler () {
26 if bb.data.inherits_class('image', e.data): 26 if bb.data.inherits_class('image', e.data):
27 e.data.setVar("MLPREFIX", variant + "-") 27 e.data.setVar("MLPREFIX", variant + "-")
28 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 28 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False))
29 e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT', True)) 29 e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT'))
30 target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False) 30 target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False)
31 if target_vendor: 31 if target_vendor:
32 e.data.setVar("TARGET_VENDOR", target_vendor) 32 e.data.setVar("TARGET_VENDOR", target_vendor)
@@ -50,7 +50,7 @@ python multilib_virtclass_handler () {
50 50
51 51
52 # Expand this since this won't work correctly once we set a multilib into place 52 # Expand this since this won't work correctly once we set a multilib into place
53 e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) 53 e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
54 54
55 override = ":virtclass-multilib-" + variant 55 override = ":virtclass-multilib-" + variant
56 56
@@ -60,7 +60,7 @@ python multilib_virtclass_handler () {
60 60
61 # Expand the WHITELISTs with multilib prefix 61 # Expand the WHITELISTs with multilib prefix
62 for whitelist in ["WHITELIST_GPL-3.0", "LGPLv2_WHITELIST_GPL-3.0"]: 62 for whitelist in ["WHITELIST_GPL-3.0", "LGPLv2_WHITELIST_GPL-3.0"]:
63 pkgs = e.data.getVar(whitelist, True) 63 pkgs = e.data.getVar(whitelist)
64 for pkg in pkgs.split(): 64 for pkg in pkgs.split():
65 pkgs += " " + variant + "-" + pkg 65 pkgs += " " + variant + "-" + pkg
66 e.data.setVar(whitelist, pkgs) 66 e.data.setVar(whitelist, pkgs)
@@ -78,7 +78,7 @@ multilib_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
78STAGINGCC_prepend = "${BBEXTENDVARIANT}-" 78STAGINGCC_prepend = "${BBEXTENDVARIANT}-"
79 79
80python __anonymous () { 80python __anonymous () {
81 variant = d.getVar("BBEXTENDVARIANT", True) 81 variant = d.getVar("BBEXTENDVARIANT")
82 82
83 import oe.classextend 83 import oe.classextend
84 84
@@ -88,7 +88,7 @@ python __anonymous () {
88 clsextend.map_depends_variable("PACKAGE_INSTALL") 88 clsextend.map_depends_variable("PACKAGE_INSTALL")
89 clsextend.map_depends_variable("LINGUAS_INSTALL") 89 clsextend.map_depends_variable("LINGUAS_INSTALL")
90 clsextend.map_depends_variable("RDEPENDS") 90 clsextend.map_depends_variable("RDEPENDS")
91 pinstall = d.getVar("LINGUAS_INSTALL", True) + " " + d.getVar("PACKAGE_INSTALL", True) 91 pinstall = d.getVar("LINGUAS_INSTALL") + " " + d.getVar("PACKAGE_INSTALL")
92 d.setVar("PACKAGE_INSTALL", pinstall) 92 d.setVar("PACKAGE_INSTALL", pinstall)
93 d.setVar("LINGUAS_INSTALL", "") 93 d.setVar("LINGUAS_INSTALL", "")
94 # FIXME, we need to map this to something, not delete it! 94 # FIXME, we need to map this to something, not delete it!
@@ -104,7 +104,7 @@ python __anonymous () {
104 return 104 return
105 105
106 clsextend.rename_packages() 106 clsextend.rename_packages()
107 clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) 107 clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split())
108 108
109 clsextend.map_packagevars() 109 clsextend.map_packagevars()
110 clsextend.map_regexp_variable("PACKAGES_DYNAMIC") 110 clsextend.map_regexp_variable("PACKAGES_DYNAMIC")
@@ -119,7 +119,7 @@ PACKAGEFUNCS_append = " do_package_qa_multilib"
119python do_package_qa_multilib() { 119python do_package_qa_multilib() {
120 120
121 def check_mlprefix(pkg, var, mlprefix): 121 def check_mlprefix(pkg, var, mlprefix):
122 values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var, True) or "") 122 values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var) or "")
123 candidates = [] 123 candidates = []
124 for i in values: 124 for i in values:
125 if i.startswith('virtual/'): 125 if i.startswith('virtual/'):
@@ -130,14 +130,14 @@ python do_package_qa_multilib() {
130 candidates.append(i) 130 candidates.append(i)
131 if len(candidates) > 0: 131 if len(candidates) > 0:
132 msg = "%s package %s - suspicious values '%s' in %s" \ 132 msg = "%s package %s - suspicious values '%s' in %s" \
133 % (d.getVar('PN', True), pkg, ' '.join(candidates), var) 133 % (d.getVar('PN'), pkg, ' '.join(candidates), var)
134 package_qa_handle_error("multilib", msg, d) 134 package_qa_handle_error("multilib", msg, d)
135 135
136 ml = d.getVar('MLPREFIX', True) 136 ml = d.getVar('MLPREFIX')
137 if not ml: 137 if not ml:
138 return 138 return
139 139
140 packages = d.getVar('PACKAGES', True) 140 packages = d.getVar('PACKAGES')
141 for pkg in packages.split(): 141 for pkg in packages.split():
142 check_mlprefix(pkg, 'RDEPENDS', ml) 142 check_mlprefix(pkg, 'RDEPENDS', ml)
143 check_mlprefix(pkg, 'RPROVIDES', ml) 143 check_mlprefix(pkg, 'RPROVIDES', ml)
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass
index 11ae2681f0..aba0371648 100644
--- a/meta/classes/multilib_global.bbclass
+++ b/meta/classes/multilib_global.bbclass
@@ -1,7 +1,7 @@
1def preferred_ml_updates(d): 1def preferred_ml_updates(d):
2 # If any PREFERRED_PROVIDER or PREFERRED_VERSION are set, 2 # If any PREFERRED_PROVIDER or PREFERRED_VERSION are set,
3 # we need to mirror these variables in the multilib case; 3 # we need to mirror these variables in the multilib case;
4 multilibs = d.getVar('MULTILIBS', True) or "" 4 multilibs = d.getVar('MULTILIBS') or ""
5 if not multilibs: 5 if not multilibs:
6 return 6 return
7 7
@@ -102,7 +102,7 @@ def preferred_ml_updates(d):
102 prov = prov.replace("virtual/", "") 102 prov = prov.replace("virtual/", "")
103 return "virtual/" + prefix + "-" + prov 103 return "virtual/" + prefix + "-" + prov
104 104
105 mp = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() 105 mp = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
106 extramp = [] 106 extramp = []
107 for p in mp: 107 for p in mp:
108 if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p: 108 if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p:
@@ -111,14 +111,14 @@ def preferred_ml_updates(d):
111 extramp.append(translate_provide(pref, p)) 111 extramp.append(translate_provide(pref, p))
112 d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp)) 112 d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp))
113 113
114 abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() 114 abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
115 extras = [] 115 extras = []
116 for p in prefixes: 116 for p in prefixes:
117 for a in abisafe: 117 for a in abisafe:
118 extras.append(p + "-" + a) 118 extras.append(p + "-" + a)
119 d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras)) 119 d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras))
120 120
121 siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() 121 siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
122 extras = [] 122 extras = []
123 for p in prefixes: 123 for p in prefixes:
124 for a in siggen_exclude: 124 for a in siggen_exclude:
@@ -128,7 +128,7 @@ def preferred_ml_updates(d):
128 128
129python multilib_virtclass_handler_vendor () { 129python multilib_virtclass_handler_vendor () {
130 if isinstance(e, bb.event.ConfigParsed): 130 if isinstance(e, bb.event.ConfigParsed):
131 for v in e.data.getVar("MULTILIB_VARIANTS", True).split(): 131 for v in e.data.getVar("MULTILIB_VARIANTS").split():
132 if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None: 132 if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None:
133 e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) 133 e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v)
134 preferred_ml_updates(e.data) 134 preferred_ml_updates(e.data)
@@ -140,14 +140,14 @@ python multilib_virtclass_handler_global () {
140 if not e.data: 140 if not e.data:
141 return 141 return
142 142
143 variant = e.data.getVar("BBEXTENDVARIANT", True) 143 variant = e.data.getVar("BBEXTENDVARIANT")
144 144
145 if isinstance(e, bb.event.RecipeParsed) and not variant: 145 if isinstance(e, bb.event.RecipeParsed) and not variant:
146 if bb.data.inherits_class('kernel', e.data) or \ 146 if bb.data.inherits_class('kernel', e.data) or \
147 bb.data.inherits_class('module-base', e.data) or \ 147 bb.data.inherits_class('module-base', e.data) or \
148 (bb.data.inherits_class('allarch', e.data) and\ 148 (bb.data.inherits_class('allarch', e.data) and\
149 not bb.data.inherits_class('packagegroup', e.data)): 149 not bb.data.inherits_class('packagegroup', e.data)):
150 variants = (e.data.getVar("MULTILIB_VARIANTS", True) or "").split() 150 variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split()
151 151
152 import oe.classextend 152 import oe.classextend
153 clsextends = [] 153 clsextends = []
@@ -155,21 +155,21 @@ python multilib_virtclass_handler_global () {
155 clsextends.append(oe.classextend.ClassExtender(variant, e.data)) 155 clsextends.append(oe.classextend.ClassExtender(variant, e.data))
156 156
157 # Process PROVIDES 157 # Process PROVIDES
158 origprovs = provs = e.data.getVar("PROVIDES", True) or "" 158 origprovs = provs = e.data.getVar("PROVIDES") or ""
159 for clsextend in clsextends: 159 for clsextend in clsextends:
160 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) 160 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False)
161 e.data.setVar("PROVIDES", provs) 161 e.data.setVar("PROVIDES", provs)
162 162
163 # Process RPROVIDES 163 # Process RPROVIDES
164 origrprovs = rprovs = e.data.getVar("RPROVIDES", True) or "" 164 origrprovs = rprovs = e.data.getVar("RPROVIDES") or ""
165 for clsextend in clsextends: 165 for clsextend in clsextends:
166 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) 166 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False)
167 if rprovs.strip(): 167 if rprovs.strip():
168 e.data.setVar("RPROVIDES", rprovs) 168 e.data.setVar("RPROVIDES", rprovs)
169 169
170 # Process RPROVIDES_${PN}... 170 # Process RPROVIDES_${PN}...
171 for pkg in (e.data.getVar("PACKAGES", True) or "").split(): 171 for pkg in (e.data.getVar("PACKAGES") or "").split():
172 origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg, True) or "" 172 origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg) or ""
173 for clsextend in clsextends: 173 for clsextend in clsextends:
174 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False) 174 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False)
175 rprovs = rprovs + " " + clsextend.extname + "-" + pkg 175 rprovs = rprovs + " " + clsextend.extname + "-" + pkg
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass
index 143f8a9147..5592e1d1c1 100644
--- a/meta/classes/native.bbclass
+++ b/meta/classes/native.bbclass
@@ -116,18 +116,18 @@ MACHINEOVERRIDES = ""
116PATH_prepend = "${COREBASE}/scripts/native-intercept:" 116PATH_prepend = "${COREBASE}/scripts/native-intercept:"
117 117
118python native_virtclass_handler () { 118python native_virtclass_handler () {
119 classextend = e.data.getVar('BBCLASSEXTEND', True) or "" 119 classextend = e.data.getVar('BBCLASSEXTEND') or ""
120 if "native" not in classextend: 120 if "native" not in classextend:
121 return 121 return
122 122
123 pn = e.data.getVar("PN", True) 123 pn = e.data.getVar("PN")
124 if not pn.endswith("-native"): 124 if not pn.endswith("-native"):
125 return 125 return
126 126
127 def map_dependencies(varname, d, suffix = ""): 127 def map_dependencies(varname, d, suffix = ""):
128 if suffix: 128 if suffix:
129 varname = varname + "_" + suffix 129 varname = varname + "_" + suffix
130 deps = d.getVar(varname, True) 130 deps = d.getVar(varname)
131 if not deps: 131 if not deps:
132 return 132 return
133 deps = bb.utils.explode_deps(deps) 133 deps = bb.utils.explode_deps(deps)
@@ -146,14 +146,14 @@ python native_virtclass_handler () {
146 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native") 146 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native")
147 147
148 map_dependencies("DEPENDS", e.data) 148 map_dependencies("DEPENDS", e.data)
149 for pkg in [e.data.getVar("PN", True), "", "${PN}"]: 149 for pkg in [e.data.getVar("PN"), "", "${PN}"]:
150 map_dependencies("RDEPENDS", e.data, pkg) 150 map_dependencies("RDEPENDS", e.data, pkg)
151 map_dependencies("RRECOMMENDS", e.data, pkg) 151 map_dependencies("RRECOMMENDS", e.data, pkg)
152 map_dependencies("RSUGGESTS", e.data, pkg) 152 map_dependencies("RSUGGESTS", e.data, pkg)
153 map_dependencies("RPROVIDES", e.data, pkg) 153 map_dependencies("RPROVIDES", e.data, pkg)
154 map_dependencies("RREPLACES", e.data, pkg) 154 map_dependencies("RREPLACES", e.data, pkg)
155 155
156 provides = e.data.getVar("PROVIDES", True) 156 provides = e.data.getVar("PROVIDES")
157 nprovides = [] 157 nprovides = []
158 for prov in provides.split(): 158 for prov in provides.split():
159 if prov.find(pn) != -1: 159 if prov.find(pn) != -1:
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass
index 31dde4a90f..2ac8fd82ed 100644
--- a/meta/classes/nativesdk.bbclass
+++ b/meta/classes/nativesdk.bbclass
@@ -64,17 +64,17 @@ export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${libdir}/pkgconfig"
64export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}" 64export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}"
65 65
66python nativesdk_virtclass_handler () { 66python nativesdk_virtclass_handler () {
67 pn = e.data.getVar("PN", True) 67 pn = e.data.getVar("PN")
68 if not (pn.endswith("-nativesdk") or pn.startswith("nativesdk-")): 68 if not (pn.endswith("-nativesdk") or pn.startswith("nativesdk-")):
69 return 69 return
70 70
71 e.data.setVar("MLPREFIX", "nativesdk-") 71 e.data.setVar("MLPREFIX", "nativesdk-")
72 e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN", True).replace("-nativesdk", "").replace("nativesdk-", "")) 72 e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN").replace("-nativesdk", "").replace("nativesdk-", ""))
73 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk") 73 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk")
74} 74}
75 75
76python () { 76python () {
77 pn = d.getVar("PN", True) 77 pn = d.getVar("PN")
78 if not pn.startswith("nativesdk-"): 78 if not pn.startswith("nativesdk-"):
79 return 79 return
80 80
@@ -82,7 +82,7 @@ python () {
82 82
83 clsextend = oe.classextend.NativesdkClassExtender("nativesdk", d) 83 clsextend = oe.classextend.NativesdkClassExtender("nativesdk", d)
84 clsextend.rename_packages() 84 clsextend.rename_packages()
85 clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) 85 clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split())
86 86
87 clsextend.map_depends_variable("DEPENDS") 87 clsextend.map_depends_variable("DEPENDS")
88 clsextend.map_packagevars() 88 clsextend.map_packagevars()
diff --git a/meta/classes/npm.bbclass b/meta/classes/npm.bbclass
index 43228c57d3..1c778a7637 100644
--- a/meta/classes/npm.bbclass
+++ b/meta/classes/npm.bbclass
@@ -13,7 +13,7 @@ def npm_oe_arch_map(target_arch, d):
13 elif re.match('arm64$', target_arch): return 'arm' 13 elif re.match('arm64$', target_arch): return 'arm'
14 return target_arch 14 return target_arch
15 15
16NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH', True), d)}" 16NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH'), d)}"
17 17
18npm_do_compile() { 18npm_do_compile() {
19 # Copy in any additionally fetched modules 19 # Copy in any additionally fetched modules
@@ -59,7 +59,7 @@ python populate_packages_prepend () {
59 description = pdata.get('description', None) 59 description = pdata.get('description', None)
60 if description: 60 if description:
61 d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'")) 61 d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'"))
62 d.appendVar('RDEPENDS_%s' % d.getVar('PN', True), ' %s' % ' '.join(pkgnames).replace('_', '-')) 62 d.appendVar('RDEPENDS_%s' % d.getVar('PN'), ' %s' % ' '.join(pkgnames).replace('_', '-'))
63} 63}
64 64
65FILES_${PN} += " \ 65FILES_${PN} += " \
diff --git a/meta/classes/oelint.bbclass b/meta/classes/oelint.bbclass
index c4febc2cfa..2589d34059 100644
--- a/meta/classes/oelint.bbclass
+++ b/meta/classes/oelint.bbclass
@@ -1,7 +1,7 @@
1addtask lint before do_build 1addtask lint before do_build
2do_lint[nostamp] = "1" 2do_lint[nostamp] = "1"
3python do_lint() { 3python do_lint() {
4 pkgname = d.getVar("PN", True) 4 pkgname = d.getVar("PN")
5 5
6 ############################## 6 ##############################
7 # Test that DESCRIPTION exists 7 # Test that DESCRIPTION exists
@@ -35,7 +35,7 @@ python do_lint() {
35 # Check that all patches have Signed-off-by and Upstream-Status 35 # Check that all patches have Signed-off-by and Upstream-Status
36 # 36 #
37 srcuri = d.getVar("SRC_URI", False).split() 37 srcuri = d.getVar("SRC_URI", False).split()
38 fpaths = (d.getVar('FILESPATH', True) or '').split(':') 38 fpaths = (d.getVar('FILESPATH') or '').split(':')
39 39
40 def findPatch(patchname): 40 def findPatch(patchname):
41 for dir in fpaths: 41 for dir in fpaths:
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index a6f0a7a63d..27034653b8 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -120,7 +120,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
120 120
121 """ 121 """
122 122
123 dvar = d.getVar('PKGD', True) 123 dvar = d.getVar('PKGD')
124 root = d.expand(root) 124 root = d.expand(root)
125 output_pattern = d.expand(output_pattern) 125 output_pattern = d.expand(output_pattern)
126 extra_depends = d.expand(extra_depends) 126 extra_depends = d.expand(extra_depends)
@@ -130,7 +130,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
130 if not os.path.exists(dvar + root): 130 if not os.path.exists(dvar + root):
131 return [] 131 return []
132 132
133 ml = d.getVar("MLPREFIX", True) 133 ml = d.getVar("MLPREFIX")
134 if ml: 134 if ml:
135 if not output_pattern.startswith(ml): 135 if not output_pattern.startswith(ml):
136 output_pattern = ml + output_pattern 136 output_pattern = ml + output_pattern
@@ -145,7 +145,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
145 extra_depends = " ".join(newdeps) 145 extra_depends = " ".join(newdeps)
146 146
147 147
148 packages = d.getVar('PACKAGES', True).split() 148 packages = d.getVar('PACKAGES').split()
149 split_packages = set() 149 split_packages = set()
150 150
151 if postinst: 151 if postinst:
@@ -163,7 +163,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
163 objs.append(relpath) 163 objs.append(relpath)
164 164
165 if extra_depends == None: 165 if extra_depends == None:
166 extra_depends = d.getVar("PN", True) 166 extra_depends = d.getVar("PN")
167 167
168 if not summary: 168 if not summary:
169 summary = description 169 summary = description
@@ -189,7 +189,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
189 packages = [pkg] + packages 189 packages = [pkg] + packages
190 else: 190 else:
191 packages.append(pkg) 191 packages.append(pkg)
192 oldfiles = d.getVar('FILES_' + pkg, True) 192 oldfiles = d.getVar('FILES_' + pkg)
193 newfile = os.path.join(root, o) 193 newfile = os.path.join(root, o)
194 # These names will be passed through glob() so if the filename actually 194 # These names will be passed through glob() so if the filename actually
195 # contains * or ? (rare, but possible) we need to handle that specially 195 # contains * or ? (rare, but possible) we need to handle that specially
@@ -214,9 +214,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
214 d.setVar('FILES_' + pkg, oldfiles + " " + newfile) 214 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
215 if extra_depends != '': 215 if extra_depends != '':
216 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) 216 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
217 if not d.getVar('DESCRIPTION_' + pkg, True): 217 if not d.getVar('DESCRIPTION_' + pkg):
218 d.setVar('DESCRIPTION_' + pkg, description % on) 218 d.setVar('DESCRIPTION_' + pkg, description % on)
219 if not d.getVar('SUMMARY_' + pkg, True): 219 if not d.getVar('SUMMARY_' + pkg):
220 d.setVar('SUMMARY_' + pkg, summary % on) 220 d.setVar('SUMMARY_' + pkg, summary % on)
221 if postinst: 221 if postinst:
222 d.setVar('pkg_postinst_' + pkg, postinst) 222 d.setVar('pkg_postinst_' + pkg, postinst)
@@ -231,9 +231,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
231PACKAGE_DEPENDS += "file-native" 231PACKAGE_DEPENDS += "file-native"
232 232
233python () { 233python () {
234 if d.getVar('PACKAGES', True) != '': 234 if d.getVar('PACKAGES') != '':
235 deps = "" 235 deps = ""
236 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split(): 236 for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
237 deps += " %s:do_populate_sysroot" % dep 237 deps += " %s:do_populate_sysroot" % dep
238 d.appendVarFlag('do_package', 'depends', deps) 238 d.appendVarFlag('do_package', 'depends', deps)
239 239
@@ -286,14 +286,14 @@ def files_from_filevars(filevars):
286 286
287# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files 287# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
288def get_conffiles(pkg, d): 288def get_conffiles(pkg, d):
289 pkgdest = d.getVar('PKGDEST', True) 289 pkgdest = d.getVar('PKGDEST')
290 root = os.path.join(pkgdest, pkg) 290 root = os.path.join(pkgdest, pkg)
291 cwd = os.getcwd() 291 cwd = os.getcwd()
292 os.chdir(root) 292 os.chdir(root)
293 293
294 conffiles = d.getVar('CONFFILES_%s' % pkg, True); 294 conffiles = d.getVar('CONFFILES_%s' % pkg);
295 if conffiles == None: 295 if conffiles == None:
296 conffiles = d.getVar('CONFFILES', True) 296 conffiles = d.getVar('CONFFILES')
297 if conffiles == None: 297 if conffiles == None:
298 conffiles = "" 298 conffiles = ""
299 conffiles = conffiles.split() 299 conffiles = conffiles.split()
@@ -318,7 +318,7 @@ def get_conffiles(pkg, d):
318 return conf_list 318 return conf_list
319 319
320def checkbuildpath(file, d): 320def checkbuildpath(file, d):
321 tmpdir = d.getVar('TMPDIR', True) 321 tmpdir = d.getVar('TMPDIR')
322 with open(file) as f: 322 with open(file) as f:
323 file_content = f.read() 323 file_content = f.read()
324 if tmpdir in file_content: 324 if tmpdir in file_content:
@@ -335,8 +335,8 @@ def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
335 335
336 import stat 336 import stat
337 337
338 dvar = d.getVar('PKGD', True) 338 dvar = d.getVar('PKGD')
339 objcopy = d.getVar("OBJCOPY", True) 339 objcopy = d.getVar("OBJCOPY")
340 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") 340 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
341 341
342 # We ignore kernel modules, we don't generate debug info files. 342 # We ignore kernel modules, we don't generate debug info files.
@@ -382,11 +382,11 @@ def copydebugsources(debugsrcdir, d):
382 382
383 sourcefile = d.expand("${WORKDIR}/debugsources.list") 383 sourcefile = d.expand("${WORKDIR}/debugsources.list")
384 if debugsrcdir and os.path.isfile(sourcefile): 384 if debugsrcdir and os.path.isfile(sourcefile):
385 dvar = d.getVar('PKGD', True) 385 dvar = d.getVar('PKGD')
386 strip = d.getVar("STRIP", True) 386 strip = d.getVar("STRIP")
387 objcopy = d.getVar("OBJCOPY", True) 387 objcopy = d.getVar("OBJCOPY")
388 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") 388 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
389 workdir = d.getVar("WORKDIR", True) 389 workdir = d.getVar("WORKDIR")
390 workparentdir = os.path.dirname(os.path.dirname(workdir)) 390 workparentdir = os.path.dirname(os.path.dirname(workdir))
391 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) 391 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
392 392
@@ -468,20 +468,20 @@ def get_package_additional_metadata (pkg_type, d):
468 return "\n".join(metadata_fields).strip() 468 return "\n".join(metadata_fields).strip()
469 469
470def runtime_mapping_rename (varname, pkg, d): 470def runtime_mapping_rename (varname, pkg, d):
471 #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) 471 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
472 472
473 if bb.data.inherits_class('packagegroup', d): 473 if bb.data.inherits_class('packagegroup', d):
474 return 474 return
475 475
476 new_depends = {} 476 new_depends = {}
477 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "") 477 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
478 for depend in deps: 478 for depend in deps:
479 new_depend = get_package_mapping(depend, pkg, d) 479 new_depend = get_package_mapping(depend, pkg, d)
480 new_depends[new_depend] = deps[depend] 480 new_depends[new_depend] = deps[depend]
481 481
482 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) 482 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
483 483
484 #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) 484 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
485 485
486# 486#
487# Package functions suitable for inclusion in PACKAGEFUNCS 487# Package functions suitable for inclusion in PACKAGEFUNCS
@@ -492,34 +492,34 @@ python package_get_auto_pr() {
492 import re 492 import re
493 493
494 # Support per recipe PRSERV_HOST 494 # Support per recipe PRSERV_HOST
495 pn = d.getVar('PN', True) 495 pn = d.getVar('PN')
496 host = d.getVar("PRSERV_HOST_" + pn, True) 496 host = d.getVar("PRSERV_HOST_" + pn)
497 if not (host is None): 497 if not (host is None):
498 d.setVar("PRSERV_HOST", host) 498 d.setVar("PRSERV_HOST", host)
499 499
500 pkgv = d.getVar("PKGV", True) 500 pkgv = d.getVar("PKGV")
501 501
502 # PR Server not active, handle AUTOINC 502 # PR Server not active, handle AUTOINC
503 if not d.getVar('PRSERV_HOST', True): 503 if not d.getVar('PRSERV_HOST'):
504 if 'AUTOINC' in pkgv: 504 if 'AUTOINC' in pkgv:
505 d.setVar("PKGV", pkgv.replace("AUTOINC", "0")) 505 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
506 return 506 return
507 507
508 auto_pr = None 508 auto_pr = None
509 pv = d.getVar("PV", True) 509 pv = d.getVar("PV")
510 version = d.getVar("PRAUTOINX", True) 510 version = d.getVar("PRAUTOINX")
511 pkgarch = d.getVar("PACKAGE_ARCH", True) 511 pkgarch = d.getVar("PACKAGE_ARCH")
512 checksum = d.getVar("BB_TASKHASH", True) 512 checksum = d.getVar("BB_TASKHASH")
513 513
514 if d.getVar('PRSERV_LOCKDOWN', True): 514 if d.getVar('PRSERV_LOCKDOWN'):
515 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None 515 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
516 if auto_pr is None: 516 if auto_pr is None:
517 bb.fatal("Can NOT get PRAUTO from lockdown exported file") 517 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
518 d.setVar('PRAUTO',str(auto_pr)) 518 d.setVar('PRAUTO',str(auto_pr))
519 return 519 return
520 520
521 try: 521 try:
522 conn = d.getVar("__PRSERV_CONN", True) 522 conn = d.getVar("__PRSERV_CONN")
523 if conn is None: 523 if conn is None:
524 conn = oe.prservice.prserv_make_conn(d) 524 conn = oe.prservice.prserv_make_conn(d)
525 if conn is not None: 525 if conn is not None:
@@ -540,19 +540,19 @@ python package_get_auto_pr() {
540LOCALEBASEPN ??= "${PN}" 540LOCALEBASEPN ??= "${PN}"
541 541
542python package_do_split_locales() { 542python package_do_split_locales() {
543 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): 543 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
544 bb.debug(1, "package requested not splitting locales") 544 bb.debug(1, "package requested not splitting locales")
545 return 545 return
546 546
547 packages = (d.getVar('PACKAGES', True) or "").split() 547 packages = (d.getVar('PACKAGES') or "").split()
548 548
549 datadir = d.getVar('datadir', True) 549 datadir = d.getVar('datadir')
550 if not datadir: 550 if not datadir:
551 bb.note("datadir not defined") 551 bb.note("datadir not defined")
552 return 552 return
553 553
554 dvar = d.getVar('PKGD', True) 554 dvar = d.getVar('PKGD')
555 pn = d.getVar('LOCALEBASEPN', True) 555 pn = d.getVar('LOCALEBASEPN')
556 556
557 if pn + '-locale' in packages: 557 if pn + '-locale' in packages:
558 packages.remove(pn + '-locale') 558 packages.remove(pn + '-locale')
@@ -565,10 +565,10 @@ python package_do_split_locales() {
565 565
566 locales = os.listdir(localedir) 566 locales = os.listdir(localedir)
567 567
568 summary = d.getVar('SUMMARY', True) or pn 568 summary = d.getVar('SUMMARY') or pn
569 description = d.getVar('DESCRIPTION', True) or "" 569 description = d.getVar('DESCRIPTION') or ""
570 locale_section = d.getVar('LOCALE_SECTION', True) 570 locale_section = d.getVar('LOCALE_SECTION')
571 mlprefix = d.getVar('MLPREFIX', True) or "" 571 mlprefix = d.getVar('MLPREFIX') or ""
572 for l in sorted(locales): 572 for l in sorted(locales):
573 ln = legitimize_package_name(l) 573 ln = legitimize_package_name(l)
574 pkg = pn + '-locale-' + ln 574 pkg = pn + '-locale-' + ln
@@ -589,14 +589,14 @@ python package_do_split_locales() {
589 # glibc-localedata-translit* won't install as a dependency 589 # glibc-localedata-translit* won't install as a dependency
590 # for some other package which breaks meta-toolchain 590 # for some other package which breaks meta-toolchain
591 # Probably breaks since virtual-locale- isn't provided anywhere 591 # Probably breaks since virtual-locale- isn't provided anywhere
592 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split() 592 #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split()
593 #rdep.append('%s-locale*' % pn) 593 #rdep.append('%s-locale*' % pn)
594 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) 594 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
595} 595}
596 596
597python perform_packagecopy () { 597python perform_packagecopy () {
598 dest = d.getVar('D', True) 598 dest = d.getVar('D')
599 dvar = d.getVar('PKGD', True) 599 dvar = d.getVar('PKGD')
600 600
601 # Start by package population by taking a copy of the installed 601 # Start by package population by taking a copy of the installed
602 # files to operate on 602 # files to operate on
@@ -730,8 +730,8 @@ python fixup_perms () {
730 # paths are resolved via BBPATH 730 # paths are resolved via BBPATH
731 def get_fs_perms_list(d): 731 def get_fs_perms_list(d):
732 str = "" 732 str = ""
733 bbpath = d.getVar('BBPATH', True) 733 bbpath = d.getVar('BBPATH')
734 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) 734 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES')
735 if not fs_perms_tables: 735 if not fs_perms_tables:
736 fs_perms_tables = 'files/fs-perms.txt' 736 fs_perms_tables = 'files/fs-perms.txt'
737 for conf_file in fs_perms_tables.split(): 737 for conf_file in fs_perms_tables.split():
@@ -740,7 +740,7 @@ python fixup_perms () {
740 740
741 741
742 742
743 dvar = d.getVar('PKGD', True) 743 dvar = d.getVar('PKGD')
744 744
745 fs_perms_table = {} 745 fs_perms_table = {}
746 fs_link_table = {} 746 fs_link_table = {}
@@ -769,7 +769,7 @@ python fixup_perms () {
769 'oldincludedir' ] 769 'oldincludedir' ]
770 770
771 for path in target_path_vars: 771 for path in target_path_vars:
772 dir = d.getVar(path, True) or "" 772 dir = d.getVar(path) or ""
773 if dir == "": 773 if dir == "":
774 continue 774 continue
775 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) 775 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
@@ -854,20 +854,20 @@ python fixup_perms () {
854python split_and_strip_files () { 854python split_and_strip_files () {
855 import stat, errno 855 import stat, errno
856 856
857 dvar = d.getVar('PKGD', True) 857 dvar = d.getVar('PKGD')
858 pn = d.getVar('PN', True) 858 pn = d.getVar('PN')
859 859
860 oldcwd = os.getcwd() 860 oldcwd = os.getcwd()
861 os.chdir(dvar) 861 os.chdir(dvar)
862 862
863 # We default to '.debug' style 863 # We default to '.debug' style
864 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': 864 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
865 # Single debug-file-directory style debug info 865 # Single debug-file-directory style debug info
866 debugappend = ".debug" 866 debugappend = ".debug"
867 debugdir = "" 867 debugdir = ""
868 debuglibdir = "/usr/lib/debug" 868 debuglibdir = "/usr/lib/debug"
869 debugsrcdir = "/usr/src/debug" 869 debugsrcdir = "/usr/src/debug"
870 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src': 870 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
871 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug 871 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
872 debugappend = "" 872 debugappend = ""
873 debugdir = "/.debug" 873 debugdir = "/.debug"
@@ -918,10 +918,10 @@ python split_and_strip_files () {
918 symlinks = {} 918 symlinks = {}
919 kernmods = [] 919 kernmods = []
920 inodes = {} 920 inodes = {}
921 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) 921 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
922 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) 922 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
923 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1' or \ 923 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
924 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): 924 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
925 for root, dirs, files in cpath.walk(dvar): 925 for root, dirs, files in cpath.walk(dvar):
926 for f in files: 926 for f in files:
927 file = os.path.join(root, f) 927 file = os.path.join(root, f)
@@ -962,7 +962,7 @@ python split_and_strip_files () {
962 elf_file = isELF(file) 962 elf_file = isELF(file)
963 if elf_file & 1: 963 if elf_file & 1:
964 if elf_file & 2: 964 if elf_file & 2:
965 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): 965 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
966 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) 966 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
967 else: 967 else:
968 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) 968 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
@@ -991,7 +991,7 @@ python split_and_strip_files () {
991 # 991 #
992 # First lets process debug splitting 992 # First lets process debug splitting
993 # 993 #
994 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): 994 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
995 for file in elffiles: 995 for file in elffiles:
996 src = file[len(dvar):] 996 src = file[len(dvar):]
997 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend 997 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
@@ -1054,8 +1054,8 @@ python split_and_strip_files () {
1054 # 1054 #
1055 # Now lets go back over things and strip them 1055 # Now lets go back over things and strip them
1056 # 1056 #
1057 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): 1057 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1058 strip = d.getVar("STRIP", True) 1058 strip = d.getVar("STRIP")
1059 sfiles = [] 1059 sfiles = []
1060 for file in elffiles: 1060 for file in elffiles:
1061 elf_file = int(elffiles[file]) 1061 elf_file = int(elffiles[file])
@@ -1075,16 +1075,16 @@ python split_and_strip_files () {
1075python populate_packages () { 1075python populate_packages () {
1076 import glob, re 1076 import glob, re
1077 1077
1078 workdir = d.getVar('WORKDIR', True) 1078 workdir = d.getVar('WORKDIR')
1079 outdir = d.getVar('DEPLOY_DIR', True) 1079 outdir = d.getVar('DEPLOY_DIR')
1080 dvar = d.getVar('PKGD', True) 1080 dvar = d.getVar('PKGD')
1081 packages = d.getVar('PACKAGES', True) 1081 packages = d.getVar('PACKAGES')
1082 pn = d.getVar('PN', True) 1082 pn = d.getVar('PN')
1083 1083
1084 bb.utils.mkdirhier(outdir) 1084 bb.utils.mkdirhier(outdir)
1085 os.chdir(dvar) 1085 os.chdir(dvar)
1086 1086
1087 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG", True) or False) 1087 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1088 1088
1089 # Sanity check PACKAGES for duplicates 1089 # Sanity check PACKAGES for duplicates
1090 # Sanity should be moved to sanity.bbclass once we have the infrastucture 1090 # Sanity should be moved to sanity.bbclass once we have the infrastucture
@@ -1099,7 +1099,7 @@ python populate_packages () {
1099 else: 1099 else:
1100 package_list.append(pkg) 1100 package_list.append(pkg)
1101 d.setVar('PACKAGES', ' '.join(package_list)) 1101 d.setVar('PACKAGES', ' '.join(package_list))
1102 pkgdest = d.getVar('PKGDEST', True) 1102 pkgdest = d.getVar('PKGDEST')
1103 1103
1104 seen = [] 1104 seen = []
1105 1105
@@ -1120,7 +1120,7 @@ python populate_packages () {
1120 root = os.path.join(pkgdest, pkg) 1120 root = os.path.join(pkgdest, pkg)
1121 bb.utils.mkdirhier(root) 1121 bb.utils.mkdirhier(root)
1122 1122
1123 filesvar = d.getVar('FILES_%s' % pkg, True) or "" 1123 filesvar = d.getVar('FILES_%s' % pkg) or ""
1124 if "//" in filesvar: 1124 if "//" in filesvar:
1125 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg 1125 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1126 package_qa_handle_error("files-invalid", msg, d) 1126 package_qa_handle_error("files-invalid", msg, d)
@@ -1188,7 +1188,7 @@ python populate_packages () {
1188 # Handle LICENSE_EXCLUSION 1188 # Handle LICENSE_EXCLUSION
1189 package_list = [] 1189 package_list = []
1190 for pkg in packages.split(): 1190 for pkg in packages.split():
1191 if d.getVar('LICENSE_EXCLUSION-' + pkg, True): 1191 if d.getVar('LICENSE_EXCLUSION-' + pkg):
1192 msg = "%s has an incompatible license. Excluding from packaging." % pkg 1192 msg = "%s has an incompatible license. Excluding from packaging." % pkg
1193 package_qa_handle_error("incompatible-license", msg, d) 1193 package_qa_handle_error("incompatible-license", msg, d)
1194 else: 1194 else:
@@ -1207,7 +1207,7 @@ python populate_packages () {
1207 1207
1208 if unshipped != []: 1208 if unshipped != []:
1209 msg = pn + ": Files/directories were installed but not shipped in any package:" 1209 msg = pn + ": Files/directories were installed but not shipped in any package:"
1210 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): 1210 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split():
1211 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) 1211 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1212 else: 1212 else:
1213 for f in unshipped: 1213 for f in unshipped:
@@ -1220,7 +1220,7 @@ populate_packages[dirs] = "${D}"
1220 1220
1221python package_fixsymlinks () { 1221python package_fixsymlinks () {
1222 import errno 1222 import errno
1223 pkgdest = d.getVar('PKGDEST', True) 1223 pkgdest = d.getVar('PKGDEST')
1224 packages = d.getVar("PACKAGES", False).split() 1224 packages = d.getVar("PACKAGES", False).split()
1225 1225
1226 dangling_links = {} 1226 dangling_links = {}
@@ -1255,7 +1255,7 @@ python package_fixsymlinks () {
1255 bb.note("%s contains dangling symlink to %s" % (pkg, l)) 1255 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1256 1256
1257 for pkg in newrdepends: 1257 for pkg in newrdepends:
1258 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") 1258 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
1259 for p in newrdepends[pkg]: 1259 for p in newrdepends[pkg]:
1260 if p not in rdepends: 1260 if p not in rdepends:
1261 rdepends[p] = [] 1261 rdepends[p] = []
@@ -1309,9 +1309,9 @@ python emit_pkgdata() {
1309 with open(subdata_file, 'w') as fd: 1309 with open(subdata_file, 'w') as fd:
1310 fd.write("PKG_%s: %s" % (ml_pkg, pkg)) 1310 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1311 1311
1312 packages = d.getVar('PACKAGES', True) 1312 packages = d.getVar('PACKAGES')
1313 pkgdest = d.getVar('PKGDEST', True) 1313 pkgdest = d.getVar('PKGDEST')
1314 pkgdatadir = d.getVar('PKGDESTWORK', True) 1314 pkgdatadir = d.getVar('PKGDESTWORK')
1315 1315
1316 # Take shared lock since we're only reading, not writing 1316 # Take shared lock since we're only reading, not writing
1317 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) 1317 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
@@ -1321,9 +1321,9 @@ python emit_pkgdata() {
1321 f.write("PACKAGES: %s\n" % packages) 1321 f.write("PACKAGES: %s\n" % packages)
1322 f.close() 1322 f.close()
1323 1323
1324 pn = d.getVar('PN', True) 1324 pn = d.getVar('PN')
1325 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split() 1325 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
1326 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split() 1326 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
1327 1327
1328 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): 1328 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1329 write_extra_pkgs(variants, pn, packages, pkgdatadir) 1329 write_extra_pkgs(variants, pn, packages, pkgdatadir)
@@ -1331,10 +1331,10 @@ python emit_pkgdata() {
1331 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)): 1331 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1332 write_extra_pkgs(global_variants, pn, packages, pkgdatadir) 1332 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1333 1333
1334 workdir = d.getVar('WORKDIR', True) 1334 workdir = d.getVar('WORKDIR')
1335 1335
1336 for pkg in packages.split(): 1336 for pkg in packages.split():
1337 pkgval = d.getVar('PKG_%s' % pkg, True) 1337 pkgval = d.getVar('PKG_%s' % pkg)
1338 if pkgval is None: 1338 if pkgval is None:
1339 pkgval = pkg 1339 pkgval = pkg
1340 d.setVar('PKG_%s' % pkg, pkg) 1340 d.setVar('PKG_%s' % pkg, pkg)
@@ -1377,11 +1377,11 @@ python emit_pkgdata() {
1377 write_if_exists(sf, pkg, 'pkg_prerm') 1377 write_if_exists(sf, pkg, 'pkg_prerm')
1378 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') 1378 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1379 write_if_exists(sf, pkg, 'FILES_INFO') 1379 write_if_exists(sf, pkg, 'FILES_INFO')
1380 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): 1380 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split():
1381 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) 1381 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1382 1382
1383 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') 1383 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1384 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): 1384 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split():
1385 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) 1385 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1386 1386
1387 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) 1387 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
@@ -1394,9 +1394,9 @@ python emit_pkgdata() {
1394 bb.utils.mkdirhier(os.path.dirname(subdata_sym)) 1394 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1395 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) 1395 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1396 1396
1397 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) 1397 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg)
1398 if not allow_empty: 1398 if not allow_empty:
1399 allow_empty = d.getVar('ALLOW_EMPTY', True) 1399 allow_empty = d.getVar('ALLOW_EMPTY')
1400 root = "%s/%s" % (pkgdest, pkg) 1400 root = "%s/%s" % (pkgdest, pkg)
1401 os.chdir(root) 1401 os.chdir(root)
1402 g = glob('*') 1402 g = glob('*')
@@ -1435,19 +1435,19 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI
1435# FILERDEPENDS_filepath_pkg - per file dep 1435# FILERDEPENDS_filepath_pkg - per file dep
1436 1436
1437python package_do_filedeps() { 1437python package_do_filedeps() {
1438 if d.getVar('SKIP_FILEDEPS', True) == '1': 1438 if d.getVar('SKIP_FILEDEPS') == '1':
1439 return 1439 return
1440 1440
1441 pkgdest = d.getVar('PKGDEST', True) 1441 pkgdest = d.getVar('PKGDEST')
1442 packages = d.getVar('PACKAGES', True) 1442 packages = d.getVar('PACKAGES')
1443 rpmdeps = d.getVar('RPMDEPS', True) 1443 rpmdeps = d.getVar('RPMDEPS')
1444 1444
1445 def chunks(files, n): 1445 def chunks(files, n):
1446 return [files[i:i+n] for i in range(0, len(files), n)] 1446 return [files[i:i+n] for i in range(0, len(files), n)]
1447 1447
1448 pkglist = [] 1448 pkglist = []
1449 for pkg in packages.split(): 1449 for pkg in packages.split():
1450 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1': 1450 if d.getVar('SKIP_FILEDEPS_' + pkg) == '1':
1451 continue 1451 continue
1452 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): 1452 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1453 continue 1453 continue
@@ -1496,22 +1496,22 @@ python package_do_shlibs() {
1496 return 1496 return
1497 1497
1498 lib_re = re.compile("^.*\.so") 1498 lib_re = re.compile("^.*\.so")
1499 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) 1499 libdir_re = re.compile(".*/%s$" % d.getVar('baselib'))
1500 1500
1501 packages = d.getVar('PACKAGES', True) 1501 packages = d.getVar('PACKAGES')
1502 targetos = d.getVar('TARGET_OS', True) 1502 targetos = d.getVar('TARGET_OS')
1503 1503
1504 workdir = d.getVar('WORKDIR', True) 1504 workdir = d.getVar('WORKDIR')
1505 1505
1506 ver = d.getVar('PKGV', True) 1506 ver = d.getVar('PKGV')
1507 if not ver: 1507 if not ver:
1508 msg = "PKGV not defined" 1508 msg = "PKGV not defined"
1509 package_qa_handle_error("pkgv-undefined", msg, d) 1509 package_qa_handle_error("pkgv-undefined", msg, d)
1510 return 1510 return
1511 1511
1512 pkgdest = d.getVar('PKGDEST', True) 1512 pkgdest = d.getVar('PKGDEST')
1513 1513
1514 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) 1514 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1515 1515
1516 # Take shared lock since we're only reading, not writing 1516 # Take shared lock since we're only reading, not writing
1517 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) 1517 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
@@ -1519,7 +1519,7 @@ python package_do_shlibs() {
1519 def linux_so(file, needed, sonames, renames, pkgver): 1519 def linux_so(file, needed, sonames, renames, pkgver):
1520 needs_ldconfig = False 1520 needs_ldconfig = False
1521 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') 1521 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1522 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null" 1522 cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1523 fd = os.popen(cmd) 1523 fd = os.popen(cmd)
1524 lines = fd.readlines() 1524 lines = fd.readlines()
1525 fd.close() 1525 fd.close()
@@ -1601,12 +1601,12 @@ python package_do_shlibs() {
1601 if name and name not in needed[pkg]: 1601 if name and name not in needed[pkg]:
1602 needed[pkg].append((name, file, [])) 1602 needed[pkg].append((name, file, []))
1603 1603
1604 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": 1604 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1605 snap_symlinks = True 1605 snap_symlinks = True
1606 else: 1606 else:
1607 snap_symlinks = False 1607 snap_symlinks = False
1608 1608
1609 if (d.getVar('USE_LDCONFIG', True) or "1") == "1": 1609 if (d.getVar('USE_LDCONFIG') or "1") == "1":
1610 use_ldconfig = True 1610 use_ldconfig = True
1611 else: 1611 else:
1612 use_ldconfig = False 1612 use_ldconfig = False
@@ -1615,14 +1615,14 @@ python package_do_shlibs() {
1615 shlib_provider = oe.package.read_shlib_providers(d) 1615 shlib_provider = oe.package.read_shlib_providers(d)
1616 1616
1617 for pkg in packages.split(): 1617 for pkg in packages.split():
1618 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or "" 1618 private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1619 private_libs = private_libs.split() 1619 private_libs = private_libs.split()
1620 needs_ldconfig = False 1620 needs_ldconfig = False
1621 bb.debug(2, "calculating shlib provides for %s" % pkg) 1621 bb.debug(2, "calculating shlib provides for %s" % pkg)
1622 1622
1623 pkgver = d.getVar('PKGV_' + pkg, True) 1623 pkgver = d.getVar('PKGV_' + pkg)
1624 if not pkgver: 1624 if not pkgver:
1625 pkgver = d.getVar('PV_' + pkg, True) 1625 pkgver = d.getVar('PV_' + pkg)
1626 if not pkgver: 1626 if not pkgver:
1627 pkgver = ver 1627 pkgver = ver
1628 1628
@@ -1659,18 +1659,18 @@ python package_do_shlibs() {
1659 fd.close() 1659 fd.close()
1660 if needs_ldconfig and use_ldconfig: 1660 if needs_ldconfig and use_ldconfig:
1661 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) 1661 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1662 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 1662 postinst = d.getVar('pkg_postinst_%s' % pkg)
1663 if not postinst: 1663 if not postinst:
1664 postinst = '#!/bin/sh\n' 1664 postinst = '#!/bin/sh\n'
1665 postinst += d.getVar('ldconfig_postinst_fragment', True) 1665 postinst += d.getVar('ldconfig_postinst_fragment')
1666 d.setVar('pkg_postinst_%s' % pkg, postinst) 1666 d.setVar('pkg_postinst_%s' % pkg, postinst)
1667 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) 1667 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1668 1668
1669 bb.utils.unlockfile(lf) 1669 bb.utils.unlockfile(lf)
1670 1670
1671 assumed_libs = d.getVar('ASSUME_SHLIBS', True) 1671 assumed_libs = d.getVar('ASSUME_SHLIBS')
1672 if assumed_libs: 1672 if assumed_libs:
1673 libdir = d.getVar("libdir", True) 1673 libdir = d.getVar("libdir")
1674 for e in assumed_libs.split(): 1674 for e in assumed_libs.split():
1675 l, dep_pkg = e.split(":") 1675 l, dep_pkg = e.split(":")
1676 lib_ver = None 1676 lib_ver = None
@@ -1682,7 +1682,7 @@ python package_do_shlibs() {
1682 shlib_provider[l] = {} 1682 shlib_provider[l] = {}
1683 shlib_provider[l][libdir] = (dep_pkg, lib_ver) 1683 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1684 1684
1685 libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)] 1685 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1686 1686
1687 for pkg in packages.split(): 1687 for pkg in packages.split():
1688 bb.debug(2, "calculating shlib requirements for %s" % pkg) 1688 bb.debug(2, "calculating shlib requirements for %s" % pkg)
@@ -1736,12 +1736,12 @@ python package_do_shlibs() {
1736python package_do_pkgconfig () { 1736python package_do_pkgconfig () {
1737 import re 1737 import re
1738 1738
1739 packages = d.getVar('PACKAGES', True) 1739 packages = d.getVar('PACKAGES')
1740 workdir = d.getVar('WORKDIR', True) 1740 workdir = d.getVar('WORKDIR')
1741 pkgdest = d.getVar('PKGDEST', True) 1741 pkgdest = d.getVar('PKGDEST')
1742 1742
1743 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() 1743 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1744 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) 1744 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1745 1745
1746 pc_re = re.compile('(.*)\.pc$') 1746 pc_re = re.compile('(.*)\.pc$')
1747 var_re = re.compile('(.*)=(.*)') 1747 var_re = re.compile('(.*)=(.*)')
@@ -1826,7 +1826,7 @@ python package_do_pkgconfig () {
1826 1826
1827def read_libdep_files(d): 1827def read_libdep_files(d):
1828 pkglibdeps = {} 1828 pkglibdeps = {}
1829 packages = d.getVar('PACKAGES', True).split() 1829 packages = d.getVar('PACKAGES').split()
1830 for pkg in packages: 1830 for pkg in packages:
1831 pkglibdeps[pkg] = {} 1831 pkglibdeps[pkg] = {}
1832 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": 1832 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
@@ -1846,9 +1846,9 @@ def read_libdep_files(d):
1846python read_shlibdeps () { 1846python read_shlibdeps () {
1847 pkglibdeps = read_libdep_files(d) 1847 pkglibdeps = read_libdep_files(d)
1848 1848
1849 packages = d.getVar('PACKAGES', True).split() 1849 packages = d.getVar('PACKAGES').split()
1850 for pkg in packages: 1850 for pkg in packages:
1851 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") 1851 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
1852 for dep in pkglibdeps[pkg]: 1852 for dep in pkglibdeps[pkg]:
1853 # Add the dep if it's not already there, or if no comparison is set 1853 # Add the dep if it's not already there, or if no comparison is set
1854 if dep not in rdepends: 1854 if dep not in rdepends:
@@ -1873,14 +1873,14 @@ python package_depchains() {
1873 package. 1873 package.
1874 """ 1874 """
1875 1875
1876 packages = d.getVar('PACKAGES', True) 1876 packages = d.getVar('PACKAGES')
1877 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() 1877 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
1878 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() 1878 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
1879 1879
1880 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): 1880 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1881 1881
1882 #bb.note('depends for %s is %s' % (base, depends)) 1882 #bb.note('depends for %s is %s' % (base, depends))
1883 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") 1883 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
1884 1884
1885 for depend in depends: 1885 for depend in depends:
1886 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): 1886 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
@@ -1901,7 +1901,7 @@ python package_depchains() {
1901 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): 1901 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1902 1902
1903 #bb.note('rdepends for %s is %s' % (base, rdepends)) 1903 #bb.note('rdepends for %s is %s' % (base, rdepends))
1904 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") 1904 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
1905 1905
1906 for depend in rdepends: 1906 for depend in rdepends:
1907 if depend.find('virtual-locale-') != -1: 1907 if depend.find('virtual-locale-') != -1:
@@ -1924,12 +1924,12 @@ python package_depchains() {
1924 list.append(dep) 1924 list.append(dep)
1925 1925
1926 depends = [] 1926 depends = []
1927 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): 1927 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
1928 add_dep(depends, dep) 1928 add_dep(depends, dep)
1929 1929
1930 rdepends = [] 1930 rdepends = []
1931 for pkg in packages.split(): 1931 for pkg in packages.split():
1932 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): 1932 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""):
1933 add_dep(rdepends, dep) 1933 add_dep(rdepends, dep)
1934 1934
1935 #bb.note('rdepends is %s' % rdepends) 1935 #bb.note('rdepends is %s' % rdepends)
@@ -1959,7 +1959,7 @@ python package_depchains() {
1959 for pkg in pkglibdeps: 1959 for pkg in pkglibdeps:
1960 for k in pkglibdeps[pkg]: 1960 for k in pkglibdeps[pkg]:
1961 add_dep(pkglibdeplist, k) 1961 add_dep(pkglibdeplist, k)
1962 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (bb.data.inherits_class('packagegroup', d))) 1962 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
1963 1963
1964 for suffix in pkgs: 1964 for suffix in pkgs:
1965 for pkg in pkgs[suffix]: 1965 for pkg in pkgs[suffix]:
@@ -1976,7 +1976,7 @@ python package_depchains() {
1976 pkg_addrrecs(pkg, base, suffix, func, rdepends, d) 1976 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1977 else: 1977 else:
1978 rdeps = [] 1978 rdeps = []
1979 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""): 1979 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""):
1980 add_dep(rdeps, dep) 1980 add_dep(rdeps, dep)
1981 pkg_addrrecs(pkg, base, suffix, func, rdeps, d) 1981 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1982} 1982}
@@ -1987,8 +1987,8 @@ PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDE
1987 1987
1988def gen_packagevar(d): 1988def gen_packagevar(d):
1989 ret = [] 1989 ret = []
1990 pkgs = (d.getVar("PACKAGES", True) or "").split() 1990 pkgs = (d.getVar("PACKAGES") or "").split()
1991 vars = (d.getVar("PACKAGEVARS", True) or "").split() 1991 vars = (d.getVar("PACKAGEVARS") or "").split()
1992 for p in pkgs: 1992 for p in pkgs:
1993 for v in vars: 1993 for v in vars:
1994 ret.append(v + "_" + p) 1994 ret.append(v + "_" + p)
@@ -2036,16 +2036,16 @@ python do_package () {
2036 # Sanity test the setup 2036 # Sanity test the setup
2037 ########################################################################### 2037 ###########################################################################
2038 2038
2039 packages = (d.getVar('PACKAGES', True) or "").split() 2039 packages = (d.getVar('PACKAGES') or "").split()
2040 if len(packages) < 1: 2040 if len(packages) < 1:
2041 bb.debug(1, "No packages to build, skipping do_package") 2041 bb.debug(1, "No packages to build, skipping do_package")
2042 return 2042 return
2043 2043
2044 workdir = d.getVar('WORKDIR', True) 2044 workdir = d.getVar('WORKDIR')
2045 outdir = d.getVar('DEPLOY_DIR', True) 2045 outdir = d.getVar('DEPLOY_DIR')
2046 dest = d.getVar('D', True) 2046 dest = d.getVar('D')
2047 dvar = d.getVar('PKGD', True) 2047 dvar = d.getVar('PKGD')
2048 pn = d.getVar('PN', True) 2048 pn = d.getVar('PN')
2049 2049
2050 if not workdir or not outdir or not dest or not dvar or not pn: 2050 if not workdir or not outdir or not dest or not dvar or not pn:
2051 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" 2051 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
@@ -2063,7 +2063,7 @@ python do_package () {
2063 # code pre-expands some frequently used variables 2063 # code pre-expands some frequently used variables
2064 2064
2065 def expandVar(x, d): 2065 def expandVar(x, d):
2066 d.setVar(x, d.getVar(x, True)) 2066 d.setVar(x, d.getVar(x))
2067 2067
2068 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': 2068 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2069 expandVar(x, d) 2069 expandVar(x, d)
@@ -2072,7 +2072,7 @@ python do_package () {
2072 # Setup PKGD (from D) 2072 # Setup PKGD (from D)
2073 ########################################################################### 2073 ###########################################################################
2074 2074
2075 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split(): 2075 for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
2076 bb.build.exec_func(f, d) 2076 bb.build.exec_func(f, d)
2077 2077
2078 ########################################################################### 2078 ###########################################################################
@@ -2081,7 +2081,7 @@ python do_package () {
2081 2081
2082 cpath = oe.cachedpath.CachedPath() 2082 cpath = oe.cachedpath.CachedPath()
2083 2083
2084 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split(): 2084 for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
2085 bb.build.exec_func(f, d) 2085 bb.build.exec_func(f, d)
2086 2086
2087 ########################################################################### 2087 ###########################################################################
@@ -2091,18 +2091,18 @@ python do_package () {
2091 # Build global list of files in each split package 2091 # Build global list of files in each split package
2092 global pkgfiles 2092 global pkgfiles
2093 pkgfiles = {} 2093 pkgfiles = {}
2094 packages = d.getVar('PACKAGES', True).split() 2094 packages = d.getVar('PACKAGES').split()
2095 pkgdest = d.getVar('PKGDEST', True) 2095 pkgdest = d.getVar('PKGDEST')
2096 for pkg in packages: 2096 for pkg in packages:
2097 pkgfiles[pkg] = [] 2097 pkgfiles[pkg] = []
2098 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): 2098 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2099 for file in files: 2099 for file in files:
2100 pkgfiles[pkg].append(walkroot + os.sep + file) 2100 pkgfiles[pkg].append(walkroot + os.sep + file)
2101 2101
2102 for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): 2102 for f in (d.getVar('PACKAGEFUNCS') or '').split():
2103 bb.build.exec_func(f, d) 2103 bb.build.exec_func(f, d)
2104 2104
2105 qa_sane = d.getVar("QA_SANE", True) 2105 qa_sane = d.getVar("QA_SANE")
2106 if not qa_sane: 2106 if not qa_sane:
2107 bb.fatal("Fatal QA errors found, failing task.") 2107 bb.fatal("Fatal QA errors found, failing task.")
2108} 2108}
@@ -2149,7 +2149,7 @@ def mapping_rename_hook(d):
2149 Rewrite variables to account for package renaming in things 2149 Rewrite variables to account for package renaming in things
2150 like debian.bbclass or manual PKG variable name changes 2150 like debian.bbclass or manual PKG variable name changes
2151 """ 2151 """
2152 pkg = d.getVar("PKG", True) 2152 pkg = d.getVar("PKG")
2153 runtime_mapping_rename("RDEPENDS", pkg, d) 2153 runtime_mapping_rename("RDEPENDS", pkg, d)
2154 runtime_mapping_rename("RRECOMMENDS", pkg, d) 2154 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2155 runtime_mapping_rename("RSUGGESTS", pkg, d) 2155 runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass
index fb6034cab1..68eca61a26 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes/package_deb.bbclass
@@ -6,14 +6,14 @@ inherit package
6 6
7IMAGE_PKGTYPE ?= "deb" 7IMAGE_PKGTYPE ?= "deb"
8 8
9DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH', True), d.getVar('TUNE_FEATURES', True))}" 9DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'))}"
10DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}" 10DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}"
11 11
12PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" 12PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs"
13 13
14APTCONF_TARGET = "${WORKDIR}" 14APTCONF_TARGET = "${WORKDIR}"
15 15
16APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" 16APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
17 17
18def debian_arch_map(arch, tune): 18def debian_arch_map(arch, tune):
19 tune_features = tune.split() 19 tune_features = tune.split()
@@ -56,22 +56,22 @@ python do_package_deb () {
56 56
57 oldcwd = os.getcwd() 57 oldcwd = os.getcwd()
58 58
59 workdir = d.getVar('WORKDIR', True) 59 workdir = d.getVar('WORKDIR')
60 if not workdir: 60 if not workdir:
61 bb.error("WORKDIR not defined, unable to package") 61 bb.error("WORKDIR not defined, unable to package")
62 return 62 return
63 63
64 outdir = d.getVar('PKGWRITEDIRDEB', True) 64 outdir = d.getVar('PKGWRITEDIRDEB')
65 if not outdir: 65 if not outdir:
66 bb.error("PKGWRITEDIRDEB not defined, unable to package") 66 bb.error("PKGWRITEDIRDEB not defined, unable to package")
67 return 67 return
68 68
69 packages = d.getVar('PACKAGES', True) 69 packages = d.getVar('PACKAGES')
70 if not packages: 70 if not packages:
71 bb.debug(1, "PACKAGES not defined, nothing to package") 71 bb.debug(1, "PACKAGES not defined, nothing to package")
72 return 72 return
73 73
74 tmpdir = d.getVar('TMPDIR', True) 74 tmpdir = d.getVar('TMPDIR')
75 75
76 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): 76 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
77 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) 77 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@@ -80,7 +80,7 @@ python do_package_deb () {
80 bb.debug(1, "No packages; nothing to do") 80 bb.debug(1, "No packages; nothing to do")
81 return 81 return
82 82
83 pkgdest = d.getVar('PKGDEST', True) 83 pkgdest = d.getVar('PKGDEST')
84 84
85 def cleanupcontrol(root): 85 def cleanupcontrol(root):
86 for p in ['CONTROL', 'DEBIAN']: 86 for p in ['CONTROL', 'DEBIAN']:
@@ -96,7 +96,7 @@ python do_package_deb () {
96 96
97 localdata.setVar('ROOT', '') 97 localdata.setVar('ROOT', '')
98 localdata.setVar('ROOT_%s' % pkg, root) 98 localdata.setVar('ROOT_%s' % pkg, root)
99 pkgname = localdata.getVar('PKG_%s' % pkg, True) 99 pkgname = localdata.getVar('PKG_%s' % pkg)
100 if not pkgname: 100 if not pkgname:
101 pkgname = pkg 101 pkgname = pkg
102 localdata.setVar('PKG', pkgname) 102 localdata.setVar('PKG', pkgname)
@@ -106,7 +106,7 @@ python do_package_deb () {
106 bb.data.update_data(localdata) 106 bb.data.update_data(localdata)
107 basedir = os.path.join(os.path.dirname(root)) 107 basedir = os.path.join(os.path.dirname(root))
108 108
109 pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True)) 109 pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH'))
110 bb.utils.mkdirhier(pkgoutdir) 110 bb.utils.mkdirhier(pkgoutdir)
111 111
112 os.chdir(root) 112 os.chdir(root)
@@ -114,7 +114,7 @@ python do_package_deb () {
114 from glob import glob 114 from glob import glob
115 g = glob('*') 115 g = glob('*')
116 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": 116 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1":
117 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) 117 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
118 bb.utils.unlockfile(lf) 118 bb.utils.unlockfile(lf)
119 continue 119 continue
120 120
@@ -129,7 +129,7 @@ python do_package_deb () {
129 bb.fatal("unable to open control file for writing") 129 bb.fatal("unable to open control file for writing")
130 130
131 fields = [] 131 fields = []
132 pe = d.getVar('PKGE', True) 132 pe = d.getVar('PKGE')
133 if pe and int(pe) > 0: 133 if pe and int(pe) > 0:
134 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) 134 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
135 else: 135 else:
@@ -141,7 +141,7 @@ python do_package_deb () {
141 fields.append(["Architecture: %s\n", ['DPKG_ARCH']]) 141 fields.append(["Architecture: %s\n", ['DPKG_ARCH']])
142 fields.append(["OE: %s\n", ['PN']]) 142 fields.append(["OE: %s\n", ['PN']])
143 fields.append(["PackageArch: %s\n", ['PACKAGE_ARCH']]) 143 fields.append(["PackageArch: %s\n", ['PACKAGE_ARCH']])
144 if d.getVar('HOMEPAGE', True): 144 if d.getVar('HOMEPAGE'):
145 fields.append(["Homepage: %s\n", ['HOMEPAGE']]) 145 fields.append(["Homepage: %s\n", ['HOMEPAGE']])
146 146
147 # Package, Version, Maintainer, Description - mandatory 147 # Package, Version, Maintainer, Description - mandatory
@@ -151,10 +151,10 @@ python do_package_deb () {
151 def pullData(l, d): 151 def pullData(l, d):
152 l2 = [] 152 l2 = []
153 for i in l: 153 for i in l:
154 data = d.getVar(i, True) 154 data = d.getVar(i)
155 if data is None: 155 if data is None:
156 raise KeyError(f) 156 raise KeyError(f)
157 if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH', True) == 'all': 157 if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH') == 'all':
158 data = 'all' 158 data = 'all'
159 elif i == 'PACKAGE_ARCH' or i == 'DPKG_ARCH': 159 elif i == 'PACKAGE_ARCH' or i == 'DPKG_ARCH':
160 # The params in deb package control don't allow character 160 # The params in deb package control don't allow character
@@ -165,7 +165,7 @@ python do_package_deb () {
165 return l2 165 return l2
166 166
167 ctrlfile.write("Package: %s\n" % pkgname) 167 ctrlfile.write("Package: %s\n" % pkgname)
168 if d.getVar('PACKAGE_ARCH', True) == "all": 168 if d.getVar('PACKAGE_ARCH') == "all":
169 ctrlfile.write("Multi-Arch: foreign\n") 169 ctrlfile.write("Multi-Arch: foreign\n")
170 # check for required fields 170 # check for required fields
171 try: 171 try:
@@ -175,9 +175,9 @@ python do_package_deb () {
175 raise KeyError(f) 175 raise KeyError(f)
176 # Special behavior for description... 176 # Special behavior for description...
177 if 'DESCRIPTION' in fs: 177 if 'DESCRIPTION' in fs:
178 summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." 178 summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "."
179 ctrlfile.write('Description: %s\n' % summary) 179 ctrlfile.write('Description: %s\n' % summary)
180 description = localdata.getVar('DESCRIPTION', True) or "." 180 description = localdata.getVar('DESCRIPTION') or "."
181 description = textwrap.dedent(description).strip() 181 description = textwrap.dedent(description).strip()
182 if '\\n' in description: 182 if '\\n' in description:
183 # Manually indent 183 # Manually indent
@@ -231,7 +231,7 @@ python do_package_deb () {
231 elif (v or "").startswith("> "): 231 elif (v or "").startswith("> "):
232 var[dep][i] = var[dep][i].replace("> ", ">> ") 232 var[dep][i] = var[dep][i].replace("> ", ">> ")
233 233
234 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") 234 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
235 debian_cmp_remap(rdepends) 235 debian_cmp_remap(rdepends)
236 for dep in list(rdepends.keys()): 236 for dep in list(rdepends.keys()):
237 if dep == pkg: 237 if dep == pkg:
@@ -239,20 +239,20 @@ python do_package_deb () {
239 continue 239 continue
240 if '*' in dep: 240 if '*' in dep:
241 del rdepends[dep] 241 del rdepends[dep]
242 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") 242 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "")
243 debian_cmp_remap(rrecommends) 243 debian_cmp_remap(rrecommends)
244 for dep in list(rrecommends.keys()): 244 for dep in list(rrecommends.keys()):
245 if '*' in dep: 245 if '*' in dep:
246 del rrecommends[dep] 246 del rrecommends[dep]
247 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") 247 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "")
248 debian_cmp_remap(rsuggests) 248 debian_cmp_remap(rsuggests)
249 # Deliberately drop version information here, not wanted/supported by deb 249 # Deliberately drop version information here, not wanted/supported by deb
250 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) 250 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), [])
251 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) 251 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0]))
252 debian_cmp_remap(rprovides) 252 debian_cmp_remap(rprovides)
253 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") 253 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "")
254 debian_cmp_remap(rreplaces) 254 debian_cmp_remap(rreplaces)
255 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") 255 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "")
256 debian_cmp_remap(rconflicts) 256 debian_cmp_remap(rconflicts)
257 if rdepends: 257 if rdepends:
258 ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) 258 ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends))
@@ -269,7 +269,7 @@ python do_package_deb () {
269 ctrlfile.close() 269 ctrlfile.close()
270 270
271 for script in ["preinst", "postinst", "prerm", "postrm"]: 271 for script in ["preinst", "postinst", "prerm", "postrm"]:
272 scriptvar = localdata.getVar('pkg_%s' % script, True) 272 scriptvar = localdata.getVar('pkg_%s' % script)
273 if not scriptvar: 273 if not scriptvar:
274 continue 274 continue
275 scriptvar = scriptvar.strip() 275 scriptvar = scriptvar.strip()
@@ -308,7 +308,7 @@ python do_package_deb () {
308 conffiles.close() 308 conffiles.close()
309 309
310 os.chdir(basedir) 310 os.chdir(basedir)
311 ret = subprocess.call("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH", True), root, pkgoutdir), shell=True) 311 ret = subprocess.call("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH"), root, pkgoutdir), shell=True)
312 if ret != 0: 312 if ret != 0:
313 bb.utils.unlockfile(lf) 313 bb.utils.unlockfile(lf)
314 bb.fatal("dpkg-deb execution failed") 314 bb.fatal("dpkg-deb execution failed")
@@ -328,7 +328,7 @@ do_package_write_deb[sstate-inputdirs] = "${PKGWRITEDIRDEB}"
328do_package_write_deb[sstate-outputdirs] = "${DEPLOY_DIR_DEB}" 328do_package_write_deb[sstate-outputdirs] = "${DEPLOY_DIR_DEB}"
329 329
330python do_package_write_deb_setscene () { 330python do_package_write_deb_setscene () {
331 tmpdir = d.getVar('TMPDIR', True) 331 tmpdir = d.getVar('TMPDIR')
332 332
333 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): 333 if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
334 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) 334 os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@@ -338,7 +338,7 @@ python do_package_write_deb_setscene () {
338addtask do_package_write_deb_setscene 338addtask do_package_write_deb_setscene
339 339
340python () { 340python () {
341 if d.getVar('PACKAGES', True) != '': 341 if d.getVar('PACKAGES') != '':
342 deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' 342 deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
343 d.appendVarFlag('do_package_write_deb', 'depends', deps) 343 d.appendVarFlag('do_package_write_deb', 'depends', deps)
344 d.setVarFlag('do_package_write_deb', 'fakeroot', "1") 344 d.setVarFlag('do_package_write_deb', 'fakeroot', "1")
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index eb00932336..7018a600a9 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -11,8 +11,8 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
11OPKGBUILDCMD ??= "opkg-build" 11OPKGBUILDCMD ??= "opkg-build"
12 12
13OPKG_ARGS += "--force_postinstall --prefer-arch-to-version" 13OPKG_ARGS += "--force_postinstall --prefer-arch-to-version"
14OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" 14OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
15OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE', True) or "").split())][(d.getVar("PACKAGE_EXCLUDE", True) or "") != ""]}" 15OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE') or "").split())][(d.getVar("PACKAGE_EXCLUDE") or "") != ""]}"
16 16
17OPKGLIBDIR = "${localstatedir}/lib" 17OPKGLIBDIR = "${localstatedir}/lib"
18 18
@@ -24,15 +24,15 @@ python do_package_ipk () {
24 24
25 oldcwd = os.getcwd() 25 oldcwd = os.getcwd()
26 26
27 workdir = d.getVar('WORKDIR', True) 27 workdir = d.getVar('WORKDIR')
28 outdir = d.getVar('PKGWRITEDIRIPK', True) 28 outdir = d.getVar('PKGWRITEDIRIPK')
29 tmpdir = d.getVar('TMPDIR', True) 29 tmpdir = d.getVar('TMPDIR')
30 pkgdest = d.getVar('PKGDEST', True) 30 pkgdest = d.getVar('PKGDEST')
31 if not workdir or not outdir or not tmpdir: 31 if not workdir or not outdir or not tmpdir:
32 bb.error("Variables incorrectly set, unable to package") 32 bb.error("Variables incorrectly set, unable to package")
33 return 33 return
34 34
35 packages = d.getVar('PACKAGES', True) 35 packages = d.getVar('PACKAGES')
36 if not packages or packages == '': 36 if not packages or packages == '':
37 bb.debug(1, "No packages; nothing to do") 37 bb.debug(1, "No packages; nothing to do")
38 return 38 return
@@ -56,7 +56,7 @@ python do_package_ipk () {
56 56
57 localdata.setVar('ROOT', '') 57 localdata.setVar('ROOT', '')
58 localdata.setVar('ROOT_%s' % pkg, root) 58 localdata.setVar('ROOT_%s' % pkg, root)
59 pkgname = localdata.getVar('PKG_%s' % pkg, True) 59 pkgname = localdata.getVar('PKG_%s' % pkg)
60 if not pkgname: 60 if not pkgname:
61 pkgname = pkg 61 pkgname = pkg
62 localdata.setVar('PKG', pkgname) 62 localdata.setVar('PKG', pkgname)
@@ -65,7 +65,7 @@ python do_package_ipk () {
65 65
66 bb.data.update_data(localdata) 66 bb.data.update_data(localdata)
67 basedir = os.path.join(os.path.dirname(root)) 67 basedir = os.path.join(os.path.dirname(root))
68 arch = localdata.getVar('PACKAGE_ARCH', True) 68 arch = localdata.getVar('PACKAGE_ARCH')
69 69
70 if localdata.getVar('IPK_HIERARCHICAL_FEED', False) == "1": 70 if localdata.getVar('IPK_HIERARCHICAL_FEED', False) == "1":
71 # Spread packages across subdirectories so each isn't too crowded 71 # Spread packages across subdirectories so each isn't too crowded
@@ -98,7 +98,7 @@ python do_package_ipk () {
98 from glob import glob 98 from glob import glob
99 g = glob('*') 99 g = glob('*')
100 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": 100 if not g and localdata.getVar('ALLOW_EMPTY', False) != "1":
101 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) 101 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
102 bb.utils.unlockfile(lf) 102 bb.utils.unlockfile(lf)
103 continue 103 continue
104 104
@@ -111,7 +111,7 @@ python do_package_ipk () {
111 bb.fatal("unable to open control file for writing") 111 bb.fatal("unable to open control file for writing")
112 112
113 fields = [] 113 fields = []
114 pe = d.getVar('PKGE', True) 114 pe = d.getVar('PKGE')
115 if pe and int(pe) > 0: 115 if pe and int(pe) > 0:
116 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) 116 fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
117 else: 117 else:
@@ -123,13 +123,13 @@ python do_package_ipk () {
123 fields.append(["License: %s\n", ['LICENSE']]) 123 fields.append(["License: %s\n", ['LICENSE']])
124 fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']]) 124 fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']])
125 fields.append(["OE: %s\n", ['PN']]) 125 fields.append(["OE: %s\n", ['PN']])
126 if d.getVar('HOMEPAGE', True): 126 if d.getVar('HOMEPAGE'):
127 fields.append(["Homepage: %s\n", ['HOMEPAGE']]) 127 fields.append(["Homepage: %s\n", ['HOMEPAGE']])
128 128
129 def pullData(l, d): 129 def pullData(l, d):
130 l2 = [] 130 l2 = []
131 for i in l: 131 for i in l:
132 l2.append(d.getVar(i, True)) 132 l2.append(d.getVar(i))
133 return l2 133 return l2
134 134
135 ctrlfile.write("Package: %s\n" % pkgname) 135 ctrlfile.write("Package: %s\n" % pkgname)
@@ -141,9 +141,9 @@ python do_package_ipk () {
141 raise KeyError(f) 141 raise KeyError(f)
142 # Special behavior for description... 142 # Special behavior for description...
143 if 'DESCRIPTION' in fs: 143 if 'DESCRIPTION' in fs:
144 summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." 144 summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "."
145 ctrlfile.write('Description: %s\n' % summary) 145 ctrlfile.write('Description: %s\n' % summary)
146 description = localdata.getVar('DESCRIPTION', True) or "." 146 description = localdata.getVar('DESCRIPTION') or "."
147 description = textwrap.dedent(description).strip() 147 description = textwrap.dedent(description).strip()
148 if '\\n' in description: 148 if '\\n' in description:
149 # Manually indent 149 # Manually indent
@@ -185,19 +185,19 @@ python do_package_ipk () {
185 elif (v or "").startswith("> "): 185 elif (v or "").startswith("> "):
186 var[dep][i] = var[dep][i].replace("> ", ">> ") 186 var[dep][i] = var[dep][i].replace("> ", ">> ")
187 187
188 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") 188 rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
189 debian_cmp_remap(rdepends) 189 debian_cmp_remap(rdepends)
190 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") 190 rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "")
191 debian_cmp_remap(rrecommends) 191 debian_cmp_remap(rrecommends)
192 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") 192 rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "")
193 debian_cmp_remap(rsuggests) 193 debian_cmp_remap(rsuggests)
194 # Deliberately drop version information here, not wanted/supported by ipk 194 # Deliberately drop version information here, not wanted/supported by ipk
195 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) 195 rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), [])
196 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) 196 rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0]))
197 debian_cmp_remap(rprovides) 197 debian_cmp_remap(rprovides)
198 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") 198 rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "")
199 debian_cmp_remap(rreplaces) 199 debian_cmp_remap(rreplaces)
200 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") 200 rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "")
201 debian_cmp_remap(rconflicts) 201 debian_cmp_remap(rconflicts)
202 202
203 if rdepends: 203 if rdepends:
@@ -212,14 +212,14 @@ python do_package_ipk () {
212 ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) 212 ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
213 if rconflicts: 213 if rconflicts:
214 ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) 214 ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
215 src_uri = localdata.getVar("SRC_URI", True).strip() or "None" 215 src_uri = localdata.getVar("SRC_URI").strip() or "None"
216 if src_uri: 216 if src_uri:
217 src_uri = re.sub("\s+", " ", src_uri) 217 src_uri = re.sub("\s+", " ", src_uri)
218 ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) 218 ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
219 ctrlfile.close() 219 ctrlfile.close()
220 220
221 for script in ["preinst", "postinst", "prerm", "postrm"]: 221 for script in ["preinst", "postinst", "prerm", "postrm"]:
222 scriptvar = localdata.getVar('pkg_%s' % script, True) 222 scriptvar = localdata.getVar('pkg_%s' % script)
223 if not scriptvar: 223 if not scriptvar:
224 continue 224 continue
225 try: 225 try:
@@ -244,15 +244,15 @@ python do_package_ipk () {
244 conffiles.close() 244 conffiles.close()
245 245
246 os.chdir(basedir) 246 os.chdir(basedir)
247 ret = subprocess.call("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", True), 247 ret = subprocess.call("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH"),
248 d.getVar("OPKGBUILDCMD", True), pkg, pkgoutdir), shell=True) 248 d.getVar("OPKGBUILDCMD"), pkg, pkgoutdir), shell=True)
249 if ret != 0: 249 if ret != 0:
250 bb.utils.unlockfile(lf) 250 bb.utils.unlockfile(lf)
251 bb.fatal("opkg-build execution failed") 251 bb.fatal("opkg-build execution failed")
252 252
253 if d.getVar('IPK_SIGN_PACKAGES', True) == '1': 253 if d.getVar('IPK_SIGN_PACKAGES') == '1':
254 ipkver = "%s-%s" % (d.getVar('PKGV', True), d.getVar('PKGR', True)) 254 ipkver = "%s-%s" % (d.getVar('PKGV'), d.getVar('PKGR'))
255 ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH', True)) 255 ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH'))
256 sign_ipk(d, ipk_to_sign) 256 sign_ipk(d, ipk_to_sign)
257 257
258 cleanupcontrol(root) 258 cleanupcontrol(root)
@@ -268,7 +268,7 @@ do_package_write_ipk[sstate-inputdirs] = "${PKGWRITEDIRIPK}"
268do_package_write_ipk[sstate-outputdirs] = "${DEPLOY_DIR_IPK}" 268do_package_write_ipk[sstate-outputdirs] = "${DEPLOY_DIR_IPK}"
269 269
270python do_package_write_ipk_setscene () { 270python do_package_write_ipk_setscene () {
271 tmpdir = d.getVar('TMPDIR', True) 271 tmpdir = d.getVar('TMPDIR')
272 272
273 if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK): 273 if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK):
274 os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN")) 274 os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"))
@@ -278,7 +278,7 @@ python do_package_write_ipk_setscene () {
278addtask do_package_write_ipk_setscene 278addtask do_package_write_ipk_setscene
279 279
280python () { 280python () {
281 if d.getVar('PACKAGES', True) != '': 281 if d.getVar('PACKAGES') != '':
282 deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' 282 deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
283 d.appendVarFlag('do_package_write_ipk', 'depends', deps) 283 d.appendVarFlag('do_package_write_ipk', 'depends', deps)
284 d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") 284 d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index c431545f7c..722c62877a 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -13,9 +13,9 @@ MERGEPERFILEDEPS = "1"
13 13
14# Construct per file dependencies file 14# Construct per file dependencies file
15def write_rpm_perfiledata(srcname, d): 15def write_rpm_perfiledata(srcname, d):
16 workdir = d.getVar('WORKDIR', True) 16 workdir = d.getVar('WORKDIR')
17 packages = d.getVar('PACKAGES', True) 17 packages = d.getVar('PACKAGES')
18 pkgd = d.getVar('PKGD', True) 18 pkgd = d.getVar('PKGD')
19 19
20 def dump_filerdeps(varname, outfile, d): 20 def dump_filerdeps(varname, outfile, d):
21 outfile.write("#!/usr/bin/env python\n\n") 21 outfile.write("#!/usr/bin/env python\n\n")
@@ -23,10 +23,10 @@ def write_rpm_perfiledata(srcname, d):
23 outfile.write('deps = {\n') 23 outfile.write('deps = {\n')
24 for pkg in packages.split(): 24 for pkg in packages.split():
25 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg 25 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
26 dependsflist = (d.getVar(dependsflist_key, True) or "") 26 dependsflist = (d.getVar(dependsflist_key) or "")
27 for dfile in dependsflist.split(): 27 for dfile in dependsflist.split():
28 key = "FILE" + varname + "_" + dfile + "_" + pkg 28 key = "FILE" + varname + "_" + dfile + "_" + pkg
29 depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "") 29 depends_dict = bb.utils.explode_dep_versions(d.getVar(key) or "")
30 file = dfile.replace("@underscore@", "_") 30 file = dfile.replace("@underscore@", "_")
31 file = file.replace("@closebrace@", "]") 31 file = file.replace("@closebrace@", "]")
32 file = file.replace("@openbrace@", "[") 32 file = file.replace("@openbrace@", "[")
@@ -87,14 +87,14 @@ python write_specfile () {
87 # append information for logs and patches to %prep 87 # append information for logs and patches to %prep
88 def add_prep(d,spec_files_bottom): 88 def add_prep(d,spec_files_bottom):
89 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): 89 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
90 spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) ) 90 spec_files_bottom.append('%%prep -n %s' % d.getVar('PN') )
91 spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") 91 spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
92 spec_files_bottom.append('') 92 spec_files_bottom.append('')
93 93
94 # append the name of tarball to key word 'SOURCE' in xxx.spec. 94 # append the name of tarball to key word 'SOURCE' in xxx.spec.
95 def tail_source(d): 95 def tail_source(d):
96 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): 96 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
97 ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) 97 ar_outdir = d.getVar('ARCHIVER_OUTDIR')
98 if not os.path.exists(ar_outdir): 98 if not os.path.exists(ar_outdir):
99 return 99 return
100 source_list = os.listdir(ar_outdir) 100 source_list = os.listdir(ar_outdir)
@@ -110,7 +110,7 @@ python write_specfile () {
110 # We need a simple way to remove the MLPREFIX from the package name, 110 # We need a simple way to remove the MLPREFIX from the package name,
111 # and dependency information... 111 # and dependency information...
112 def strip_multilib(name, d): 112 def strip_multilib(name, d):
113 multilibs = d.getVar('MULTILIBS', True) or "" 113 multilibs = d.getVar('MULTILIBS') or ""
114 for ext in multilibs.split(): 114 for ext in multilibs.split():
115 eext = ext.split(':') 115 eext = ext.split(':')
116 if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0: 116 if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0:
@@ -124,7 +124,7 @@ python write_specfile () {
124 newdeps[strip_multilib(dep, d)] = depends[dep] 124 newdeps[strip_multilib(dep, d)] = depends[dep]
125 return bb.utils.join_deps(newdeps) 125 return bb.utils.join_deps(newdeps)
126 126
127# ml = d.getVar("MLPREFIX", True) 127# ml = d.getVar("MLPREFIX")
128# if ml and name and len(ml) != 0 and name.find(ml) == 0: 128# if ml and name and len(ml) != 0 and name.find(ml) == 0:
129# return ml.join(name.split(ml, 1)[1:]) 129# return ml.join(name.split(ml, 1)[1:])
130# return name 130# return name
@@ -144,7 +144,7 @@ python write_specfile () {
144 # after renaming we cannot look up the dependencies in the packagedata 144 # after renaming we cannot look up the dependencies in the packagedata
145 # store. 145 # store.
146 def translate_vers(varname, d): 146 def translate_vers(varname, d):
147 depends = d.getVar(varname, True) 147 depends = d.getVar(varname)
148 if depends: 148 if depends:
149 depends_dict = bb.utils.explode_dep_versions2(depends) 149 depends_dict = bb.utils.explode_dep_versions2(depends)
150 newdeps_dict = {} 150 newdeps_dict = {}
@@ -248,10 +248,10 @@ python write_specfile () {
248 def get_perfile(varname, pkg, d): 248 def get_perfile(varname, pkg, d):
249 deps = [] 249 deps = []
250 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg 250 dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
251 dependsflist = (d.getVar(dependsflist_key, True) or "") 251 dependsflist = (d.getVar(dependsflist_key) or "")
252 for dfile in dependsflist.split(): 252 for dfile in dependsflist.split():
253 key = "FILE" + varname + "_" + dfile + "_" + pkg 253 key = "FILE" + varname + "_" + dfile + "_" + pkg
254 depends = d.getVar(key, True) 254 depends = d.getVar(key)
255 if depends: 255 if depends:
256 deps.append(depends) 256 deps.append(depends)
257 return " ".join(deps) 257 return " ".join(deps)
@@ -269,33 +269,33 @@ python write_specfile () {
269 else: 269 else:
270 spec_preamble.append('%s' % textwrap.fill(dedent_text, width=75)) 270 spec_preamble.append('%s' % textwrap.fill(dedent_text, width=75))
271 271
272 packages = d.getVar('PACKAGES', True) 272 packages = d.getVar('PACKAGES')
273 if not packages or packages == '': 273 if not packages or packages == '':
274 bb.debug(1, "No packages; nothing to do") 274 bb.debug(1, "No packages; nothing to do")
275 return 275 return
276 276
277 pkgdest = d.getVar('PKGDEST', True) 277 pkgdest = d.getVar('PKGDEST')
278 if not pkgdest: 278 if not pkgdest:
279 bb.fatal("No PKGDEST") 279 bb.fatal("No PKGDEST")
280 280
281 outspecfile = d.getVar('OUTSPECFILE', True) 281 outspecfile = d.getVar('OUTSPECFILE')
282 if not outspecfile: 282 if not outspecfile:
283 bb.fatal("No OUTSPECFILE") 283 bb.fatal("No OUTSPECFILE")
284 284
285 # Construct the SPEC file... 285 # Construct the SPEC file...
286 srcname = strip_multilib(d.getVar('PN', True), d) 286 srcname = strip_multilib(d.getVar('PN'), d)
287 srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".") 287 srcsummary = (d.getVar('SUMMARY') or d.getVar('DESCRIPTION') or ".")
288 srcversion = d.getVar('PKGV', True).replace('-', '+') 288 srcversion = d.getVar('PKGV').replace('-', '+')
289 srcrelease = d.getVar('PKGR', True) 289 srcrelease = d.getVar('PKGR')
290 srcepoch = (d.getVar('PKGE', True) or "") 290 srcepoch = (d.getVar('PKGE') or "")
291 srclicense = d.getVar('LICENSE', True) 291 srclicense = d.getVar('LICENSE')
292 srcsection = d.getVar('SECTION', True) 292 srcsection = d.getVar('SECTION')
293 srcmaintainer = d.getVar('MAINTAINER', True) 293 srcmaintainer = d.getVar('MAINTAINER')
294 srchomepage = d.getVar('HOMEPAGE', True) 294 srchomepage = d.getVar('HOMEPAGE')
295 srcdescription = d.getVar('DESCRIPTION', True) or "." 295 srcdescription = d.getVar('DESCRIPTION') or "."
296 srccustomtagschunk = get_package_additional_metadata("rpm", d) 296 srccustomtagschunk = get_package_additional_metadata("rpm", d)
297 297
298 srcdepends = strip_multilib_deps(d.getVar('DEPENDS', True), d) 298 srcdepends = strip_multilib_deps(d.getVar('DEPENDS'), d)
299 srcrdepends = [] 299 srcrdepends = []
300 srcrrecommends = [] 300 srcrrecommends = []
301 srcrsuggests = [] 301 srcrsuggests = []
@@ -318,8 +318,8 @@ python write_specfile () {
318 spec_files_top = [] 318 spec_files_top = []
319 spec_files_bottom = [] 319 spec_files_bottom = []
320 320
321 perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" 321 perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0"
322 extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA", True) or "0") == "1" 322 extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA") or "0") == "1"
323 323
324 for pkg in packages.split(): 324 for pkg in packages.split():
325 localdata = bb.data.createCopy(d) 325 localdata = bb.data.createCopy(d)
@@ -328,7 +328,7 @@ python write_specfile () {
328 328
329 localdata.setVar('ROOT', '') 329 localdata.setVar('ROOT', '')
330 localdata.setVar('ROOT_%s' % pkg, root) 330 localdata.setVar('ROOT_%s' % pkg, root)
331 pkgname = localdata.getVar('PKG_%s' % pkg, True) 331 pkgname = localdata.getVar('PKG_%s' % pkg)
332 if not pkgname: 332 if not pkgname:
333 pkgname = pkg 333 pkgname = pkg
334 localdata.setVar('PKG', pkgname) 334 localdata.setVar('PKG', pkgname)
@@ -338,19 +338,19 @@ python write_specfile () {
338 bb.data.update_data(localdata) 338 bb.data.update_data(localdata)
339 339
340 conffiles = get_conffiles(pkg, d) 340 conffiles = get_conffiles(pkg, d)
341 dirfiles = localdata.getVar('DIRFILES', True) 341 dirfiles = localdata.getVar('DIRFILES')
342 if dirfiles is not None: 342 if dirfiles is not None:
343 dirfiles = dirfiles.split() 343 dirfiles = dirfiles.split()
344 344
345 splitname = strip_multilib(pkgname, d) 345 splitname = strip_multilib(pkgname, d)
346 346
347 splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".") 347 splitsummary = (localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or ".")
348 splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+') 348 splitversion = (localdata.getVar('PKGV') or "").replace('-', '+')
349 splitrelease = (localdata.getVar('PKGR', True) or "") 349 splitrelease = (localdata.getVar('PKGR') or "")
350 splitepoch = (localdata.getVar('PKGE', True) or "") 350 splitepoch = (localdata.getVar('PKGE') or "")
351 splitlicense = (localdata.getVar('LICENSE', True) or "") 351 splitlicense = (localdata.getVar('LICENSE') or "")
352 splitsection = (localdata.getVar('SECTION', True) or "") 352 splitsection = (localdata.getVar('SECTION') or "")
353 splitdescription = (localdata.getVar('DESCRIPTION', True) or ".") 353 splitdescription = (localdata.getVar('DESCRIPTION') or ".")
354 splitcustomtagschunk = get_package_additional_metadata("rpm", localdata) 354 splitcustomtagschunk = get_package_additional_metadata("rpm", localdata)
355 355
356 translate_vers('RDEPENDS', localdata) 356 translate_vers('RDEPENDS', localdata)
@@ -363,18 +363,18 @@ python write_specfile () {
363 # Map the dependencies into their final form 363 # Map the dependencies into their final form
364 mapping_rename_hook(localdata) 364 mapping_rename_hook(localdata)
365 365
366 splitrdepends = strip_multilib_deps(localdata.getVar('RDEPENDS', True), d) 366 splitrdepends = strip_multilib_deps(localdata.getVar('RDEPENDS'), d)
367 splitrrecommends = strip_multilib_deps(localdata.getVar('RRECOMMENDS', True), d) 367 splitrrecommends = strip_multilib_deps(localdata.getVar('RRECOMMENDS'), d)
368 splitrsuggests = strip_multilib_deps(localdata.getVar('RSUGGESTS', True), d) 368 splitrsuggests = strip_multilib_deps(localdata.getVar('RSUGGESTS'), d)
369 splitrprovides = strip_multilib_deps(localdata.getVar('RPROVIDES', True), d) 369 splitrprovides = strip_multilib_deps(localdata.getVar('RPROVIDES'), d)
370 splitrreplaces = strip_multilib_deps(localdata.getVar('RREPLACES', True), d) 370 splitrreplaces = strip_multilib_deps(localdata.getVar('RREPLACES'), d)
371 splitrconflicts = strip_multilib_deps(localdata.getVar('RCONFLICTS', True), d) 371 splitrconflicts = strip_multilib_deps(localdata.getVar('RCONFLICTS'), d)
372 splitrobsoletes = [] 372 splitrobsoletes = []
373 373
374 splitrpreinst = localdata.getVar('pkg_preinst', True) 374 splitrpreinst = localdata.getVar('pkg_preinst')
375 splitrpostinst = localdata.getVar('pkg_postinst', True) 375 splitrpostinst = localdata.getVar('pkg_postinst')
376 splitrprerm = localdata.getVar('pkg_prerm', True) 376 splitrprerm = localdata.getVar('pkg_prerm')
377 splitrpostrm = localdata.getVar('pkg_postrm', True) 377 splitrpostrm = localdata.getVar('pkg_postrm')
378 378
379 379
380 if not perfiledeps: 380 if not perfiledeps:
@@ -621,7 +621,7 @@ python write_specfile () {
621 621
622 # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top 622 # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top
623 # of the generated spec file 623 # of the generated spec file
624 external_preamble = d.getVar("RPMSPEC_PREAMBLE", True) 624 external_preamble = d.getVar("RPMSPEC_PREAMBLE")
625 if external_preamble: 625 if external_preamble:
626 specfile.write(external_preamble + "\n") 626 specfile.write(external_preamble + "\n")
627 627
@@ -652,20 +652,20 @@ python do_package_rpm () {
652 # We need a simple way to remove the MLPREFIX from the package name, 652 # We need a simple way to remove the MLPREFIX from the package name,
653 # and dependency information... 653 # and dependency information...
654 def strip_multilib(name, d): 654 def strip_multilib(name, d):
655 ml = d.getVar("MLPREFIX", True) 655 ml = d.getVar("MLPREFIX")
656 if ml and name and len(ml) != 0 and name.find(ml) >= 0: 656 if ml and name and len(ml) != 0 and name.find(ml) >= 0:
657 return "".join(name.split(ml)) 657 return "".join(name.split(ml))
658 return name 658 return name
659 659
660 workdir = d.getVar('WORKDIR', True) 660 workdir = d.getVar('WORKDIR')
661 tmpdir = d.getVar('TMPDIR', True) 661 tmpdir = d.getVar('TMPDIR')
662 pkgd = d.getVar('PKGD', True) 662 pkgd = d.getVar('PKGD')
663 pkgdest = d.getVar('PKGDEST', True) 663 pkgdest = d.getVar('PKGDEST')
664 if not workdir or not pkgd or not tmpdir: 664 if not workdir or not pkgd or not tmpdir:
665 bb.error("Variables incorrectly set, unable to package") 665 bb.error("Variables incorrectly set, unable to package")
666 return 666 return
667 667
668 packages = d.getVar('PACKAGES', True) 668 packages = d.getVar('PACKAGES')
669 if not packages or packages == '': 669 if not packages or packages == '':
670 bb.debug(1, "No packages; nothing to do") 670 bb.debug(1, "No packages; nothing to do")
671 return 671 return
@@ -674,31 +674,31 @@ python do_package_rpm () {
674 # If the spec file already exist, and has not been stored into 674 # If the spec file already exist, and has not been stored into
675 # pseudo's files.db, it maybe cause rpmbuild src.rpm fail, 675 # pseudo's files.db, it maybe cause rpmbuild src.rpm fail,
676 # so remove it before doing rpmbuild src.rpm. 676 # so remove it before doing rpmbuild src.rpm.
677 srcname = strip_multilib(d.getVar('PN', True), d) 677 srcname = strip_multilib(d.getVar('PN'), d)
678 outspecfile = workdir + "/" + srcname + ".spec" 678 outspecfile = workdir + "/" + srcname + ".spec"
679 if os.path.isfile(outspecfile): 679 if os.path.isfile(outspecfile):
680 os.remove(outspecfile) 680 os.remove(outspecfile)
681 d.setVar('OUTSPECFILE', outspecfile) 681 d.setVar('OUTSPECFILE', outspecfile)
682 bb.build.exec_func('write_specfile', d) 682 bb.build.exec_func('write_specfile', d)
683 683
684 perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" 684 perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0"
685 if perfiledeps: 685 if perfiledeps:
686 outdepends, outprovides = write_rpm_perfiledata(srcname, d) 686 outdepends, outprovides = write_rpm_perfiledata(srcname, d)
687 687
688 # Setup the rpmbuild arguments... 688 # Setup the rpmbuild arguments...
689 rpmbuild = d.getVar('RPMBUILD', True) 689 rpmbuild = d.getVar('RPMBUILD')
690 targetsys = d.getVar('TARGET_SYS', True) 690 targetsys = d.getVar('TARGET_SYS')
691 targetvendor = d.getVar('HOST_VENDOR', True) 691 targetvendor = d.getVar('HOST_VENDOR')
692 package_arch = (d.getVar('PACKAGE_ARCH', True) or "").replace("-", "_") 692 package_arch = (d.getVar('PACKAGE_ARCH') or "").replace("-", "_")
693 sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX', True) or "nativesdk").replace("-", "_") 693 sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX') or "nativesdk").replace("-", "_")
694 if package_arch not in "all any noarch".split() and not package_arch.endswith(sdkpkgsuffix): 694 if package_arch not in "all any noarch".split() and not package_arch.endswith(sdkpkgsuffix):
695 ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_") 695 ml_prefix = (d.getVar('MLPREFIX') or "").replace("-", "_")
696 d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch) 696 d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
697 else: 697 else:
698 d.setVar('PACKAGE_ARCH_EXTEND', package_arch) 698 d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
699 pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') 699 pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}')
700 d.setVar('RPM_PKGWRITEDIR', pkgwritedir) 700 d.setVar('RPM_PKGWRITEDIR', pkgwritedir)
701 bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR', True)) 701 bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR'))
702 pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-${HOST_OS}') 702 pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-${HOST_OS}')
703 magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') 703 magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc')
704 bb.utils.mkdirhier(pkgwritedir) 704 bb.utils.mkdirhier(pkgwritedir)
@@ -707,7 +707,7 @@ python do_package_rpm () {
707 cmd = rpmbuild 707 cmd = rpmbuild
708 cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd 708 cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd
709 cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'" 709 cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'"
710 cmd = cmd + " --define '_builddir " + d.getVar('S', True) + "'" 710 cmd = cmd + " --define '_builddir " + d.getVar('S') + "'"
711 cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'" 711 cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'"
712 cmd = cmd + " --define '_use_internal_dependency_generator 0'" 712 cmd = cmd + " --define '_use_internal_dependency_generator 0'"
713 if perfiledeps: 713 if perfiledeps:
@@ -721,8 +721,8 @@ python do_package_rpm () {
721 cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'" 721 cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'"
722 cmd = cmd + " --define '_tmppath " + workdir + "'" 722 cmd = cmd + " --define '_tmppath " + workdir + "'"
723 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): 723 if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
724 cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" 724 cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'"
725 cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" 725 cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR') + "'"
726 cmdsrpm = cmdsrpm + " -bs " + outspecfile 726 cmdsrpm = cmdsrpm + " -bs " + outspecfile
727 # Build the .src.rpm 727 # Build the .src.rpm
728 d.setVar('SBUILDSPEC', cmdsrpm + "\n") 728 d.setVar('SBUILDSPEC', cmdsrpm + "\n")
@@ -735,12 +735,12 @@ python do_package_rpm () {
735 d.setVarFlag('BUILDSPEC', 'func', '1') 735 d.setVarFlag('BUILDSPEC', 'func', '1')
736 bb.build.exec_func('BUILDSPEC', d) 736 bb.build.exec_func('BUILDSPEC', d)
737 737
738 if d.getVar('RPM_SIGN_PACKAGES', True) == '1': 738 if d.getVar('RPM_SIGN_PACKAGES') == '1':
739 bb.build.exec_func("sign_rpm", d) 739 bb.build.exec_func("sign_rpm", d)
740} 740}
741 741
742python () { 742python () {
743 if d.getVar('PACKAGES', True) != '': 743 if d.getVar('PACKAGES') != '':
744 deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' 744 deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
745 d.appendVarFlag('do_package_write_rpm', 'depends', deps) 745 d.appendVarFlag('do_package_write_rpm', 'depends', deps)
746 d.setVarFlag('do_package_write_rpm', 'fakeroot', '1') 746 d.setVarFlag('do_package_write_rpm', 'fakeroot', '1')
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass
index e217814af4..3ff8b8f560 100644
--- a/meta/classes/package_tar.bbclass
+++ b/meta/classes/package_tar.bbclass
@@ -7,27 +7,27 @@ python do_package_tar () {
7 7
8 oldcwd = os.getcwd() 8 oldcwd = os.getcwd()
9 9
10 workdir = d.getVar('WORKDIR', True) 10 workdir = d.getVar('WORKDIR')
11 if not workdir: 11 if not workdir:
12 bb.error("WORKDIR not defined, unable to package") 12 bb.error("WORKDIR not defined, unable to package")
13 return 13 return
14 14
15 outdir = d.getVar('DEPLOY_DIR_TAR', True) 15 outdir = d.getVar('DEPLOY_DIR_TAR')
16 if not outdir: 16 if not outdir:
17 bb.error("DEPLOY_DIR_TAR not defined, unable to package") 17 bb.error("DEPLOY_DIR_TAR not defined, unable to package")
18 return 18 return
19 19
20 dvar = d.getVar('D', True) 20 dvar = d.getVar('D')
21 if not dvar: 21 if not dvar:
22 bb.error("D not defined, unable to package") 22 bb.error("D not defined, unable to package")
23 return 23 return
24 24
25 packages = d.getVar('PACKAGES', True) 25 packages = d.getVar('PACKAGES')
26 if not packages: 26 if not packages:
27 bb.debug(1, "PACKAGES not defined, nothing to package") 27 bb.debug(1, "PACKAGES not defined, nothing to package")
28 return 28 return
29 29
30 pkgdest = d.getVar('PKGDEST', True) 30 pkgdest = d.getVar('PKGDEST')
31 31
32 bb.utils.mkdirhier(outdir) 32 bb.utils.mkdirhier(outdir)
33 bb.utils.mkdirhier(dvar) 33 bb.utils.mkdirhier(dvar)
@@ -46,7 +46,7 @@ python do_package_tar () {
46 os.chdir(root) 46 os.chdir(root)
47 dlist = os.listdir(root) 47 dlist = os.listdir(root)
48 if not dlist: 48 if not dlist:
49 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) 49 bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
50 continue 50 continue
51 args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split() 51 args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split()
52 ret = subprocess.call(args + [tarfn] + dlist) 52 ret = subprocess.call(args + [tarfn] + dlist)
@@ -57,7 +57,7 @@ python do_package_tar () {
57} 57}
58 58
59python () { 59python () {
60 if d.getVar('PACKAGES', True) != '': 60 if d.getVar('PACKAGES') != '':
61 deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split() 61 deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split()
62 deps.append('tar-native:do_populate_sysroot') 62 deps.append('tar-native:do_populate_sysroot')
63 deps.append('virtual/fakeroot-native:do_populate_sysroot') 63 deps.append('virtual/fakeroot-native:do_populate_sysroot')
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass
index 3397f1e36b..a903e5cfd2 100644
--- a/meta/classes/packagedata.bbclass
+++ b/meta/classes/packagedata.bbclass
@@ -2,10 +2,10 @@ python read_subpackage_metadata () {
2 import oe.packagedata 2 import oe.packagedata
3 3
4 vars = { 4 vars = {
5 "PN" : d.getVar('PN', True), 5 "PN" : d.getVar('PN'),
6 "PE" : d.getVar('PE', True), 6 "PE" : d.getVar('PE'),
7 "PV" : d.getVar('PV', True), 7 "PV" : d.getVar('PV'),
8 "PR" : d.getVar('PR', True), 8 "PR" : d.getVar('PR'),
9 } 9 }
10 10
11 data = oe.packagedata.read_pkgdata(vars["PN"], d) 11 data = oe.packagedata.read_pkgdata(vars["PN"], d)
@@ -13,7 +13,7 @@ python read_subpackage_metadata () {
13 for key in data.keys(): 13 for key in data.keys():
14 d.setVar(key, data[key]) 14 d.setVar(key, data[key])
15 15
16 for pkg in d.getVar('PACKAGES', True).split(): 16 for pkg in d.getVar('PACKAGES').split():
17 sdata = oe.packagedata.read_subpkgdata(pkg, d) 17 sdata = oe.packagedata.read_subpkgdata(pkg, d)
18 for key in sdata.keys(): 18 for key in sdata.keys():
19 if key in vars: 19 if key in vars:
diff --git a/meta/classes/packagefeed-stability.bbclass b/meta/classes/packagefeed-stability.bbclass
index aa01def74d..3a128073d0 100644
--- a/meta/classes/packagefeed-stability.bbclass
+++ b/meta/classes/packagefeed-stability.bbclass
@@ -31,7 +31,7 @@ python() {
31 # This assumes that the package_write task is called package_write_<pkgtype> 31 # This assumes that the package_write task is called package_write_<pkgtype>
32 # and that the directory in which packages should be written is 32 # and that the directory in which packages should be written is
33 # pointed to by the variable DEPLOY_DIR_<PKGTYPE> 33 # pointed to by the variable DEPLOY_DIR_<PKGTYPE>
34 for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): 34 for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split():
35 if pkgclass.startswith('package_'): 35 if pkgclass.startswith('package_'):
36 pkgtype = pkgclass.split('_', 1)[1] 36 pkgtype = pkgclass.split('_', 1)[1]
37 pkgwritefunc = 'do_package_write_%s' % pkgtype 37 pkgwritefunc = 'do_package_write_%s' % pkgtype
@@ -71,7 +71,7 @@ python() {
71# This isn't the real task function - it's a template that we use in the 71# This isn't the real task function - it's a template that we use in the
72# anonymous python code above 72# anonymous python code above
73fakeroot python do_package_compare () { 73fakeroot python do_package_compare () {
74 currenttask = d.getVar('BB_CURRENTTASK', True) 74 currenttask = d.getVar('BB_CURRENTTASK')
75 pkgtype = currenttask.rsplit('_', 1)[1] 75 pkgtype = currenttask.rsplit('_', 1)[1]
76 package_compare_impl(pkgtype, d) 76 package_compare_impl(pkgtype, d)
77} 77}
@@ -83,12 +83,12 @@ def package_compare_impl(pkgtype, d):
83 import subprocess 83 import subprocess
84 import oe.sstatesig 84 import oe.sstatesig
85 85
86 pn = d.getVar('PN', True) 86 pn = d.getVar('PN')
87 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) 87 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True)
88 prepath = deploydir + '-prediff/' 88 prepath = deploydir + '-prediff/'
89 89
90 # Find out PKGR values are 90 # Find out PKGR values are
91 pkgdatadir = d.getVar('PKGDATA_DIR', True) 91 pkgdatadir = d.getVar('PKGDATA_DIR')
92 packages = [] 92 packages = []
93 try: 93 try:
94 with open(os.path.join(pkgdatadir, pn), 'r') as f: 94 with open(os.path.join(pkgdatadir, pn), 'r') as f:
@@ -138,7 +138,7 @@ def package_compare_impl(pkgtype, d):
138 files = [] 138 files = []
139 docopy = False 139 docopy = False
140 manifest, _ = oe.sstatesig.sstate_get_manifest_filename(pkgwritetask, d) 140 manifest, _ = oe.sstatesig.sstate_get_manifest_filename(pkgwritetask, d)
141 mlprefix = d.getVar('MLPREFIX', True) 141 mlprefix = d.getVar('MLPREFIX')
142 # Copy recipe's all packages if one of the packages are different to make 142 # Copy recipe's all packages if one of the packages are different to make
143 # they have the same PR. 143 # they have the same PR.
144 with open(manifest, 'r') as f: 144 with open(manifest, 'r') as f:
@@ -215,7 +215,7 @@ def package_compare_impl(pkgtype, d):
215 # multilib), they're identical in theory, but sstate.bbclass 215 # multilib), they're identical in theory, but sstate.bbclass
216 # copies it again, so keep align with that. 216 # copies it again, so keep align with that.
217 if os.path.exists(destpath) and pkgtype == 'rpm' \ 217 if os.path.exists(destpath) and pkgtype == 'rpm' \
218 and d.getVar('PACKAGE_ARCH', True) == 'all': 218 and d.getVar('PACKAGE_ARCH') == 'all':
219 os.unlink(destpath) 219 os.unlink(destpath)
220 if (os.stat(srcpath).st_dev == os.stat(destdir).st_dev): 220 if (os.stat(srcpath).st_dev == os.stat(destdir).st_dev):
221 # Use a hard link to save space 221 # Use a hard link to save space
@@ -229,7 +229,7 @@ def package_compare_impl(pkgtype, d):
229do_cleansstate[postfuncs] += "pfs_cleanpkgs" 229do_cleansstate[postfuncs] += "pfs_cleanpkgs"
230python pfs_cleanpkgs () { 230python pfs_cleanpkgs () {
231 import errno 231 import errno
232 for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): 232 for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split():
233 if pkgclass.startswith('package_'): 233 if pkgclass.startswith('package_'):
234 pkgtype = pkgclass.split('_', 1)[1] 234 pkgtype = pkgclass.split('_', 1)[1]
235 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) 235 deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True)
diff --git a/meta/classes/packagegroup.bbclass b/meta/classes/packagegroup.bbclass
index 3928c8a4ac..a60a52b6d4 100644
--- a/meta/classes/packagegroup.bbclass
+++ b/meta/classes/packagegroup.bbclass
@@ -16,15 +16,15 @@ PACKAGE_ARCH_EXPANDED := "${PACKAGE_ARCH}"
16 16
17LICENSE ?= "MIT" 17LICENSE ?= "MIT"
18 18
19inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED', True) == 'all', 'allarch', '')} 19inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED') == 'all', 'allarch', '')}
20 20
21# This automatically adds -dbg and -dev flavours of all PACKAGES 21# This automatically adds -dbg and -dev flavours of all PACKAGES
22# to the list. Their dependencies (RRECOMMENDS) are handled as usual 22# to the list. Their dependencies (RRECOMMENDS) are handled as usual
23# by package_depchains in a following step. 23# by package_depchains in a following step.
24# Also mark all packages as ALLOW_EMPTY 24# Also mark all packages as ALLOW_EMPTY
25python () { 25python () {
26 packages = d.getVar('PACKAGES', True).split() 26 packages = d.getVar('PACKAGES').split()
27 if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY', True) != '1': 27 if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY') != '1':
28 types = ['', '-dbg', '-dev'] 28 types = ['', '-dbg', '-dev']
29 if bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): 29 if bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d):
30 types.append('-ptest') 30 types.append('-ptest')
@@ -49,7 +49,7 @@ do_install[noexec] = "1"
49do_populate_sysroot[noexec] = "1" 49do_populate_sysroot[noexec] = "1"
50 50
51python () { 51python () {
52 initman = d.getVar("VIRTUAL-RUNTIME_init_manager", True) 52 initman = d.getVar("VIRTUAL-RUNTIME_init_manager")
53 if initman and initman in ['sysvinit', 'systemd'] and not bb.utils.contains('DISTRO_FEATURES', initman, True, False, d): 53 if initman and initman in ['sysvinit', 'systemd'] and not bb.utils.contains('DISTRO_FEATURES', initman, True, False, d):
54 bb.fatal("Please ensure that your setting of VIRTUAL-RUNTIME_init_manager (%s) matches the entries enabled in DISTRO_FEATURES" % initman) 54 bb.fatal("Please ensure that your setting of VIRTUAL-RUNTIME_init_manager (%s) matches the entries enabled in DISTRO_FEATURES" % initman)
55} 55}
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass
index 0e5b602462..23ba5df48b 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes/patch.bbclass
@@ -11,7 +11,7 @@ PATCH_GIT_USER_EMAIL ?= "oe.patch@oe"
11inherit terminal 11inherit terminal
12 12
13python () { 13python () {
14 if d.getVar('PATCHTOOL', True) == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS', True) == '1': 14 if d.getVar('PATCHTOOL') == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS') == '1':
15 tasks = list(filter(lambda k: d.getVarFlag(k, "task", True), d.keys())) 15 tasks = list(filter(lambda k: d.getVarFlag(k, "task", True), d.keys()))
16 extratasks = [] 16 extratasks = []
17 def follow_chain(task, endtask, chain=None): 17 def follow_chain(task, endtask, chain=None):
@@ -44,8 +44,8 @@ python () {
44 44
45python patch_task_patch_prefunc() { 45python patch_task_patch_prefunc() {
46 # Prefunc for do_patch 46 # Prefunc for do_patch
47 func = d.getVar('BB_RUNTASK', True) 47 func = d.getVar('BB_RUNTASK')
48 srcsubdir = d.getVar('S', True) 48 srcsubdir = d.getVar('S')
49 49
50 patchdir = os.path.join(srcsubdir, 'patches') 50 patchdir = os.path.join(srcsubdir, 'patches')
51 if os.path.exists(patchdir): 51 if os.path.exists(patchdir):
@@ -59,12 +59,12 @@ python patch_task_postfunc() {
59 # Prefunc for task functions between do_unpack and do_patch 59 # Prefunc for task functions between do_unpack and do_patch
60 import oe.patch 60 import oe.patch
61 import shutil 61 import shutil
62 func = d.getVar('BB_RUNTASK', True) 62 func = d.getVar('BB_RUNTASK')
63 srcsubdir = d.getVar('S', True) 63 srcsubdir = d.getVar('S')
64 64
65 if os.path.exists(srcsubdir): 65 if os.path.exists(srcsubdir):
66 if func == 'do_patch': 66 if func == 'do_patch':
67 haspatches = (d.getVar('PATCH_HAS_PATCHES_DIR', True) == '1') 67 haspatches = (d.getVar('PATCH_HAS_PATCHES_DIR') == '1')
68 patchdir = os.path.join(srcsubdir, 'patches') 68 patchdir = os.path.join(srcsubdir, 'patches')
69 if os.path.exists(patchdir): 69 if os.path.exists(patchdir):
70 shutil.rmtree(patchdir) 70 shutil.rmtree(patchdir)
@@ -99,20 +99,20 @@ python patch_do_patch() {
99 "git": oe.patch.GitApplyTree, 99 "git": oe.patch.GitApplyTree,
100 } 100 }
101 101
102 cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt'] 102 cls = patchsetmap[d.getVar('PATCHTOOL') or 'quilt']
103 103
104 resolvermap = { 104 resolvermap = {
105 "noop": oe.patch.NOOPResolver, 105 "noop": oe.patch.NOOPResolver,
106 "user": oe.patch.UserResolver, 106 "user": oe.patch.UserResolver,
107 } 107 }
108 108
109 rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user'] 109 rcls = resolvermap[d.getVar('PATCHRESOLVE') or 'user']
110 110
111 classes = {} 111 classes = {}
112 112
113 s = d.getVar('S', True) 113 s = d.getVar('S')
114 114
115 os.putenv('PATH', d.getVar('PATH', True)) 115 os.putenv('PATH', d.getVar('PATH'))
116 116
117 # We must use one TMPDIR per process so that the "patch" processes 117 # We must use one TMPDIR per process so that the "patch" processes
118 # don't generate the same temp file name. 118 # don't generate the same temp file name.
diff --git a/meta/classes/pixbufcache.bbclass b/meta/classes/pixbufcache.bbclass
index 3f48a0f344..63bf079cf1 100644
--- a/meta/classes/pixbufcache.bbclass
+++ b/meta/classes/pixbufcache.bbclass
@@ -28,20 +28,20 @@ fi
28} 28}
29 29
30python populate_packages_append() { 30python populate_packages_append() {
31 pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES', True).split() 31 pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES').split()
32 32
33 for pkg in pixbuf_pkgs: 33 for pkg in pixbuf_pkgs:
34 bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg) 34 bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg)
35 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) 35 postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
36 if not postinst: 36 if not postinst:
37 postinst = '#!/bin/sh\n' 37 postinst = '#!/bin/sh\n'
38 postinst += d.getVar('pixbufcache_common', True) 38 postinst += d.getVar('pixbufcache_common')
39 d.setVar('pkg_postinst_%s' % pkg, postinst) 39 d.setVar('pkg_postinst_%s' % pkg, postinst)
40 40
41 postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) 41 postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
42 if not postrm: 42 if not postrm:
43 postrm = '#!/bin/sh\n' 43 postrm = '#!/bin/sh\n'
44 postrm += d.getVar('pixbufcache_common', True) 44 postrm += d.getVar('pixbufcache_common')
45 d.setVar('pkg_postrm_%s' % pkg, postrm) 45 d.setVar('pkg_postrm_%s' % pkg, postrm)
46} 46}
47 47
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass
index 220cde6925..48bc3959f5 100644
--- a/meta/classes/populate_sdk_base.bbclass
+++ b/meta/classes/populate_sdk_base.bbclass
@@ -11,7 +11,7 @@ COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest'
11def complementary_globs(featurevar, d): 11def complementary_globs(featurevar, d):
12 all_globs = d.getVarFlags('COMPLEMENTARY_GLOB') 12 all_globs = d.getVarFlags('COMPLEMENTARY_GLOB')
13 globs = [] 13 globs = []
14 features = set((d.getVar(featurevar, True) or '').split()) 14 features = set((d.getVar(featurevar) or '').split())
15 for name, glob in all_globs.items(): 15 for name, glob in all_globs.items():
16 if name in features: 16 if name in features:
17 globs.append(glob) 17 globs.append(glob)
@@ -57,30 +57,30 @@ SDK_PRE_INSTALL_COMMAND ?= ""
57SDK_POST_INSTALL_COMMAND ?= "" 57SDK_POST_INSTALL_COMMAND ?= ""
58SDK_RELOCATE_AFTER_INSTALL ?= "1" 58SDK_RELOCATE_AFTER_INSTALL ?= "1"
59 59
60SDKEXTPATH ?= "~/${@d.getVar('DISTRO', True)}_sdk" 60SDKEXTPATH ?= "~/${@d.getVar('DISTRO')}_sdk"
61SDK_TITLE ?= "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} SDK" 61SDK_TITLE ?= "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} SDK"
62 62
63SDK_TARGET_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.target.manifest" 63SDK_TARGET_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.target.manifest"
64SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest" 64SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest"
65python write_target_sdk_manifest () { 65python write_target_sdk_manifest () {
66 from oe.sdk import sdk_list_installed_packages 66 from oe.sdk import sdk_list_installed_packages
67 from oe.utils import format_pkg_list 67 from oe.utils import format_pkg_list
68 sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST", True)) 68 sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST"))
69 pkgs = sdk_list_installed_packages(d, True) 69 pkgs = sdk_list_installed_packages(d, True)
70 if not os.path.exists(sdkmanifestdir): 70 if not os.path.exists(sdkmanifestdir):
71 bb.utils.mkdirhier(sdkmanifestdir) 71 bb.utils.mkdirhier(sdkmanifestdir)
72 with open(d.getVar('SDK_TARGET_MANIFEST', True), 'w') as output: 72 with open(d.getVar('SDK_TARGET_MANIFEST'), 'w') as output:
73 output.write(format_pkg_list(pkgs, 'ver')) 73 output.write(format_pkg_list(pkgs, 'ver'))
74} 74}
75 75
76python write_host_sdk_manifest () { 76python write_host_sdk_manifest () {
77 from oe.sdk import sdk_list_installed_packages 77 from oe.sdk import sdk_list_installed_packages
78 from oe.utils import format_pkg_list 78 from oe.utils import format_pkg_list
79 sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST", True)) 79 sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST"))
80 pkgs = sdk_list_installed_packages(d, False) 80 pkgs = sdk_list_installed_packages(d, False)
81 if not os.path.exists(sdkmanifestdir): 81 if not os.path.exists(sdkmanifestdir):
82 bb.utils.mkdirhier(sdkmanifestdir) 82 bb.utils.mkdirhier(sdkmanifestdir)
83 with open(d.getVar('SDK_HOST_MANIFEST', True), 'w') as output: 83 with open(d.getVar('SDK_HOST_MANIFEST'), 'w') as output:
84 output.write(format_pkg_list(pkgs, 'ver')) 84 output.write(format_pkg_list(pkgs, 'ver'))
85} 85}
86 86
@@ -93,7 +93,7 @@ def populate_sdk_common(d):
93 from oe.sdk import populate_sdk 93 from oe.sdk import populate_sdk
94 from oe.manifest import create_manifest, Manifest 94 from oe.manifest import create_manifest, Manifest
95 95
96 pn = d.getVar('PN', True) 96 pn = d.getVar('PN')
97 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d) 97 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d)
98 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d) 98 runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d)
99 99
@@ -101,13 +101,13 @@ def populate_sdk_common(d):
101 ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata") 101 ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata")
102 runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld) 102 runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld)
103 runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld) 103 runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld)
104 d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK", True)) 104 d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK"))
105 d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", True)) 105 d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY"))
106 106
107 # create target/host SDK manifests 107 # create target/host SDK manifests
108 create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), 108 create_manifest(d, manifest_dir=d.getVar('SDK_DIR'),
109 manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST) 109 manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST)
110 create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), 110 create_manifest(d, manifest_dir=d.getVar('SDK_DIR'),
111 manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET) 111 manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET)
112 112
113 populate_sdk(d) 113 populate_sdk(d)
@@ -134,7 +134,7 @@ fakeroot create_sdk_files() {
134python check_sdk_sysroots() { 134python check_sdk_sysroots() {
135 # Fails build if there are broken or dangling symlinks in SDK sysroots 135 # Fails build if there are broken or dangling symlinks in SDK sysroots
136 136
137 if d.getVar('CHECK_SDK_SYSROOTS', True) != '1': 137 if d.getVar('CHECK_SDK_SYSROOTS') != '1':
138 # disabled, bail out 138 # disabled, bail out
139 return 139 return
140 140
@@ -142,8 +142,8 @@ python check_sdk_sysroots() {
142 return os.path.abspath(path) 142 return os.path.abspath(path)
143 143
144 # Get scan root 144 # Get scan root
145 SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT', True), 145 SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT'),
146 d.getVar('SDKPATH', True))) 146 d.getVar('SDKPATH')))
147 147
148 bb.note('Checking SDK sysroots at ' + SCAN_ROOT) 148 bb.note('Checking SDK sysroots at ' + SCAN_ROOT)
149 149
@@ -218,7 +218,7 @@ EOF
218 -e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \ 218 -e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \
219 -e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \ 219 -e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \
220 -e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \ 220 -e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \
221 -e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE", True).replace('&', '\&')}#g' \ 221 -e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE").replace('&', '\&')}#g' \
222 -e 's#@SDK_VERSION@#${SDK_VERSION}#g' \ 222 -e 's#@SDK_VERSION@#${SDK_VERSION}#g' \
223 -e '/@SDK_PRE_INSTALL_COMMAND@/d' \ 223 -e '/@SDK_PRE_INSTALL_COMMAND@/d' \
224 -e '/@SDK_POST_INSTALL_COMMAND@/d' \ 224 -e '/@SDK_POST_INSTALL_COMMAND@/d' \
@@ -268,7 +268,7 @@ do_populate_sdk[file-checksums] += "${COREBASE}/meta/files/toolchain-shar-reloca
268 ${COREBASE}/meta/files/toolchain-shar-extract.sh:True" 268 ${COREBASE}/meta/files/toolchain-shar-extract.sh:True"
269 269
270do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}" 270do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}"
271do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS', True).split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}" 271do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS').split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}"
272do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS', True).split()])}" 272do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS').split()])}"
273do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb" 273do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb"
274addtask populate_sdk 274addtask populate_sdk
diff --git a/meta/classes/populate_sdk_ext.bbclass b/meta/classes/populate_sdk_ext.bbclass
index 1affa9dfaa..be8b6a1f6f 100644
--- a/meta/classes/populate_sdk_ext.bbclass
+++ b/meta/classes/populate_sdk_ext.bbclass
@@ -21,7 +21,7 @@ SDK_EXT_task-populate-sdk-ext = "-ext"
21# Options are full or minimal 21# Options are full or minimal
22SDK_EXT_TYPE ?= "full" 22SDK_EXT_TYPE ?= "full"
23SDK_INCLUDE_PKGDATA ?= "0" 23SDK_INCLUDE_PKGDATA ?= "0"
24SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE', True) == 'full' else '0'}" 24SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE') == 'full' else '0'}"
25 25
26SDK_RECRDEP_TASKS ?= "" 26SDK_RECRDEP_TASKS ?= ""
27 27
@@ -43,8 +43,8 @@ SDK_TARGETS ?= "${PN}"
43 43
44def get_sdk_install_targets(d, images_only=False): 44def get_sdk_install_targets(d, images_only=False):
45 sdk_install_targets = '' 45 sdk_install_targets = ''
46 if images_only or d.getVar('SDK_EXT_TYPE', True) != 'minimal': 46 if images_only or d.getVar('SDK_EXT_TYPE') != 'minimal':
47 sdk_install_targets = d.getVar('SDK_TARGETS', True) 47 sdk_install_targets = d.getVar('SDK_TARGETS')
48 48
49 depd = d.getVar('BB_TASKDEPDATA', False) 49 depd = d.getVar('BB_TASKDEPDATA', False)
50 for v in depd.values(): 50 for v in depd.values():
@@ -53,9 +53,9 @@ def get_sdk_install_targets(d, images_only=False):
53 sdk_install_targets += ' {}'.format(v[0]) 53 sdk_install_targets += ' {}'.format(v[0])
54 54
55 if not images_only: 55 if not images_only:
56 if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': 56 if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
57 sdk_install_targets += ' meta-world-pkgdata:do_allpackagedata' 57 sdk_install_targets += ' meta-world-pkgdata:do_allpackagedata'
58 if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1': 58 if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1':
59 sdk_install_targets += ' meta-extsdk-toolchain:do_populate_sysroot' 59 sdk_install_targets += ' meta-extsdk-toolchain:do_populate_sysroot'
60 60
61 return sdk_install_targets 61 return sdk_install_targets
@@ -83,7 +83,7 @@ TOOLCHAIN_OUTPUTNAME_task-populate-sdk-ext = "${TOOLCHAINEXT_OUTPUTNAME}"
83SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" 83SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest"
84SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" 84SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest"
85 85
86SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} Extensible SDK" 86SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} Extensible SDK"
87 87
88def clean_esdk_builddir(d, sdkbasepath): 88def clean_esdk_builddir(d, sdkbasepath):
89 """Clean up traces of the fake build for create_filtered_tasklist()""" 89 """Clean up traces of the fake build for create_filtered_tasklist()"""
@@ -110,7 +110,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
110 try: 110 try:
111 with open(sdkbasepath + '/conf/local.conf', 'a') as f: 111 with open(sdkbasepath + '/conf/local.conf', 'a') as f:
112 # Force the use of sstate from the build system 112 # Force the use of sstate from the build system
113 f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR', True)) 113 f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR'))
114 f.write('SSTATE_MIRRORS_forcevariable = ""\n') 114 f.write('SSTATE_MIRRORS_forcevariable = ""\n')
115 # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it 115 # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it
116 f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n') 116 f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n')
@@ -121,7 +121,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
121 121
122 # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake 122 # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake
123 # will not allow in its COREBASE path, so we need to rename the directory temporarily 123 # will not allow in its COREBASE path, so we need to rename the directory temporarily
124 temp_sdkbasepath = d.getVar('SDK_OUTPUT', True) + '/tmp-renamed-sdk' 124 temp_sdkbasepath = d.getVar('SDK_OUTPUT') + '/tmp-renamed-sdk'
125 # Delete any existing temp dir 125 # Delete any existing temp dir
126 try: 126 try:
127 shutil.rmtree(temp_sdkbasepath) 127 shutil.rmtree(temp_sdkbasepath)
@@ -130,7 +130,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
130 os.rename(sdkbasepath, temp_sdkbasepath) 130 os.rename(sdkbasepath, temp_sdkbasepath)
131 try: 131 try:
132 cmdprefix = '. %s .; ' % conf_initpath 132 cmdprefix = '. %s .; ' % conf_initpath
133 logfile = d.getVar('WORKDIR', True) + '/tasklist_bb_log.txt' 133 logfile = d.getVar('WORKDIR') + '/tasklist_bb_log.txt'
134 try: 134 try:
135 oe.copy_buildsystem.check_sstate_task_list(d, get_sdk_install_targets(d), tasklistfile, cmdprefix=cmdprefix, cwd=temp_sdkbasepath, logfile=logfile) 135 oe.copy_buildsystem.check_sstate_task_list(d, get_sdk_install_targets(d), tasklistfile, cmdprefix=cmdprefix, cwd=temp_sdkbasepath, logfile=logfile)
136 except bb.process.ExecutionError as e: 136 except bb.process.ExecutionError as e:
@@ -152,7 +152,7 @@ python copy_buildsystem () {
152 import glob 152 import glob
153 import oe.copy_buildsystem 153 import oe.copy_buildsystem
154 154
155 oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT', True) 155 oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT')
156 156
157 conf_bbpath = '' 157 conf_bbpath = ''
158 conf_initpath = '' 158 conf_initpath = ''
@@ -160,10 +160,10 @@ python copy_buildsystem () {
160 160
161 # Copy in all metadata layers + bitbake (as repositories) 161 # Copy in all metadata layers + bitbake (as repositories)
162 buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d) 162 buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d)
163 baseoutpath = d.getVar('SDK_OUTPUT', True) + '/' + d.getVar('SDKPATH', True) 163 baseoutpath = d.getVar('SDK_OUTPUT') + '/' + d.getVar('SDKPATH')
164 164
165 # Determine if we're building a derivative extensible SDK (from devtool build-sdk) 165 # Determine if we're building a derivative extensible SDK (from devtool build-sdk)
166 derivative = (d.getVar('SDK_DERIVATIVE', True) or '') == '1' 166 derivative = (d.getVar('SDK_DERIVATIVE') or '') == '1'
167 if derivative: 167 if derivative:
168 workspace_name = 'orig-workspace' 168 workspace_name = 'orig-workspace'
169 else: 169 else:
@@ -171,7 +171,7 @@ python copy_buildsystem () {
171 layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers', workspace_name) 171 layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers', workspace_name)
172 172
173 sdkbblayers = [] 173 sdkbblayers = []
174 corebase = os.path.basename(d.getVar('COREBASE', True)) 174 corebase = os.path.basename(d.getVar('COREBASE'))
175 for layer in layers_copied: 175 for layer in layers_copied:
176 if corebase == os.path.basename(layer): 176 if corebase == os.path.basename(layer):
177 conf_bbpath = os.path.join('layers', layer, 'bitbake') 177 conf_bbpath = os.path.join('layers', layer, 'bitbake')
@@ -202,8 +202,8 @@ python copy_buildsystem () {
202 config.set('General', 'init_path', conf_initpath) 202 config.set('General', 'init_path', conf_initpath)
203 config.set('General', 'core_meta_subdir', core_meta_subdir) 203 config.set('General', 'core_meta_subdir', core_meta_subdir)
204 config.add_section('SDK') 204 config.add_section('SDK')
205 config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS', True)) 205 config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS'))
206 updateurl = d.getVar('SDK_UPDATE_URL', True) 206 updateurl = d.getVar('SDK_UPDATE_URL')
207 if updateurl: 207 if updateurl:
208 config.set('SDK', 'updateserver', updateurl) 208 config.set('SDK', 'updateserver', updateurl)
209 bb.utils.mkdirhier(os.path.join(baseoutpath, 'conf')) 209 bb.utils.mkdirhier(os.path.join(baseoutpath, 'conf'))
@@ -215,7 +215,7 @@ python copy_buildsystem () {
215 pass 215 pass
216 216
217 # Create a layer for new recipes / appends 217 # Create a layer for new recipes / appends
218 bbpath = d.getVar('BBPATH', True) 218 bbpath = d.getVar('BBPATH')
219 bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')]) 219 bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')])
220 220
221 # Create bblayers.conf 221 # Create bblayers.conf
@@ -248,16 +248,16 @@ python copy_buildsystem () {
248 bb.utils.mkdirhier(uninative_outdir) 248 bb.utils.mkdirhier(uninative_outdir)
249 shutil.copy(uninative_file, uninative_outdir) 249 shutil.copy(uninative_file, uninative_outdir)
250 250
251 env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE', True) or '').split() 251 env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE') or '').split()
252 env_whitelist_values = {} 252 env_whitelist_values = {}
253 253
254 # Create local.conf 254 # Create local.conf
255 builddir = d.getVar('TOPDIR', True) 255 builddir = d.getVar('TOPDIR')
256 if derivative: 256 if derivative:
257 shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf') 257 shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf')
258 else: 258 else:
259 local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST', True) or '').split() 259 local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST') or '').split()
260 local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST', True) or '').split() 260 local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST') or '').split()
261 def handle_var(varname, origvalue, op, newlines): 261 def handle_var(varname, origvalue, op, newlines):
262 if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist): 262 if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist):
263 newlines.append('# Removed original setting of %s\n' % varname) 263 newlines.append('# Removed original setting of %s\n' % varname)
@@ -285,7 +285,7 @@ python copy_buildsystem () {
285 f.write('DL_DIR = "${TOPDIR}/downloads"\n') 285 f.write('DL_DIR = "${TOPDIR}/downloads"\n')
286 286
287 f.write('INHERIT += "%s"\n' % 'uninative') 287 f.write('INHERIT += "%s"\n' % 'uninative')
288 f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH', True), uninative_checksum)) 288 f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH'), uninative_checksum))
289 f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False)) 289 f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False))
290 290
291 # Some classes are not suitable for SDK, remove them from INHERIT 291 # Some classes are not suitable for SDK, remove them from INHERIT
@@ -319,7 +319,7 @@ python copy_buildsystem () {
319 319
320 # If you define a sdk_extraconf() function then it can contain additional config 320 # If you define a sdk_extraconf() function then it can contain additional config
321 # (Though this is awkward; sdk-extra.conf should probably be used instead) 321 # (Though this is awkward; sdk-extra.conf should probably be used instead)
322 extraconf = (d.getVar('sdk_extraconf', True) or '').strip() 322 extraconf = (d.getVar('sdk_extraconf') or '').strip()
323 if extraconf: 323 if extraconf:
324 # Strip off any leading / trailing spaces 324 # Strip off any leading / trailing spaces
325 for line in extraconf.splitlines(): 325 for line in extraconf.splitlines():
@@ -352,7 +352,7 @@ python copy_buildsystem () {
352 # BB_ENV_EXTRAWHITE) are set in the SDK's configuration 352 # BB_ENV_EXTRAWHITE) are set in the SDK's configuration
353 extralines = [] 353 extralines = []
354 for name, value in env_whitelist_values.items(): 354 for name, value in env_whitelist_values.items():
355 actualvalue = d.getVar(name, True) or '' 355 actualvalue = d.getVar(name) or ''
356 if value != actualvalue: 356 if value != actualvalue:
357 extralines.append('%s = "%s"\n' % (name, actualvalue)) 357 extralines.append('%s = "%s"\n' % (name, actualvalue))
358 if extralines: 358 if extralines:
@@ -365,7 +365,7 @@ python copy_buildsystem () {
365 365
366 # Filter the locked signatures file to just the sstate tasks we are interested in 366 # Filter the locked signatures file to just the sstate tasks we are interested in
367 excluded_targets = get_sdk_install_targets(d, images_only=True) 367 excluded_targets = get_sdk_install_targets(d, images_only=True)
368 sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' 368 sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc'
369 lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc' 369 lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc'
370 oe.copy_buildsystem.prune_lockedsigs([], 370 oe.copy_buildsystem.prune_lockedsigs([],
371 excluded_targets.split(), 371 excluded_targets.split(),
@@ -378,36 +378,36 @@ python copy_buildsystem () {
378 # uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d) 378 # uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d)
379 fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d) 379 fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d)
380 380
381 sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1') 381 sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1')
382 sdk_ext_type = d.getVar('SDK_EXT_TYPE', True) 382 sdk_ext_type = d.getVar('SDK_EXT_TYPE')
383 if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative: 383 if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative:
384 # Create the filtered task list used to generate the sstate cache shipped with the SDK 384 # Create the filtered task list used to generate the sstate cache shipped with the SDK
385 tasklistfn = d.getVar('WORKDIR', True) + '/tasklist.txt' 385 tasklistfn = d.getVar('WORKDIR') + '/tasklist.txt'
386 create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath) 386 create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath)
387 else: 387 else:
388 tasklistfn = None 388 tasklistfn = None
389 389
390 # Add packagedata if enabled 390 # Add packagedata if enabled
391 if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': 391 if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
392 lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base.inc' 392 lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc'
393 lockedsigs_copy = d.getVar('WORKDIR', True) + '/locked-sigs-copy.inc' 393 lockedsigs_copy = d.getVar('WORKDIR') + '/locked-sigs-copy.inc'
394 shutil.move(lockedsigs_pruned, lockedsigs_base) 394 shutil.move(lockedsigs_pruned, lockedsigs_base)
395 oe.copy_buildsystem.merge_lockedsigs(['do_packagedata'], 395 oe.copy_buildsystem.merge_lockedsigs(['do_packagedata'],
396 lockedsigs_base, 396 lockedsigs_base,
397 d.getVar('STAGING_DIR_HOST', True) + '/world-pkgdata/locked-sigs-pkgdata.inc', 397 d.getVar('STAGING_DIR_HOST') + '/world-pkgdata/locked-sigs-pkgdata.inc',
398 lockedsigs_pruned, 398 lockedsigs_pruned,
399 lockedsigs_copy) 399 lockedsigs_copy)
400 400
401 if sdk_include_toolchain: 401 if sdk_include_toolchain:
402 lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base2.inc' 402 lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base2.inc'
403 lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST', True) + '/locked-sigs/locked-sigs-extsdk-toolchain.inc' 403 lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST') + '/locked-sigs/locked-sigs-extsdk-toolchain.inc'
404 shutil.move(lockedsigs_pruned, lockedsigs_base) 404 shutil.move(lockedsigs_pruned, lockedsigs_base)
405 oe.copy_buildsystem.merge_lockedsigs([], 405 oe.copy_buildsystem.merge_lockedsigs([],
406 lockedsigs_base, 406 lockedsigs_base,
407 lockedsigs_toolchain, 407 lockedsigs_toolchain,
408 lockedsigs_pruned) 408 lockedsigs_pruned)
409 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_toolchain, 409 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_toolchain,
410 d.getVar('SSTATE_DIR', True), 410 d.getVar('SSTATE_DIR'),
411 sstate_out, d, 411 sstate_out, d,
412 fixedlsbstring, 412 fixedlsbstring,
413 filterfile=tasklistfn) 413 filterfile=tasklistfn)
@@ -417,22 +417,22 @@ python copy_buildsystem () {
417 # Assume the user is not going to set up an additional sstate 417 # Assume the user is not going to set up an additional sstate
418 # mirror, thus we need to copy the additional artifacts (from 418 # mirror, thus we need to copy the additional artifacts (from
419 # workspace recipes) into the derivative SDK 419 # workspace recipes) into the derivative SDK
420 lockedsigs_orig = d.getVar('TOPDIR', True) + '/conf/locked-sigs.inc' 420 lockedsigs_orig = d.getVar('TOPDIR') + '/conf/locked-sigs.inc'
421 if os.path.exists(lockedsigs_orig): 421 if os.path.exists(lockedsigs_orig):
422 lockedsigs_extra = d.getVar('WORKDIR', True) + '/locked-sigs-extra.inc' 422 lockedsigs_extra = d.getVar('WORKDIR') + '/locked-sigs-extra.inc'
423 oe.copy_buildsystem.merge_lockedsigs(None, 423 oe.copy_buildsystem.merge_lockedsigs(None,
424 lockedsigs_orig, 424 lockedsigs_orig,
425 lockedsigs_pruned, 425 lockedsigs_pruned,
426 None, 426 None,
427 lockedsigs_extra) 427 lockedsigs_extra)
428 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_extra, 428 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_extra,
429 d.getVar('SSTATE_DIR', True), 429 d.getVar('SSTATE_DIR'),
430 sstate_out, d, 430 sstate_out, d,
431 fixedlsbstring, 431 fixedlsbstring,
432 filterfile=tasklistfn) 432 filterfile=tasklistfn)
433 else: 433 else:
434 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_pruned, 434 oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_pruned,
435 d.getVar('SSTATE_DIR', True), 435 d.getVar('SSTATE_DIR'),
436 sstate_out, d, 436 sstate_out, d,
437 fixedlsbstring, 437 fixedlsbstring,
438 filterfile=tasklistfn) 438 filterfile=tasklistfn)
@@ -463,24 +463,24 @@ python copy_buildsystem () {
463def get_current_buildtools(d): 463def get_current_buildtools(d):
464 """Get the file name of the current buildtools installer""" 464 """Get the file name of the current buildtools installer"""
465 import glob 465 import glob
466 btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY', True), '*-buildtools-nativesdk-standalone-*.sh')) 466 btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY'), '*-buildtools-nativesdk-standalone-*.sh'))
467 btfiles.sort(key=os.path.getctime) 467 btfiles.sort(key=os.path.getctime)
468 return os.path.basename(btfiles[-1]) 468 return os.path.basename(btfiles[-1])
469 469
470def get_sdk_required_utilities(buildtools_fn, d): 470def get_sdk_required_utilities(buildtools_fn, d):
471 """Find required utilities that aren't provided by the buildtools""" 471 """Find required utilities that aren't provided by the buildtools"""
472 sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES', True) or '').split() 472 sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES') or '').split()
473 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}gcc')) 473 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}gcc'))
474 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}g++')) 474 sanity_required_utilities.append(d.expand('${BUILD_PREFIX}g++'))
475 buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY', True), buildtools_fn) 475 buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY'), buildtools_fn)
476 filelist, _ = bb.process.run('%s -l' % buildtools_installer) 476 filelist, _ = bb.process.run('%s -l' % buildtools_installer)
477 localdata = bb.data.createCopy(d) 477 localdata = bb.data.createCopy(d)
478 localdata.setVar('SDKPATH', '.') 478 localdata.setVar('SDKPATH', '.')
479 sdkpathnative = localdata.getVar('SDKPATHNATIVE', True) 479 sdkpathnative = localdata.getVar('SDKPATHNATIVE')
480 sdkbindirs = [localdata.getVar('bindir_nativesdk', True), 480 sdkbindirs = [localdata.getVar('bindir_nativesdk'),
481 localdata.getVar('sbindir_nativesdk', True), 481 localdata.getVar('sbindir_nativesdk'),
482 localdata.getVar('base_bindir_nativesdk', True), 482 localdata.getVar('base_bindir_nativesdk'),
483 localdata.getVar('base_sbindir_nativesdk', True)] 483 localdata.getVar('base_sbindir_nativesdk')]
484 for line in filelist.splitlines(): 484 for line in filelist.splitlines():
485 splitline = line.split() 485 splitline = line.split()
486 if len(splitline) > 5: 486 if len(splitline) > 5:
@@ -509,7 +509,7 @@ install_tools() {
509 # (they get populated from sstate on installation) 509 # (they get populated from sstate on installation)
510 unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd" 510 unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd"
511 if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then 511 if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then
512 binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR', True))} 512 binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR'))}
513 lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path 513 lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path
514 fi 514 fi
515 touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase 515 touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase
@@ -611,8 +611,8 @@ SDK_INSTALL_TARGETS = ""
611fakeroot python do_populate_sdk_ext() { 611fakeroot python do_populate_sdk_ext() {
612 # FIXME hopefully we can remove this restriction at some point, but uninative 612 # FIXME hopefully we can remove this restriction at some point, but uninative
613 # currently forces this upon us 613 # currently forces this upon us
614 if d.getVar('SDK_ARCH', True) != d.getVar('BUILD_ARCH', True): 614 if d.getVar('SDK_ARCH') != d.getVar('BUILD_ARCH'):
615 bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH', True), d.getVar('BUILD_ARCH', True))) 615 bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH'), d.getVar('BUILD_ARCH')))
616 616
617 d.setVar('SDK_INSTALL_TARGETS', get_sdk_install_targets(d)) 617 d.setVar('SDK_INSTALL_TARGETS', get_sdk_install_targets(d))
618 buildtools_fn = get_current_buildtools(d) 618 buildtools_fn = get_current_buildtools(d)
@@ -626,7 +626,7 @@ fakeroot python do_populate_sdk_ext() {
626def get_ext_sdk_depends(d): 626def get_ext_sdk_depends(d):
627 # Note: the deps varflag is a list not a string, so we need to specify expand=False 627 # Note: the deps varflag is a list not a string, so we need to specify expand=False
628 deps = d.getVarFlag('do_image_complete', 'deps', False) 628 deps = d.getVarFlag('do_image_complete', 'deps', False)
629 pn = d.getVar('PN', True) 629 pn = d.getVar('PN')
630 deplist = ['%s:%s' % (pn, dep) for dep in deps] 630 deplist = ['%s:%s' % (pn, dep) for dep in deps]
631 for task in ['do_image_complete', 'do_rootfs', 'do_build']: 631 for task in ['do_image_complete', 'do_rootfs', 'do_build']:
632 deplist.extend((d.getVarFlag(task, 'depends', True) or '').split()) 632 deplist.extend((d.getVarFlag(task, 'depends', True) or '').split())
@@ -637,7 +637,7 @@ python do_sdk_depends() {
637 # dependencies we don't need to (e.g. buildtools-tarball) and bringing those 637 # dependencies we don't need to (e.g. buildtools-tarball) and bringing those
638 # into the SDK's sstate-cache 638 # into the SDK's sstate-cache
639 import oe.copy_buildsystem 639 import oe.copy_buildsystem
640 sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' 640 sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc'
641 oe.copy_buildsystem.generate_locked_sigs(sigfile, d) 641 oe.copy_buildsystem.generate_locked_sigs(sigfile, d)
642} 642}
643addtask sdk_depends 643addtask sdk_depends
@@ -658,10 +658,10 @@ do_populate_sdk_ext[dirs] = "${@d.getVarFlag('do_populate_sdk', 'dirs', False)}"
658 658
659do_populate_sdk_ext[depends] = "${@d.getVarFlag('do_populate_sdk', 'depends', False)} \ 659do_populate_sdk_ext[depends] = "${@d.getVarFlag('do_populate_sdk', 'depends', False)} \
660 buildtools-tarball:do_populate_sdk uninative-tarball:do_populate_sdk \ 660 buildtools-tarball:do_populate_sdk uninative-tarball:do_populate_sdk \
661 ${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1' else ''} \ 661 ${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA') == '1' else ''} \
662 ${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1' else ''}" 662 ${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1' else ''}"
663 663
664do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':do_build' for x in d.getVar('SDK_TARGETS', True).split()])}" 664do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':do_build' for x in d.getVar('SDK_TARGETS').split()])}"
665 665
666# Make sure code changes can result in rebuild 666# Make sure code changes can result in rebuild
667do_populate_sdk_ext[vardeps] += "copy_buildsystem \ 667do_populate_sdk_ext[vardeps] += "copy_buildsystem \
diff --git a/meta/classes/prexport.bbclass b/meta/classes/prexport.bbclass
index 809ec1034e..6dcf99e29f 100644
--- a/meta/classes/prexport.bbclass
+++ b/meta/classes/prexport.bbclass
@@ -15,7 +15,7 @@ python prexport_handler () {
15 if isinstance(e, bb.event.RecipeParsed): 15 if isinstance(e, bb.event.RecipeParsed):
16 import oe.prservice 16 import oe.prservice
17 #get all PR values for the current PRAUTOINX 17 #get all PR values for the current PRAUTOINX
18 ver = e.data.getVar('PRSERV_DUMPOPT_VERSION', True) 18 ver = e.data.getVar('PRSERV_DUMPOPT_VERSION')
19 ver = ver.replace('%','-') 19 ver = ver.replace('%','-')
20 retval = oe.prservice.prserv_dump_db(e.data) 20 retval = oe.prservice.prserv_dump_db(e.data)
21 if not retval: 21 if not retval:
@@ -40,7 +40,7 @@ python prexport_handler () {
40 import oe.prservice 40 import oe.prservice
41 oe.prservice.prserv_check_avail(e.data) 41 oe.prservice.prserv_check_avail(e.data)
42 #remove dumpfile 42 #remove dumpfile
43 bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE', True)) 43 bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE'))
44 elif isinstance(e, bb.event.ParseCompleted): 44 elif isinstance(e, bb.event.ParseCompleted):
45 import oe.prservice 45 import oe.prservice
46 #dump meta info of tables 46 #dump meta info of tables
diff --git a/meta/classes/ptest.bbclass b/meta/classes/ptest.bbclass
index fa3561e621..798d802974 100644
--- a/meta/classes/ptest.bbclass
+++ b/meta/classes/ptest.bbclass
@@ -61,7 +61,7 @@ python () {
61 d.setVarFlag('do_install_ptest_base', 'fakeroot', '1') 61 d.setVarFlag('do_install_ptest_base', 'fakeroot', '1')
62 62
63 # Remove all '*ptest_base' tasks when ptest is not enabled 63 # Remove all '*ptest_base' tasks when ptest is not enabled
64 if not(d.getVar('PTEST_ENABLED', True) == "1"): 64 if not(d.getVar('PTEST_ENABLED') == "1"):
65 for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']: 65 for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']:
66 bb.build.deltask(i, d) 66 bb.build.deltask(i, d)
67} 67}
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass
index f2d4d1c9e1..616758260c 100644
--- a/meta/classes/qemu.bbclass
+++ b/meta/classes/qemu.bbclass
@@ -4,12 +4,12 @@
4# 4#
5 5
6def qemu_target_binary(data): 6def qemu_target_binary(data):
7 package_arch = data.getVar("PACKAGE_ARCH", True) 7 package_arch = data.getVar("PACKAGE_ARCH")
8 qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch, True) or "") 8 qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "")
9 if qemu_target_binary: 9 if qemu_target_binary:
10 return qemu_target_binary 10 return qemu_target_binary
11 11
12 target_arch = data.getVar("TARGET_ARCH", True) 12 target_arch = data.getVar("TARGET_ARCH")
13 if target_arch in ("i486", "i586", "i686"): 13 if target_arch in ("i486", "i586", "i686"):
14 target_arch = "i386" 14 target_arch = "i386"
15 elif target_arch == "powerpc": 15 elif target_arch == "powerpc":
@@ -26,7 +26,7 @@ def qemu_wrapper_cmdline(data, rootfs_path, library_paths):
26 if qemu_binary == "qemu-allarch": 26 if qemu_binary == "qemu-allarch":
27 qemu_binary = "qemuwrapper" 27 qemu_binary = "qemuwrapper"
28 28
29 qemu_options = data.getVar("QEMU_OPTIONS", True) 29 qemu_options = data.getVar("QEMU_OPTIONS")
30 30
31 return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\ 31 return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\
32 + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " " 32 + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " "
@@ -52,7 +52,7 @@ def qemu_run_binary(data, rootfs_path, binary):
52# this dance). For others (e.g. arm) a -cpu option is not necessary, since the 52# this dance). For others (e.g. arm) a -cpu option is not necessary, since the
53# qemu-arm default CPU supports all required architecture levels. 53# qemu-arm default CPU supports all required architecture levels.
54 54
55QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) or ""}" 55QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH'), True) or ""}"
56QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}" 56QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}"
57 57
58QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2" 58QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2"
diff --git a/meta/classes/qemuboot.bbclass b/meta/classes/qemuboot.bbclass
index 8b1d4d087d..28e8a737f8 100644
--- a/meta/classes/qemuboot.bbclass
+++ b/meta/classes/qemuboot.bbclass
@@ -55,17 +55,17 @@ do_write_qemuboot_conf[vardeps] += "${@' '.join(qemuboot_vars(d))}"
55python do_write_qemuboot_conf() { 55python do_write_qemuboot_conf() {
56 import configparser 56 import configparser
57 57
58 qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_NAME', True)) 58 qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_NAME'))
59 qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_LINK_NAME', True)) 59 qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_LINK_NAME'))
60 cf = configparser.ConfigParser() 60 cf = configparser.ConfigParser()
61 cf.add_section('config_bsp') 61 cf.add_section('config_bsp')
62 for k in qemuboot_vars(d): 62 for k in qemuboot_vars(d):
63 cf.set('config_bsp', k, '%s' % d.getVar(k, True)) 63 cf.set('config_bsp', k, '%s' % d.getVar(k))
64 64
65 # QB_DEFAULT_KERNEL's value of KERNEL_IMAGETYPE is the name of a symlink 65 # QB_DEFAULT_KERNEL's value of KERNEL_IMAGETYPE is the name of a symlink
66 # to the kernel file, which hinders relocatability of the qb conf. 66 # to the kernel file, which hinders relocatability of the qb conf.
67 # Read the link and replace it with the full filename of the target. 67 # Read the link and replace it with the full filename of the target.
68 kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('QB_DEFAULT_KERNEL', True)) 68 kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('QB_DEFAULT_KERNEL'))
69 kernel = os.path.realpath(kernel_link) 69 kernel = os.path.realpath(kernel_link)
70 cf.set('config_bsp', 'QB_DEFAULT_KERNEL', kernel) 70 cf.set('config_bsp', 'QB_DEFAULT_KERNEL', kernel)
71 71
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass
index add34df9d6..3b00b0e521 100644
--- a/meta/classes/recipe_sanity.bbclass
+++ b/meta/classes/recipe_sanity.bbclass
@@ -1,5 +1,5 @@
1def __note(msg, d): 1def __note(msg, d):
2 bb.note("%s: recipe_sanity: %s" % (d.getVar("P", True), msg)) 2 bb.note("%s: recipe_sanity: %s" % (d.getVar("P"), msg))
3 3
4__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" 4__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS"
5def bad_runtime_vars(cfgdata, d): 5def bad_runtime_vars(cfgdata, d):
@@ -7,7 +7,7 @@ def bad_runtime_vars(cfgdata, d):
7 bb.data.inherits_class("cross", d): 7 bb.data.inherits_class("cross", d):
8 return 8 return
9 9
10 for var in d.getVar("__recipe_sanity_badruntimevars", True).split(): 10 for var in d.getVar("__recipe_sanity_badruntimevars").split():
11 val = d.getVar(var, False) 11 val = d.getVar(var, False)
12 if val and val != cfgdata.get(var): 12 if val and val != cfgdata.get(var):
13 __note("%s should be %s_${PN}" % (var, var), d) 13 __note("%s should be %s_${PN}" % (var, var), d)
@@ -15,11 +15,11 @@ def bad_runtime_vars(cfgdata, d):
15__recipe_sanity_reqvars = "DESCRIPTION" 15__recipe_sanity_reqvars = "DESCRIPTION"
16__recipe_sanity_reqdiffvars = "" 16__recipe_sanity_reqdiffvars = ""
17def req_vars(cfgdata, d): 17def req_vars(cfgdata, d):
18 for var in d.getVar("__recipe_sanity_reqvars", True).split(): 18 for var in d.getVar("__recipe_sanity_reqvars").split():
19 if not d.getVar(var, False): 19 if not d.getVar(var, False):
20 __note("%s should be set" % var, d) 20 __note("%s should be set" % var, d)
21 21
22 for var in d.getVar("__recipe_sanity_reqdiffvars", True).split(): 22 for var in d.getVar("__recipe_sanity_reqdiffvars").split():
23 val = d.getVar(var, False) 23 val = d.getVar(var, False)
24 cfgval = cfgdata.get(var) 24 cfgval = cfgdata.get(var)
25 25
@@ -38,11 +38,11 @@ def var_renames_overwrite(cfgdata, d):
38def incorrect_nonempty_PACKAGES(cfgdata, d): 38def incorrect_nonempty_PACKAGES(cfgdata, d):
39 if bb.data.inherits_class("native", d) or \ 39 if bb.data.inherits_class("native", d) or \
40 bb.data.inherits_class("cross", d): 40 bb.data.inherits_class("cross", d):
41 if d.getVar("PACKAGES", True): 41 if d.getVar("PACKAGES"):
42 return True 42 return True
43 43
44def can_use_autotools_base(cfgdata, d): 44def can_use_autotools_base(cfgdata, d):
45 cfg = d.getVar("do_configure", True) 45 cfg = d.getVar("do_configure")
46 if not bb.data.inherits_class("autotools", d): 46 if not bb.data.inherits_class("autotools", d):
47 return False 47 return False
48 48
@@ -61,7 +61,7 @@ def can_delete_FILESPATH(cfgdata, d):
61 expected = cfgdata.get("FILESPATH") 61 expected = cfgdata.get("FILESPATH")
62 expectedpaths = d.expand(expected) 62 expectedpaths = d.expand(expected)
63 unexpanded = d.getVar("FILESPATH", False) 63 unexpanded = d.getVar("FILESPATH", False)
64 filespath = d.getVar("FILESPATH", True).split(":") 64 filespath = d.getVar("FILESPATH").split(":")
65 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] 65 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
66 for fp in filespath: 66 for fp in filespath:
67 if not fp in expectedpaths: 67 if not fp in expectedpaths:
@@ -72,13 +72,13 @@ def can_delete_FILESPATH(cfgdata, d):
72 72
73def can_delete_FILESDIR(cfgdata, d): 73def can_delete_FILESDIR(cfgdata, d):
74 expected = cfgdata.get("FILESDIR") 74 expected = cfgdata.get("FILESDIR")
75 #expected = "${@bb.utils.which(d.getVar('FILESPATH', True), '.')}" 75 #expected = "${@bb.utils.which(d.getVar('FILESPATH'), '.')}"
76 unexpanded = d.getVar("FILESDIR", False) 76 unexpanded = d.getVar("FILESDIR", False)
77 if unexpanded is None: 77 if unexpanded is None:
78 return False 78 return False
79 79
80 expanded = os.path.normpath(d.getVar("FILESDIR", True)) 80 expanded = os.path.normpath(d.getVar("FILESDIR"))
81 filespath = d.getVar("FILESPATH", True).split(":") 81 filespath = d.getVar("FILESPATH").split(":")
82 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] 82 filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
83 83
84 return unexpanded != expected and \ 84 return unexpanded != expected and \
@@ -96,7 +96,7 @@ def can_delete_others(p, cfgdata, d):
96 continue 96 continue
97 97
98 try: 98 try:
99 expanded = d.getVar(k, True) 99 expanded = d.getVar(k)
100 cfgexpanded = d.expand(cfgunexpanded) 100 cfgexpanded = d.expand(cfgunexpanded)
101 except bb.fetch.ParameterError: 101 except bb.fetch.ParameterError:
102 continue 102 continue
@@ -108,8 +108,8 @@ def can_delete_others(p, cfgdata, d):
108 (p, cfgunexpanded, unexpanded, expanded)) 108 (p, cfgunexpanded, unexpanded, expanded))
109 109
110python do_recipe_sanity () { 110python do_recipe_sanity () {
111 p = d.getVar("P", True) 111 p = d.getVar("P")
112 p = "%s %s %s" % (d.getVar("PN", True), d.getVar("PV", True), d.getVar("PR", True)) 112 p = "%s %s %s" % (d.getVar("PN"), d.getVar("PV"), d.getVar("PR"))
113 113
114 sanitychecks = [ 114 sanitychecks = [
115 (can_delete_FILESDIR, "candidate for removal of FILESDIR"), 115 (can_delete_FILESDIR, "candidate for removal of FILESDIR"),
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass
index 5bb231efc1..d6fdd364ad 100644
--- a/meta/classes/report-error.bbclass
+++ b/meta/classes/report-error.bbclass
@@ -10,7 +10,7 @@ ERR_REPORT_DIR ?= "${LOG_DIR}/error-report"
10 10
11def errorreport_getdata(e): 11def errorreport_getdata(e):
12 import codecs 12 import codecs
13 logpath = e.data.getVar('ERR_REPORT_DIR', True) 13 logpath = e.data.getVar('ERR_REPORT_DIR')
14 datafile = os.path.join(logpath, "error-report.txt") 14 datafile = os.path.join(logpath, "error-report.txt")
15 with codecs.open(datafile, 'r', 'utf-8') as f: 15 with codecs.open(datafile, 'r', 'utf-8') as f:
16 data = f.read() 16 data = f.read()
@@ -19,7 +19,7 @@ def errorreport_getdata(e):
19def errorreport_savedata(e, newdata, file): 19def errorreport_savedata(e, newdata, file):
20 import json 20 import json
21 import codecs 21 import codecs
22 logpath = e.data.getVar('ERR_REPORT_DIR', True) 22 logpath = e.data.getVar('ERR_REPORT_DIR')
23 datafile = os.path.join(logpath, file) 23 datafile = os.path.join(logpath, file)
24 with codecs.open(datafile, 'w', 'utf-8') as f: 24 with codecs.open(datafile, 'w', 'utf-8') as f:
25 json.dump(newdata, f, indent=4, sort_keys=True) 25 json.dump(newdata, f, indent=4, sort_keys=True)
@@ -29,18 +29,18 @@ python errorreport_handler () {
29 import json 29 import json
30 import codecs 30 import codecs
31 31
32 logpath = e.data.getVar('ERR_REPORT_DIR', True) 32 logpath = e.data.getVar('ERR_REPORT_DIR')
33 datafile = os.path.join(logpath, "error-report.txt") 33 datafile = os.path.join(logpath, "error-report.txt")
34 34
35 if isinstance(e, bb.event.BuildStarted): 35 if isinstance(e, bb.event.BuildStarted):
36 bb.utils.mkdirhier(logpath) 36 bb.utils.mkdirhier(logpath)
37 data = {} 37 data = {}
38 machine = e.data.getVar("MACHINE", True) 38 machine = e.data.getVar("MACHINE")
39 data['machine'] = machine 39 data['machine'] = machine
40 data['build_sys'] = e.data.getVar("BUILD_SYS", True) 40 data['build_sys'] = e.data.getVar("BUILD_SYS")
41 data['nativelsb'] = e.data.getVar("NATIVELSBSTRING", True) 41 data['nativelsb'] = e.data.getVar("NATIVELSBSTRING")
42 data['distro'] = e.data.getVar("DISTRO", True) 42 data['distro'] = e.data.getVar("DISTRO")
43 data['target_sys'] = e.data.getVar("TARGET_SYS", True) 43 data['target_sys'] = e.data.getVar("TARGET_SYS")
44 data['failures'] = [] 44 data['failures'] = []
45 data['component'] = " ".join(e.getPkgs()) 45 data['component'] = " ".join(e.getPkgs())
46 data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data)) 46 data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data))
@@ -51,7 +51,7 @@ python errorreport_handler () {
51 elif isinstance(e, bb.build.TaskFailed): 51 elif isinstance(e, bb.build.TaskFailed):
52 task = e.task 52 task = e.task
53 taskdata={} 53 taskdata={}
54 log = e.data.getVar('BB_LOGFILE', True) 54 log = e.data.getVar('BB_LOGFILE')
55 taskdata['package'] = e.data.expand("${PF}") 55 taskdata['package'] = e.data.expand("${PF}")
56 taskdata['task'] = task 56 taskdata['task'] = task
57 if log: 57 if log:
@@ -61,7 +61,7 @@ python errorreport_handler () {
61 61
62 # Replace host-specific paths so the logs are cleaner 62 # Replace host-specific paths so the logs are cleaner
63 for d in ("TOPDIR", "TMPDIR"): 63 for d in ("TOPDIR", "TMPDIR"):
64 s = e.data.getVar(d, True) 64 s = e.data.getVar(d)
65 if s: 65 if s:
66 logdata = logdata.replace(s, d) 66 logdata = logdata.replace(s, d)
67 67
@@ -92,7 +92,7 @@ python errorreport_handler () {
92 bb.utils.unlockfile(lock) 92 bb.utils.unlockfile(lock)
93 failures = jsondata['failures'] 93 failures = jsondata['failures']
94 if(len(failures) > 0): 94 if(len(failures) > 0):
95 filename = "error_report_" + e.data.getVar("BUILDNAME", True)+".txt" 95 filename = "error_report_" + e.data.getVar("BUILDNAME")+".txt"
96 datafile = errorreport_savedata(e, jsondata, filename) 96 datafile = errorreport_savedata(e, jsondata, filename)
97 bb.note("The errors for this build are stored in %s\nYou can send the errors to a reports server by running:\n send-error-report %s [-s server]" % (datafile, datafile)) 97 bb.note("The errors for this build are stored in %s\nYou can send the errors to a reports server by running:\n send-error-report %s [-s server]" % (datafile, datafile))
98 bb.note("The contents of these logs will be posted in public if you use the above command with the default server. Please ensure you remove any identifying or proprietary information when prompted before sending.") 98 bb.note("The contents of these logs will be posted in public if you use the above command with the default server. Please ensure you remove any identifying or proprietary information when prompted before sending.")
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass
index 263ad8be9f..3516c7edf8 100644
--- a/meta/classes/rm_work.bbclass
+++ b/meta/classes/rm_work.bbclass
@@ -119,10 +119,10 @@ rm_work_rootfs[cleandirs] = "${WORKDIR}/rootfs"
119 119
120python () { 120python () {
121 if bb.data.inherits_class('kernel', d): 121 if bb.data.inherits_class('kernel', d):
122 d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN", True)) 122 d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN"))
123 # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe. 123 # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe.
124 excludes = (d.getVar("RM_WORK_EXCLUDE", True) or "").split() 124 excludes = (d.getVar("RM_WORK_EXCLUDE") or "").split()
125 pn = d.getVar("PN", True) 125 pn = d.getVar("PN")
126 if pn in excludes: 126 if pn in excludes:
127 d.delVarFlag('rm_work_rootfs', 'cleandirs') 127 d.delVarFlag('rm_work_rootfs', 'cleandirs')
128 d.delVarFlag('rm_work_populatesdk', 'cleandirs') 128 d.delVarFlag('rm_work_populatesdk', 'cleandirs')
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass
index 0c7ceea542..8d48a2d1d9 100644
--- a/meta/classes/rootfs-postcommands.bbclass
+++ b/meta/classes/rootfs-postcommands.bbclass
@@ -217,9 +217,9 @@ python write_image_manifest () {
217 from oe.rootfs import image_list_installed_packages 217 from oe.rootfs import image_list_installed_packages
218 from oe.utils import format_pkg_list 218 from oe.utils import format_pkg_list
219 219
220 deploy_dir = d.getVar('IMGDEPLOYDIR', True) 220 deploy_dir = d.getVar('IMGDEPLOYDIR')
221 link_name = d.getVar('IMAGE_LINK_NAME', True) 221 link_name = d.getVar('IMAGE_LINK_NAME')
222 manifest_name = d.getVar('IMAGE_MANIFEST', True) 222 manifest_name = d.getVar('IMAGE_MANIFEST')
223 223
224 if not manifest_name: 224 if not manifest_name:
225 return 225 return
diff --git a/meta/classes/rootfs_deb.bbclass b/meta/classes/rootfs_deb.bbclass
index f79fca608e..e8facd4368 100644
--- a/meta/classes/rootfs_deb.bbclass
+++ b/meta/classes/rootfs_deb.bbclass
@@ -14,7 +14,7 @@ do_rootfs[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock"
14do_populate_sdk[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock" 14do_populate_sdk[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock"
15 15
16python rootfs_deb_bad_recommendations() { 16python rootfs_deb_bad_recommendations() {
17 if d.getVar("BAD_RECOMMENDATIONS", True): 17 if d.getVar("BAD_RECOMMENDATIONS"):
18 bb.warn("Debian package install does not support BAD_RECOMMENDATIONS") 18 bb.warn("Debian package install does not support BAD_RECOMMENDATIONS")
19} 19}
20do_rootfs[prefuncs] += "rootfs_deb_bad_recommendations" 20do_rootfs[prefuncs] += "rootfs_deb_bad_recommendations"
@@ -25,7 +25,7 @@ opkglibdir = "${localstatedir}/lib/opkg"
25 25
26python () { 26python () {
27 # Map TARGET_ARCH to Debian's ideas about architectures 27 # Map TARGET_ARCH to Debian's ideas about architectures
28 darch = d.getVar('SDK_ARCH', True) 28 darch = d.getVar('SDK_ARCH')
29 if darch in ["x86", "i486", "i586", "i686", "pentium"]: 29 if darch in ["x86", "i486", "i586", "i686", "pentium"]:
30 d.setVar('DEB_SDK_ARCH', 'i386') 30 d.setVar('DEB_SDK_ARCH', 'i386')
31 elif darch == "x86_64": 31 elif darch == "x86_64":
diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes/rootfs_ipk.bbclass
index d5c38fef74..8c60398085 100644
--- a/meta/classes/rootfs_ipk.bbclass
+++ b/meta/classes/rootfs_ipk.bbclass
@@ -27,7 +27,7 @@ MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg"
27 27
28python () { 28python () {
29 29
30 if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): 30 if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
31 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) 31 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
32 flags = flags.replace("do_package_write_ipk", "") 32 flags = flags.replace("do_package_write_ipk", "")
33 flags = flags.replace("do_deploy", "") 33 flags = flags.replace("do_deploy", "")
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass
index 37730a7104..20beb7c713 100644
--- a/meta/classes/rootfs_rpm.bbclass
+++ b/meta/classes/rootfs_rpm.bbclass
@@ -25,7 +25,7 @@ do_rootfs[recrdeptask] += "do_package_write_rpm"
25do_rootfs[vardeps] += "PACKAGE_FEED_URIS" 25do_rootfs[vardeps] += "PACKAGE_FEED_URIS"
26 26
27python () { 27python () {
28 if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): 28 if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
29 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) 29 flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
30 flags = flags.replace("do_package_write_rpm", "") 30 flags = flags.replace("do_package_write_rpm", "")
31 flags = flags.replace("do_deploy", "") 31 flags = flags.replace("do_deploy", "")
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index 921b248b78..9674ae9580 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -6,7 +6,7 @@ SANITY_REQUIRED_UTILITIES ?= "patch diffstat makeinfo git bzip2 tar \
6 gzip gawk chrpath wget cpio perl file" 6 gzip gawk chrpath wget cpio perl file"
7 7
8def bblayers_conf_file(d): 8def bblayers_conf_file(d):
9 return os.path.join(d.getVar('TOPDIR', True), 'conf/bblayers.conf') 9 return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf')
10 10
11def sanity_conf_read(fn): 11def sanity_conf_read(fn):
12 with open(fn, 'r') as f: 12 with open(fn, 'r') as f:
@@ -39,8 +39,8 @@ SANITY_DIFF_TOOL ?= "meld"
39SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample" 39SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample"
40python oecore_update_localconf() { 40python oecore_update_localconf() {
41 # Check we are using a valid local.conf 41 # Check we are using a valid local.conf
42 current_conf = d.getVar('CONF_VERSION', True) 42 current_conf = d.getVar('CONF_VERSION')
43 conf_version = d.getVar('LOCALCONF_VERSION', True) 43 conf_version = d.getVar('LOCALCONF_VERSION')
44 44
45 failmsg = """Your version of local.conf was generated from an older/newer version of 45 failmsg = """Your version of local.conf was generated from an older/newer version of
46local.conf.sample and there have been updates made to this file. Please compare the two 46local.conf.sample and there have been updates made to this file. Please compare the two
@@ -59,8 +59,8 @@ is a good way to visualise the changes."""
59SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample" 59SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample"
60python oecore_update_siteconf() { 60python oecore_update_siteconf() {
61 # If we have a site.conf, check it's valid 61 # If we have a site.conf, check it's valid
62 current_sconf = d.getVar('SCONF_VERSION', True) 62 current_sconf = d.getVar('SCONF_VERSION')
63 sconf_version = d.getVar('SITE_CONF_VERSION', True) 63 sconf_version = d.getVar('SITE_CONF_VERSION')
64 64
65 failmsg = """Your version of site.conf was generated from an older version of 65 failmsg = """Your version of site.conf was generated from an older version of
66site.conf.sample and there have been updates made to this file. Please compare the two 66site.conf.sample and there have been updates made to this file. Please compare the two
@@ -80,8 +80,8 @@ SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample"
80python oecore_update_bblayers() { 80python oecore_update_bblayers() {
81 # bblayers.conf is out of date, so see if we can resolve that 81 # bblayers.conf is out of date, so see if we can resolve that
82 82
83 current_lconf = int(d.getVar('LCONF_VERSION', True)) 83 current_lconf = int(d.getVar('LCONF_VERSION'))
84 lconf_version = int(d.getVar('LAYER_CONF_VERSION', True)) 84 lconf_version = int(d.getVar('LAYER_CONF_VERSION'))
85 85
86 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}). 86 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}).
87Please compare your file against bblayers.conf.sample and merge any changes before continuing. 87Please compare your file against bblayers.conf.sample and merge any changes before continuing.
@@ -141,7 +141,7 @@ is a good way to visualise the changes."""
141 # Handle rename of meta-yocto -> meta-poky 141 # Handle rename of meta-yocto -> meta-poky
142 # This marks the start of separate version numbers but code is needed in OE-Core 142 # This marks the start of separate version numbers but code is needed in OE-Core
143 # for the migration, one last time. 143 # for the migration, one last time.
144 layers = d.getVar('BBLAYERS', True).split() 144 layers = d.getVar('BBLAYERS').split()
145 layers = [ os.path.basename(path) for path in layers ] 145 layers = [ os.path.basename(path) for path in layers ]
146 if 'meta-yocto' in layers: 146 if 'meta-yocto' in layers:
147 found = False 147 found = False
@@ -172,7 +172,7 @@ is a good way to visualise the changes."""
172} 172}
173 173
174def raise_sanity_error(msg, d, network_error=False): 174def raise_sanity_error(msg, d, network_error=False):
175 if d.getVar("SANITY_USE_EVENTS", True) == "1": 175 if d.getVar("SANITY_USE_EVENTS") == "1":
176 try: 176 try:
177 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d) 177 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d)
178 except TypeError: 178 except TypeError:
@@ -198,7 +198,7 @@ def check_toolchain_tune_args(data, tune, multilib, errs):
198 return found_errors 198 return found_errors
199 199
200def check_toolchain_args_present(data, tune, multilib, tune_errors, which): 200def check_toolchain_args_present(data, tune, multilib, tune_errors, which):
201 args_set = (data.getVar("TUNE_%s" % which, True) or "").split() 201 args_set = (data.getVar("TUNE_%s" % which) or "").split()
202 args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split() 202 args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split()
203 args_missing = [] 203 args_missing = []
204 204
@@ -228,7 +228,7 @@ def check_toolchain_tune(data, tune, multilib):
228 localdata.setVar("OVERRIDES", overrides) 228 localdata.setVar("OVERRIDES", overrides)
229 bb.data.update_data(localdata) 229 bb.data.update_data(localdata)
230 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) 230 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib))
231 features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune, True) or "").split() 231 features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune) or "").split()
232 if not features: 232 if not features:
233 return "Tuning '%s' has no defined features, and cannot be used." % tune 233 return "Tuning '%s' has no defined features, and cannot be used." % tune
234 valid_tunes = localdata.getVarFlags('TUNEVALID') or {} 234 valid_tunes = localdata.getVarFlags('TUNEVALID') or {}
@@ -248,9 +248,9 @@ def check_toolchain_tune(data, tune, multilib):
248 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) 248 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
249 else: 249 else:
250 tune_errors.append("Feature '%s' is not defined." % feature) 250 tune_errors.append("Feature '%s' is not defined." % feature)
251 whitelist = localdata.getVar("TUNEABI_WHITELIST", True) 251 whitelist = localdata.getVar("TUNEABI_WHITELIST")
252 if whitelist: 252 if whitelist:
253 tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune, True) 253 tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune)
254 if not tuneabi: 254 if not tuneabi:
255 tuneabi = tune 255 tuneabi = tune
256 if True not in [x in whitelist.split() for x in tuneabi.split()]: 256 if True not in [x in whitelist.split() for x in tuneabi.split()]:
@@ -264,13 +264,13 @@ def check_toolchain_tune(data, tune, multilib):
264 264
265def check_toolchain(data): 265def check_toolchain(data):
266 tune_error_set = [] 266 tune_error_set = []
267 deftune = data.getVar("DEFAULTTUNE", True) 267 deftune = data.getVar("DEFAULTTUNE")
268 tune_errors = check_toolchain_tune(data, deftune, 'default') 268 tune_errors = check_toolchain_tune(data, deftune, 'default')
269 if tune_errors: 269 if tune_errors:
270 tune_error_set.append(tune_errors) 270 tune_error_set.append(tune_errors)
271 271
272 multilibs = (data.getVar("MULTILIB_VARIANTS", True) or "").split() 272 multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split()
273 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS", True) or "").split() 273 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split()
274 274
275 if multilibs: 275 if multilibs:
276 seen_libs = [] 276 seen_libs = []
@@ -282,7 +282,7 @@ def check_toolchain(data):
282 seen_libs.append(lib) 282 seen_libs.append(lib)
283 if not lib in global_multilibs: 283 if not lib in global_multilibs:
284 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) 284 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib)
285 tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib, True) 285 tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib)
286 if tune in seen_tunes: 286 if tune in seen_tunes:
287 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) 287 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune)
288 else: 288 else:
@@ -360,10 +360,10 @@ def check_connectivity(d):
360 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable 360 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
361 # using the same syntax as for SRC_URI. If the variable is not set 361 # using the same syntax as for SRC_URI. If the variable is not set
362 # the check is skipped 362 # the check is skipped
363 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS', True) or "").split() 363 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split()
364 retval = "" 364 retval = ""
365 365
366 bbn = d.getVar('BB_NO_NETWORK', True) 366 bbn = d.getVar('BB_NO_NETWORK')
367 if bbn not in (None, '0', '1'): 367 if bbn not in (None, '0', '1'):
368 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn 368 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn
369 369
@@ -382,7 +382,7 @@ def check_connectivity(d):
382 except Exception as err: 382 except Exception as err:
383 # Allow the message to be configured so that users can be 383 # Allow the message to be configured so that users can be
384 # pointed to a support mechanism. 384 # pointed to a support mechanism.
385 msg = data.getVar('CONNECTIVITY_CHECK_MSG', True) or "" 385 msg = data.getVar('CONNECTIVITY_CHECK_MSG') or ""
386 if len(msg) == 0: 386 if len(msg) == 0:
387 msg = "%s.\n" % err 387 msg = "%s.\n" % err
388 msg += " Please ensure your host's network is configured correctly,\n" 388 msg += " Please ensure your host's network is configured correctly,\n"
@@ -395,7 +395,7 @@ def check_connectivity(d):
395def check_supported_distro(sanity_data): 395def check_supported_distro(sanity_data):
396 from fnmatch import fnmatch 396 from fnmatch import fnmatch
397 397
398 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS', True) 398 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS')
399 if not tested_distros: 399 if not tested_distros:
400 return 400 return
401 401
@@ -418,17 +418,17 @@ def check_sanity_validmachine(sanity_data):
418 messages = "" 418 messages = ""
419 419
420 # Check TUNE_ARCH is set 420 # Check TUNE_ARCH is set
421 if sanity_data.getVar('TUNE_ARCH', True) == 'INVALID': 421 if sanity_data.getVar('TUNE_ARCH') == 'INVALID':
422 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n' 422 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n'
423 423
424 # Check TARGET_OS is set 424 # Check TARGET_OS is set
425 if sanity_data.getVar('TARGET_OS', True) == 'INVALID': 425 if sanity_data.getVar('TARGET_OS') == 'INVALID':
426 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n' 426 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n'
427 427
428 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS 428 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS
429 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS', True) 429 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS')
430 tunepkg = sanity_data.getVar('TUNE_PKGARCH', True) 430 tunepkg = sanity_data.getVar('TUNE_PKGARCH')
431 defaulttune = sanity_data.getVar('DEFAULTTUNE', True) 431 defaulttune = sanity_data.getVar('DEFAULTTUNE')
432 tunefound = False 432 tunefound = False
433 seen = {} 433 seen = {}
434 dups = [] 434 dups = []
@@ -476,7 +476,7 @@ def check_gcc_march(sanity_data):
476 result = True; 476 result = True;
477 477
478 if not result: 478 if not result:
479 build_arch = sanity_data.getVar('BUILD_ARCH', True) 479 build_arch = sanity_data.getVar('BUILD_ARCH')
480 status,res = oe.utils.getstatusoutput(sanity_data.expand("${BUILD_CC} -march=%s gcc_test.c -o gcc_test" % build_arch)) 480 status,res = oe.utils.getstatusoutput(sanity_data.expand("${BUILD_CC} -march=%s gcc_test.c -o gcc_test" % build_arch))
481 if status == 0: 481 if status == 0:
482 message = "BUILD_CFLAGS_append = \" -march=%s\"" % build_arch 482 message = "BUILD_CFLAGS_append = \" -march=%s\"" % build_arch
@@ -564,11 +564,11 @@ def check_perl_modules(sanity_data):
564 return None 564 return None
565 565
566def sanity_check_conffiles(d): 566def sanity_check_conffiles(d):
567 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS', True).split() 567 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split()
568 for func in funcs: 568 for func in funcs:
569 conffile, current_version, required_version, func = func.split(":") 569 conffile, current_version, required_version, func = func.split(":")
570 if check_conf_exists(conffile, d) and d.getVar(current_version, True) is not None and \ 570 if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \
571 d.getVar(current_version, True) != d.getVar(required_version, True): 571 d.getVar(current_version) != d.getVar(required_version):
572 try: 572 try:
573 bb.build.exec_func(func, d, pythonexception=True) 573 bb.build.exec_func(func, d, pythonexception=True)
574 except NotImplementedError as e: 574 except NotImplementedError as e:
@@ -581,8 +581,8 @@ def sanity_handle_abichanges(status, d):
581 # 581 #
582 import subprocess 582 import subprocess
583 583
584 current_abi = d.getVar('OELAYOUT_ABI', True) 584 current_abi = d.getVar('OELAYOUT_ABI')
585 abifile = d.getVar('SANITY_ABIFILE', True) 585 abifile = d.getVar('SANITY_ABIFILE')
586 if os.path.exists(abifile): 586 if os.path.exists(abifile):
587 with open(abifile, "r") as f: 587 with open(abifile, "r") as f:
588 abi = f.read().strip() 588 abi = f.read().strip()
@@ -677,12 +677,12 @@ def check_sanity_version_change(status, d):
677 missing = missing + "GNU make," 677 missing = missing + "GNU make,"
678 678
679 if not check_app_exists('${BUILD_CC}', d): 679 if not check_app_exists('${BUILD_CC}', d):
680 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC", True) 680 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC")
681 681
682 if not check_app_exists('${BUILD_CXX}', d): 682 if not check_app_exists('${BUILD_CXX}', d):
683 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX", True) 683 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX")
684 684
685 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES', True) 685 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES')
686 686
687 for util in required_utilities.split(): 687 for util in required_utilities.split():
688 if not check_app_exists(util, d): 688 if not check_app_exists(util, d):
@@ -692,7 +692,7 @@ def check_sanity_version_change(status, d):
692 missing = missing.rstrip(',') 692 missing = missing.rstrip(',')
693 status.addresult("Please install the following missing utilities: %s\n" % missing) 693 status.addresult("Please install the following missing utilities: %s\n" % missing)
694 694
695 assume_provided = d.getVar('ASSUME_PROVIDED', True).split() 695 assume_provided = d.getVar('ASSUME_PROVIDED').split()
696 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf 696 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
697 if "diffstat-native" not in assume_provided: 697 if "diffstat-native" not in assume_provided:
698 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n') 698 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n')
@@ -715,7 +715,7 @@ def check_sanity_version_change(status, d):
715 status.addresult(" __sync_bool_compare_and_swap (&atomic, 2, 3);\n") 715 status.addresult(" __sync_bool_compare_and_swap (&atomic, 2, 3);\n")
716 716
717 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) 717 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS)
718 tmpdir = d.getVar('TMPDIR', True) 718 tmpdir = d.getVar('TMPDIR')
719 status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) 719 status.addresult(check_create_long_filename(tmpdir, "TMPDIR"))
720 tmpdirmode = os.stat(tmpdir).st_mode 720 tmpdirmode = os.stat(tmpdir).st_mode
721 if (tmpdirmode & stat.S_ISGID): 721 if (tmpdirmode & stat.S_ISGID):
@@ -739,7 +739,7 @@ def check_sanity_version_change(status, d):
739 if netcheck: 739 if netcheck:
740 status.network_error = True 740 status.network_error = True
741 741
742 nolibs = d.getVar('NO32LIBS', True) 742 nolibs = d.getVar('NO32LIBS')
743 if not nolibs: 743 if not nolibs:
744 lib32path = '/lib' 744 lib32path = '/lib'
745 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ): 745 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ):
@@ -748,7 +748,7 @@ def check_sanity_version_change(status, d):
748 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'): 748 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'):
749 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n") 749 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n")
750 750
751 bbpaths = d.getVar('BBPATH', True).split(":") 751 bbpaths = d.getVar('BBPATH').split(":")
752 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths): 752 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths):
753 status.addresult("BBPATH references the current directory, either through " \ 753 status.addresult("BBPATH references the current directory, either through " \
754 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\ 754 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\
@@ -758,7 +758,7 @@ def check_sanity_version_change(status, d):
758 "references.\n" \ 758 "references.\n" \
759 "Parsed BBPATH is" + str(bbpaths)); 759 "Parsed BBPATH is" + str(bbpaths));
760 760
761 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF', True) 761 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF')
762 if not oes_bb_conf: 762 if not oes_bb_conf:
763 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') 763 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n')
764 764
@@ -793,26 +793,26 @@ def check_sanity_everybuild(status, d):
793 793
794 # Check the bitbake version meets minimum requirements 794 # Check the bitbake version meets minimum requirements
795 from distutils.version import LooseVersion 795 from distutils.version import LooseVersion
796 minversion = d.getVar('BB_MIN_VERSION', True) 796 minversion = d.getVar('BB_MIN_VERSION')
797 if (LooseVersion(bb.__version__) < LooseVersion(minversion)): 797 if (LooseVersion(bb.__version__) < LooseVersion(minversion)):
798 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) 798 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__))
799 799
800 sanity_check_locale(d) 800 sanity_check_locale(d)
801 801
802 paths = d.getVar('PATH', True).split(":") 802 paths = d.getVar('PATH').split(":")
803 if "." in paths or "./" in paths or "" in paths: 803 if "." in paths or "./" in paths or "" in paths:
804 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n") 804 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n")
805 805
806 # Check that the DISTRO is valid, if set 806 # Check that the DISTRO is valid, if set
807 # need to take into account DISTRO renaming DISTRO 807 # need to take into account DISTRO renaming DISTRO
808 distro = d.getVar('DISTRO', True) 808 distro = d.getVar('DISTRO')
809 if distro and distro != "nodistro": 809 if distro and distro != "nodistro":
810 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ): 810 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ):
811 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO", True)) 811 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO"))
812 812
813 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't 813 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't
814 # set, since so much relies on it being set. 814 # set, since so much relies on it being set.
815 dldir = d.getVar('DL_DIR', True) 815 dldir = d.getVar('DL_DIR')
816 if not dldir: 816 if not dldir:
817 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n") 817 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n")
818 if os.path.exists(dldir) and not os.access(dldir, os.W_OK): 818 if os.path.exists(dldir) and not os.access(dldir, os.W_OK):
@@ -821,7 +821,7 @@ def check_sanity_everybuild(status, d):
821 821
822 # Check that the MACHINE is valid, if it is set 822 # Check that the MACHINE is valid, if it is set
823 machinevalid = True 823 machinevalid = True
824 if d.getVar('MACHINE', True): 824 if d.getVar('MACHINE'):
825 if not check_conf_exists("conf/machine/${MACHINE}.conf", d): 825 if not check_conf_exists("conf/machine/${MACHINE}.conf", d):
826 status.addresult('Please set a valid MACHINE in your local.conf or environment\n') 826 status.addresult('Please set a valid MACHINE in your local.conf or environment\n')
827 machinevalid = False 827 machinevalid = False
@@ -834,7 +834,7 @@ def check_sanity_everybuild(status, d):
834 status.addresult(check_toolchain(d)) 834 status.addresult(check_toolchain(d))
835 835
836 # Check that the SDKMACHINE is valid, if it is set 836 # Check that the SDKMACHINE is valid, if it is set
837 if d.getVar('SDKMACHINE', True): 837 if d.getVar('SDKMACHINE'):
838 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d): 838 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d):
839 status.addresult('Specified SDKMACHINE value is not valid\n') 839 status.addresult('Specified SDKMACHINE value is not valid\n')
840 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}": 840 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}":
@@ -847,7 +847,7 @@ def check_sanity_everybuild(status, d):
847 status.addresult("Please use a umask which allows a+rx and u+rwx\n") 847 status.addresult("Please use a umask which allows a+rx and u+rwx\n")
848 os.umask(omask) 848 os.umask(omask)
849 849
850 if d.getVar('TARGET_ARCH', True) == "arm": 850 if d.getVar('TARGET_ARCH') == "arm":
851 # This path is no longer user-readable in modern (very recent) Linux 851 # This path is no longer user-readable in modern (very recent) Linux
852 try: 852 try:
853 if os.path.exists("/proc/sys/vm/mmap_min_addr"): 853 if os.path.exists("/proc/sys/vm/mmap_min_addr"):
@@ -860,7 +860,7 @@ def check_sanity_everybuild(status, d):
860 except: 860 except:
861 pass 861 pass
862 862
863 oeroot = d.getVar('COREBASE', True) 863 oeroot = d.getVar('COREBASE')
864 if oeroot.find('+') != -1: 864 if oeroot.find('+') != -1:
865 status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.") 865 status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.")
866 if oeroot.find('@') != -1: 866 if oeroot.find('@') != -1:
@@ -875,7 +875,7 @@ def check_sanity_everybuild(status, d):
875 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \ 875 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \
876 'bzr', 'cvs', 'npm', 'sftp', 'ssh'] 876 'bzr', 'cvs', 'npm', 'sftp', 'ssh']
877 for mirror_var in mirror_vars: 877 for mirror_var in mirror_vars:
878 mirrors = (d.getVar(mirror_var, True) or '').replace('\\n', '\n').split('\n') 878 mirrors = (d.getVar(mirror_var) or '').replace('\\n', '\n').split('\n')
879 for mirror_entry in mirrors: 879 for mirror_entry in mirrors:
880 mirror_entry = mirror_entry.strip() 880 mirror_entry = mirror_entry.strip()
881 if not mirror_entry: 881 if not mirror_entry:
@@ -914,7 +914,7 @@ def check_sanity_everybuild(status, d):
914 check_symlink(mirror_base, d) 914 check_symlink(mirror_base, d)
915 915
916 # Check that TMPDIR hasn't changed location since the last time we were run 916 # Check that TMPDIR hasn't changed location since the last time we were run
917 tmpdir = d.getVar('TMPDIR', True) 917 tmpdir = d.getVar('TMPDIR')
918 checkfile = os.path.join(tmpdir, "saved_tmpdir") 918 checkfile = os.path.join(tmpdir, "saved_tmpdir")
919 if os.path.exists(checkfile): 919 if os.path.exists(checkfile):
920 with open(checkfile, "r") as f: 920 with open(checkfile, "r") as f:
@@ -951,8 +951,8 @@ def check_sanity(sanity_data):
951 951
952 status = SanityStatus() 952 status = SanityStatus()
953 953
954 tmpdir = sanity_data.getVar('TMPDIR', True) 954 tmpdir = sanity_data.getVar('TMPDIR')
955 sstate_dir = sanity_data.getVar('SSTATE_DIR', True) 955 sstate_dir = sanity_data.getVar('SSTATE_DIR')
956 956
957 check_symlink(sstate_dir, sanity_data) 957 check_symlink(sstate_dir, sanity_data)
958 958
@@ -976,7 +976,7 @@ def check_sanity(sanity_data):
976 976
977 check_sanity_everybuild(status, sanity_data) 977 check_sanity_everybuild(status, sanity_data)
978 978
979 sanity_version = int(sanity_data.getVar('SANITY_VERSION', True) or 1) 979 sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1)
980 network_error = False 980 network_error = False
981 # NATIVELSBSTRING var may have been overridden with "universal", so 981 # NATIVELSBSTRING var may have been overridden with "universal", so
982 # get actual host distribution id and version 982 # get actual host distribution id and version
diff --git a/meta/classes/sign_ipk.bbclass b/meta/classes/sign_ipk.bbclass
index a481f6d9a8..e5057b7799 100644
--- a/meta/classes/sign_ipk.bbclass
+++ b/meta/classes/sign_ipk.bbclass
@@ -29,10 +29,10 @@ IPK_GPG_SIGNATURE_TYPE ?= 'ASC'
29python () { 29python () {
30 # Check configuration 30 # Check configuration
31 for var in ('IPK_GPG_NAME', 'IPK_GPG_PASSPHRASE_FILE'): 31 for var in ('IPK_GPG_NAME', 'IPK_GPG_PASSPHRASE_FILE'):
32 if not d.getVar(var, True): 32 if not d.getVar(var):
33 raise_sanity_error("You need to define %s in the config" % var, d) 33 raise_sanity_error("You need to define %s in the config" % var, d)
34 34
35 sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE", True) 35 sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE")
36 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": 36 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN":
37 raise_sanity_error("Bad value for IPK_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) 37 raise_sanity_error("Bad value for IPK_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype)
38} 38}
@@ -42,11 +42,11 @@ def sign_ipk(d, ipk_to_sign):
42 42
43 bb.debug(1, 'Signing ipk: %s' % ipk_to_sign) 43 bb.debug(1, 'Signing ipk: %s' % ipk_to_sign)
44 44
45 signer = get_signer(d, d.getVar('IPK_GPG_BACKEND', True)) 45 signer = get_signer(d, d.getVar('IPK_GPG_BACKEND'))
46 sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE', True) 46 sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE')
47 is_ascii_sig = (sig_type.upper() != "BIN") 47 is_ascii_sig = (sig_type.upper() != "BIN")
48 48
49 signer.detach_sign(ipk_to_sign, 49 signer.detach_sign(ipk_to_sign,
50 d.getVar('IPK_GPG_NAME', True), 50 d.getVar('IPK_GPG_NAME'),
51 d.getVar('IPK_GPG_PASSPHRASE_FILE', True), 51 d.getVar('IPK_GPG_PASSPHRASE_FILE'),
52 armor=is_ascii_sig) 52 armor=is_ascii_sig)
diff --git a/meta/classes/sign_package_feed.bbclass b/meta/classes/sign_package_feed.bbclass
index 31a6e9b042..71df03bab3 100644
--- a/meta/classes/sign_package_feed.bbclass
+++ b/meta/classes/sign_package_feed.bbclass
@@ -31,10 +31,10 @@ PACKAGE_FEED_GPG_SIGNATURE_TYPE ?= 'ASC'
31python () { 31python () {
32 # Check sanity of configuration 32 # Check sanity of configuration
33 for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'): 33 for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'):
34 if not d.getVar(var, True): 34 if not d.getVar(var):
35 raise_sanity_error("You need to define %s in the config" % var, d) 35 raise_sanity_error("You need to define %s in the config" % var, d)
36 36
37 sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE", True) 37 sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE")
38 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": 38 if sigtype.upper() != "ASC" and sigtype.upper() != "BIN":
39 raise_sanity_error("Bad value for PACKAGE_FEED_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) 39 raise_sanity_error("Bad value for PACKAGE_FEED_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype)
40} 40}
diff --git a/meta/classes/sign_rpm.bbclass b/meta/classes/sign_rpm.bbclass
index a8ea75faaa..2a08020819 100644
--- a/meta/classes/sign_rpm.bbclass
+++ b/meta/classes/sign_rpm.bbclass
@@ -22,11 +22,11 @@ RPM_GPG_BACKEND ?= 'local'
22 22
23 23
24python () { 24python () {
25 if d.getVar('RPM_GPG_PASSPHRASE_FILE', True): 25 if d.getVar('RPM_GPG_PASSPHRASE_FILE'):
26 raise_sanity_error('RPM_GPG_PASSPHRASE_FILE is replaced by RPM_GPG_PASSPHRASE', d) 26 raise_sanity_error('RPM_GPG_PASSPHRASE_FILE is replaced by RPM_GPG_PASSPHRASE', d)
27 # Check configuration 27 # Check configuration
28 for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE'): 28 for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE'):
29 if not d.getVar(var, True): 29 if not d.getVar(var):
30 raise_sanity_error("You need to define %s in the config" % var, d) 30 raise_sanity_error("You need to define %s in the config" % var, d)
31 31
32 # Set the expected location of the public key 32 # Set the expected location of the public key
@@ -41,12 +41,12 @@ python sign_rpm () {
41 import glob 41 import glob
42 from oe.gpg_sign import get_signer 42 from oe.gpg_sign import get_signer
43 43
44 signer = get_signer(d, d.getVar('RPM_GPG_BACKEND', True)) 44 signer = get_signer(d, d.getVar('RPM_GPG_BACKEND'))
45 rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR', True) + '/*') 45 rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR') + '/*')
46 46
47 signer.sign_rpms(rpms, 47 signer.sign_rpms(rpms,
48 d.getVar('RPM_GPG_NAME', True), 48 d.getVar('RPM_GPG_NAME'),
49 d.getVar('RPM_GPG_PASSPHRASE', True)) 49 d.getVar('RPM_GPG_PASSPHRASE'))
50} 50}
51 51
52do_package_index[depends] += "signing-keys:do_deploy" 52do_package_index[depends] += "signing-keys:do_deploy"
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
index 45dce489de..9d51a0266a 100644
--- a/meta/classes/siteconfig.bbclass
+++ b/meta/classes/siteconfig.bbclass
@@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () {
2 shared_state = sstate_state_fromvars(d) 2 shared_state = sstate_state_fromvars(d)
3 if shared_state['task'] != 'populate_sysroot': 3 if shared_state['task'] != 'populate_sysroot':
4 return 4 return
5 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', True), 'site_config')): 5 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
6 bb.debug(1, "No site_config directory, skipping do_siteconfig") 6 bb.debug(1, "No site_config directory, skipping do_siteconfig")
7 return 7 return
8 bb.build.exec_func('do_siteconfig_gencache', d) 8 bb.build.exec_func('do_siteconfig_gencache', d)
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass
index 6eca004c5e..abb194f0d6 100644
--- a/meta/classes/siteinfo.bbclass
+++ b/meta/classes/siteinfo.bbclass
@@ -113,14 +113,14 @@ def siteinfo_data(d):
113 113
114 # Add in any extra user supplied data which may come from a BSP layer, removing the 114 # Add in any extra user supplied data which may come from a BSP layer, removing the
115 # need to always change this class directly 115 # need to always change this class directly
116 extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS", True) or "").split() 116 extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS") or "").split()
117 for m in extra_siteinfo: 117 for m in extra_siteinfo:
118 call = m + "(archinfo, osinfo, targetinfo, d)" 118 call = m + "(archinfo, osinfo, targetinfo, d)"
119 locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d} 119 locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d}
120 archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs) 120 archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs)
121 121
122 hostarch = d.getVar("HOST_ARCH", True) 122 hostarch = d.getVar("HOST_ARCH")
123 hostos = d.getVar("HOST_OS", True) 123 hostos = d.getVar("HOST_OS")
124 target = "%s-%s" % (hostarch, hostos) 124 target = "%s-%s" % (hostarch, hostos)
125 125
126 sitedata = [] 126 sitedata = []
@@ -144,7 +144,7 @@ python () {
144 d.setVar("SITEINFO_ENDIANNESS", "be") 144 d.setVar("SITEINFO_ENDIANNESS", "be")
145 else: 145 else:
146 bb.error("Unable to determine endianness for architecture '%s'" % 146 bb.error("Unable to determine endianness for architecture '%s'" %
147 d.getVar("HOST_ARCH", True)) 147 d.getVar("HOST_ARCH"))
148 bb.fatal("Please add your architecture to siteinfo.bbclass") 148 bb.fatal("Please add your architecture to siteinfo.bbclass")
149 149
150 if "bit-32" in sitedata: 150 if "bit-32" in sitedata:
@@ -153,14 +153,14 @@ python () {
153 d.setVar("SITEINFO_BITS", "64") 153 d.setVar("SITEINFO_BITS", "64")
154 else: 154 else:
155 bb.error("Unable to determine bit size for architecture '%s'" % 155 bb.error("Unable to determine bit size for architecture '%s'" %
156 d.getVar("HOST_ARCH", True)) 156 d.getVar("HOST_ARCH"))
157 bb.fatal("Please add your architecture to siteinfo.bbclass") 157 bb.fatal("Please add your architecture to siteinfo.bbclass")
158} 158}
159 159
160def siteinfo_get_files(d, aclocalcache = False): 160def siteinfo_get_files(d, aclocalcache = False):
161 sitedata = siteinfo_data(d) 161 sitedata = siteinfo_data(d)
162 sitefiles = "" 162 sitefiles = ""
163 for path in d.getVar("BBPATH", True).split(":"): 163 for path in d.getVar("BBPATH").split(":"):
164 for element in sitedata: 164 for element in sitedata:
165 filename = os.path.join(path, "site", element) 165 filename = os.path.join(path, "site", element)
166 if os.path.exists(filename): 166 if os.path.exists(filename):
@@ -177,7 +177,7 @@ def siteinfo_get_files(d, aclocalcache = False):
177 # issues and the directory being created/removed whilst this code executes. This can happen 177 # issues and the directory being created/removed whilst this code executes. This can happen
178 # when a multilib recipe is parsed along with its base variant which may be running at the time 178 # when a multilib recipe is parsed along with its base variant which may be running at the time
179 # causing rare but nasty failures 179 # causing rare but nasty failures
180 path_siteconfig = d.getVar('ACLOCALDIR', True) 180 path_siteconfig = d.getVar('ACLOCALDIR')
181 if path_siteconfig and os.path.isdir(path_siteconfig): 181 if path_siteconfig and os.path.isdir(path_siteconfig):
182 for i in os.listdir(path_siteconfig): 182 for i in os.listdir(path_siteconfig):
183 if not i.endswith("_config"): 183 if not i.endswith("_config"):
diff --git a/meta/classes/spdx.bbclass b/meta/classes/spdx.bbclass
index 89394d3a96..c5f544d2a4 100644
--- a/meta/classes/spdx.bbclass
+++ b/meta/classes/spdx.bbclass
@@ -26,20 +26,20 @@ python do_spdx () {
26 import json, shutil 26 import json, shutil
27 27
28 info = {} 28 info = {}
29 info['workdir'] = d.getVar('WORKDIR', True) 29 info['workdir'] = d.getVar('WORKDIR')
30 info['sourcedir'] = d.getVar('SPDX_S', True) 30 info['sourcedir'] = d.getVar('SPDX_S')
31 info['pn'] = d.getVar('PN', True) 31 info['pn'] = d.getVar('PN')
32 info['pv'] = d.getVar('PV', True) 32 info['pv'] = d.getVar('PV')
33 info['spdx_version'] = d.getVar('SPDX_VERSION', True) 33 info['spdx_version'] = d.getVar('SPDX_VERSION')
34 info['data_license'] = d.getVar('DATA_LICENSE', True) 34 info['data_license'] = d.getVar('DATA_LICENSE')
35 35
36 sstatedir = d.getVar('SPDXSSTATEDIR', True) 36 sstatedir = d.getVar('SPDXSSTATEDIR')
37 sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx") 37 sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx")
38 38
39 manifest_dir = d.getVar('SPDX_MANIFEST_DIR', True) 39 manifest_dir = d.getVar('SPDX_MANIFEST_DIR')
40 info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" ) 40 info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" )
41 41
42 info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR', True) 42 info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR')
43 info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" ) 43 info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" )
44 44
45 # Make sure important dirs exist 45 # Make sure important dirs exist
@@ -74,9 +74,9 @@ python do_spdx () {
74 foss_license_info = cached_spdx['Licenses'] 74 foss_license_info = cached_spdx['Licenses']
75 else: 75 else:
76 ## setup fossology command 76 ## setup fossology command
77 foss_server = d.getVar('FOSS_SERVER', True) 77 foss_server = d.getVar('FOSS_SERVER')
78 foss_flags = d.getVar('FOSS_WGET_FLAGS', True) 78 foss_flags = d.getVar('FOSS_WGET_FLAGS')
79 foss_full_spdx = d.getVar('FOSS_FULL_SPDX', True) == "true" or False 79 foss_full_spdx = d.getVar('FOSS_FULL_SPDX') == "true" or False
80 foss_command = "wget %s --post-file=%s %s"\ 80 foss_command = "wget %s --post-file=%s %s"\
81 % (foss_flags, info['tar_file'], foss_server) 81 % (foss_flags, info['tar_file'], foss_server)
82 82
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index 8643f3d247..546e276d9f 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -11,7 +11,7 @@ def generate_sstatefn(spec, hash, d):
11SSTATE_PKGARCH = "${PACKAGE_ARCH}" 11SSTATE_PKGARCH = "${PACKAGE_ARCH}"
12SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:" 12SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
13SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:" 13SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:"
14SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC', True), d.getVar('BB_TASKHASH', True), d)}" 14SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_TASKHASH'), d)}"
15SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}" 15SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}"
16SSTATE_EXTRAPATH = "" 16SSTATE_EXTRAPATH = ""
17SSTATE_EXTRAPATHWILDCARD = "" 17SSTATE_EXTRAPATHWILDCARD = ""
@@ -34,7 +34,7 @@ SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}"
34SSTATE_DUPWHITELIST += "${DEPLOY_DIR}/sdk/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt" 34SSTATE_DUPWHITELIST += "${DEPLOY_DIR}/sdk/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt"
35 35
36SSTATE_SCAN_FILES ?= "*.la *-config *_config" 36SSTATE_SCAN_FILES ?= "*.la *-config *_config"
37SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES", True).split())}" \) -type f' 37SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES").split())}" \) -type f'
38 38
39BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}" 39BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}"
40 40
@@ -84,7 +84,7 @@ python () {
84 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${SDK_OS}")) 84 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${SDK_OS}"))
85 elif bb.data.inherits_class('cross-canadian', d): 85 elif bb.data.inherits_class('cross-canadian', d):
86 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}")) 86 d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}"))
87 elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH", True) == "all": 87 elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH") == "all":
88 d.setVar('SSTATE_PKGARCH', "allarch") 88 d.setVar('SSTATE_PKGARCH', "allarch")
89 else: 89 else:
90 d.setVar('SSTATE_MANMACH', d.expand("${PACKAGE_ARCH}")) 90 d.setVar('SSTATE_MANMACH', d.expand("${PACKAGE_ARCH}"))
@@ -100,7 +100,7 @@ python () {
100 scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" 100 scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}"
101 d.setVar('SSTATE_SCAN_CMD', scan_cmd) 101 d.setVar('SSTATE_SCAN_CMD', scan_cmd)
102 102
103 unique_tasks = sorted(set((d.getVar('SSTATETASKS', True) or "").split())) 103 unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split()))
104 d.setVar('SSTATETASKS', " ".join(unique_tasks)) 104 d.setVar('SSTATETASKS', " ".join(unique_tasks))
105 for task in unique_tasks: 105 for task in unique_tasks:
106 d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ") 106 d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ")
@@ -118,7 +118,7 @@ def sstate_init(task, d):
118 118
119def sstate_state_fromvars(d, task = None): 119def sstate_state_fromvars(d, task = None):
120 if task is None: 120 if task is None:
121 task = d.getVar('BB_CURRENTTASK', True) 121 task = d.getVar('BB_CURRENTTASK')
122 if not task: 122 if not task:
123 bb.fatal("sstate code running without task context?!") 123 bb.fatal("sstate code running without task context?!")
124 task = task.replace("_setscene", "") 124 task = task.replace("_setscene", "")
@@ -200,7 +200,7 @@ def sstate_install(ss, d):
200 shareddirs.append(dstdir) 200 shareddirs.append(dstdir)
201 201
202 # Check the file list for conflicts against files which already exist 202 # Check the file list for conflicts against files which already exist
203 whitelist = (d.getVar("SSTATE_DUPWHITELIST", True) or "").split() 203 whitelist = (d.getVar("SSTATE_DUPWHITELIST") or "").split()
204 match = [] 204 match = []
205 for f in sharedfiles: 205 for f in sharedfiles:
206 if os.path.exists(f): 206 if os.path.exists(f):
@@ -239,7 +239,7 @@ def sstate_install(ss, d):
239 "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \ 239 "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \
240 "be to resolve the conflict. If in doubt, please ask on the mailing list, " \ 240 "be to resolve the conflict. If in doubt, please ask on the mailing list, " \
241 "sharing the error and filelist above." % \ 241 "sharing the error and filelist above." % \
242 (d.getVar('PN', True), "\n ".join(match))) 242 (d.getVar('PN'), "\n ".join(match)))
243 bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") 243 bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.")
244 244
245 # Write out the manifest 245 # Write out the manifest
@@ -260,7 +260,7 @@ def sstate_install(ss, d):
260 260
261 i = d2.expand("${SSTATE_MANIFESTS}/index-${SSTATE_MANMACH}") 261 i = d2.expand("${SSTATE_MANIFESTS}/index-${SSTATE_MANMACH}")
262 l = bb.utils.lockfile(i + ".lock") 262 l = bb.utils.lockfile(i + ".lock")
263 filedata = d.getVar("STAMP", True) + " " + d2.getVar("SSTATE_MANFILEPREFIX", True) + " " + d.getVar("WORKDIR", True) + "\n" 263 filedata = d.getVar("STAMP") + " " + d2.getVar("SSTATE_MANFILEPREFIX") + " " + d.getVar("WORKDIR") + "\n"
264 manifests = [] 264 manifests = []
265 if os.path.exists(i): 265 if os.path.exists(i):
266 with open(i, "r") as f: 266 with open(i, "r") as f:
@@ -275,7 +275,7 @@ def sstate_install(ss, d):
275 if os.path.exists(state[1]): 275 if os.path.exists(state[1]):
276 oe.path.copyhardlinktree(state[1], state[2]) 276 oe.path.copyhardlinktree(state[1], state[2])
277 277
278 for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split(): 278 for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split():
279 # All hooks should run in the SSTATE_INSTDIR 279 # All hooks should run in the SSTATE_INSTDIR
280 bb.build.exec_func(postinst, d, (sstateinst,)) 280 bb.build.exec_func(postinst, d, (sstateinst,))
281 281
@@ -298,8 +298,8 @@ def sstate_installpkg(ss, d):
298 oe.path.remove(dir) 298 oe.path.remove(dir)
299 299
300 sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) 300 sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task'])
301 sstatefetch = d.getVar('SSTATE_PKGNAME', True) + '_' + ss['task'] + ".tgz" 301 sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz"
302 sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['task'] + ".tgz" 302 sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz"
303 303
304 if not os.path.exists(sstatepkg): 304 if not os.path.exists(sstatepkg):
305 pstaging_fetch(sstatefetch, sstatepkg, d) 305 pstaging_fetch(sstatefetch, sstatepkg, d)
@@ -313,12 +313,12 @@ def sstate_installpkg(ss, d):
313 d.setVar('SSTATE_INSTDIR', sstateinst) 313 d.setVar('SSTATE_INSTDIR', sstateinst)
314 d.setVar('SSTATE_PKG', sstatepkg) 314 d.setVar('SSTATE_PKG', sstatepkg)
315 315
316 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): 316 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
317 signer = get_signer(d, 'local') 317 signer = get_signer(d, 'local')
318 if not signer.verify(sstatepkg + '.sig'): 318 if not signer.verify(sstatepkg + '.sig'):
319 bb.warn("Cannot verify signature on sstate package %s" % sstatepkg) 319 bb.warn("Cannot verify signature on sstate package %s" % sstatepkg)
320 320
321 for f in (d.getVar('SSTATEPREINSTFUNCS', True) or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS', True) or '').split(): 321 for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split():
322 # All hooks should run in the SSTATE_INSTDIR 322 # All hooks should run in the SSTATE_INSTDIR
323 bb.build.exec_func(f, d, (sstateinst,)) 323 bb.build.exec_func(f, d, (sstateinst,))
324 324
@@ -328,7 +328,7 @@ def sstate_installpkg(ss, d):
328 sstate_install(ss, d) 328 sstate_install(ss, d)
329 329
330 for plain in ss['plaindirs']: 330 for plain in ss['plaindirs']:
331 workdir = d.getVar('WORKDIR', True) 331 workdir = d.getVar('WORKDIR')
332 src = sstateinst + "/" + plain.replace(workdir, '') 332 src = sstateinst + "/" + plain.replace(workdir, '')
333 dest = plain 333 dest = plain
334 bb.utils.mkdirhier(src) 334 bb.utils.mkdirhier(src)
@@ -344,12 +344,12 @@ python sstate_hardcode_path_unpack () {
344 # sstate_hardcode_path(d) 344 # sstate_hardcode_path(d)
345 import subprocess 345 import subprocess
346 346
347 sstateinst = d.getVar('SSTATE_INSTDIR', True) 347 sstateinst = d.getVar('SSTATE_INSTDIR')
348 fixmefn = sstateinst + "fixmepath" 348 fixmefn = sstateinst + "fixmepath"
349 if os.path.isfile(fixmefn): 349 if os.path.isfile(fixmefn):
350 staging = d.getVar('STAGING_DIR', True) 350 staging = d.getVar('STAGING_DIR')
351 staging_target = d.getVar('STAGING_DIR_TARGET', True) 351 staging_target = d.getVar('STAGING_DIR_TARGET')
352 staging_host = d.getVar('STAGING_DIR_HOST', True) 352 staging_host = d.getVar('STAGING_DIR_HOST')
353 353
354 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): 354 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
355 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) 355 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
@@ -358,9 +358,9 @@ python sstate_hardcode_path_unpack () {
358 else: 358 else:
359 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) 359 sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
360 360
361 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' 361 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
362 for fixmevar in extra_staging_fixmes.split(): 362 for fixmevar in extra_staging_fixmes.split():
363 fixme_path = d.getVar(fixmevar, True) 363 fixme_path = d.getVar(fixmevar)
364 sstate_sed_cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path) 364 sstate_sed_cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path)
365 365
366 # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed 366 # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
@@ -377,12 +377,12 @@ python sstate_hardcode_path_unpack () {
377def sstate_clean_cachefile(ss, d): 377def sstate_clean_cachefile(ss, d):
378 import oe.path 378 import oe.path
379 379
380 sstatepkgfile = d.getVar('SSTATE_PATHSPEC', True) + "*_" + ss['task'] + ".tgz*" 380 sstatepkgfile = d.getVar('SSTATE_PATHSPEC') + "*_" + ss['task'] + ".tgz*"
381 bb.note("Removing %s" % sstatepkgfile) 381 bb.note("Removing %s" % sstatepkgfile)
382 oe.path.remove(sstatepkgfile) 382 oe.path.remove(sstatepkgfile)
383 383
384def sstate_clean_cachefiles(d): 384def sstate_clean_cachefiles(d):
385 for task in (d.getVar('SSTATETASKS', True) or "").split(): 385 for task in (d.getVar('SSTATETASKS') or "").split():
386 ld = d.createCopy() 386 ld = d.createCopy()
387 ss = sstate_state_fromvars(ld, task) 387 ss = sstate_state_fromvars(ld, task)
388 sstate_clean_cachefile(ss, ld) 388 sstate_clean_cachefile(ss, ld)
@@ -424,7 +424,7 @@ def sstate_clean(ss, d):
424 import glob 424 import glob
425 425
426 d2 = d.createCopy() 426 d2 = d.createCopy()
427 stamp_clean = d.getVar("STAMPCLEAN", True) 427 stamp_clean = d.getVar("STAMPCLEAN")
428 extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True) 428 extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True)
429 if extrainf: 429 if extrainf:
430 d2.setVar("SSTATE_MANMACH", extrainf) 430 d2.setVar("SSTATE_MANMACH", extrainf)
@@ -467,7 +467,7 @@ def sstate_clean(ss, d):
467 oe.path.remove(stfile) 467 oe.path.remove(stfile)
468 468
469 # Removes the users/groups created by the package 469 # Removes the users/groups created by the package
470 for cleanfunc in (d.getVar('SSTATECLEANFUNCS', True) or '').split(): 470 for cleanfunc in (d.getVar('SSTATECLEANFUNCS') or '').split():
471 bb.build.exec_func(cleanfunc, d) 471 bb.build.exec_func(cleanfunc, d)
472 472
473sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX" 473sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX"
@@ -475,13 +475,13 @@ sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX"
475CLEANFUNCS += "sstate_cleanall" 475CLEANFUNCS += "sstate_cleanall"
476 476
477python sstate_cleanall() { 477python sstate_cleanall() {
478 bb.note("Removing shared state for package %s" % d.getVar('PN', True)) 478 bb.note("Removing shared state for package %s" % d.getVar('PN'))
479 479
480 manifest_dir = d.getVar('SSTATE_MANIFESTS', True) 480 manifest_dir = d.getVar('SSTATE_MANIFESTS')
481 if not os.path.exists(manifest_dir): 481 if not os.path.exists(manifest_dir):
482 return 482 return
483 483
484 tasks = d.getVar('SSTATETASKS', True).split() 484 tasks = d.getVar('SSTATETASKS').split()
485 for name in tasks: 485 for name in tasks:
486 ld = d.createCopy() 486 ld = d.createCopy()
487 shared_state = sstate_state_fromvars(ld, name) 487 shared_state = sstate_state_fromvars(ld, name)
@@ -497,10 +497,10 @@ python sstate_hardcode_path () {
497 # Note: the logic in this function needs to match the reverse logic 497 # Note: the logic in this function needs to match the reverse logic
498 # in sstate_installpkg(ss, d) 498 # in sstate_installpkg(ss, d)
499 499
500 staging = d.getVar('STAGING_DIR', True) 500 staging = d.getVar('STAGING_DIR')
501 staging_target = d.getVar('STAGING_DIR_TARGET', True) 501 staging_target = d.getVar('STAGING_DIR_TARGET')
502 staging_host = d.getVar('STAGING_DIR_HOST', True) 502 staging_host = d.getVar('STAGING_DIR_HOST')
503 sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) 503 sstate_builddir = d.getVar('SSTATE_BUILDDIR')
504 504
505 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): 505 if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
506 sstate_grep_cmd = "grep -l -e '%s'" % (staging) 506 sstate_grep_cmd = "grep -l -e '%s'" % (staging)
@@ -512,14 +512,14 @@ python sstate_hardcode_path () {
512 sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) 512 sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
513 sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) 513 sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
514 514
515 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' 515 extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
516 for fixmevar in extra_staging_fixmes.split(): 516 for fixmevar in extra_staging_fixmes.split():
517 fixme_path = d.getVar(fixmevar, True) 517 fixme_path = d.getVar(fixmevar)
518 sstate_sed_cmd += " -e 's:%s:FIXME_%s:g'" % (fixme_path, fixmevar) 518 sstate_sed_cmd += " -e 's:%s:FIXME_%s:g'" % (fixme_path, fixmevar)
519 519
520 fixmefn = sstate_builddir + "fixmepath" 520 fixmefn = sstate_builddir + "fixmepath"
521 521
522 sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) 522 sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD')
523 sstate_filelist_cmd = "tee %s" % (fixmefn) 523 sstate_filelist_cmd = "tee %s" % (fixmefn)
524 524
525 # fixmepath file needs relative paths, drop sstate_builddir prefix 525 # fixmepath file needs relative paths, drop sstate_builddir prefix
@@ -568,17 +568,17 @@ def sstate_package(ss, d):
568 os.remove(path) 568 os.remove(path)
569 os.symlink(base, path) 569 os.symlink(base, path)
570 570
571 tmpdir = d.getVar('TMPDIR', True) 571 tmpdir = d.getVar('TMPDIR')
572 572
573 sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task']) 573 sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task'])
574 sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['task'] + ".tgz" 574 sstatepkg = d.getVar('SSTATE_PKG') + '_'+ ss['task'] + ".tgz"
575 bb.utils.remove(sstatebuild, recurse=True) 575 bb.utils.remove(sstatebuild, recurse=True)
576 bb.utils.mkdirhier(sstatebuild) 576 bb.utils.mkdirhier(sstatebuild)
577 bb.utils.mkdirhier(os.path.dirname(sstatepkg)) 577 bb.utils.mkdirhier(os.path.dirname(sstatepkg))
578 for state in ss['dirs']: 578 for state in ss['dirs']:
579 if not os.path.exists(state[1]): 579 if not os.path.exists(state[1]):
580 continue 580 continue
581 if d.getVar('SSTATE_SKIP_CREATION', True) == '1': 581 if d.getVar('SSTATE_SKIP_CREATION') == '1':
582 continue 582 continue
583 srcbase = state[0].rstrip("/").rsplit('/', 1)[0] 583 srcbase = state[0].rstrip("/").rsplit('/', 1)[0]
584 for walkroot, dirs, files in os.walk(state[1]): 584 for walkroot, dirs, files in os.walk(state[1]):
@@ -593,7 +593,7 @@ def sstate_package(ss, d):
593 bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) 593 bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0]))
594 oe.path.copyhardlinktree(state[1], sstatebuild + state[0]) 594 oe.path.copyhardlinktree(state[1], sstatebuild + state[0])
595 595
596 workdir = d.getVar('WORKDIR', True) 596 workdir = d.getVar('WORKDIR')
597 for plain in ss['plaindirs']: 597 for plain in ss['plaindirs']:
598 pdir = plain.replace(workdir, sstatebuild) 598 pdir = plain.replace(workdir, sstatebuild)
599 bb.utils.mkdirhier(plain) 599 bb.utils.mkdirhier(plain)
@@ -603,9 +603,9 @@ def sstate_package(ss, d):
603 d.setVar('SSTATE_BUILDDIR', sstatebuild) 603 d.setVar('SSTATE_BUILDDIR', sstatebuild)
604 d.setVar('SSTATE_PKG', sstatepkg) 604 d.setVar('SSTATE_PKG', sstatepkg)
605 605
606 for f in (d.getVar('SSTATECREATEFUNCS', True) or '').split() + \ 606 for f in (d.getVar('SSTATECREATEFUNCS') or '').split() + \
607 ['sstate_create_package', 'sstate_sign_package'] + \ 607 ['sstate_create_package', 'sstate_sign_package'] + \
608 (d.getVar('SSTATEPOSTCREATEFUNCS', True) or '').split(): 608 (d.getVar('SSTATEPOSTCREATEFUNCS') or '').split():
609 # All hooks should run in SSTATE_BUILDDIR. 609 # All hooks should run in SSTATE_BUILDDIR.
610 bb.build.exec_func(f, d, (sstatebuild,)) 610 bb.build.exec_func(f, d, (sstatebuild,))
611 611
@@ -617,7 +617,7 @@ def pstaging_fetch(sstatefetch, sstatepkg, d):
617 import bb.fetch2 617 import bb.fetch2
618 618
619 # Only try and fetch if the user has configured a mirror 619 # Only try and fetch if the user has configured a mirror
620 mirrors = d.getVar('SSTATE_MIRRORS', True) 620 mirrors = d.getVar('SSTATE_MIRRORS')
621 if not mirrors: 621 if not mirrors:
622 return 622 return
623 623
@@ -635,14 +635,14 @@ def pstaging_fetch(sstatefetch, sstatepkg, d):
635 635
636 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, 636 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
637 # we'll want to allow network access for the current set of fetches. 637 # we'll want to allow network access for the current set of fetches.
638 if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": 638 if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1":
639 localdata.delVar('BB_NO_NETWORK') 639 localdata.delVar('BB_NO_NETWORK')
640 640
641 # Try a fetch from the sstate mirror, if it fails just return and 641 # Try a fetch from the sstate mirror, if it fails just return and
642 # we will build the package 642 # we will build the package
643 uris = ['file://{0};downloadfilename={0}'.format(sstatefetch), 643 uris = ['file://{0};downloadfilename={0}'.format(sstatefetch),
644 'file://{0}.siginfo;downloadfilename={0}.siginfo'.format(sstatefetch)] 644 'file://{0}.siginfo;downloadfilename={0}.siginfo'.format(sstatefetch)]
645 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): 645 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
646 uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)] 646 uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)]
647 647
648 for srcuri in uris: 648 for srcuri in uris:
@@ -671,7 +671,7 @@ python sstate_task_postfunc () {
671 671
672 sstate_install(shared_state, d) 672 sstate_install(shared_state, d)
673 for intercept in shared_state['interceptfuncs']: 673 for intercept in shared_state['interceptfuncs']:
674 bb.build.exec_func(intercept, d, (d.getVar("WORKDIR", True),)) 674 bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),))
675 omask = os.umask(0o002) 675 omask = os.umask(0o002)
676 if omask != 0o002: 676 if omask != 0o002:
677 bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) 677 bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask)
@@ -709,13 +709,13 @@ sstate_create_package () {
709python sstate_sign_package () { 709python sstate_sign_package () {
710 from oe.gpg_sign import get_signer 710 from oe.gpg_sign import get_signer
711 711
712 if d.getVar('SSTATE_SIG_KEY', True): 712 if d.getVar('SSTATE_SIG_KEY'):
713 signer = get_signer(d, 'local') 713 signer = get_signer(d, 'local')
714 sstate_pkg = d.getVar('SSTATE_PKG', True) 714 sstate_pkg = d.getVar('SSTATE_PKG')
715 if os.path.exists(sstate_pkg + '.sig'): 715 if os.path.exists(sstate_pkg + '.sig'):
716 os.unlink(sstate_pkg + '.sig') 716 os.unlink(sstate_pkg + '.sig')
717 signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None, 717 signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None,
718 d.getVar('SSTATE_SIG_PASSPHRASE', True), armor=False) 718 d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False)
719} 719}
720 720
721# 721#
@@ -746,7 +746,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
746 splithashfn = sq_hashfn[task].split(" ") 746 splithashfn = sq_hashfn[task].split(" ")
747 spec = splithashfn[1] 747 spec = splithashfn[1]
748 if splithashfn[0] == "True": 748 if splithashfn[0] == "True":
749 extrapath = d.getVar("NATIVELSBSTRING", True) + "/" 749 extrapath = d.getVar("NATIVELSBSTRING") + "/"
750 else: 750 else:
751 extrapath = "" 751 extrapath = ""
752 752
@@ -785,7 +785,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
785 missed.append(task) 785 missed.append(task)
786 bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) 786 bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile)
787 787
788 mirrors = d.getVar("SSTATE_MIRRORS", True) 788 mirrors = d.getVar("SSTATE_MIRRORS")
789 if mirrors: 789 if mirrors:
790 # Copy the data object and override DL_DIR and SRC_URI 790 # Copy the data object and override DL_DIR and SRC_URI
791 localdata = bb.data.createCopy(d) 791 localdata = bb.data.createCopy(d)
@@ -801,7 +801,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
801 801
802 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, 802 # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
803 # we'll want to allow network access for the current set of fetches. 803 # we'll want to allow network access for the current set of fetches.
804 if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": 804 if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1":
805 localdata.delVar('BB_NO_NETWORK') 805 localdata.delVar('BB_NO_NETWORK')
806 806
807 whitelist = bb.runqueue.get_setscene_enforce_whitelist(d) 807 whitelist = bb.runqueue.get_setscene_enforce_whitelist(d)
@@ -868,7 +868,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
868 if whitelist and missing: 868 if whitelist and missing:
869 bb.fatal('Required artifacts were unavailable - exiting') 869 bb.fatal('Required artifacts were unavailable - exiting')
870 870
871 inheritlist = d.getVar("INHERIT", True) 871 inheritlist = d.getVar("INHERIT")
872 if "toaster" in inheritlist: 872 if "toaster" in inheritlist:
873 evdata = {'missed': [], 'found': []}; 873 evdata = {'missed': [], 'found': []};
874 for task in missed: 874 for task in missed:
@@ -977,15 +977,15 @@ sstate_eventhandler[eventmask] = "bb.build.TaskSucceeded"
977python sstate_eventhandler() { 977python sstate_eventhandler() {
978 d = e.data 978 d = e.data
979 # When we write an sstate package we rewrite the SSTATE_PKG 979 # When we write an sstate package we rewrite the SSTATE_PKG
980 spkg = d.getVar('SSTATE_PKG', True) 980 spkg = d.getVar('SSTATE_PKG')
981 if not spkg.endswith(".tgz"): 981 if not spkg.endswith(".tgz"):
982 taskname = d.getVar("BB_RUNTASK", True)[3:] 982 taskname = d.getVar("BB_RUNTASK")[3:]
983 spec = d.getVar('SSTATE_PKGSPEC', True) 983 spec = d.getVar('SSTATE_PKGSPEC')
984 swspec = d.getVar('SSTATE_SWSPEC', True) 984 swspec = d.getVar('SSTATE_SWSPEC')
985 if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec: 985 if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec:
986 d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}") 986 d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}")
987 d.setVar("SSTATE_EXTRAPATH", "") 987 d.setVar("SSTATE_EXTRAPATH", "")
988 sstatepkg = d.getVar('SSTATE_PKG', True) 988 sstatepkg = d.getVar('SSTATE_PKG')
989 bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d) 989 bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d)
990} 990}
991 991
@@ -1004,7 +1004,7 @@ python sstate_eventhandler2() {
1004 stamps = e.stamps.values() 1004 stamps = e.stamps.values()
1005 removeworkdir = (d.getVar("SSTATE_PRUNE_OBSOLETEWORKDIR", False) == "1") 1005 removeworkdir = (d.getVar("SSTATE_PRUNE_OBSOLETEWORKDIR", False) == "1")
1006 seen = [] 1006 seen = []
1007 for a in d.getVar("SSTATE_ARCHS", True).split(): 1007 for a in d.getVar("SSTATE_ARCHS").split():
1008 toremove = [] 1008 toremove = []
1009 i = d.expand("${SSTATE_MANIFESTS}/index-" + a) 1009 i = d.expand("${SSTATE_MANIFESTS}/index-" + a)
1010 if not os.path.exists(i): 1010 if not os.path.exists(i):
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass
index bfabd06f3f..72a7421ba6 100644
--- a/meta/classes/staging.bbclass
+++ b/meta/classes/staging.bbclass
@@ -69,8 +69,8 @@ sysroot_stage_all() {
69python sysroot_strip () { 69python sysroot_strip () {
70 import stat, errno 70 import stat, errno
71 71
72 dvar = d.getVar('SYSROOT_DESTDIR', True) 72 dvar = d.getVar('SYSROOT_DESTDIR')
73 pn = d.getVar('PN', True) 73 pn = d.getVar('PN')
74 74
75 os.chdir(dvar) 75 os.chdir(dvar)
76 76
@@ -103,9 +103,9 @@ python sysroot_strip () {
103 103
104 elffiles = {} 104 elffiles = {}
105 inodes = {} 105 inodes = {}
106 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) 106 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
107 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) 107 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
108 if (d.getVar('INHIBIT_SYSROOT_STRIP', True) != '1'): 108 if (d.getVar('INHIBIT_SYSROOT_STRIP') != '1'):
109 # 109 #
110 # First lets figure out all of the files we may have to process 110 # First lets figure out all of the files we may have to process
111 # 111 #
@@ -136,7 +136,7 @@ python sysroot_strip () {
136 elf_file = isELF(file) 136 elf_file = isELF(file)
137 if elf_file & 1: 137 if elf_file & 1:
138 if elf_file & 2: 138 if elf_file & 2:
139 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): 139 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
140 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) 140 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
141 else: 141 else:
142 bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)) 142 bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn))
@@ -154,7 +154,7 @@ python sysroot_strip () {
154 # 154 #
155 # Now strip them (in parallel) 155 # Now strip them (in parallel)
156 # 156 #
157 strip = d.getVar("STRIP", True) 157 strip = d.getVar("STRIP")
158 sfiles = [] 158 sfiles = []
159 for file in elffiles: 159 for file in elffiles:
160 elf_file = int(elffiles[file]) 160 elf_file = int(elffiles[file])
@@ -211,13 +211,13 @@ def sysroot_checkhashes(covered, tasknames, fnids, fns, d, invalidtasks = None):
211python do_populate_sysroot () { 211python do_populate_sysroot () {
212 bb.build.exec_func("sysroot_stage_all", d) 212 bb.build.exec_func("sysroot_stage_all", d)
213 bb.build.exec_func("sysroot_strip", d) 213 bb.build.exec_func("sysroot_strip", d)
214 for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS', True) or '').split(): 214 for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS') or '').split():
215 bb.build.exec_func(f, d) 215 bb.build.exec_func(f, d)
216 pn = d.getVar("PN", True) 216 pn = d.getVar("PN")
217 multiprov = d.getVar("MULTI_PROVIDER_WHITELIST", True).split() 217 multiprov = d.getVar("MULTI_PROVIDER_WHITELIST").split()
218 provdir = d.expand("${SYSROOT_DESTDIR}${base_prefix}/sysroot-providers/") 218 provdir = d.expand("${SYSROOT_DESTDIR}${base_prefix}/sysroot-providers/")
219 bb.utils.mkdirhier(provdir) 219 bb.utils.mkdirhier(provdir)
220 for p in d.getVar("PROVIDES", True).split(): 220 for p in d.getVar("PROVIDES").split():
221 if p in multiprov: 221 if p in multiprov:
222 continue 222 continue
223 p = p.replace("/", "_") 223 p = p.replace("/", "_")
diff --git a/meta/classes/syslinux.bbclass b/meta/classes/syslinux.bbclass
index 7778fd708f..d8859b35bd 100644
--- a/meta/classes/syslinux.bbclass
+++ b/meta/classes/syslinux.bbclass
@@ -84,12 +84,12 @@ python build_syslinux_cfg () {
84 import copy 84 import copy
85 import sys 85 import sys
86 86
87 workdir = d.getVar('WORKDIR', True) 87 workdir = d.getVar('WORKDIR')
88 if not workdir: 88 if not workdir:
89 bb.error("WORKDIR not defined, unable to package") 89 bb.error("WORKDIR not defined, unable to package")
90 return 90 return
91 91
92 labels = d.getVar('LABELS', True) 92 labels = d.getVar('LABELS')
93 if not labels: 93 if not labels:
94 bb.debug(1, "LABELS not defined, nothing to do") 94 bb.debug(1, "LABELS not defined, nothing to do")
95 return 95 return
@@ -98,7 +98,7 @@ python build_syslinux_cfg () {
98 bb.debug(1, "No labels, nothing to do") 98 bb.debug(1, "No labels, nothing to do")
99 return 99 return
100 100
101 cfile = d.getVar('SYSLINUX_CFG', True) 101 cfile = d.getVar('SYSLINUX_CFG')
102 if not cfile: 102 if not cfile:
103 bb.fatal('Unable to read SYSLINUX_CFG') 103 bb.fatal('Unable to read SYSLINUX_CFG')
104 104
@@ -109,39 +109,39 @@ python build_syslinux_cfg () {
109 109
110 cfgfile.write('# Automatically created by OE\n') 110 cfgfile.write('# Automatically created by OE\n')
111 111
112 opts = d.getVar('SYSLINUX_OPTS', True) 112 opts = d.getVar('SYSLINUX_OPTS')
113 113
114 if opts: 114 if opts:
115 for opt in opts.split(';'): 115 for opt in opts.split(';'):
116 cfgfile.write('%s\n' % opt) 116 cfgfile.write('%s\n' % opt)
117 117
118 allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS', True) 118 allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS')
119 if allowoptions: 119 if allowoptions:
120 cfgfile.write('ALLOWOPTIONS %s\n' % allowoptions) 120 cfgfile.write('ALLOWOPTIONS %s\n' % allowoptions)
121 else: 121 else:
122 cfgfile.write('ALLOWOPTIONS 1\n') 122 cfgfile.write('ALLOWOPTIONS 1\n')
123 123
124 syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE', True) 124 syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE')
125 syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY', True) 125 syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY')
126 syslinux_serial = d.getVar('SYSLINUX_SERIAL', True) 126 syslinux_serial = d.getVar('SYSLINUX_SERIAL')
127 if syslinux_serial: 127 if syslinux_serial:
128 cfgfile.write('SERIAL %s\n' % syslinux_serial) 128 cfgfile.write('SERIAL %s\n' % syslinux_serial)
129 129
130 menu = (d.getVar('AUTO_SYSLINUXMENU', True) == "1") 130 menu = (d.getVar('AUTO_SYSLINUXMENU') == "1")
131 131
132 if menu and syslinux_serial: 132 if menu and syslinux_serial:
133 cfgfile.write('DEFAULT Graphics console %s\n' % (labels.split()[0])) 133 cfgfile.write('DEFAULT Graphics console %s\n' % (labels.split()[0]))
134 else: 134 else:
135 cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) 135 cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
136 136
137 timeout = d.getVar('SYSLINUX_TIMEOUT', True) 137 timeout = d.getVar('SYSLINUX_TIMEOUT')
138 138
139 if timeout: 139 if timeout:
140 cfgfile.write('TIMEOUT %s\n' % timeout) 140 cfgfile.write('TIMEOUT %s\n' % timeout)
141 else: 141 else:
142 cfgfile.write('TIMEOUT 50\n') 142 cfgfile.write('TIMEOUT 50\n')
143 143
144 prompt = d.getVar('SYSLINUX_PROMPT', True) 144 prompt = d.getVar('SYSLINUX_PROMPT')
145 if prompt: 145 if prompt:
146 cfgfile.write('PROMPT %s\n' % prompt) 146 cfgfile.write('PROMPT %s\n' % prompt)
147 else: 147 else:
@@ -151,14 +151,14 @@ python build_syslinux_cfg () {
151 cfgfile.write('ui vesamenu.c32\n') 151 cfgfile.write('ui vesamenu.c32\n')
152 cfgfile.write('menu title Select kernel options and boot kernel\n') 152 cfgfile.write('menu title Select kernel options and boot kernel\n')
153 cfgfile.write('menu tabmsg Press [Tab] to edit, [Return] to select\n') 153 cfgfile.write('menu tabmsg Press [Tab] to edit, [Return] to select\n')
154 splash = d.getVar('SYSLINUX_SPLASH', True) 154 splash = d.getVar('SYSLINUX_SPLASH')
155 if splash: 155 if splash:
156 cfgfile.write('menu background splash.lss\n') 156 cfgfile.write('menu background splash.lss\n')
157 157
158 for label in labels.split(): 158 for label in labels.split():
159 localdata = bb.data.createCopy(d) 159 localdata = bb.data.createCopy(d)
160 160
161 overrides = localdata.getVar('OVERRIDES', True) 161 overrides = localdata.getVar('OVERRIDES')
162 if not overrides: 162 if not overrides:
163 bb.fatal('OVERRIDES not defined') 163 bb.fatal('OVERRIDES not defined')
164 164
@@ -170,19 +170,19 @@ python build_syslinux_cfg () {
170 btypes = [ [ "Graphics console ", syslinux_default_console ], 170 btypes = [ [ "Graphics console ", syslinux_default_console ],
171 [ "Serial console ", syslinux_serial_tty ] ] 171 [ "Serial console ", syslinux_serial_tty ] ]
172 172
173 root= d.getVar('SYSLINUX_ROOT', True) 173 root= d.getVar('SYSLINUX_ROOT')
174 if not root: 174 if not root:
175 bb.fatal('SYSLINUX_ROOT not defined') 175 bb.fatal('SYSLINUX_ROOT not defined')
176 176
177 for btype in btypes: 177 for btype in btypes:
178 cfgfile.write('LABEL %s%s\nKERNEL /vmlinuz\n' % (btype[0], label)) 178 cfgfile.write('LABEL %s%s\nKERNEL /vmlinuz\n' % (btype[0], label))
179 179
180 exargs = d.getVar('SYSLINUX_KERNEL_ARGS', True) 180 exargs = d.getVar('SYSLINUX_KERNEL_ARGS')
181 if exargs: 181 if exargs:
182 btype[1] += " " + exargs 182 btype[1] += " " + exargs
183 183
184 append = localdata.getVar('APPEND', True) 184 append = localdata.getVar('APPEND')
185 initrd = localdata.getVar('INITRD', True) 185 initrd = localdata.getVar('INITRD')
186 186
187 append = root + " " + append 187 append = root + " " + append
188 cfgfile.write('APPEND ') 188 cfgfile.write('APPEND ')
diff --git a/meta/classes/systemd-boot.bbclass b/meta/classes/systemd-boot.bbclass
index 05244c7e50..60729a756c 100644
--- a/meta/classes/systemd-boot.bbclass
+++ b/meta/classes/systemd-boot.bbclass
@@ -63,8 +63,8 @@ efi_hddimg_populate() {
63} 63}
64 64
65python build_efi_cfg() { 65python build_efi_cfg() {
66 s = d.getVar("S", True) 66 s = d.getVar("S")
67 labels = d.getVar('LABELS', True) 67 labels = d.getVar('LABELS')
68 if not labels: 68 if not labels:
69 bb.debug(1, "LABELS not defined, nothing to do") 69 bb.debug(1, "LABELS not defined, nothing to do")
70 return 70 return
@@ -73,7 +73,7 @@ python build_efi_cfg() {
73 bb.debug(1, "No labels, nothing to do") 73 bb.debug(1, "No labels, nothing to do")
74 return 74 return
75 75
76 cfile = d.getVar('SYSTEMD_BOOT_CFG', True) 76 cfile = d.getVar('SYSTEMD_BOOT_CFG')
77 try: 77 try:
78 cfgfile = open(cfile, 'w') 78 cfgfile = open(cfile, 'w')
79 except OSError: 79 except OSError:
@@ -81,7 +81,7 @@ python build_efi_cfg() {
81 81
82 cfgfile.write('# Automatically created by OE\n') 82 cfgfile.write('# Automatically created by OE\n')
83 cfgfile.write('default %s\n' % (labels.split()[0])) 83 cfgfile.write('default %s\n' % (labels.split()[0]))
84 timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT', True) 84 timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT')
85 if timeout: 85 if timeout:
86 cfgfile.write('timeout %s\n' % timeout) 86 cfgfile.write('timeout %s\n' % timeout)
87 else: 87 else:
@@ -91,7 +91,7 @@ python build_efi_cfg() {
91 for label in labels.split(): 91 for label in labels.split():
92 localdata = d.createCopy() 92 localdata = d.createCopy()
93 93
94 overrides = localdata.getVar('OVERRIDES', True) 94 overrides = localdata.getVar('OVERRIDES')
95 if not overrides: 95 if not overrides:
96 bb.fatal('OVERRIDES not defined') 96 bb.fatal('OVERRIDES not defined')
97 97
@@ -107,8 +107,8 @@ python build_efi_cfg() {
107 entrycfg.write('title %s\n' % label) 107 entrycfg.write('title %s\n' % label)
108 entrycfg.write('linux /vmlinuz\n') 108 entrycfg.write('linux /vmlinuz\n')
109 109
110 append = localdata.getVar('APPEND', True) 110 append = localdata.getVar('APPEND')
111 initrd = localdata.getVar('INITRD', True) 111 initrd = localdata.getVar('INITRD')
112 112
113 if initrd: 113 if initrd:
114 entrycfg.write('initrd /initrd\n') 114 entrycfg.write('initrd /initrd\n')
diff --git a/meta/classes/systemd.bbclass b/meta/classes/systemd.bbclass
index 99a08a09a4..34bfefad40 100644
--- a/meta/classes/systemd.bbclass
+++ b/meta/classes/systemd.bbclass
@@ -71,12 +71,12 @@ python systemd_populate_packages() {
71 def get_package_var(d, var, pkg): 71 def get_package_var(d, var, pkg):
72 val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip() 72 val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip()
73 if val == "": 73 if val == "":
74 val = (d.getVar(var, True) or "").strip() 74 val = (d.getVar(var) or "").strip()
75 return val 75 return val
76 76
77 # Check if systemd-packages already included in PACKAGES 77 # Check if systemd-packages already included in PACKAGES
78 def systemd_check_package(pkg_systemd): 78 def systemd_check_package(pkg_systemd):
79 packages = d.getVar('PACKAGES', True) 79 packages = d.getVar('PACKAGES')
80 if not pkg_systemd in packages.split(): 80 if not pkg_systemd in packages.split():
81 bb.error('%s does not appear in package list, please add it' % pkg_systemd) 81 bb.error('%s does not appear in package list, please add it' % pkg_systemd)
82 82
@@ -90,23 +90,23 @@ python systemd_populate_packages() {
90 localdata.prependVar("OVERRIDES", pkg + ":") 90 localdata.prependVar("OVERRIDES", pkg + ":")
91 bb.data.update_data(localdata) 91 bb.data.update_data(localdata)
92 92
93 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 93 postinst = d.getVar('pkg_postinst_%s' % pkg)
94 if not postinst: 94 if not postinst:
95 postinst = '#!/bin/sh\n' 95 postinst = '#!/bin/sh\n'
96 postinst += localdata.getVar('systemd_postinst', True) 96 postinst += localdata.getVar('systemd_postinst')
97 d.setVar('pkg_postinst_%s' % pkg, postinst) 97 d.setVar('pkg_postinst_%s' % pkg, postinst)
98 98
99 prerm = d.getVar('pkg_prerm_%s' % pkg, True) 99 prerm = d.getVar('pkg_prerm_%s' % pkg)
100 if not prerm: 100 if not prerm:
101 prerm = '#!/bin/sh\n' 101 prerm = '#!/bin/sh\n'
102 prerm += localdata.getVar('systemd_prerm', True) 102 prerm += localdata.getVar('systemd_prerm')
103 d.setVar('pkg_prerm_%s' % pkg, prerm) 103 d.setVar('pkg_prerm_%s' % pkg, prerm)
104 104
105 105
106 # Add files to FILES_*-systemd if existent and not already done 106 # Add files to FILES_*-systemd if existent and not already done
107 def systemd_append_file(pkg_systemd, file_append): 107 def systemd_append_file(pkg_systemd, file_append):
108 appended = False 108 appended = False
109 if os.path.exists(oe.path.join(d.getVar("D", True), file_append)): 109 if os.path.exists(oe.path.join(d.getVar("D"), file_append)):
110 var_name = "FILES_" + pkg_systemd 110 var_name = "FILES_" + pkg_systemd
111 files = d.getVar(var_name, False) or "" 111 files = d.getVar(var_name, False) or ""
112 if file_append not in files.split(): 112 if file_append not in files.split():
@@ -118,7 +118,7 @@ python systemd_populate_packages() {
118 def systemd_add_files_and_parse(pkg_systemd, path, service, keys): 118 def systemd_add_files_and_parse(pkg_systemd, path, service, keys):
119 # avoid infinite recursion 119 # avoid infinite recursion
120 if systemd_append_file(pkg_systemd, oe.path.join(path, service)): 120 if systemd_append_file(pkg_systemd, oe.path.join(path, service)):
121 fullpath = oe.path.join(d.getVar("D", True), path, service) 121 fullpath = oe.path.join(d.getVar("D"), path, service)
122 if service.find('.service') != -1: 122 if service.find('.service') != -1:
123 # for *.service add *@.service 123 # for *.service add *@.service
124 service_base = service.replace('.service', '') 124 service_base = service.replace('.service', '')
@@ -141,9 +141,9 @@ python systemd_populate_packages() {
141 141
142 # Check service-files and call systemd_add_files_and_parse for each entry 142 # Check service-files and call systemd_add_files_and_parse for each entry
143 def systemd_check_services(): 143 def systemd_check_services():
144 searchpaths = [oe.path.join(d.getVar("sysconfdir", True), "systemd", "system"),] 144 searchpaths = [oe.path.join(d.getVar("sysconfdir"), "systemd", "system"),]
145 searchpaths.append(d.getVar("systemd_system_unitdir", True)) 145 searchpaths.append(d.getVar("systemd_system_unitdir"))
146 systemd_packages = d.getVar('SYSTEMD_PACKAGES', True) 146 systemd_packages = d.getVar('SYSTEMD_PACKAGES')
147 147
148 keys = 'Also' 148 keys = 'Also'
149 # scan for all in SYSTEMD_SERVICE[] 149 # scan for all in SYSTEMD_SERVICE[]
@@ -158,11 +158,11 @@ python systemd_populate_packages() {
158 base = re.sub('@[^.]+.', '@.', service) 158 base = re.sub('@[^.]+.', '@.', service)
159 159
160 for path in searchpaths: 160 for path in searchpaths:
161 if os.path.exists(oe.path.join(d.getVar("D", True), path, service)): 161 if os.path.exists(oe.path.join(d.getVar("D"), path, service)):
162 path_found = path 162 path_found = path
163 break 163 break
164 elif base is not None: 164 elif base is not None:
165 if os.path.exists(oe.path.join(d.getVar("D", True), path, base)): 165 if os.path.exists(oe.path.join(d.getVar("D"), path, base)):
166 path_found = path 166 path_found = path
167 break 167 break
168 168
@@ -172,10 +172,10 @@ python systemd_populate_packages() {
172 bb.fatal("SYSTEMD_SERVICE_%s value %s does not exist" % (pkg_systemd, service)) 172 bb.fatal("SYSTEMD_SERVICE_%s value %s does not exist" % (pkg_systemd, service))
173 173
174 # Run all modifications once when creating package 174 # Run all modifications once when creating package
175 if os.path.exists(d.getVar("D", True)): 175 if os.path.exists(d.getVar("D")):
176 for pkg in d.getVar('SYSTEMD_PACKAGES', True).split(): 176 for pkg in d.getVar('SYSTEMD_PACKAGES').split():
177 systemd_check_package(pkg) 177 systemd_check_package(pkg)
178 if d.getVar('SYSTEMD_SERVICE_' + pkg, True): 178 if d.getVar('SYSTEMD_SERVICE_' + pkg):
179 systemd_generate_package_scripts(pkg) 179 systemd_generate_package_scripts(pkg)
180 systemd_check_services() 180 systemd_check_services()
181} 181}
@@ -185,7 +185,7 @@ PACKAGESPLITFUNCS_prepend = "systemd_populate_packages "
185python rm_systemd_unitdir (){ 185python rm_systemd_unitdir (){
186 import shutil 186 import shutil
187 if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d): 187 if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d):
188 systemd_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_unitdir', True)) 188 systemd_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_unitdir'))
189 if os.path.exists(systemd_unitdir): 189 if os.path.exists(systemd_unitdir):
190 shutil.rmtree(systemd_unitdir) 190 shutil.rmtree(systemd_unitdir)
191 systemd_libdir = os.path.dirname(systemd_unitdir) 191 systemd_libdir = os.path.dirname(systemd_unitdir)
@@ -196,12 +196,12 @@ do_install[postfuncs] += "rm_systemd_unitdir "
196 196
197python rm_sysvinit_initddir (){ 197python rm_sysvinit_initddir (){
198 import shutil 198 import shutil
199 sysv_initddir = oe.path.join(d.getVar("D", True), (d.getVar('INIT_D_DIR', True) or "/etc/init.d")) 199 sysv_initddir = oe.path.join(d.getVar("D"), (d.getVar('INIT_D_DIR') or "/etc/init.d"))
200 200
201 if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and \ 201 if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and \
202 not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \ 202 not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \
203 os.path.exists(sysv_initddir): 203 os.path.exists(sysv_initddir):
204 systemd_system_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_system_unitdir', True)) 204 systemd_system_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_system_unitdir'))
205 205
206 # If systemd_system_unitdir contains anything, delete sysv_initddir 206 # If systemd_system_unitdir contains anything, delete sysv_initddir
207 if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)): 207 if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)):
diff --git a/meta/classes/terminal.bbclass b/meta/classes/terminal.bbclass
index cd8d124507..5db013f4dc 100644
--- a/meta/classes/terminal.bbclass
+++ b/meta/classes/terminal.bbclass
@@ -19,9 +19,9 @@ def emit_terminal_func(command, envdata, d):
19 envdata.setVar(cmd_func, 'exec ' + command) 19 envdata.setVar(cmd_func, 'exec ' + command)
20 envdata.setVarFlag(cmd_func, 'func', '1') 20 envdata.setVarFlag(cmd_func, 'func', '1')
21 21
22 runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" 22 runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}"
23 runfile = runfmt.format(func=cmd_func, task=cmd_func, taskfunc=cmd_func, pid=os.getpid()) 23 runfile = runfmt.format(func=cmd_func, task=cmd_func, taskfunc=cmd_func, pid=os.getpid())
24 runfile = os.path.join(d.getVar('T', True), runfile) 24 runfile = os.path.join(d.getVar('T'), runfile)
25 bb.utils.mkdirhier(os.path.dirname(runfile)) 25 bb.utils.mkdirhier(os.path.dirname(runfile))
26 26
27 with open(runfile, 'w') as script: 27 with open(runfile, 'w') as script:
@@ -44,7 +44,7 @@ def oe_terminal(command, title, d):
44 envdata.setVarFlag(v, 'export', '1') 44 envdata.setVarFlag(v, 'export', '1')
45 45
46 for export in oe.data.typed_value('OE_TERMINAL_EXPORTS', d): 46 for export in oe.data.typed_value('OE_TERMINAL_EXPORTS', d):
47 value = d.getVar(export, True) 47 value = d.getVar(export)
48 if value is not None: 48 if value is not None:
49 os.environ[export] = str(value) 49 os.environ[export] = str(value)
50 envdata.setVar(export, str(value)) 50 envdata.setVar(export, str(value))
@@ -60,7 +60,7 @@ def oe_terminal(command, title, d):
60 for key in origbbenv: 60 for key in origbbenv:
61 if key in envdata: 61 if key in envdata:
62 continue 62 continue
63 value = origbbenv.getVar(key, True) 63 value = origbbenv.getVar(key)
64 if value is not None: 64 if value is not None:
65 os.environ[key] = str(value) 65 os.environ[key] = str(value)
66 envdata.setVar(key, str(value)) 66 envdata.setVar(key, str(value))
diff --git a/meta/classes/testexport.bbclass b/meta/classes/testexport.bbclass
index 5147020820..3f7b2de71d 100644
--- a/meta/classes/testexport.bbclass
+++ b/meta/classes/testexport.bbclass
@@ -49,19 +49,19 @@ def exportTests(d,tc):
49 import re 49 import re
50 import oe.path 50 import oe.path
51 51
52 exportpath = d.getVar("TEST_EXPORT_DIR", True) 52 exportpath = d.getVar("TEST_EXPORT_DIR")
53 53
54 savedata = {} 54 savedata = {}
55 savedata["d"] = {} 55 savedata["d"] = {}
56 savedata["target"] = {} 56 savedata["target"] = {}
57 savedata["target"]["ip"] = tc.target.ip or d.getVar("TEST_TARGET_IP", True) 57 savedata["target"]["ip"] = tc.target.ip or d.getVar("TEST_TARGET_IP")
58 savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP", True) 58 savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP")
59 59
60 keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \ 60 keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \
61 and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func", True)] 61 and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func", True)]
62 for key in keys: 62 for key in keys:
63 try: 63 try:
64 savedata["d"][key] = d.getVar(key, True) 64 savedata["d"][key] = d.getVar(key)
65 except bb.data_smart.ExpansionError: 65 except bb.data_smart.ExpansionError:
66 # we don't care about those anyway 66 # we don't care about those anyway
67 pass 67 pass
@@ -71,7 +71,7 @@ def exportTests(d,tc):
71 json.dump(savedata, f, skipkeys=True, indent=4, sort_keys=True) 71 json.dump(savedata, f, skipkeys=True, indent=4, sort_keys=True)
72 72
73 # Replace absolute path with relative in the file 73 # Replace absolute path with relative in the file
74 exclude_path = os.path.join(d.getVar("COREBASE", True),'meta','lib','oeqa') 74 exclude_path = os.path.join(d.getVar("COREBASE"),'meta','lib','oeqa')
75 f1 = open(json_file,'r').read() 75 f1 = open(json_file,'r').read()
76 f2 = open(json_file,'w') 76 f2 = open(json_file,'w')
77 m = f1.replace(exclude_path,'oeqa') 77 m = f1.replace(exclude_path,'oeqa')
@@ -90,7 +90,7 @@ def exportTests(d,tc):
90 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/runtime/files")) 90 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/runtime/files"))
91 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/utils")) 91 bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/utils"))
92 # copy test modules, this should cover tests in other layers too 92 # copy test modules, this should cover tests in other layers too
93 bbpath = d.getVar("BBPATH", True).split(':') 93 bbpath = d.getVar("BBPATH").split(':')
94 for t in tc.testslist: 94 for t in tc.testslist:
95 isfolder = False 95 isfolder = False
96 if re.search("\w+\.\w+\.test_\S+", t): 96 if re.search("\w+\.\w+\.test_\S+", t):
@@ -111,7 +111,7 @@ def exportTests(d,tc):
111 if os.path.isfile(json_file): 111 if os.path.isfile(json_file):
112 shutil.copy2(json_file, os.path.join(exportpath, "oeqa/runtime")) 112 shutil.copy2(json_file, os.path.join(exportpath, "oeqa/runtime"))
113 # Get meta layer 113 # Get meta layer
114 for layer in d.getVar("BBLAYERS", True).split(): 114 for layer in d.getVar("BBLAYERS").split():
115 if os.path.basename(layer) == "meta": 115 if os.path.basename(layer) == "meta":
116 meta_layer = layer 116 meta_layer = layer
117 break 117 break
@@ -130,28 +130,28 @@ def exportTests(d,tc):
130 shutil.copy2(os.path.join(root, f), os.path.join(exportpath, "oeqa/runtime/files")) 130 shutil.copy2(os.path.join(root, f), os.path.join(exportpath, "oeqa/runtime/files"))
131 131
132 # Create tar file for common parts of testexport 132 # Create tar file for common parts of testexport
133 create_tarball(d, "testexport.tar.gz", d.getVar("TEST_EXPORT_DIR", True)) 133 create_tarball(d, "testexport.tar.gz", d.getVar("TEST_EXPORT_DIR"))
134 134
135 # Copy packages needed for runtime testing 135 # Copy packages needed for runtime testing
136 test_pkg_dir = d.getVar("TEST_NEEDED_PACKAGES_DIR", True) 136 test_pkg_dir = d.getVar("TEST_NEEDED_PACKAGES_DIR")
137 if os.listdir(test_pkg_dir): 137 if os.listdir(test_pkg_dir):
138 export_pkg_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), "packages") 138 export_pkg_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"), "packages")
139 oe.path.copytree(test_pkg_dir, export_pkg_dir) 139 oe.path.copytree(test_pkg_dir, export_pkg_dir)
140 # Create tar file for packages needed by the DUT 140 # Create tar file for packages needed by the DUT
141 create_tarball(d, "testexport_packages_%s.tar.gz" % d.getVar("MACHINE", True), export_pkg_dir) 141 create_tarball(d, "testexport_packages_%s.tar.gz" % d.getVar("MACHINE"), export_pkg_dir)
142 142
143 # Copy SDK 143 # Copy SDK
144 if d.getVar("TEST_EXPORT_SDK_ENABLED", True) == "1": 144 if d.getVar("TEST_EXPORT_SDK_ENABLED") == "1":
145 sdk_deploy = d.getVar("SDK_DEPLOY", True) 145 sdk_deploy = d.getVar("SDK_DEPLOY")
146 tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME", True) 146 tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME")
147 tarball_path = os.path.join(sdk_deploy, tarball_name) 147 tarball_path = os.path.join(sdk_deploy, tarball_name)
148 export_sdk_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), 148 export_sdk_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"),
149 d.getVar("TEST_EXPORT_SDK_DIR", True)) 149 d.getVar("TEST_EXPORT_SDK_DIR"))
150 bb.utils.mkdirhier(export_sdk_dir) 150 bb.utils.mkdirhier(export_sdk_dir)
151 shutil.copy2(tarball_path, export_sdk_dir) 151 shutil.copy2(tarball_path, export_sdk_dir)
152 152
153 # Create tar file for the sdk 153 # Create tar file for the sdk
154 create_tarball(d, "testexport_sdk_%s.tar.gz" % d.getVar("SDK_ARCH", True), export_sdk_dir) 154 create_tarball(d, "testexport_sdk_%s.tar.gz" % d.getVar("SDK_ARCH"), export_sdk_dir)
155 155
156 bb.plain("Exported tests to: %s" % exportpath) 156 bb.plain("Exported tests to: %s" % exportpath)
157 157
@@ -161,8 +161,8 @@ def testexport_main(d):
161 from oeqa.utils.dump import get_host_dumper 161 from oeqa.utils.dump import get_host_dumper
162 162
163 test_create_extract_dirs(d) 163 test_create_extract_dirs(d)
164 export_dir = d.getVar("TEST_EXPORT_DIR", True) 164 export_dir = d.getVar("TEST_EXPORT_DIR")
165 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 165 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
166 bb.utils.remove(export_dir, recurse=True) 166 bb.utils.remove(export_dir, recurse=True)
167 bb.utils.mkdirhier(export_dir) 167 bb.utils.mkdirhier(export_dir)
168 168
@@ -188,7 +188,7 @@ def create_tarball(d, tar_name, src_dir):
188 188
189 import tarfile 189 import tarfile
190 190
191 tar_path = os.path.join(d.getVar("TEST_EXPORT_DIR", True), tar_name) 191 tar_path = os.path.join(d.getVar("TEST_EXPORT_DIR"), tar_name)
192 current_dir = os.getcwd() 192 current_dir = os.getcwd()
193 src_dir = src_dir.rstrip('/') 193 src_dir = src_dir.rstrip('/')
194 dir_name = os.path.dirname(src_dir) 194 dir_name = os.path.dirname(src_dir)
diff --git a/meta/classes/testimage.bbclass b/meta/classes/testimage.bbclass
index 6b6781d860..770ec801c2 100644
--- a/meta/classes/testimage.bbclass
+++ b/meta/classes/testimage.bbclass
@@ -130,8 +130,8 @@ def testimage_main(d):
130 from oeqa.targetcontrol import get_target_controller 130 from oeqa.targetcontrol import get_target_controller
131 from oeqa.utils.dump import get_host_dumper 131 from oeqa.utils.dump import get_host_dumper
132 132
133 pn = d.getVar("PN", True) 133 pn = d.getVar("PN")
134 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 134 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
135 test_create_extract_dirs(d) 135 test_create_extract_dirs(d)
136 136
137 # we need the host dumper in test context 137 # we need the host dumper in test context
@@ -176,10 +176,10 @@ def testimage_main(d):
176 target.stop() 176 target.stop()
177 177
178def test_create_extract_dirs(d): 178def test_create_extract_dirs(d):
179 install_path = d.getVar("TEST_INSTALL_TMP_DIR", True) 179 install_path = d.getVar("TEST_INSTALL_TMP_DIR")
180 package_path = d.getVar("TEST_PACKAGED_DIR", True) 180 package_path = d.getVar("TEST_PACKAGED_DIR")
181 extracted_path = d.getVar("TEST_EXTRACTED_DIR", True) 181 extracted_path = d.getVar("TEST_EXTRACTED_DIR")
182 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 182 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
183 bb.utils.remove(package_path, recurse=True) 183 bb.utils.remove(package_path, recurse=True)
184 bb.utils.mkdirhier(install_path) 184 bb.utils.mkdirhier(install_path)
185 bb.utils.mkdirhier(package_path) 185 bb.utils.mkdirhier(package_path)
diff --git a/meta/classes/testsdk.bbclass b/meta/classes/testsdk.bbclass
index 06b4c5034f..063b9080a5 100644
--- a/meta/classes/testsdk.bbclass
+++ b/meta/classes/testsdk.bbclass
@@ -54,8 +54,8 @@ def testsdk_main(d):
54 import subprocess 54 import subprocess
55 from oeqa.oetest import SDKTestContext 55 from oeqa.oetest import SDKTestContext
56 56
57 pn = d.getVar("PN", True) 57 pn = d.getVar("PN")
58 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) 58 bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR"))
59 59
60 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh") 60 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh")
61 if not os.path.exists(tcname): 61 if not os.path.exists(tcname):
@@ -100,12 +100,12 @@ def testsdkext_main(d):
100 100
101 # extensible sdk can be contaminated if native programs are 101 # extensible sdk can be contaminated if native programs are
102 # in PATH, i.e. use perl-native instead of eSDK one. 102 # in PATH, i.e. use perl-native instead of eSDK one.
103 paths_to_avoid = [d.getVar('STAGING_DIR', True), 103 paths_to_avoid = [d.getVar('STAGING_DIR'),
104 d.getVar('BASE_WORKDIR', True)] 104 d.getVar('BASE_WORKDIR')]
105 os.environ['PATH'] = avoid_paths_in_environ(paths_to_avoid) 105 os.environ['PATH'] = avoid_paths_in_environ(paths_to_avoid)
106 106
107 pn = d.getVar("PN", True) 107 pn = d.getVar("PN")
108 bb.utils.mkdirhier(d.getVar("TEST_LOG_SDKEXT_DIR", True)) 108 bb.utils.mkdirhier(d.getVar("TEST_LOG_SDKEXT_DIR"))
109 109
110 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.sh") 110 tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.sh")
111 if not os.path.exists(tcname): 111 if not os.path.exists(tcname):
diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass
index 917b74d887..706a392d7f 100644
--- a/meta/classes/tinderclient.bbclass
+++ b/meta/classes/tinderclient.bbclass
@@ -55,22 +55,22 @@ def tinder_format_http_post(d,status,log):
55 55
56 # the variables we will need to send on this form post 56 # the variables we will need to send on this form post
57 variables = { 57 variables = {
58 "tree" : d.getVar('TINDER_TREE', True), 58 "tree" : d.getVar('TINDER_TREE'),
59 "machine_name" : d.getVar('TINDER_MACHINE', True), 59 "machine_name" : d.getVar('TINDER_MACHINE'),
60 "os" : os.uname()[0], 60 "os" : os.uname()[0],
61 "os_version" : os.uname()[2], 61 "os_version" : os.uname()[2],
62 "compiler" : "gcc", 62 "compiler" : "gcc",
63 "clobber" : d.getVar('TINDER_CLOBBER', True) or "0", 63 "clobber" : d.getVar('TINDER_CLOBBER') or "0",
64 "srcdate" : d.getVar('SRCDATE', True), 64 "srcdate" : d.getVar('SRCDATE'),
65 "PN" : d.getVar('PN', True), 65 "PN" : d.getVar('PN'),
66 "PV" : d.getVar('PV', True), 66 "PV" : d.getVar('PV'),
67 "PR" : d.getVar('PR', True), 67 "PR" : d.getVar('PR'),
68 "FILE" : d.getVar('FILE', True) or "N/A", 68 "FILE" : d.getVar('FILE') or "N/A",
69 "TARGETARCH" : d.getVar('TARGET_ARCH', True), 69 "TARGETARCH" : d.getVar('TARGET_ARCH'),
70 "TARGETFPU" : d.getVar('TARGET_FPU', True) or "Unknown", 70 "TARGETFPU" : d.getVar('TARGET_FPU') or "Unknown",
71 "TARGETOS" : d.getVar('TARGET_OS', True) or "Unknown", 71 "TARGETOS" : d.getVar('TARGET_OS') or "Unknown",
72 "MACHINE" : d.getVar('MACHINE', True) or "Unknown", 72 "MACHINE" : d.getVar('MACHINE') or "Unknown",
73 "DISTRO" : d.getVar('DISTRO', True) or "Unknown", 73 "DISTRO" : d.getVar('DISTRO') or "Unknown",
74 "zecke-rocks" : "sure", 74 "zecke-rocks" : "sure",
75 } 75 }
76 76
@@ -127,7 +127,7 @@ def tinder_build_start(d):
127 127
128 # now we will need to save the machine number 128 # now we will need to save the machine number
129 # we will override any previous numbers 129 # we will override any previous numbers
130 f = open(d.getVar('TMPDIR', True)+"/tinder-machine.id", 'w') 130 f = open(d.getVar('TMPDIR')+"/tinder-machine.id", 'w')
131 f.write(report) 131 f.write(report)
132 132
133 133
@@ -137,8 +137,8 @@ def tinder_send_http(d, status, _log):
137 """ 137 """
138 138
139 # get the body and type 139 # get the body and type
140 server = d.getVar('TINDER_HOST', True) 140 server = d.getVar('TINDER_HOST')
141 url = d.getVar('TINDER_URL', True) 141 url = d.getVar('TINDER_URL')
142 142
143 selector = url + "/xml/build_status.pl" 143 selector = url + "/xml/build_status.pl"
144 144
@@ -278,7 +278,7 @@ def tinder_do_tinder_report(event):
278 278
279 try: 279 try:
280 # truncate the tinder log file 280 # truncate the tinder log file
281 f = open(event.data.getVar('TINDER_LOG', True), 'w') 281 f = open(event.data.getVar('TINDER_LOG'), 'w')
282 f.write("") 282 f.write("")
283 f.close() 283 f.close()
284 except: 284 except:
@@ -287,7 +287,7 @@ def tinder_do_tinder_report(event):
287 try: 287 try:
288 # write a status to the file. This is needed for the -k option 288 # write a status to the file. This is needed for the -k option
289 # of BitBake 289 # of BitBake
290 g = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') 290 g = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w')
291 g.write("") 291 g.write("")
292 g.close() 292 g.close()
293 except IOError: 293 except IOError:
@@ -296,10 +296,10 @@ def tinder_do_tinder_report(event):
296 # Append the Task-Log (compile,configure...) to the log file 296 # Append the Task-Log (compile,configure...) to the log file
297 # we will send to the server 297 # we will send to the server
298 if name == "TaskSucceeded" or name == "TaskFailed": 298 if name == "TaskSucceeded" or name == "TaskFailed":
299 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) 299 log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task))
300 300
301 if len(log_file) != 0: 301 if len(log_file) != 0:
302 to_file = event.data.getVar('TINDER_LOG', True) 302 to_file = event.data.getVar('TINDER_LOG')
303 log += "".join(open(log_file[0], 'r').readlines()) 303 log += "".join(open(log_file[0], 'r').readlines())
304 304
305 # set the right 'HEADER'/Summary for the TinderBox 305 # set the right 'HEADER'/Summary for the TinderBox
@@ -310,16 +310,16 @@ def tinder_do_tinder_report(event):
310 elif name == "TaskFailed": 310 elif name == "TaskFailed":
311 log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task 311 log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task
312 elif name == "PkgStarted": 312 elif name == "PkgStarted":
313 log += "---> TINDERBOX Package %s started\n" % event.data.getVar('PF', True) 313 log += "---> TINDERBOX Package %s started\n" % event.data.getVar('PF')
314 elif name == "PkgSucceeded": 314 elif name == "PkgSucceeded":
315 log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % event.data.getVar('PF', True) 315 log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % event.data.getVar('PF')
316 elif name == "PkgFailed": 316 elif name == "PkgFailed":
317 if not event.data.getVar('TINDER_AUTOBUILD', True) == "0": 317 if not event.data.getVar('TINDER_AUTOBUILD') == "0":
318 build.exec_task('do_clean', event.data) 318 build.exec_task('do_clean', event.data)
319 log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % event.data.getVar('PF', True) 319 log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % event.data.getVar('PF')
320 status = 200 320 status = 200
321 # remember the failure for the -k case 321 # remember the failure for the -k case
322 h = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') 322 h = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w')
323 h.write("200") 323 h.write("200")
324 elif name == "BuildCompleted": 324 elif name == "BuildCompleted":
325 log += "Build Completed\n" 325 log += "Build Completed\n"
@@ -342,7 +342,7 @@ def tinder_do_tinder_report(event):
342 log += "Error:Was Runtime: %d\n" % event.isRuntime() 342 log += "Error:Was Runtime: %d\n" % event.isRuntime()
343 status = 200 343 status = 200
344 # remember the failure for the -k case 344 # remember the failure for the -k case
345 h = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') 345 h = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w')
346 h.write("200") 346 h.write("200")
347 347
348 # now post the log 348 # now post the log
@@ -360,7 +360,7 @@ python tinderclient_eventhandler() {
360 if e.data is None or bb.event.getName(e) == "MsgNote": 360 if e.data is None or bb.event.getName(e) == "MsgNote":
361 return 361 return
362 362
363 do_tinder_report = e.data.getVar('TINDER_REPORT', True) 363 do_tinder_report = e.data.getVar('TINDER_REPORT')
364 if do_tinder_report and do_tinder_report == "1": 364 if do_tinder_report and do_tinder_report == "1":
365 tinder_do_tinder_report(e) 365 tinder_do_tinder_report(e)
366 366
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass
index 4bddf34e9c..4ea20567a3 100644
--- a/meta/classes/toaster.bbclass
+++ b/meta/classes/toaster.bbclass
@@ -80,7 +80,7 @@ python toaster_layerinfo_dumpdata() {
80 return layer_info 80 return layer_info
81 81
82 82
83 bblayers = e.data.getVar("BBLAYERS", True) 83 bblayers = e.data.getVar("BBLAYERS")
84 84
85 llayerinfo = {} 85 llayerinfo = {}
86 86
@@ -119,10 +119,10 @@ python toaster_package_dumpdata() {
119 """ 119 """
120 120
121 # No need to try and dumpdata if the recipe isn't generating packages 121 # No need to try and dumpdata if the recipe isn't generating packages
122 if not d.getVar('PACKAGES', True): 122 if not d.getVar('PACKAGES'):
123 return 123 return
124 124
125 pkgdatadir = d.getVar('PKGDESTWORK', True) 125 pkgdatadir = d.getVar('PKGDESTWORK')
126 lpkgdata = {} 126 lpkgdata = {}
127 datadir = os.path.join(pkgdatadir, 'runtime') 127 datadir = os.path.join(pkgdatadir, 'runtime')
128 128
@@ -142,7 +142,7 @@ python toaster_artifact_dumpdata() {
142 """ 142 """
143 143
144 event_data = { 144 event_data = {
145 "TOOLCHAIN_OUTPUTNAME": d.getVar("TOOLCHAIN_OUTPUTNAME", True) 145 "TOOLCHAIN_OUTPUTNAME": d.getVar("TOOLCHAIN_OUTPUTNAME")
146 } 146 }
147 147
148 bb.event.fire(bb.event.MetadataEvent("SDKArtifactInfo", event_data), d) 148 bb.event.fire(bb.event.MetadataEvent("SDKArtifactInfo", event_data), d)
@@ -157,9 +157,9 @@ python toaster_collect_task_stats() {
157 import bb.utils 157 import bb.utils
158 import os 158 import os
159 159
160 toaster_statlist_file = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), "toasterstatlist") 160 toaster_statlist_file = os.path.join(e.data.getVar('BUILDSTATS_BASE'), "toasterstatlist")
161 161
162 if not e.data.getVar('BUILDSTATS_BASE', True): 162 if not e.data.getVar('BUILDSTATS_BASE'):
163 return # if we don't have buildstats, we cannot collect stats 163 return # if we don't have buildstats, we cannot collect stats
164 164
165 def stat_to_float(value): 165 def stat_to_float(value):
@@ -246,7 +246,7 @@ python toaster_buildhistory_dump() {
246 import re 246 import re
247 BUILDHISTORY_DIR = e.data.expand("${TOPDIR}/buildhistory") 247 BUILDHISTORY_DIR = e.data.expand("${TOPDIR}/buildhistory")
248 BUILDHISTORY_DIR_IMAGE_BASE = e.data.expand("%s/images/${MACHINE_ARCH}/${TCLIBC}/"% BUILDHISTORY_DIR) 248 BUILDHISTORY_DIR_IMAGE_BASE = e.data.expand("%s/images/${MACHINE_ARCH}/${TCLIBC}/"% BUILDHISTORY_DIR)
249 pkgdata_dir = e.data.getVar("PKGDATA_DIR", True) 249 pkgdata_dir = e.data.getVar("PKGDATA_DIR")
250 250
251 251
252 # scan the build targets for this build 252 # scan the build targets for this build
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass
index 0e11f2d7a0..44b4e24255 100644
--- a/meta/classes/toolchain-scripts.bbclass
+++ b/meta/classes/toolchain-scripts.bbclass
@@ -139,9 +139,9 @@ toolchain_create_sdk_siteconfig[vardepsexclude] = "TOOLCHAIN_CONFIGSITE_SYSROOTC
139python __anonymous () { 139python __anonymous () {
140 import oe.classextend 140 import oe.classextend
141 deps = "" 141 deps = ""
142 for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', True) or "").split(): 142 for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE') or "").split():
143 deps += " %s:do_populate_sysroot" % dep 143 deps += " %s:do_populate_sysroot" % dep
144 for variant in (d.getVar('MULTILIB_VARIANTS', True) or "").split(): 144 for variant in (d.getVar('MULTILIB_VARIANTS') or "").split():
145 clsextend = oe.classextend.ClassExtender(variant, d) 145 clsextend = oe.classextend.ClassExtender(variant, d)
146 newdep = clsextend.extend_name(dep) 146 newdep = clsextend.extend_name(dep)
147 deps += " %s:do_populate_sysroot" % newdep 147 deps += " %s:do_populate_sysroot" % newdep
diff --git a/meta/classes/uboot-config.bbclass b/meta/classes/uboot-config.bbclass
index 3f760f2fbe..10013b7d49 100644
--- a/meta/classes/uboot-config.bbclass
+++ b/meta/classes/uboot-config.bbclass
@@ -14,19 +14,19 @@
14UBOOT_BINARY ?= "u-boot.${UBOOT_SUFFIX}" 14UBOOT_BINARY ?= "u-boot.${UBOOT_SUFFIX}"
15 15
16python () { 16python () {
17 ubootmachine = d.getVar("UBOOT_MACHINE", True) 17 ubootmachine = d.getVar("UBOOT_MACHINE")
18 ubootconfigflags = d.getVarFlags('UBOOT_CONFIG') 18 ubootconfigflags = d.getVarFlags('UBOOT_CONFIG')
19 ubootbinary = d.getVar('UBOOT_BINARY', True) 19 ubootbinary = d.getVar('UBOOT_BINARY')
20 ubootbinaries = d.getVar('UBOOT_BINARIES', True) 20 ubootbinaries = d.getVar('UBOOT_BINARIES')
21 # The "doc" varflag is special, we don't want to see it here 21 # The "doc" varflag is special, we don't want to see it here
22 ubootconfigflags.pop('doc', None) 22 ubootconfigflags.pop('doc', None)
23 23
24 if not ubootmachine and not ubootconfigflags: 24 if not ubootmachine and not ubootconfigflags:
25 PN = d.getVar("PN", True) 25 PN = d.getVar("PN")
26 FILE = os.path.basename(d.getVar("FILE", True)) 26 FILE = os.path.basename(d.getVar("FILE"))
27 bb.debug(1, "To build %s, see %s for instructions on \ 27 bb.debug(1, "To build %s, see %s for instructions on \
28 setting up your machine config" % (PN, FILE)) 28 setting up your machine config" % (PN, FILE))
29 raise bb.parse.SkipPackage("Either UBOOT_MACHINE or UBOOT_CONFIG must be set in the %s machine configuration." % d.getVar("MACHINE", True)) 29 raise bb.parse.SkipPackage("Either UBOOT_MACHINE or UBOOT_CONFIG must be set in the %s machine configuration." % d.getVar("MACHINE"))
30 30
31 if ubootmachine and ubootconfigflags: 31 if ubootmachine and ubootconfigflags:
32 raise bb.parse.SkipPackage("You cannot use UBOOT_MACHINE and UBOOT_CONFIG at the same time.") 32 raise bb.parse.SkipPackage("You cannot use UBOOT_MACHINE and UBOOT_CONFIG at the same time.")
@@ -37,7 +37,7 @@ python () {
37 if not ubootconfigflags: 37 if not ubootconfigflags:
38 return 38 return
39 39
40 ubootconfig = (d.getVar('UBOOT_CONFIG', True) or "").split() 40 ubootconfig = (d.getVar('UBOOT_CONFIG') or "").split()
41 if len(ubootconfig) > 0: 41 if len(ubootconfig) > 0:
42 for config in ubootconfig: 42 for config in ubootconfig:
43 for f, v in ubootconfigflags.items(): 43 for f, v in ubootconfigflags.items():
diff --git a/meta/classes/uboot-extlinux-config.bbclass b/meta/classes/uboot-extlinux-config.bbclass
index df91386c00..ec5fffb7bb 100644
--- a/meta/classes/uboot-extlinux-config.bbclass
+++ b/meta/classes/uboot-extlinux-config.bbclass
@@ -58,20 +58,20 @@ UBOOT_EXTLINUX_MENU_DESCRIPTION_linux ??= "${DISTRO_NAME}"
58UBOOT_EXTLINUX_CONFIG = "${B}/extlinux.conf" 58UBOOT_EXTLINUX_CONFIG = "${B}/extlinux.conf"
59 59
60python create_extlinux_config() { 60python create_extlinux_config() {
61 if d.getVar("UBOOT_EXTLINUX", True) != "1": 61 if d.getVar("UBOOT_EXTLINUX") != "1":
62 return 62 return
63 63
64 if not d.getVar('WORKDIR', True): 64 if not d.getVar('WORKDIR'):
65 bb.error("WORKDIR not defined, unable to package") 65 bb.error("WORKDIR not defined, unable to package")
66 66
67 labels = d.getVar('UBOOT_EXTLINUX_LABELS', True) 67 labels = d.getVar('UBOOT_EXTLINUX_LABELS')
68 if not labels: 68 if not labels:
69 bb.fatal("UBOOT_EXTLINUX_LABELS not defined, nothing to do") 69 bb.fatal("UBOOT_EXTLINUX_LABELS not defined, nothing to do")
70 70
71 if not labels.strip(): 71 if not labels.strip():
72 bb.fatal("No labels, nothing to do") 72 bb.fatal("No labels, nothing to do")
73 73
74 cfile = d.getVar('UBOOT_EXTLINUX_CONFIG', True) 74 cfile = d.getVar('UBOOT_EXTLINUX_CONFIG')
75 if not cfile: 75 if not cfile:
76 bb.fatal('Unable to read UBOOT_EXTLINUX_CONFIG') 76 bb.fatal('Unable to read UBOOT_EXTLINUX_CONFIG')
77 77
@@ -85,34 +85,34 @@ python create_extlinux_config() {
85 for label in labels.split(): 85 for label in labels.split():
86 localdata = bb.data.createCopy(d) 86 localdata = bb.data.createCopy(d)
87 87
88 overrides = localdata.getVar('OVERRIDES', True) 88 overrides = localdata.getVar('OVERRIDES')
89 if not overrides: 89 if not overrides:
90 bb.fatal('OVERRIDES not defined') 90 bb.fatal('OVERRIDES not defined')
91 91
92 localdata.setVar('OVERRIDES', label + ':' + overrides) 92 localdata.setVar('OVERRIDES', label + ':' + overrides)
93 bb.data.update_data(localdata) 93 bb.data.update_data(localdata)
94 94
95 extlinux_console = localdata.getVar('UBOOT_EXTLINUX_CONSOLE', True) 95 extlinux_console = localdata.getVar('UBOOT_EXTLINUX_CONSOLE')
96 96
97 menu_description = localdata.getVar('UBOOT_EXTLINUX_MENU_DESCRIPTION', True) 97 menu_description = localdata.getVar('UBOOT_EXTLINUX_MENU_DESCRIPTION')
98 if not menu_description: 98 if not menu_description:
99 menu_description = label 99 menu_description = label
100 100
101 root = localdata.getVar('UBOOT_EXTLINUX_ROOT', True) 101 root = localdata.getVar('UBOOT_EXTLINUX_ROOT')
102 if not root: 102 if not root:
103 bb.fatal('UBOOT_EXTLINUX_ROOT not defined') 103 bb.fatal('UBOOT_EXTLINUX_ROOT not defined')
104 104
105 kernel_image = localdata.getVar('UBOOT_EXTLINUX_KERNEL_IMAGE', True) 105 kernel_image = localdata.getVar('UBOOT_EXTLINUX_KERNEL_IMAGE')
106 fdtdir = localdata.getVar('UBOOT_EXTLINUX_FDTDIR', True) 106 fdtdir = localdata.getVar('UBOOT_EXTLINUX_FDTDIR')
107 if fdtdir: 107 if fdtdir:
108 cfgfile.write('LABEL %s\n\tKERNEL %s\n\tFDTDIR %s\n' % 108 cfgfile.write('LABEL %s\n\tKERNEL %s\n\tFDTDIR %s\n' %
109 (menu_description, kernel_image, fdtdir)) 109 (menu_description, kernel_image, fdtdir))
110 else: 110 else:
111 cfgfile.write('LABEL %s\n\tKERNEL %s\n' % (menu_description, kernel_image)) 111 cfgfile.write('LABEL %s\n\tKERNEL %s\n' % (menu_description, kernel_image))
112 112
113 kernel_args = localdata.getVar('UBOOT_EXTLINUX_KERNEL_ARGS', True) 113 kernel_args = localdata.getVar('UBOOT_EXTLINUX_KERNEL_ARGS')
114 114
115 initrd = localdata.getVar('UBOOT_EXTLINUX_INITRD', True) 115 initrd = localdata.getVar('UBOOT_EXTLINUX_INITRD')
116 if initrd: 116 if initrd:
117 cfgfile.write('\tINITRD %s\n'% initrd) 117 cfgfile.write('\tINITRD %s\n'% initrd)
118 118
diff --git a/meta/classes/uboot-sign.bbclass b/meta/classes/uboot-sign.bbclass
index cef26b19be..65a8c49935 100644
--- a/meta/classes/uboot-sign.bbclass
+++ b/meta/classes/uboot-sign.bbclass
@@ -80,9 +80,9 @@ do_concat_dtb () {
80} 80}
81 81
82python () { 82python () {
83 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot', True) or 'u-boot' 83 uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
84 if d.getVar('UBOOT_SIGN_ENABLE', True) == '1' and d.getVar('PN', True) == uboot_pn: 84 if d.getVar('UBOOT_SIGN_ENABLE') == '1' and d.getVar('PN') == uboot_pn:
85 kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel', True) 85 kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel')
86 86
87 # u-boot.dtb and u-boot-nodtb.bin are deployed _before_ do_deploy 87 # u-boot.dtb and u-boot-nodtb.bin are deployed _before_ do_deploy
88 # Thus, do_deploy_setscene will also populate them in DEPLOY_IMAGE_DIR 88 # Thus, do_deploy_setscene will also populate them in DEPLOY_IMAGE_DIR
diff --git a/meta/classes/uninative.bbclass b/meta/classes/uninative.bbclass
index 11cbf9be80..177af73247 100644
--- a/meta/classes/uninative.bbclass
+++ b/meta/classes/uninative.bbclass
@@ -19,11 +19,11 @@ python uninative_event_fetchloader() {
19 loader isn't already present. 19 loader isn't already present.
20 """ 20 """
21 21
22 chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH", True), True) 22 chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH"), True)
23 if not chksum: 23 if not chksum:
24 bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH", True)) 24 bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH"))
25 25
26 loader = d.getVar("UNINATIVE_LOADER", True) 26 loader = d.getVar("UNINATIVE_LOADER")
27 loaderchksum = loader + ".chksum" 27 loaderchksum = loader + ".chksum"
28 if os.path.exists(loader) and os.path.exists(loaderchksum): 28 if os.path.exists(loader) and os.path.exists(loaderchksum):
29 with open(loaderchksum, "r") as f: 29 with open(loaderchksum, "r") as f:
@@ -36,13 +36,13 @@ python uninative_event_fetchloader() {
36 # Save and restore cwd as Fetch.download() does a chdir() 36 # Save and restore cwd as Fetch.download() does a chdir()
37 olddir = os.getcwd() 37 olddir = os.getcwd()
38 38
39 tarball = d.getVar("UNINATIVE_TARBALL", True) 39 tarball = d.getVar("UNINATIVE_TARBALL")
40 tarballdir = os.path.join(d.getVar("UNINATIVE_DLDIR", True), chksum) 40 tarballdir = os.path.join(d.getVar("UNINATIVE_DLDIR"), chksum)
41 tarballpath = os.path.join(tarballdir, tarball) 41 tarballpath = os.path.join(tarballdir, tarball)
42 42
43 if not os.path.exists(tarballpath): 43 if not os.path.exists(tarballpath):
44 bb.utils.mkdirhier(tarballdir) 44 bb.utils.mkdirhier(tarballdir)
45 if d.getVar("UNINATIVE_URL", True) == "unset": 45 if d.getVar("UNINATIVE_URL") == "unset":
46 bb.fatal("Uninative selected but not configured, please set UNINATIVE_URL") 46 bb.fatal("Uninative selected but not configured, please set UNINATIVE_URL")
47 47
48 localdata = bb.data.createCopy(d) 48 localdata = bb.data.createCopy(d)
@@ -85,7 +85,7 @@ python uninative_event_enable() {
85} 85}
86 86
87def enable_uninative(d): 87def enable_uninative(d):
88 loader = d.getVar("UNINATIVE_LOADER", True) 88 loader = d.getVar("UNINATIVE_LOADER")
89 if os.path.exists(loader): 89 if os.path.exists(loader):
90 bb.debug(2, "Enabling uninative") 90 bb.debug(2, "Enabling uninative")
91 d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d)) 91 d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d))
@@ -100,7 +100,7 @@ python uninative_changeinterp () {
100 if not (bb.data.inherits_class('native', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross', d)): 100 if not (bb.data.inherits_class('native', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross', d)):
101 return 101 return
102 102
103 sstateinst = d.getVar('SSTATE_INSTDIR', True) 103 sstateinst = d.getVar('SSTATE_INSTDIR')
104 for walkroot, dirs, files in os.walk(sstateinst): 104 for walkroot, dirs, files in os.walk(sstateinst):
105 for file in files: 105 for file in files:
106 if file.endswith(".so") or ".so." in file: 106 if file.endswith(".so") or ".so." in file:
@@ -121,7 +121,7 @@ python uninative_changeinterp () {
121 121
122 try: 122 try:
123 subprocess.check_output(("patchelf-uninative", "--set-interpreter", 123 subprocess.check_output(("patchelf-uninative", "--set-interpreter",
124 d.getVar("UNINATIVE_LOADER", True), f), 124 d.getVar("UNINATIVE_LOADER"), f),
125 stderr=subprocess.STDOUT) 125 stderr=subprocess.STDOUT)
126 except subprocess.CalledProcessError as e: 126 except subprocess.CalledProcessError as e:
127 bb.fatal("'%s' failed with exit code %d and the following output:\n%s" % 127 bb.fatal("'%s' failed with exit code %d and the following output:\n%s" %
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass
index 1fdd681315..65da9dadbe 100644
--- a/meta/classes/update-alternatives.bbclass
+++ b/meta/classes/update-alternatives.bbclass
@@ -66,8 +66,8 @@ ALTERNATIVE_PRIORITY = "10"
66UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY" 66UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY"
67 67
68def gen_updatealternativesvardeps(d): 68def gen_updatealternativesvardeps(d):
69 pkgs = (d.getVar("PACKAGES", True) or "").split() 69 pkgs = (d.getVar("PACKAGES") or "").split()
70 vars = (d.getVar("UPDALTVARS", True) or "").split() 70 vars = (d.getVar("UPDALTVARS") or "").split()
71 71
72 # First compute them for non_pkg versions 72 # First compute them for non_pkg versions
73 for v in vars: 73 for v in vars:
@@ -84,7 +84,7 @@ def gen_updatealternativesvardeps(d):
84 d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False))) 84 d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False)))
85 85
86def ua_extend_depends(d): 86def ua_extend_depends(d):
87 if not 'virtual/update-alternatives' in d.getVar('PROVIDES', True): 87 if not 'virtual/update-alternatives' in d.getVar('PROVIDES'):
88 d.appendVar('DEPENDS', ' virtual/${MLPREFIX}update-alternatives') 88 d.appendVar('DEPENDS', ' virtual/${MLPREFIX}update-alternatives')
89 89
90python __anonymous() { 90python __anonymous() {
@@ -103,8 +103,8 @@ python __anonymous() {
103 103
104def gen_updatealternativesvars(d): 104def gen_updatealternativesvars(d):
105 ret = [] 105 ret = []
106 pkgs = (d.getVar("PACKAGES", True) or "").split() 106 pkgs = (d.getVar("PACKAGES") or "").split()
107 vars = (d.getVar("UPDALTVARS", True) or "").split() 107 vars = (d.getVar("UPDALTVARS") or "").split()
108 108
109 for v in vars: 109 for v in vars:
110 ret.append(v + "_VARDEPS") 110 ret.append(v + "_VARDEPS")
@@ -123,23 +123,23 @@ populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}"
123# place. 123# place.
124python perform_packagecopy_append () { 124python perform_packagecopy_append () {
125 # Check for deprecated usage... 125 # Check for deprecated usage...
126 pn = d.getVar('BPN', True) 126 pn = d.getVar('BPN')
127 if d.getVar('ALTERNATIVE_LINKS', True) != None: 127 if d.getVar('ALTERNATIVE_LINKS') != None:
128 bb.fatal('%s: Use of ALTERNATIVE_LINKS/ALTERNATIVE_PATH/ALTERNATIVE_NAME is no longer supported, please convert to the updated syntax, see update-alternatives.bbclass for more info.' % pn) 128 bb.fatal('%s: Use of ALTERNATIVE_LINKS/ALTERNATIVE_PATH/ALTERNATIVE_NAME is no longer supported, please convert to the updated syntax, see update-alternatives.bbclass for more info.' % pn)
129 129
130 # Do actual update alternatives processing 130 # Do actual update alternatives processing
131 pkgdest = d.getVar('PKGD', True) 131 pkgdest = d.getVar('PKGD')
132 for pkg in (d.getVar('PACKAGES', True) or "").split(): 132 for pkg in (d.getVar('PACKAGES') or "").split():
133 # If the src == dest, we know we need to rename the dest by appending ${BPN} 133 # If the src == dest, we know we need to rename the dest by appending ${BPN}
134 link_rename = {} 134 link_rename = {}
135 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): 135 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
136 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) 136 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
137 if not alt_link: 137 if not alt_link:
138 alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name) 138 alt_link = "%s/%s" % (d.getVar('bindir'), alt_name)
139 d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) 139 d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
140 140
141 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) 141 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
142 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link 142 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
143 # Sometimes alt_target is specified as relative to the link name. 143 # Sometimes alt_target is specified as relative to the link name.
144 alt_target = os.path.join(os.path.dirname(alt_link), alt_target) 144 alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
145 145
@@ -189,23 +189,23 @@ python perform_packagecopy_append () {
189PACKAGESPLITFUNCS_prepend = "populate_packages_updatealternatives " 189PACKAGESPLITFUNCS_prepend = "populate_packages_updatealternatives "
190 190
191python populate_packages_updatealternatives () { 191python populate_packages_updatealternatives () {
192 pn = d.getVar('BPN', True) 192 pn = d.getVar('BPN')
193 193
194 # Do actual update alternatives processing 194 # Do actual update alternatives processing
195 pkgdest = d.getVar('PKGD', True) 195 pkgdest = d.getVar('PKGD')
196 for pkg in (d.getVar('PACKAGES', True) or "").split(): 196 for pkg in (d.getVar('PACKAGES') or "").split():
197 # Create post install/removal scripts 197 # Create post install/removal scripts
198 alt_setup_links = "" 198 alt_setup_links = ""
199 alt_remove_links = "" 199 alt_remove_links = ""
200 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): 200 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
201 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) 201 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
202 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) 202 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
203 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link 203 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
204 # Sometimes alt_target is specified as relative to the link name. 204 # Sometimes alt_target is specified as relative to the link name.
205 alt_target = os.path.join(os.path.dirname(alt_link), alt_target) 205 alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
206 206
207 alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True) 207 alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True)
208 alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True) 208 alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg) or d.getVar('ALTERNATIVE_PRIORITY')
209 209
210 # This shouldn't trigger, as it should have been resolved earlier! 210 # This shouldn't trigger, as it should have been resolved earlier!
211 if alt_link == alt_target: 211 if alt_link == alt_target:
@@ -224,32 +224,32 @@ python populate_packages_updatealternatives () {
224 224
225 if alt_setup_links: 225 if alt_setup_links:
226 # RDEPENDS setup 226 # RDEPENDS setup
227 provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives', True) 227 provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives')
228 if provider: 228 if provider:
229 #bb.note('adding runtime requirement for update-alternatives for %s' % pkg) 229 #bb.note('adding runtime requirement for update-alternatives for %s' % pkg)
230 d.appendVar('RDEPENDS_%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider) 230 d.appendVar('RDEPENDS_%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider)
231 231
232 bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg) 232 bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg)
233 bb.note('%s' % alt_setup_links) 233 bb.note('%s' % alt_setup_links)
234 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or '#!/bin/sh\n' 234 postinst = d.getVar('pkg_postinst_%s' % pkg) or '#!/bin/sh\n'
235 postinst += alt_setup_links 235 postinst += alt_setup_links
236 d.setVar('pkg_postinst_%s' % pkg, postinst) 236 d.setVar('pkg_postinst_%s' % pkg, postinst)
237 237
238 bb.note('%s' % alt_remove_links) 238 bb.note('%s' % alt_remove_links)
239 prerm = d.getVar('pkg_prerm_%s' % pkg, True) or '#!/bin/sh\n' 239 prerm = d.getVar('pkg_prerm_%s' % pkg) or '#!/bin/sh\n'
240 prerm += alt_remove_links 240 prerm += alt_remove_links
241 d.setVar('pkg_prerm_%s' % pkg, prerm) 241 d.setVar('pkg_prerm_%s' % pkg, prerm)
242} 242}
243 243
244python package_do_filedeps_append () { 244python package_do_filedeps_append () {
245 pn = d.getVar('BPN', True) 245 pn = d.getVar('BPN')
246 pkgdest = d.getVar('PKGDEST', True) 246 pkgdest = d.getVar('PKGDEST')
247 247
248 for pkg in packages.split(): 248 for pkg in packages.split():
249 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): 249 for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
250 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) 250 alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
251 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) 251 alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
252 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link 252 alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
253 253
254 if alt_link == alt_target: 254 if alt_link == alt_target:
255 bb.warn('%s: alt_link == alt_target: %s == %s' % (pn, alt_link, alt_target)) 255 bb.warn('%s: alt_link == alt_target: %s == %s' % (pn, alt_link, alt_target))
@@ -261,7 +261,7 @@ python package_do_filedeps_append () {
261 # Add file provide 261 # Add file provide
262 trans_target = oe.package.file_translate(alt_target) 262 trans_target = oe.package.file_translate(alt_target)
263 d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link) 263 d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
264 if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""): 264 if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg) or ""):
265 d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target) 265 d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
266} 266}
267 267
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass
index 2c3ef9edd1..2746c360fe 100644
--- a/meta/classes/update-rc.d.bbclass
+++ b/meta/classes/update-rc.d.bbclass
@@ -89,52 +89,52 @@ python populate_packages_updatercd () {
89 return 89 return
90 statement = "grep -q -w '/etc/init.d/functions' %s" % path 90 statement = "grep -q -w '/etc/init.d/functions' %s" % path
91 if subprocess.call(statement, shell=True) == 0: 91 if subprocess.call(statement, shell=True) == 0:
92 mlprefix = d.getVar('MLPREFIX', True) or "" 92 mlprefix = d.getVar('MLPREFIX') or ""
93 d.appendVar('RDEPENDS_' + pkg, ' %sinitscripts-functions' % (mlprefix)) 93 d.appendVar('RDEPENDS_' + pkg, ' %sinitscripts-functions' % (mlprefix))
94 94
95 def update_rcd_package(pkg): 95 def update_rcd_package(pkg):
96 bb.debug(1, 'adding update-rc.d calls to preinst/postinst/prerm/postrm for %s' % pkg) 96 bb.debug(1, 'adding update-rc.d calls to preinst/postinst/prerm/postrm for %s' % pkg)
97 97
98 localdata = bb.data.createCopy(d) 98 localdata = bb.data.createCopy(d)
99 overrides = localdata.getVar("OVERRIDES", True) 99 overrides = localdata.getVar("OVERRIDES")
100 localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) 100 localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
101 bb.data.update_data(localdata) 101 bb.data.update_data(localdata)
102 102
103 update_rcd_auto_depend(pkg) 103 update_rcd_auto_depend(pkg)
104 104
105 preinst = d.getVar('pkg_preinst_%s' % pkg, True) 105 preinst = d.getVar('pkg_preinst_%s' % pkg)
106 if not preinst: 106 if not preinst:
107 preinst = '#!/bin/sh\n' 107 preinst = '#!/bin/sh\n'
108 preinst += localdata.getVar('updatercd_preinst', True) 108 preinst += localdata.getVar('updatercd_preinst')
109 d.setVar('pkg_preinst_%s' % pkg, preinst) 109 d.setVar('pkg_preinst_%s' % pkg, preinst)
110 110
111 postinst = d.getVar('pkg_postinst_%s' % pkg, True) 111 postinst = d.getVar('pkg_postinst_%s' % pkg)
112 if not postinst: 112 if not postinst:
113 postinst = '#!/bin/sh\n' 113 postinst = '#!/bin/sh\n'
114 postinst += localdata.getVar('updatercd_postinst', True) 114 postinst += localdata.getVar('updatercd_postinst')
115 d.setVar('pkg_postinst_%s' % pkg, postinst) 115 d.setVar('pkg_postinst_%s' % pkg, postinst)
116 116
117 prerm = d.getVar('pkg_prerm_%s' % pkg, True) 117 prerm = d.getVar('pkg_prerm_%s' % pkg)
118 if not prerm: 118 if not prerm:
119 prerm = '#!/bin/sh\n' 119 prerm = '#!/bin/sh\n'
120 prerm += localdata.getVar('updatercd_prerm', True) 120 prerm += localdata.getVar('updatercd_prerm')
121 d.setVar('pkg_prerm_%s' % pkg, prerm) 121 d.setVar('pkg_prerm_%s' % pkg, prerm)
122 122
123 postrm = d.getVar('pkg_postrm_%s' % pkg, True) 123 postrm = d.getVar('pkg_postrm_%s' % pkg)
124 if not postrm: 124 if not postrm:
125 postrm = '#!/bin/sh\n' 125 postrm = '#!/bin/sh\n'
126 postrm += localdata.getVar('updatercd_postrm', True) 126 postrm += localdata.getVar('updatercd_postrm')
127 d.setVar('pkg_postrm_%s' % pkg, postrm) 127 d.setVar('pkg_postrm_%s' % pkg, postrm)
128 128
129 d.appendVar('RRECOMMENDS_' + pkg, " ${MLPREFIX}${UPDATERCD}") 129 d.appendVar('RRECOMMENDS_' + pkg, " ${MLPREFIX}${UPDATERCD}")
130 130
131 # Check that this class isn't being inhibited (generally, by 131 # Check that this class isn't being inhibited (generally, by
132 # systemd.bbclass) before doing any work. 132 # systemd.bbclass) before doing any work.
133 if not d.getVar("INHIBIT_UPDATERCD_BBCLASS", True): 133 if not d.getVar("INHIBIT_UPDATERCD_BBCLASS"):
134 pkgs = d.getVar('INITSCRIPT_PACKAGES', True) 134 pkgs = d.getVar('INITSCRIPT_PACKAGES')
135 if pkgs == None: 135 if pkgs == None:
136 pkgs = d.getVar('UPDATERCPN', True) 136 pkgs = d.getVar('UPDATERCPN')
137 packages = (d.getVar('PACKAGES', True) or "").split() 137 packages = (d.getVar('PACKAGES') or "").split()
138 if not pkgs in packages and packages != []: 138 if not pkgs in packages and packages != []:
139 pkgs = packages[0] 139 pkgs = packages[0]
140 for pkg in pkgs.split(): 140 for pkg in pkgs.split():
diff --git a/meta/classes/useradd-staticids.bbclass b/meta/classes/useradd-staticids.bbclass
index 4162774e9c..94bcbaa3d7 100644
--- a/meta/classes/useradd-staticids.bbclass
+++ b/meta/classes/useradd-staticids.bbclass
@@ -8,11 +8,11 @@ def update_useradd_static_config(d):
8 8
9 class myArgumentParser( argparse.ArgumentParser ): 9 class myArgumentParser( argparse.ArgumentParser ):
10 def _print_message(self, message, file=None): 10 def _print_message(self, message, file=None):
11 bb.warn("%s - %s: %s" % (d.getVar('PN', True), pkg, message)) 11 bb.warn("%s - %s: %s" % (d.getVar('PN'), pkg, message))
12 12
13 # This should never be called... 13 # This should never be called...
14 def exit(self, status=0, message=None): 14 def exit(self, status=0, message=None):
15 message = message or ("%s - %s: useradd.bbclass: Argument parsing exited" % (d.getVar('PN', True), pkg)) 15 message = message or ("%s - %s: useradd.bbclass: Argument parsing exited" % (d.getVar('PN'), pkg))
16 error(message) 16 error(message)
17 17
18 def error(self, message): 18 def error(self, message):
@@ -52,10 +52,10 @@ def update_useradd_static_config(d):
52 52
53 def handle_missing_id(id, type, pkg): 53 def handle_missing_id(id, type, pkg):
54 # For backwards compatibility we accept "1" in addition to "error" 54 # For backwards compatibility we accept "1" in addition to "error"
55 if d.getVar('USERADD_ERROR_DYNAMIC', True) == 'error' or d.getVar('USERADD_ERROR_DYNAMIC', True) == '1': 55 if d.getVar('USERADD_ERROR_DYNAMIC') == 'error' or d.getVar('USERADD_ERROR_DYNAMIC') == '1':
56 raise NotImplementedError("%s - %s: %sname %s does not have a static ID defined. Skipping it." % (d.getVar('PN', True), pkg, type, id)) 56 raise NotImplementedError("%s - %s: %sname %s does not have a static ID defined. Skipping it." % (d.getVar('PN'), pkg, type, id))
57 elif d.getVar('USERADD_ERROR_DYNAMIC', True) == 'warn': 57 elif d.getVar('USERADD_ERROR_DYNAMIC') == 'warn':
58 bb.warn("%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN', True), pkg, type, id)) 58 bb.warn("%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN'), pkg, type, id))
59 59
60 # We parse and rewrite the useradd components 60 # We parse and rewrite the useradd components
61 def rewrite_useradd(params): 61 def rewrite_useradd(params):
@@ -89,8 +89,8 @@ def update_useradd_static_config(d):
89 # paths are resolved via BBPATH 89 # paths are resolved via BBPATH
90 def get_passwd_list(d): 90 def get_passwd_list(d):
91 str = "" 91 str = ""
92 bbpath = d.getVar('BBPATH', True) 92 bbpath = d.getVar('BBPATH')
93 passwd_tables = d.getVar('USERADD_UID_TABLES', True) 93 passwd_tables = d.getVar('USERADD_UID_TABLES')
94 if not passwd_tables: 94 if not passwd_tables:
95 passwd_tables = 'files/passwd' 95 passwd_tables = 'files/passwd'
96 for conf_file in passwd_tables.split(): 96 for conf_file in passwd_tables.split():
@@ -106,7 +106,7 @@ def update_useradd_static_config(d):
106 try: 106 try:
107 uaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param)) 107 uaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param))
108 except: 108 except:
109 bb.fatal("%s: Unable to parse arguments for USERADD_PARAM_%s: '%s'" % (d.getVar('PN', True), pkg, param)) 109 bb.fatal("%s: Unable to parse arguments for USERADD_PARAM_%s: '%s'" % (d.getVar('PN'), pkg, param))
110 110
111 # Read all passwd files specified in USERADD_UID_TABLES or files/passwd 111 # Read all passwd files specified in USERADD_UID_TABLES or files/passwd
112 # Use the standard passwd layout: 112 # Use the standard passwd layout:
@@ -130,7 +130,7 @@ def update_useradd_static_config(d):
130 field = users[uaargs.LOGIN] 130 field = users[uaargs.LOGIN]
131 131
132 if uaargs.uid and field[2] and (uaargs.uid != field[2]): 132 if uaargs.uid and field[2] and (uaargs.uid != field[2]):
133 bb.warn("%s: Changing username %s's uid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), uaargs.LOGIN, uaargs.uid, field[2])) 133 bb.warn("%s: Changing username %s's uid from (%s) to (%s), verify configuration files!" % (d.getVar('PN'), uaargs.LOGIN, uaargs.uid, field[2]))
134 uaargs.uid = field[2] or uaargs.uid 134 uaargs.uid = field[2] or uaargs.uid
135 135
136 # Determine the possible groupname 136 # Determine the possible groupname
@@ -158,12 +158,12 @@ def update_useradd_static_config(d):
158 # We want to add a group, but we don't know it's name... so we can't add the group... 158 # We want to add a group, but we don't know it's name... so we can't add the group...
159 # We have to assume the group has previously been added or we'll fail on the adduser... 159 # We have to assume the group has previously been added or we'll fail on the adduser...
160 # Note: specifying the actual gid is very rare in OE, usually the group name is specified. 160 # Note: specifying the actual gid is very rare in OE, usually the group name is specified.
161 bb.warn("%s: Changing gid for login %s to %s, verify configuration files!" % (d.getVar('PN', True), uaargs.LOGIN, uaargs.groupid)) 161 bb.warn("%s: Changing gid for login %s to %s, verify configuration files!" % (d.getVar('PN'), uaargs.LOGIN, uaargs.groupid))
162 162
163 uaargs.gid = uaargs.groupid 163 uaargs.gid = uaargs.groupid
164 uaargs.user_group = None 164 uaargs.user_group = None
165 if newgroup: 165 if newgroup:
166 groupadd = d.getVar("GROUPADD_PARAM_%s" % pkg, True) 166 groupadd = d.getVar("GROUPADD_PARAM_%s" % pkg)
167 if groupadd: 167 if groupadd:
168 d.setVar("GROUPADD_PARAM_%s" % pkg, "%s; %s" % (groupadd, newgroup)) 168 d.setVar("GROUPADD_PARAM_%s" % pkg, "%s; %s" % (groupadd, newgroup))
169 else: 169 else:
@@ -223,8 +223,8 @@ def update_useradd_static_config(d):
223 # paths are resolved via BBPATH 223 # paths are resolved via BBPATH
224 def get_group_list(d): 224 def get_group_list(d):
225 str = "" 225 str = ""
226 bbpath = d.getVar('BBPATH', True) 226 bbpath = d.getVar('BBPATH')
227 group_tables = d.getVar('USERADD_GID_TABLES', True) 227 group_tables = d.getVar('USERADD_GID_TABLES')
228 if not group_tables: 228 if not group_tables:
229 group_tables = 'files/group' 229 group_tables = 'files/group'
230 for conf_file in group_tables.split(): 230 for conf_file in group_tables.split():
@@ -241,7 +241,7 @@ def update_useradd_static_config(d):
241 # If we're processing multiple lines, we could have left over values here... 241 # If we're processing multiple lines, we could have left over values here...
242 gaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param)) 242 gaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param))
243 except: 243 except:
244 bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM_%s: '%s'" % (d.getVar('PN', True), pkg, param)) 244 bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM_%s: '%s'" % (d.getVar('PN'), pkg, param))
245 245
246 # Read all group files specified in USERADD_GID_TABLES or files/group 246 # Read all group files specified in USERADD_GID_TABLES or files/group
247 # Use the standard group layout: 247 # Use the standard group layout:
@@ -264,7 +264,7 @@ def update_useradd_static_config(d):
264 264
265 if field[2]: 265 if field[2]:
266 if gaargs.gid and (gaargs.gid != field[2]): 266 if gaargs.gid and (gaargs.gid != field[2]):
267 bb.warn("%s: Changing groupname %s's gid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), gaargs.GROUP, gaargs.gid, field[2])) 267 bb.warn("%s: Changing groupname %s's gid from (%s) to (%s), verify configuration files!" % (d.getVar('PN'), gaargs.GROUP, gaargs.gid, field[2]))
268 gaargs.gid = field[2] 268 gaargs.gid = field[2]
269 269
270 if not gaargs.gid or not gaargs.gid.isdigit(): 270 if not gaargs.gid or not gaargs.gid.isdigit():
@@ -288,32 +288,32 @@ def update_useradd_static_config(d):
288 # the files listed in USERADD_UID/GID_TABLES. We need to tell bitbake 288 # the files listed in USERADD_UID/GID_TABLES. We need to tell bitbake
289 # about that explicitly to trigger re-parsing and thus re-execution of 289 # about that explicitly to trigger re-parsing and thus re-execution of
290 # this code when the files change. 290 # this code when the files change.
291 bbpath = d.getVar('BBPATH', True) 291 bbpath = d.getVar('BBPATH')
292 for varname, default in (('USERADD_UID_TABLES', 'files/passwd'), 292 for varname, default in (('USERADD_UID_TABLES', 'files/passwd'),
293 ('USERADD_GID_TABLES', 'files/group')): 293 ('USERADD_GID_TABLES', 'files/group')):
294 tables = d.getVar(varname, True) 294 tables = d.getVar(varname)
295 if not tables: 295 if not tables:
296 tables = default 296 tables = default
297 for conf_file in tables.split(): 297 for conf_file in tables.split():
298 bb.parse.mark_dependency(d, bb.utils.which(bbpath, conf_file)) 298 bb.parse.mark_dependency(d, bb.utils.which(bbpath, conf_file))
299 299
300 # Load and process the users and groups, rewriting the adduser/addgroup params 300 # Load and process the users and groups, rewriting the adduser/addgroup params
301 useradd_packages = d.getVar('USERADD_PACKAGES', True) 301 useradd_packages = d.getVar('USERADD_PACKAGES')
302 302
303 for pkg in useradd_packages.split(): 303 for pkg in useradd_packages.split():
304 # Groupmems doesn't have anything we might want to change, so simply validating 304 # Groupmems doesn't have anything we might want to change, so simply validating
305 # is a bit of a waste -- only process useradd/groupadd 305 # is a bit of a waste -- only process useradd/groupadd
306 useradd_param = d.getVar('USERADD_PARAM_%s' % pkg, True) 306 useradd_param = d.getVar('USERADD_PARAM_%s' % pkg)
307 if useradd_param: 307 if useradd_param:
308 #bb.warn("Before: 'USERADD_PARAM_%s' - '%s'" % (pkg, useradd_param)) 308 #bb.warn("Before: 'USERADD_PARAM_%s' - '%s'" % (pkg, useradd_param))
309 d.setVar('USERADD_PARAM_%s' % pkg, rewrite_useradd(useradd_param)) 309 d.setVar('USERADD_PARAM_%s' % pkg, rewrite_useradd(useradd_param))
310 #bb.warn("After: 'USERADD_PARAM_%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM_%s' % pkg, True))) 310 #bb.warn("After: 'USERADD_PARAM_%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM_%s' % pkg)))
311 311
312 groupadd_param = d.getVar('GROUPADD_PARAM_%s' % pkg, True) 312 groupadd_param = d.getVar('GROUPADD_PARAM_%s' % pkg)
313 if groupadd_param: 313 if groupadd_param:
314 #bb.warn("Before: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, groupadd_param)) 314 #bb.warn("Before: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, groupadd_param))
315 d.setVar('GROUPADD_PARAM_%s' % pkg, rewrite_groupadd(groupadd_param)) 315 d.setVar('GROUPADD_PARAM_%s' % pkg, rewrite_groupadd(groupadd_param))
316 #bb.warn("After: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM_%s' % pkg, True))) 316 #bb.warn("After: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM_%s' % pkg)))
317 317
318 318
319 319
@@ -323,6 +323,6 @@ python __anonymous() {
323 try: 323 try:
324 update_useradd_static_config(d) 324 update_useradd_static_config(d)
325 except NotImplementedError as f: 325 except NotImplementedError as f:
326 bb.debug(1, "Skipping recipe %s: %s" % (d.getVar('PN', True), f)) 326 bb.debug(1, "Skipping recipe %s: %s" % (d.getVar('PN'), f))
327 raise bb.parse.SkipPackage(f) 327 raise bb.parse.SkipPackage(f)
328} 328}
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass
index 3cff08e00d..fd59969986 100644
--- a/meta/classes/useradd.bbclass
+++ b/meta/classes/useradd.bbclass
@@ -168,13 +168,13 @@ USERADDSETSCENEDEPS = ""
168 168
169# Recipe parse-time sanity checks 169# Recipe parse-time sanity checks
170def update_useradd_after_parse(d): 170def update_useradd_after_parse(d):
171 useradd_packages = d.getVar('USERADD_PACKAGES', True) 171 useradd_packages = d.getVar('USERADD_PACKAGES')
172 172
173 if not useradd_packages: 173 if not useradd_packages:
174 bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False)) 174 bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False))
175 175
176 for pkg in useradd_packages.split(): 176 for pkg in useradd_packages.split():
177 if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPMEMS_PARAM_%s' % pkg, True): 177 if not d.getVar('USERADD_PARAM_%s' % pkg) and not d.getVar('GROUPADD_PARAM_%s' % pkg) and not d.getVar('GROUPMEMS_PARAM_%s' % pkg):
178 bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg)) 178 bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg))
179 179
180python __anonymous() { 180python __anonymous() {
@@ -191,9 +191,9 @@ def get_all_cmd_params(d, cmd_type):
191 param_type = cmd_type.upper() + "_PARAM_%s" 191 param_type = cmd_type.upper() + "_PARAM_%s"
192 params = [] 192 params = []
193 193
194 useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" 194 useradd_packages = d.getVar('USERADD_PACKAGES') or ""
195 for pkg in useradd_packages.split(): 195 for pkg in useradd_packages.split():
196 param = d.getVar(param_type % pkg, True) 196 param = d.getVar(param_type % pkg)
197 if param: 197 if param:
198 params.append(param.rstrip(" ;")) 198 params.append(param.rstrip(" ;"))
199 199
@@ -209,20 +209,20 @@ fakeroot python populate_packages_prepend () {
209 required to execute on the target. Not doing so may cause 209 required to execute on the target. Not doing so may cause
210 useradd preinst to be invoked twice, causing unwanted warnings. 210 useradd preinst to be invoked twice, causing unwanted warnings.
211 """ 211 """
212 preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True) 212 preinst = d.getVar('pkg_preinst_%s' % pkg) or d.getVar('pkg_preinst')
213 if not preinst: 213 if not preinst:
214 preinst = '#!/bin/sh\n' 214 preinst = '#!/bin/sh\n'
215 preinst += 'bbnote () {\n\techo "NOTE: $*"\n}\n' 215 preinst += 'bbnote () {\n\techo "NOTE: $*"\n}\n'
216 preinst += 'bbwarn () {\n\techo "WARNING: $*"\n}\n' 216 preinst += 'bbwarn () {\n\techo "WARNING: $*"\n}\n'
217 preinst += 'bbfatal () {\n\techo "ERROR: $*"\n\texit 1\n}\n' 217 preinst += 'bbfatal () {\n\techo "ERROR: $*"\n\texit 1\n}\n'
218 preinst += 'perform_groupadd () {\n%s}\n' % d.getVar('perform_groupadd', True) 218 preinst += 'perform_groupadd () {\n%s}\n' % d.getVar('perform_groupadd')
219 preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd', True) 219 preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd')
220 preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems', True) 220 preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems')
221 preinst += d.getVar('useradd_preinst', True) 221 preinst += d.getVar('useradd_preinst')
222 d.setVar('pkg_preinst_%s' % pkg, preinst) 222 d.setVar('pkg_preinst_%s' % pkg, preinst)
223 223
224 # RDEPENDS setup 224 # RDEPENDS setup
225 rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" 225 rdepends = d.getVar("RDEPENDS_%s" % pkg) or ""
226 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-passwd' 226 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-passwd'
227 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'shadow' 227 rdepends += ' ' + d.getVar('MLPREFIX', False) + 'shadow'
228 # base-files is where the default /etc/skel is packaged 228 # base-files is where the default /etc/skel is packaged
@@ -233,7 +233,7 @@ fakeroot python populate_packages_prepend () {
233 # to packages specified by USERADD_PACKAGES 233 # to packages specified by USERADD_PACKAGES
234 if not bb.data.inherits_class('nativesdk', d) \ 234 if not bb.data.inherits_class('nativesdk', d) \
235 and not bb.data.inherits_class('native', d): 235 and not bb.data.inherits_class('native', d):
236 useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" 236 useradd_packages = d.getVar('USERADD_PACKAGES') or ""
237 for pkg in useradd_packages.split(): 237 for pkg in useradd_packages.split():
238 update_useradd_package(pkg) 238 update_useradd_package(pkg)
239} 239}
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass
index 7ba56e28ae..68e8217135 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes/utility-tasks.bbclass
@@ -32,14 +32,14 @@ python do_clean() {
32 bb.note("Removing " + dir) 32 bb.note("Removing " + dir)
33 oe.path.remove(dir) 33 oe.path.remove(dir)
34 34
35 for f in (d.getVar('CLEANFUNCS', True) or '').split(): 35 for f in (d.getVar('CLEANFUNCS') or '').split():
36 bb.build.exec_func(f, d) 36 bb.build.exec_func(f, d)
37} 37}
38 38
39addtask checkuri 39addtask checkuri
40do_checkuri[nostamp] = "1" 40do_checkuri[nostamp] = "1"
41python do_checkuri() { 41python do_checkuri() {
42 src_uri = (d.getVar('SRC_URI', True) or "").split() 42 src_uri = (d.getVar('SRC_URI') or "").split()
43 if len(src_uri) == 0: 43 if len(src_uri) == 0:
44 return 44 return
45 45
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass
index dbb5e4cbbc..640daed4a8 100644
--- a/meta/classes/utils.bbclass
+++ b/meta/classes/utils.bbclass
@@ -41,9 +41,9 @@ def oe_filter_out(f, str, d):
41 41
42def machine_paths(d): 42def machine_paths(d):
43 """List any existing machine specific filespath directories""" 43 """List any existing machine specific filespath directories"""
44 machine = d.getVar("MACHINE", True) 44 machine = d.getVar("MACHINE")
45 filespathpkg = d.getVar("FILESPATHPKG", True).split(":") 45 filespathpkg = d.getVar("FILESPATHPKG").split(":")
46 for basepath in d.getVar("FILESPATHBASE", True).split(":"): 46 for basepath in d.getVar("FILESPATHBASE").split(":"):
47 for pkgpath in filespathpkg: 47 for pkgpath in filespathpkg:
48 machinepath = os.path.join(basepath, pkgpath, machine) 48 machinepath = os.path.join(basepath, pkgpath, machine)
49 if os.path.isdir(machinepath): 49 if os.path.isdir(machinepath):
@@ -52,7 +52,7 @@ def machine_paths(d):
52def is_machine_specific(d): 52def is_machine_specific(d):
53 """Determine whether the current recipe is machine specific""" 53 """Determine whether the current recipe is machine specific"""
54 machinepaths = set(machine_paths(d)) 54 machinepaths = set(machine_paths(d))
55 srcuri = d.getVar("SRC_URI", True).split() 55 srcuri = d.getVar("SRC_URI").split()
56 for url in srcuri: 56 for url in srcuri:
57 fetcher = bb.fetch2.Fetch([srcuri], d) 57 fetcher = bb.fetch2.Fetch([srcuri], d)
58 if url.startswith("file://"): 58 if url.startswith("file://"):
@@ -315,14 +315,14 @@ def explode_deps(s):
315 315
316def base_set_filespath(path, d): 316def base_set_filespath(path, d):
317 filespath = [] 317 filespath = []
318 extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") 318 extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
319 # Remove default flag which was used for checking 319 # Remove default flag which was used for checking
320 extrapaths = extrapaths.replace("__default:", "") 320 extrapaths = extrapaths.replace("__default:", "")
321 # Don't prepend empty strings to the path list 321 # Don't prepend empty strings to the path list
322 if extrapaths != "": 322 if extrapaths != "":
323 path = extrapaths.split(":") + path 323 path = extrapaths.split(":") + path
324 # The ":" ensures we have an 'empty' override 324 # The ":" ensures we have an 'empty' override
325 overrides = (":" + (d.getVar("FILESOVERRIDES", True) or "")).split(":") 325 overrides = (":" + (d.getVar("FILESOVERRIDES") or "")).split(":")
326 overrides.reverse() 326 overrides.reverse()
327 for o in overrides: 327 for o in overrides:
328 for p in path: 328 for p in path:
@@ -333,7 +333,7 @@ def base_set_filespath(path, d):
333def extend_variants(d, var, extend, delim=':'): 333def extend_variants(d, var, extend, delim=':'):
334 """Return a string of all bb class extend variants for the given extend""" 334 """Return a string of all bb class extend variants for the given extend"""
335 variants = [] 335 variants = []
336 whole = d.getVar(var, True) or "" 336 whole = d.getVar(var) or ""
337 for ext in whole.split(): 337 for ext in whole.split():
338 eext = ext.split(delim) 338 eext = ext.split(delim)
339 if len(eext) > 1 and eext[0] == extend: 339 if len(eext) > 1 and eext[0] == extend:
@@ -341,7 +341,7 @@ def extend_variants(d, var, extend, delim=':'):
341 return " ".join(variants) 341 return " ".join(variants)
342 342
343def multilib_pkg_extend(d, pkg): 343def multilib_pkg_extend(d, pkg):
344 variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split() 344 variants = (d.getVar("MULTILIB_VARIANTS") or "").split()
345 if not variants: 345 if not variants:
346 return pkg 346 return pkg
347 pkgs = pkg 347 pkgs = pkg
@@ -352,21 +352,21 @@ def multilib_pkg_extend(d, pkg):
352def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '): 352def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '):
353 """Return a string of all ${var} in all multilib tune configuration""" 353 """Return a string of all ${var} in all multilib tune configuration"""
354 values = [] 354 values = []
355 value = d.getVar(var, True) or "" 355 value = d.getVar(var) or ""
356 if value != "": 356 if value != "":
357 if need_split: 357 if need_split:
358 for item in value.split(delim): 358 for item in value.split(delim):
359 values.append(item) 359 values.append(item)
360 else: 360 else:
361 values.append(value) 361 values.append(value)
362 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 362 variants = d.getVar("MULTILIB_VARIANTS") or ""
363 for item in variants.split(): 363 for item in variants.split():
364 localdata = bb.data.createCopy(d) 364 localdata = bb.data.createCopy(d)
365 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 365 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
366 localdata.setVar("OVERRIDES", overrides) 366 localdata.setVar("OVERRIDES", overrides)
367 localdata.setVar("MLPREFIX", item + "-") 367 localdata.setVar("MLPREFIX", item + "-")
368 bb.data.update_data(localdata) 368 bb.data.update_data(localdata)
369 value = localdata.getVar(var, True) or "" 369 value = localdata.getVar(var) or ""
370 if value != "": 370 if value != "":
371 if need_split: 371 if need_split:
372 for item in value.split(delim): 372 for item in value.split(delim):
@@ -402,21 +402,21 @@ def all_multilib_tune_list(vars, d):
402 newoverrides.append(o) 402 newoverrides.append(o)
403 localdata.setVar("OVERRIDES", ":".join(newoverrides)) 403 localdata.setVar("OVERRIDES", ":".join(newoverrides))
404 localdata.setVar("MLPREFIX", "") 404 localdata.setVar("MLPREFIX", "")
405 origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL", True) 405 origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
406 if origdefault: 406 if origdefault:
407 localdata.setVar("DEFAULTTUNE", origdefault) 407 localdata.setVar("DEFAULTTUNE", origdefault)
408 bb.data.update_data(localdata) 408 bb.data.update_data(localdata)
409 values['ml'] = [''] 409 values['ml'] = ['']
410 for v in vars: 410 for v in vars:
411 values[v].append(localdata.getVar(v, True)) 411 values[v].append(localdata.getVar(v))
412 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 412 variants = d.getVar("MULTILIB_VARIANTS") or ""
413 for item in variants.split(): 413 for item in variants.split():
414 localdata = bb.data.createCopy(d) 414 localdata = bb.data.createCopy(d)
415 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 415 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
416 localdata.setVar("OVERRIDES", overrides) 416 localdata.setVar("OVERRIDES", overrides)
417 localdata.setVar("MLPREFIX", item + "-") 417 localdata.setVar("MLPREFIX", item + "-")
418 bb.data.update_data(localdata) 418 bb.data.update_data(localdata)
419 values[v].append(localdata.getVar(v, True)) 419 values[v].append(localdata.getVar(v))
420 values['ml'].append(item) 420 values['ml'].append(item)
421 return values 421 return values
422 422
diff --git a/meta/classes/waf.bbclass b/meta/classes/waf.bbclass
index 5e55833ca4..95f524012a 100644
--- a/meta/classes/waf.bbclass
+++ b/meta/classes/waf.bbclass
@@ -2,7 +2,7 @@
2DISABLE_STATIC = "" 2DISABLE_STATIC = ""
3 3
4def get_waf_parallel_make(d): 4def get_waf_parallel_make(d):
5 pm = d.getVar('PARALLEL_MAKE', True) 5 pm = d.getVar('PARALLEL_MAKE')
6 if pm: 6 if pm:
7 # look for '-j' and throw other options (e.g. '-l') away 7 # look for '-j' and throw other options (e.g. '-l') away
8 # because they might have different meaning in bjam 8 # because they might have different meaning in bjam