summaryrefslogtreecommitdiffstats
path: root/meta/classes/base.bbclass
diff options
context:
space:
mode:
authorJoshua Lock <joshua.g.lock@intel.com>2016-12-14 21:13:04 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-12-16 10:23:23 +0000
commitc4e2c59088765d1f1de7ec57cde91980f887c2ff (patch)
treea2fda8ac5916fb59a711e9220c2177008cca9347 /meta/classes/base.bbclass
parentd5e67725ac11e3296cad104470931ffa16824b90 (diff)
downloadpoky-c4e2c59088765d1f1de7ec57cde91980f887c2ff.tar.gz
meta: remove True option to getVar calls
getVar() now defaults to expanding by default, thus remove the True option from getVar() calls with a regex search and replace. Search made with the following regex: getVar ?\(( ?[^,()]*), True\) (From OE-Core rev: 7c552996597faaee2fbee185b250c0ee30ea3b5f) Signed-off-by: Joshua Lock <joshua.g.lock@intel.com> Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/base.bbclass')
-rw-r--r--meta/classes/base.bbclass126
1 files changed, 63 insertions, 63 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 19673e6913..2765ebf61b 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -16,7 +16,7 @@ OE_IMPORTS[type] = "list"
16def oe_import(d): 16def oe_import(d):
17 import sys 17 import sys
18 18
19 bbpath = d.getVar("BBPATH", True).split(":") 19 bbpath = d.getVar("BBPATH").split(":")
20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] 20 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21 21
22 def inject(name, value): 22 def inject(name, value):
@@ -37,7 +37,7 @@ def oe_import(d):
37OE_IMPORTED := "${@oe_import(d)}" 37OE_IMPORTED := "${@oe_import(d)}"
38 38
39def lsb_distro_identifier(d): 39def lsb_distro_identifier(d):
40 adjust = d.getVar('LSB_DISTRO_ADJUST', True) 40 adjust = d.getVar('LSB_DISTRO_ADJUST')
41 adjust_func = None 41 adjust_func = None
42 if adjust: 42 if adjust:
43 try: 43 try:
@@ -72,7 +72,7 @@ def base_dep_prepend(d):
72 # we need that built is the responsibility of the patch function / class, not 72 # we need that built is the responsibility of the patch function / class, not
73 # the application. 73 # the application.
74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False): 74 if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
75 if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): 75 if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " 76 deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
77 return deps 77 return deps
78 78
@@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} "
83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" 83FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
84# THISDIR only works properly with imediate expansion as it has to run 84# THISDIR only works properly with imediate expansion as it has to run
85# in the context of the location its used (:=) 85# in the context of the location its used (:=)
86THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" 86THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
87 87
88def extra_path_elements(d): 88def extra_path_elements(d):
89 path = "" 89 path = ""
90 elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() 90 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
91 for e in elements: 91 for e in elements:
92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" 92 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
93 return path 93 return path
@@ -96,11 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}"
96 96
97def get_lic_checksum_file_list(d): 97def get_lic_checksum_file_list(d):
98 filelist = [] 98 filelist = []
99 lic_files = d.getVar("LIC_FILES_CHKSUM", True) or '' 99 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
100 tmpdir = d.getVar("TMPDIR", True) 100 tmpdir = d.getVar("TMPDIR")
101 s = d.getVar("S", True) 101 s = d.getVar("S")
102 b = d.getVar("B", True) 102 b = d.getVar("B")
103 workdir = d.getVar("WORKDIR", True) 103 workdir = d.getVar("WORKDIR")
104 104
105 urls = lic_files.split() 105 urls = lic_files.split()
106 for url in urls: 106 for url in urls:
@@ -116,7 +116,7 @@ def get_lic_checksum_file_list(d):
116 continue 116 continue
117 filelist.append(path + ":" + str(os.path.exists(path))) 117 filelist.append(path + ":" + str(os.path.exists(path)))
118 except bb.fetch.MalformedUrl: 118 except bb.fetch.MalformedUrl:
119 bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) 119 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
120 return " ".join(filelist) 120 return " ".join(filelist)
121 121
122addtask fetch 122addtask fetch
@@ -126,7 +126,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
126do_fetch[vardeps] += "SRCREV" 126do_fetch[vardeps] += "SRCREV"
127python base_do_fetch() { 127python base_do_fetch() {
128 128
129 src_uri = (d.getVar('SRC_URI', True) or "").split() 129 src_uri = (d.getVar('SRC_URI') or "").split()
130 if len(src_uri) == 0: 130 if len(src_uri) == 0:
131 return 131 return
132 132
@@ -141,31 +141,31 @@ addtask unpack after do_fetch
141do_unpack[dirs] = "${WORKDIR}" 141do_unpack[dirs] = "${WORKDIR}"
142 142
143python () { 143python () {
144 if d.getVar('S', True) != d.getVar('WORKDIR', True): 144 if d.getVar('S') != d.getVar('WORKDIR'):
145 d.setVarFlag('do_unpack', 'cleandirs', '${S}') 145 d.setVarFlag('do_unpack', 'cleandirs', '${S}')
146 else: 146 else:
147 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) 147 d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
148} 148}
149python base_do_unpack() { 149python base_do_unpack() {
150 src_uri = (d.getVar('SRC_URI', True) or "").split() 150 src_uri = (d.getVar('SRC_URI') or "").split()
151 if len(src_uri) == 0: 151 if len(src_uri) == 0:
152 return 152 return
153 153
154 try: 154 try:
155 fetcher = bb.fetch2.Fetch(src_uri, d) 155 fetcher = bb.fetch2.Fetch(src_uri, d)
156 fetcher.unpack(d.getVar('WORKDIR', True)) 156 fetcher.unpack(d.getVar('WORKDIR'))
157 except bb.fetch2.BBFetchException as e: 157 except bb.fetch2.BBFetchException as e:
158 bb.fatal(str(e)) 158 bb.fatal(str(e))
159} 159}
160 160
161def pkgarch_mapping(d): 161def pkgarch_mapping(d):
162 # Compatibility mappings of TUNE_PKGARCH (opt in) 162 # Compatibility mappings of TUNE_PKGARCH (opt in)
163 if d.getVar("PKGARCHCOMPAT_ARMV7A", True): 163 if d.getVar("PKGARCHCOMPAT_ARMV7A"):
164 if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": 164 if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon":
165 d.setVar("TUNE_PKGARCH", "armv7a") 165 d.setVar("TUNE_PKGARCH", "armv7a")
166 166
167def get_layers_branch_rev(d): 167def get_layers_branch_rev(d):
168 layers = (d.getVar("BBLAYERS", True) or "").split() 168 layers = (d.getVar("BBLAYERS") or "").split()
169 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ 169 layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
170 base_get_metadata_git_branch(i, None).strip(), \ 170 base_get_metadata_git_branch(i, None).strip(), \
171 base_get_metadata_git_revision(i, None)) \ 171 base_get_metadata_git_revision(i, None)) \
@@ -192,7 +192,7 @@ BUILDCFG_FUNCS[type] = "list"
192def buildcfg_vars(d): 192def buildcfg_vars(d):
193 statusvars = oe.data.typed_value('BUILDCFG_VARS', d) 193 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
194 for var in statusvars: 194 for var in statusvars:
195 value = d.getVar(var, True) 195 value = d.getVar(var)
196 if value is not None: 196 if value is not None:
197 yield '%-17s = "%s"' % (var, value) 197 yield '%-17s = "%s"' % (var, value)
198 198
@@ -200,7 +200,7 @@ def buildcfg_neededvars(d):
200 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) 200 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
201 pesteruser = [] 201 pesteruser = []
202 for v in needed_vars: 202 for v in needed_vars:
203 val = d.getVar(v, True) 203 val = d.getVar(v)
204 if not val or val == 'INVALID': 204 if not val or val == 'INVALID':
205 pesteruser.append(v) 205 pesteruser.append(v)
206 206
@@ -233,7 +233,7 @@ python base_eventhandler() {
233 if flines: 233 if flines:
234 statuslines.extend(flines) 234 statuslines.extend(flines)
235 235
236 statusheader = e.data.getVar('BUILDCFG_HEADER', True) 236 statusheader = e.data.getVar('BUILDCFG_HEADER')
237 if statusheader: 237 if statusheader:
238 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 238 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
239 239
@@ -241,7 +241,7 @@ python base_eventhandler() {
241 # target ones and we'd see dulpicate key names overwriting each other 241 # target ones and we'd see dulpicate key names overwriting each other
242 # for various PREFERRED_PROVIDERS 242 # for various PREFERRED_PROVIDERS
243 if isinstance(e, bb.event.RecipePreFinalise): 243 if isinstance(e, bb.event.RecipePreFinalise):
244 if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True): 244 if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"):
245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") 245 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
246 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") 246 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
247 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") 247 e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
@@ -267,14 +267,14 @@ python base_eventhandler() {
267 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in 267 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
268 # particular. 268 # particular.
269 # 269 #
270 pn = d.getVar('PN', True) 270 pn = d.getVar('PN')
271 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 271 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
272 if not source_mirror_fetch: 272 if not source_mirror_fetch:
273 provs = (d.getVar("PROVIDES", True) or "").split() 273 provs = (d.getVar("PROVIDES") or "").split()
274 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() 274 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
275 for p in provs: 275 for p in provs:
276 if p.startswith("virtual/") and p not in multiwhitelist: 276 if p.startswith("virtual/") and p not in multiwhitelist:
277 profprov = d.getVar("PREFERRED_PROVIDER_" + p, True) 277 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
278 if profprov and pn != profprov: 278 if profprov and pn != profprov:
279 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) 279 raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
280} 280}
@@ -336,9 +336,9 @@ def set_packagetriplet(d):
336 tos = [] 336 tos = []
337 tvs = [] 337 tvs = []
338 338
339 archs.append(d.getVar("PACKAGE_ARCHS", True).split()) 339 archs.append(d.getVar("PACKAGE_ARCHS").split())
340 tos.append(d.getVar("TARGET_OS", True)) 340 tos.append(d.getVar("TARGET_OS"))
341 tvs.append(d.getVar("TARGET_VENDOR", True)) 341 tvs.append(d.getVar("TARGET_VENDOR"))
342 342
343 def settriplet(d, varname, archs, tos, tvs): 343 def settriplet(d, varname, archs, tos, tvs):
344 triplets = [] 344 triplets = []
@@ -350,16 +350,16 @@ def set_packagetriplet(d):
350 350
351 settriplet(d, "PKGTRIPLETS", archs, tos, tvs) 351 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
352 352
353 variants = d.getVar("MULTILIB_VARIANTS", True) or "" 353 variants = d.getVar("MULTILIB_VARIANTS") or ""
354 for item in variants.split(): 354 for item in variants.split():
355 localdata = bb.data.createCopy(d) 355 localdata = bb.data.createCopy(d)
356 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item 356 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
357 localdata.setVar("OVERRIDES", overrides) 357 localdata.setVar("OVERRIDES", overrides)
358 bb.data.update_data(localdata) 358 bb.data.update_data(localdata)
359 359
360 archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) 360 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
361 tos.append(localdata.getVar("TARGET_OS", True)) 361 tos.append(localdata.getVar("TARGET_OS"))
362 tvs.append(localdata.getVar("TARGET_VENDOR", True)) 362 tvs.append(localdata.getVar("TARGET_VENDOR"))
363 363
364 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) 364 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
365 365
@@ -374,10 +374,10 @@ python () {
374 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" 374 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
375 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} 375 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
376 if pkgconfigflags: 376 if pkgconfigflags:
377 pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() 377 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
378 pn = d.getVar("PN", True) 378 pn = d.getVar("PN")
379 379
380 mlprefix = d.getVar("MLPREFIX", True) 380 mlprefix = d.getVar("MLPREFIX")
381 381
382 def expandFilter(appends, extension, prefix): 382 def expandFilter(appends, extension, prefix):
383 appends = bb.utils.explode_deps(d.expand(" ".join(appends))) 383 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
@@ -419,7 +419,7 @@ python () {
419 num = len(items) 419 num = len(items)
420 if num > 4: 420 if num > 4:
421 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" 421 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
422 % (d.getVar('PN', True), flag)) 422 % (d.getVar('PN'), flag))
423 423
424 if flag in pkgconfig: 424 if flag in pkgconfig:
425 if num >= 3 and items[2]: 425 if num >= 3 and items[2]:
@@ -434,8 +434,8 @@ python () {
434 appendVar('RDEPENDS_${PN}', extrardeps) 434 appendVar('RDEPENDS_${PN}', extrardeps)
435 appendVar('PACKAGECONFIG_CONFARGS', extraconf) 435 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
436 436
437 pn = d.getVar('PN', True) 437 pn = d.getVar('PN')
438 license = d.getVar('LICENSE', True) 438 license = d.getVar('LICENSE')
439 if license == "INVALID": 439 if license == "INVALID":
440 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 440 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
441 441
@@ -465,26 +465,26 @@ python () {
465 d.setVarFlag('do_devshell', 'fakeroot', '1') 465 d.setVarFlag('do_devshell', 'fakeroot', '1')
466 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 466 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
467 467
468 need_machine = d.getVar('COMPATIBLE_MACHINE', True) 468 need_machine = d.getVar('COMPATIBLE_MACHINE')
469 if need_machine: 469 if need_machine:
470 import re 470 import re
471 compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") 471 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
472 for m in compat_machines: 472 for m in compat_machines:
473 if re.match(need_machine, m): 473 if re.match(need_machine, m):
474 break 474 break
475 else: 475 else:
476 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) 476 raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
477 477
478 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 478 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
479 if not source_mirror_fetch: 479 if not source_mirror_fetch:
480 need_host = d.getVar('COMPATIBLE_HOST', True) 480 need_host = d.getVar('COMPATIBLE_HOST')
481 if need_host: 481 if need_host:
482 import re 482 import re
483 this_host = d.getVar('HOST_SYS', True) 483 this_host = d.getVar('HOST_SYS')
484 if not re.match(need_host, this_host): 484 if not re.match(need_host, this_host):
485 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) 485 raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
486 486
487 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() 487 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
488 488
489 check_license = False if pn.startswith("nativesdk-") else True 489 check_license = False if pn.startswith("nativesdk-") else True
490 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", 490 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
@@ -503,21 +503,21 @@ python () {
503 for lic in bad_licenses: 503 for lic in bad_licenses:
504 spdx_license = return_spdx(d, lic) 504 spdx_license = return_spdx(d, lic)
505 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: 505 for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
506 whitelist.extend((d.getVar(w + lic, True) or "").split()) 506 whitelist.extend((d.getVar(w + lic) or "").split())
507 if spdx_license: 507 if spdx_license:
508 whitelist.extend((d.getVar(w + spdx_license, True) or "").split()) 508 whitelist.extend((d.getVar(w + spdx_license) or "").split())
509 ''' 509 '''
510 We need to track what we are whitelisting and why. If pn is 510 We need to track what we are whitelisting and why. If pn is
511 incompatible we need to be able to note that the image that 511 incompatible we need to be able to note that the image that
512 is created may infact contain incompatible licenses despite 512 is created may infact contain incompatible licenses despite
513 INCOMPATIBLE_LICENSE being set. 513 INCOMPATIBLE_LICENSE being set.
514 ''' 514 '''
515 incompatwl.extend((d.getVar(w + lic, True) or "").split()) 515 incompatwl.extend((d.getVar(w + lic) or "").split())
516 if spdx_license: 516 if spdx_license:
517 incompatwl.extend((d.getVar(w + spdx_license, True) or "").split()) 517 incompatwl.extend((d.getVar(w + spdx_license) or "").split())
518 518
519 if not pn in whitelist: 519 if not pn in whitelist:
520 pkgs = d.getVar('PACKAGES', True).split() 520 pkgs = d.getVar('PACKAGES').split()
521 skipped_pkgs = [] 521 skipped_pkgs = []
522 unskipped_pkgs = [] 522 unskipped_pkgs = []
523 for pkg in pkgs: 523 for pkg in pkgs:
@@ -529,7 +529,7 @@ python () {
529 if unskipped_pkgs: 529 if unskipped_pkgs:
530 for pkg in skipped_pkgs: 530 for pkg in skipped_pkgs:
531 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) 531 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
532 mlprefix = d.getVar('MLPREFIX', True) 532 mlprefix = d.getVar('MLPREFIX')
533 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) 533 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
534 for pkg in unskipped_pkgs: 534 for pkg in unskipped_pkgs:
535 bb.debug(1, "INCLUDING the package " + pkg) 535 bb.debug(1, "INCLUDING the package " + pkg)
@@ -545,8 +545,8 @@ python () {
545 # matching of license expressions - just check that all license strings 545 # matching of license expressions - just check that all license strings
546 # in LICENSE_<pkg> are found in LICENSE. 546 # in LICENSE_<pkg> are found in LICENSE.
547 license_set = oe.license.list_licenses(license) 547 license_set = oe.license.list_licenses(license)
548 for pkg in d.getVar('PACKAGES', True).split(): 548 for pkg in d.getVar('PACKAGES').split():
549 pkg_license = d.getVar('LICENSE_' + pkg, True) 549 pkg_license = d.getVar('LICENSE_' + pkg)
550 if pkg_license: 550 if pkg_license:
551 unlisted = oe.license.list_licenses(pkg_license) - license_set 551 unlisted = oe.license.list_licenses(pkg_license) - license_set
552 if unlisted: 552 if unlisted:
@@ -554,7 +554,7 @@ python () {
554 "listed in LICENSE" % (pkg, ' '.join(unlisted))) 554 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
555 555
556 needsrcrev = False 556 needsrcrev = False
557 srcuri = d.getVar('SRC_URI', True) 557 srcuri = d.getVar('SRC_URI')
558 for uri in srcuri.split(): 558 for uri in srcuri.split():
559 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] 559 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
560 560
@@ -614,8 +614,8 @@ python () {
614 set_packagetriplet(d) 614 set_packagetriplet(d)
615 615
616 # 'multimachine' handling 616 # 'multimachine' handling
617 mach_arch = d.getVar('MACHINE_ARCH', True) 617 mach_arch = d.getVar('MACHINE_ARCH')
618 pkg_arch = d.getVar('PACKAGE_ARCH', True) 618 pkg_arch = d.getVar('PACKAGE_ARCH')
619 619
620 if (pkg_arch == mach_arch): 620 if (pkg_arch == mach_arch):
621 # Already machine specific - nothing further to do 621 # Already machine specific - nothing further to do
@@ -625,11 +625,11 @@ python () {
625 # We always try to scan SRC_URI for urls with machine overrides 625 # We always try to scan SRC_URI for urls with machine overrides
626 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 626 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
627 # 627 #
628 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) 628 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
629 if override != '0': 629 if override != '0':
630 paths = [] 630 paths = []
631 fpaths = (d.getVar('FILESPATH', True) or '').split(':') 631 fpaths = (d.getVar('FILESPATH') or '').split(':')
632 machine = d.getVar('MACHINE', True) 632 machine = d.getVar('MACHINE')
633 for p in fpaths: 633 for p in fpaths:
634 if os.path.basename(p) == machine and os.path.isdir(p): 634 if os.path.basename(p) == machine and os.path.isdir(p):
635 paths.append(p) 635 paths.append(p)
@@ -646,16 +646,16 @@ python () {
646 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 646 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
647 return 647 return
648 648
649 packages = d.getVar('PACKAGES', True).split() 649 packages = d.getVar('PACKAGES').split()
650 for pkg in packages: 650 for pkg in packages:
651 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) 651 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
652 652
653 # We could look for != PACKAGE_ARCH here but how to choose 653 # We could look for != PACKAGE_ARCH here but how to choose
654 # if multiple differences are present? 654 # if multiple differences are present?
655 # Look through PACKAGE_ARCHS for the priority order? 655 # Look through PACKAGE_ARCHS for the priority order?
656 if pkgarch and pkgarch == mach_arch: 656 if pkgarch and pkgarch == mach_arch:
657 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") 657 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
658 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) 658 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
659} 659}
660 660
661addtask cleansstate after do_clean 661addtask cleansstate after do_clean
@@ -666,7 +666,7 @@ addtask cleanall after do_cleansstate
666do_cleansstate[nostamp] = "1" 666do_cleansstate[nostamp] = "1"
667 667
668python do_cleanall() { 668python do_cleanall() {
669 src_uri = (d.getVar('SRC_URI', True) or "").split() 669 src_uri = (d.getVar('SRC_URI') or "").split()
670 if len(src_uri) == 0: 670 if len(src_uri) == 0:
671 return 671 return
672 672