summaryrefslogtreecommitdiffstats
path: root/meta/classes-global/base.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes-global/base.bbclass')
-rw-r--r--meta/classes-global/base.bbclass132
1 files changed, 61 insertions, 71 deletions
diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass
index 0999b42daa..6be1f5c2df 100644
--- a/meta/classes-global/base.bbclass
+++ b/meta/classes-global/base.bbclass
@@ -19,6 +19,22 @@ PACKAGECONFIG_CONFARGS ??= ""
19 19
20inherit metadata_scm 20inherit metadata_scm
21 21
22PREFERRED_TOOLCHAIN_TARGET ??= "gcc"
23PREFERRED_TOOLCHAIN_NATIVE ??= "gcc"
24PREFERRED_TOOLCHAIN_SDK ??= "gcc"
25
26PREFERRED_TOOLCHAIN = "${PREFERRED_TOOLCHAIN_TARGET}"
27PREFERRED_TOOLCHAIN:class-native = "${PREFERRED_TOOLCHAIN_NATIVE}"
28PREFERRED_TOOLCHAIN:class-cross = "${PREFERRED_TOOLCHAIN_NATIVE}"
29PREFERRED_TOOLCHAIN:class-crosssdk = "${PREFERRED_TOOLCHAIN_SDK}"
30PREFERRED_TOOLCHAIN:class-nativesdk = "${PREFERRED_TOOLCHAIN_SDK}"
31
32TOOLCHAIN ??= "${PREFERRED_TOOLCHAIN}"
33TOOLCHAIN_NATIVE ??= "${PREFERRED_TOOLCHAIN_NATIVE}"
34
35inherit_defer toolchain/${TOOLCHAIN_NATIVE}-native
36inherit_defer toolchain/${TOOLCHAIN}
37
22def lsb_distro_identifier(d): 38def lsb_distro_identifier(d):
23 adjust = d.getVar('LSB_DISTRO_ADJUST') 39 adjust = d.getVar('LSB_DISTRO_ADJUST')
24 adjust_func = None 40 adjust_func = None
@@ -48,13 +64,13 @@ def get_base_dep(d):
48 return "" 64 return ""
49 return "${BASE_DEFAULT_DEPS}" 65 return "${BASE_DEFAULT_DEPS}"
50 66
51BASE_DEFAULT_DEPS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}compilerlibs virtual/libc" 67BASE_DEFAULT_DEPS = "virtual/cross-cc virtual/compilerlibs virtual/libc"
52 68
53BASEDEPENDS = "" 69BASEDEPENDS = ""
54BASEDEPENDS:class-target = "${@get_base_dep(d)}" 70BASEDEPENDS:class-target = "${@get_base_dep(d)}"
55BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" 71BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}"
56 72
57DEPENDS:prepend="${BASEDEPENDS} " 73DEPENDS:prepend = "${BASEDEPENDS} "
58 74
59FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" 75FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
60# THISDIR only works properly with imediate expansion as it has to run 76# THISDIR only works properly with imediate expansion as it has to run
@@ -139,6 +155,7 @@ do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
139do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" 155do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
140do_fetch[prefuncs] += "fetcher_hashes_dummyfunc" 156do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
141do_fetch[network] = "1" 157do_fetch[network] = "1"
158do_fetch[umask] = "${OE_SHARED_UMASK}"
142python base_do_fetch() { 159python base_do_fetch() {
143 160
144 src_uri = (d.getVar('SRC_URI') or "").split() 161 src_uri = (d.getVar('SRC_URI') or "").split()
@@ -153,18 +170,29 @@ python base_do_fetch() {
153} 170}
154 171
155addtask unpack after do_fetch 172addtask unpack after do_fetch
156do_unpack[dirs] = "${WORKDIR}" 173do_unpack[cleandirs] = "${UNPACKDIR}"
157
158do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
159 174
160python base_do_unpack() { 175python base_do_unpack() {
176 import shutil
177
178 sourcedir = d.getVar('S')
179 # Intentionally keep SOURCE_BASEDIR internal to the task just for SDE
180 d.setVar("SOURCE_BASEDIR", sourcedir)
181
161 src_uri = (d.getVar('SRC_URI') or "").split() 182 src_uri = (d.getVar('SRC_URI') or "").split()
162 if not src_uri: 183 if not src_uri:
163 return 184 return
164 185
186 basedir = None
187 unpackdir = d.getVar('UNPACKDIR')
188 if sourcedir.startswith(unpackdir):
189 basedir = sourcedir.replace(unpackdir, '').strip("/").split('/')[0]
190 if basedir:
191 d.setVar("SOURCE_BASEDIR", unpackdir + '/' + basedir)
192
165 try: 193 try:
166 fetcher = bb.fetch2.Fetch(src_uri, d) 194 fetcher = bb.fetch2.Fetch(src_uri, d)
167 fetcher.unpack(d.getVar('WORKDIR')) 195 fetcher.unpack(d.getVar('UNPACKDIR'))
168 except bb.fetch2.BBFetchException as e: 196 except bb.fetch2.BBFetchException as e:
169 bb.fatal("Bitbake Fetcher Error: " + repr(e)) 197 bb.fatal("Bitbake Fetcher Error: " + repr(e))
170} 198}
@@ -199,8 +227,8 @@ addtask do_deploy_source_date_epoch_setscene
199addtask do_deploy_source_date_epoch before do_configure after do_patch 227addtask do_deploy_source_date_epoch before do_configure after do_patch
200 228
201python create_source_date_epoch_stamp() { 229python create_source_date_epoch_stamp() {
202 # Version: 1 230 # Version: 2
203 source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S')) 231 source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('SOURCE_BASEDIR') or d.getVar('S'))
204 oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) 232 oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d)
205} 233}
206do_unpack[postfuncs] += "create_source_date_epoch_stamp" 234do_unpack[postfuncs] += "create_source_date_epoch_stamp"
@@ -249,10 +277,19 @@ def buildcfg_neededvars(d):
249 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) 277 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
250 278
251addhandler base_eventhandler 279addhandler base_eventhandler
252base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed" 280base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed bb.event.RecipePreDeferredInherits"
253python base_eventhandler() { 281python base_eventhandler() {
254 import bb.runqueue 282 import bb.runqueue
255 283
284 if isinstance(e, bb.event.RecipePreDeferredInherits):
285 # Use this to snoop on class extensions and set these up before the deferred inherits
286 # are processed which allows overrides on conditional variables.
287 for c in ['native', 'nativesdk', 'crosssdk', 'cross']:
288 if c in e.inherits:
289 d.setVar('CLASSOVERRIDE', 'class-' + c)
290 break
291 return
292
256 if isinstance(e, bb.event.ConfigParsed): 293 if isinstance(e, bb.event.ConfigParsed):
257 if not d.getVar("NATIVELSBSTRING", False): 294 if not d.getVar("NATIVELSBSTRING", False):
258 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d)) 295 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
@@ -294,16 +331,6 @@ python base_eventhandler() {
294 if statusheader: 331 if statusheader:
295 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) 332 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
296 333
297 # This code is to silence warnings where the SDK variables overwrite the
298 # target ones and we'd see duplicate key names overwriting each other
299 # for various PREFERRED_PROVIDERS
300 if isinstance(e, bb.event.RecipePreFinalise):
301 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
302 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
303 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
304 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
305 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
306
307 if isinstance(e, bb.event.RecipeParsed): 334 if isinstance(e, bb.event.RecipeParsed):
308 # 335 #
309 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set 336 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
@@ -312,7 +339,7 @@ python base_eventhandler() {
312 # particular. 339 # particular.
313 # 340 #
314 pn = d.getVar('PN') 341 pn = d.getVar('PN')
315 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) 342 source_mirror_fetch = bb.utils.to_boolean(d.getVar('SOURCE_MIRROR_FETCH', False))
316 if not source_mirror_fetch: 343 if not source_mirror_fetch:
317 provs = (d.getVar("PROVIDES") or "").split() 344 provs = (d.getVar("PROVIDES") or "").split()
318 multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() 345 multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
@@ -410,16 +437,6 @@ python () {
410 oe.utils.features_backfill("DISTRO_FEATURES", d) 437 oe.utils.features_backfill("DISTRO_FEATURES", d)
411 oe.utils.features_backfill("MACHINE_FEATURES", d) 438 oe.utils.features_backfill("MACHINE_FEATURES", d)
412 439
413 if d.getVar("S")[-1] == '/':
414 bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S")))
415 if d.getVar("B")[-1] == '/':
416 bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B")))
417
418 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")):
419 d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}")
420 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")):
421 d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}")
422
423 # To add a recipe to the skip list , set: 440 # To add a recipe to the skip list , set:
424 # SKIP_RECIPE[pn] = "message" 441 # SKIP_RECIPE[pn] = "message"
425 pn = d.getVar('PN') 442 pn = d.getVar('PN')
@@ -463,10 +480,10 @@ python () {
463 def appendVar(varname, appends): 480 def appendVar(varname, appends):
464 if not appends: 481 if not appends:
465 return 482 return
466 if varname.find("DEPENDS") != -1: 483 if "DEPENDS" in varname or varname.startswith("RRECOMMENDS"):
467 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) : 484 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
468 appends = expandFilter(appends, "", "nativesdk-") 485 appends = expandFilter(appends, "", "nativesdk-")
469 elif bb.data.inherits_class('native', d): 486 elif bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d):
470 appends = expandFilter(appends, "-native", "") 487 appends = expandFilter(appends, "-native", "")
471 elif mlprefix: 488 elif mlprefix:
472 appends = expandFilter(appends, "", mlprefix) 489 appends = expandFilter(appends, "", mlprefix)
@@ -520,8 +537,8 @@ python () {
520 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) 537 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
521 538
522 if bb.data.inherits_class('license', d): 539 if bb.data.inherits_class('license', d):
523 check_license_format(d) 540 oe.license.check_license_format(d)
524 unmatched_license_flags = check_license_flags(d) 541 unmatched_license_flags = oe.license.check_license_flags(d)
525 if unmatched_license_flags: 542 if unmatched_license_flags:
526 for unmatched in unmatched_license_flags: 543 for unmatched in unmatched_license_flags:
527 message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched 544 message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched
@@ -545,7 +562,7 @@ python () {
545 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') 562 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
546 563
547 need_machine = d.getVar('COMPATIBLE_MACHINE') 564 need_machine = d.getVar('COMPATIBLE_MACHINE')
548 if need_machine and not d.getVar('PARSE_ALL_RECIPES', False): 565 if need_machine and not bb.utils.to_boolean(d.getVar('PARSE_ALL_RECIPES', False)):
549 import re 566 import re
550 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") 567 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
551 for m in compat_machines: 568 for m in compat_machines:
@@ -554,7 +571,8 @@ python () {
554 else: 571 else:
555 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) 572 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
556 573
557 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False) 574 source_mirror_fetch = bb.utils.to_boolean(d.getVar('SOURCE_MIRROR_FETCH', False)) or \
575 bb.utils.to_boolean(d.getVar('PARSE_ALL_RECIPES', False))
558 if not source_mirror_fetch: 576 if not source_mirror_fetch:
559 need_host = d.getVar('COMPATIBLE_HOST') 577 need_host = d.getVar('COMPATIBLE_HOST')
560 if need_host: 578 if need_host:
@@ -565,46 +583,18 @@ python () {
565 583
566 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() 584 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
567 585
568 check_license = False if pn.startswith("nativesdk-") else True 586 pkgs = d.getVar('PACKAGES').split()
569 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", 587 if pkgs:
570 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", 588 skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, pkgs)
571 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: 589 unskipped_pkgs = [p for p in pkgs if p not in skipped_pkgs]
572 if pn.endswith(d.expand(t)):
573 check_license = False
574 if pn.startswith("gcc-source-"):
575 check_license = False
576
577 if check_license and bad_licenses:
578 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
579
580 exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split()
581
582 for lic_exception in exceptions:
583 if ":" in lic_exception:
584 lic_exception = lic_exception.split(":")[1]
585 if lic_exception in oe.license.obsolete_license_list():
586 bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception)
587
588 pkgs = d.getVar('PACKAGES').split()
589 skipped_pkgs = {}
590 unskipped_pkgs = []
591 for pkg in pkgs:
592 remaining_bad_licenses = oe.license.apply_pkg_license_exception(pkg, bad_licenses, exceptions)
593
594 incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg)
595 if incompatible_lic:
596 skipped_pkgs[pkg] = incompatible_lic
597 else:
598 unskipped_pkgs.append(pkg)
599 590
600 if unskipped_pkgs: 591 if unskipped_pkgs:
601 for pkg in skipped_pkgs: 592 for pkg in skipped_pkgs:
602 bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg]))) 593 bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
603 d.setVar('_exclude_incompatible-' + pkg, ' '.join(skipped_pkgs[pkg]))
604 for pkg in unskipped_pkgs: 594 for pkg in unskipped_pkgs:
605 bb.debug(1, "Including the package %s" % pkg) 595 bb.debug(1, "Including the package %s" % pkg)
606 else: 596 else:
607 incompatible_lic = incompatible_license(d, bad_licenses) 597 incompatible_lic = oe.license.incompatible_license(d, bad_licenses)
608 for pkg in skipped_pkgs: 598 for pkg in skipped_pkgs:
609 incompatible_lic += skipped_pkgs[pkg] 599 incompatible_lic += skipped_pkgs[pkg]
610 incompatible_lic = sorted(list(set(incompatible_lic))) 600 incompatible_lic = sorted(list(set(incompatible_lic)))
@@ -674,9 +664,9 @@ python () {
674 elif path.endswith('.deb'): 664 elif path.endswith('.deb'):
675 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 665 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
676 666
677 # *.7z should DEPEND on p7zip-native for unpacking 667 # *.7z should DEPEND on 7zip-native for unpacking
678 elif path.endswith('.7z'): 668 elif path.endswith('.7z'):
679 d.appendVarFlag('do_unpack', 'depends', ' p7zip-native:do_populate_sysroot') 669 d.appendVarFlag('do_unpack', 'depends', ' 7zip-native:do_populate_sysroot')
680 670
681 set_packagetriplet(d) 671 set_packagetriplet(d)
682 672