diff options
Diffstat (limited to 'meta/classes-global')
-rw-r--r-- | meta/classes-global/base.bbclass | 132 | ||||
-rw-r--r-- | meta/classes-global/buildstats.bbclass | 19 | ||||
-rw-r--r-- | meta/classes-global/insane.bbclass | 592 | ||||
-rw-r--r-- | meta/classes-global/license.bbclass | 184 | ||||
-rw-r--r-- | meta/classes-global/logging.bbclass | 10 | ||||
-rw-r--r-- | meta/classes-global/mirrors.bbclass | 20 | ||||
-rw-r--r-- | meta/classes-global/package.bbclass | 6 | ||||
-rw-r--r-- | meta/classes-global/package_ipk.bbclass | 2 | ||||
-rw-r--r-- | meta/classes-global/package_rpm.bbclass | 62 | ||||
-rw-r--r-- | meta/classes-global/retain.bbclass | 182 | ||||
-rw-r--r-- | meta/classes-global/sanity.bbclass | 99 | ||||
-rw-r--r-- | meta/classes-global/sstate.bbclass | 176 | ||||
-rw-r--r-- | meta/classes-global/staging.bbclass | 13 | ||||
-rw-r--r-- | meta/classes-global/uninative.bbclass | 8 | ||||
-rw-r--r-- | meta/classes-global/utility-tasks.bbclass | 23 | ||||
-rw-r--r-- | meta/classes-global/utils.bbclass | 14 | ||||
-rw-r--r-- | meta/classes-global/yocto-check-layer.bbclass | 62 |
17 files changed, 849 insertions, 755 deletions
diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass index 0999b42daa..6be1f5c2df 100644 --- a/meta/classes-global/base.bbclass +++ b/meta/classes-global/base.bbclass | |||
@@ -19,6 +19,22 @@ PACKAGECONFIG_CONFARGS ??= "" | |||
19 | 19 | ||
20 | inherit metadata_scm | 20 | inherit metadata_scm |
21 | 21 | ||
22 | PREFERRED_TOOLCHAIN_TARGET ??= "gcc" | ||
23 | PREFERRED_TOOLCHAIN_NATIVE ??= "gcc" | ||
24 | PREFERRED_TOOLCHAIN_SDK ??= "gcc" | ||
25 | |||
26 | PREFERRED_TOOLCHAIN = "${PREFERRED_TOOLCHAIN_TARGET}" | ||
27 | PREFERRED_TOOLCHAIN:class-native = "${PREFERRED_TOOLCHAIN_NATIVE}" | ||
28 | PREFERRED_TOOLCHAIN:class-cross = "${PREFERRED_TOOLCHAIN_NATIVE}" | ||
29 | PREFERRED_TOOLCHAIN:class-crosssdk = "${PREFERRED_TOOLCHAIN_SDK}" | ||
30 | PREFERRED_TOOLCHAIN:class-nativesdk = "${PREFERRED_TOOLCHAIN_SDK}" | ||
31 | |||
32 | TOOLCHAIN ??= "${PREFERRED_TOOLCHAIN}" | ||
33 | TOOLCHAIN_NATIVE ??= "${PREFERRED_TOOLCHAIN_NATIVE}" | ||
34 | |||
35 | inherit_defer toolchain/${TOOLCHAIN_NATIVE}-native | ||
36 | inherit_defer toolchain/${TOOLCHAIN} | ||
37 | |||
22 | def lsb_distro_identifier(d): | 38 | def lsb_distro_identifier(d): |
23 | adjust = d.getVar('LSB_DISTRO_ADJUST') | 39 | adjust = d.getVar('LSB_DISTRO_ADJUST') |
24 | adjust_func = None | 40 | adjust_func = None |
@@ -48,13 +64,13 @@ def get_base_dep(d): | |||
48 | return "" | 64 | return "" |
49 | return "${BASE_DEFAULT_DEPS}" | 65 | return "${BASE_DEFAULT_DEPS}" |
50 | 66 | ||
51 | BASE_DEFAULT_DEPS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}compilerlibs virtual/libc" | 67 | BASE_DEFAULT_DEPS = "virtual/cross-cc virtual/compilerlibs virtual/libc" |
52 | 68 | ||
53 | BASEDEPENDS = "" | 69 | BASEDEPENDS = "" |
54 | BASEDEPENDS:class-target = "${@get_base_dep(d)}" | 70 | BASEDEPENDS:class-target = "${@get_base_dep(d)}" |
55 | BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" | 71 | BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" |
56 | 72 | ||
57 | DEPENDS:prepend="${BASEDEPENDS} " | 73 | DEPENDS:prepend = "${BASEDEPENDS} " |
58 | 74 | ||
59 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" | 75 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" |
60 | # THISDIR only works properly with imediate expansion as it has to run | 76 | # THISDIR only works properly with imediate expansion as it has to run |
@@ -139,6 +155,7 @@ do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" | |||
139 | do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" | 155 | do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" |
140 | do_fetch[prefuncs] += "fetcher_hashes_dummyfunc" | 156 | do_fetch[prefuncs] += "fetcher_hashes_dummyfunc" |
141 | do_fetch[network] = "1" | 157 | do_fetch[network] = "1" |
158 | do_fetch[umask] = "${OE_SHARED_UMASK}" | ||
142 | python base_do_fetch() { | 159 | python base_do_fetch() { |
143 | 160 | ||
144 | src_uri = (d.getVar('SRC_URI') or "").split() | 161 | src_uri = (d.getVar('SRC_URI') or "").split() |
@@ -153,18 +170,29 @@ python base_do_fetch() { | |||
153 | } | 170 | } |
154 | 171 | ||
155 | addtask unpack after do_fetch | 172 | addtask unpack after do_fetch |
156 | do_unpack[dirs] = "${WORKDIR}" | 173 | do_unpack[cleandirs] = "${UNPACKDIR}" |
157 | |||
158 | do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}" | ||
159 | 174 | ||
160 | python base_do_unpack() { | 175 | python base_do_unpack() { |
176 | import shutil | ||
177 | |||
178 | sourcedir = d.getVar('S') | ||
179 | # Intentionally keep SOURCE_BASEDIR internal to the task just for SDE | ||
180 | d.setVar("SOURCE_BASEDIR", sourcedir) | ||
181 | |||
161 | src_uri = (d.getVar('SRC_URI') or "").split() | 182 | src_uri = (d.getVar('SRC_URI') or "").split() |
162 | if not src_uri: | 183 | if not src_uri: |
163 | return | 184 | return |
164 | 185 | ||
186 | basedir = None | ||
187 | unpackdir = d.getVar('UNPACKDIR') | ||
188 | if sourcedir.startswith(unpackdir): | ||
189 | basedir = sourcedir.replace(unpackdir, '').strip("/").split('/')[0] | ||
190 | if basedir: | ||
191 | d.setVar("SOURCE_BASEDIR", unpackdir + '/' + basedir) | ||
192 | |||
165 | try: | 193 | try: |
166 | fetcher = bb.fetch2.Fetch(src_uri, d) | 194 | fetcher = bb.fetch2.Fetch(src_uri, d) |
167 | fetcher.unpack(d.getVar('WORKDIR')) | 195 | fetcher.unpack(d.getVar('UNPACKDIR')) |
168 | except bb.fetch2.BBFetchException as e: | 196 | except bb.fetch2.BBFetchException as e: |
169 | bb.fatal("Bitbake Fetcher Error: " + repr(e)) | 197 | bb.fatal("Bitbake Fetcher Error: " + repr(e)) |
170 | } | 198 | } |
@@ -199,8 +227,8 @@ addtask do_deploy_source_date_epoch_setscene | |||
199 | addtask do_deploy_source_date_epoch before do_configure after do_patch | 227 | addtask do_deploy_source_date_epoch before do_configure after do_patch |
200 | 228 | ||
201 | python create_source_date_epoch_stamp() { | 229 | python create_source_date_epoch_stamp() { |
202 | # Version: 1 | 230 | # Version: 2 |
203 | source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S')) | 231 | source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('SOURCE_BASEDIR') or d.getVar('S')) |
204 | oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) | 232 | oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) |
205 | } | 233 | } |
206 | do_unpack[postfuncs] += "create_source_date_epoch_stamp" | 234 | do_unpack[postfuncs] += "create_source_date_epoch_stamp" |
@@ -249,10 +277,19 @@ def buildcfg_neededvars(d): | |||
249 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | 277 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) |
250 | 278 | ||
251 | addhandler base_eventhandler | 279 | addhandler base_eventhandler |
252 | base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed" | 280 | base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed bb.event.RecipePreDeferredInherits" |
253 | python base_eventhandler() { | 281 | python base_eventhandler() { |
254 | import bb.runqueue | 282 | import bb.runqueue |
255 | 283 | ||
284 | if isinstance(e, bb.event.RecipePreDeferredInherits): | ||
285 | # Use this to snoop on class extensions and set these up before the deferred inherits | ||
286 | # are processed which allows overrides on conditional variables. | ||
287 | for c in ['native', 'nativesdk', 'crosssdk', 'cross']: | ||
288 | if c in e.inherits: | ||
289 | d.setVar('CLASSOVERRIDE', 'class-' + c) | ||
290 | break | ||
291 | return | ||
292 | |||
256 | if isinstance(e, bb.event.ConfigParsed): | 293 | if isinstance(e, bb.event.ConfigParsed): |
257 | if not d.getVar("NATIVELSBSTRING", False): | 294 | if not d.getVar("NATIVELSBSTRING", False): |
258 | d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d)) | 295 | d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d)) |
@@ -294,16 +331,6 @@ python base_eventhandler() { | |||
294 | if statusheader: | 331 | if statusheader: |
295 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 332 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) |
296 | 333 | ||
297 | # This code is to silence warnings where the SDK variables overwrite the | ||
298 | # target ones and we'd see duplicate key names overwriting each other | ||
299 | # for various PREFERRED_PROVIDERS | ||
300 | if isinstance(e, bb.event.RecipePreFinalise): | ||
301 | if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"): | ||
302 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") | ||
303 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") | ||
304 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++") | ||
305 | d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs") | ||
306 | |||
307 | if isinstance(e, bb.event.RecipeParsed): | 334 | if isinstance(e, bb.event.RecipeParsed): |
308 | # | 335 | # |
309 | # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set | 336 | # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set |
@@ -312,7 +339,7 @@ python base_eventhandler() { | |||
312 | # particular. | 339 | # particular. |
313 | # | 340 | # |
314 | pn = d.getVar('PN') | 341 | pn = d.getVar('PN') |
315 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) | 342 | source_mirror_fetch = bb.utils.to_boolean(d.getVar('SOURCE_MIRROR_FETCH', False)) |
316 | if not source_mirror_fetch: | 343 | if not source_mirror_fetch: |
317 | provs = (d.getVar("PROVIDES") or "").split() | 344 | provs = (d.getVar("PROVIDES") or "").split() |
318 | multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() | 345 | multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split() |
@@ -410,16 +437,6 @@ python () { | |||
410 | oe.utils.features_backfill("DISTRO_FEATURES", d) | 437 | oe.utils.features_backfill("DISTRO_FEATURES", d) |
411 | oe.utils.features_backfill("MACHINE_FEATURES", d) | 438 | oe.utils.features_backfill("MACHINE_FEATURES", d) |
412 | 439 | ||
413 | if d.getVar("S")[-1] == '/': | ||
414 | bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) | ||
415 | if d.getVar("B")[-1] == '/': | ||
416 | bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) | ||
417 | |||
418 | if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")): | ||
419 | d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}") | ||
420 | if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")): | ||
421 | d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}") | ||
422 | |||
423 | # To add a recipe to the skip list , set: | 440 | # To add a recipe to the skip list , set: |
424 | # SKIP_RECIPE[pn] = "message" | 441 | # SKIP_RECIPE[pn] = "message" |
425 | pn = d.getVar('PN') | 442 | pn = d.getVar('PN') |
@@ -463,10 +480,10 @@ python () { | |||
463 | def appendVar(varname, appends): | 480 | def appendVar(varname, appends): |
464 | if not appends: | 481 | if not appends: |
465 | return | 482 | return |
466 | if varname.find("DEPENDS") != -1: | 483 | if "DEPENDS" in varname or varname.startswith("RRECOMMENDS"): |
467 | if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) : | 484 | if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) : |
468 | appends = expandFilter(appends, "", "nativesdk-") | 485 | appends = expandFilter(appends, "", "nativesdk-") |
469 | elif bb.data.inherits_class('native', d): | 486 | elif bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d): |
470 | appends = expandFilter(appends, "-native", "") | 487 | appends = expandFilter(appends, "-native", "") |
471 | elif mlprefix: | 488 | elif mlprefix: |
472 | appends = expandFilter(appends, "", mlprefix) | 489 | appends = expandFilter(appends, "", mlprefix) |
@@ -520,8 +537,8 @@ python () { | |||
520 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) | 537 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) |
521 | 538 | ||
522 | if bb.data.inherits_class('license', d): | 539 | if bb.data.inherits_class('license', d): |
523 | check_license_format(d) | 540 | oe.license.check_license_format(d) |
524 | unmatched_license_flags = check_license_flags(d) | 541 | unmatched_license_flags = oe.license.check_license_flags(d) |
525 | if unmatched_license_flags: | 542 | if unmatched_license_flags: |
526 | for unmatched in unmatched_license_flags: | 543 | for unmatched in unmatched_license_flags: |
527 | message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched | 544 | message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched |
@@ -545,7 +562,7 @@ python () { | |||
545 | d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 562 | d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') |
546 | 563 | ||
547 | need_machine = d.getVar('COMPATIBLE_MACHINE') | 564 | need_machine = d.getVar('COMPATIBLE_MACHINE') |
548 | if need_machine and not d.getVar('PARSE_ALL_RECIPES', False): | 565 | if need_machine and not bb.utils.to_boolean(d.getVar('PARSE_ALL_RECIPES', False)): |
549 | import re | 566 | import re |
550 | compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") | 567 | compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") |
551 | for m in compat_machines: | 568 | for m in compat_machines: |
@@ -554,7 +571,8 @@ python () { | |||
554 | else: | 571 | else: |
555 | raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) | 572 | raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) |
556 | 573 | ||
557 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False) | 574 | source_mirror_fetch = bb.utils.to_boolean(d.getVar('SOURCE_MIRROR_FETCH', False)) or \ |
575 | bb.utils.to_boolean(d.getVar('PARSE_ALL_RECIPES', False)) | ||
558 | if not source_mirror_fetch: | 576 | if not source_mirror_fetch: |
559 | need_host = d.getVar('COMPATIBLE_HOST') | 577 | need_host = d.getVar('COMPATIBLE_HOST') |
560 | if need_host: | 578 | if need_host: |
@@ -565,46 +583,18 @@ python () { | |||
565 | 583 | ||
566 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | 584 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() |
567 | 585 | ||
568 | check_license = False if pn.startswith("nativesdk-") else True | 586 | pkgs = d.getVar('PACKAGES').split() |
569 | for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", | 587 | if pkgs: |
570 | "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", | 588 | skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, pkgs) |
571 | "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: | 589 | unskipped_pkgs = [p for p in pkgs if p not in skipped_pkgs] |
572 | if pn.endswith(d.expand(t)): | ||
573 | check_license = False | ||
574 | if pn.startswith("gcc-source-"): | ||
575 | check_license = False | ||
576 | |||
577 | if check_license and bad_licenses: | ||
578 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
579 | |||
580 | exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() | ||
581 | |||
582 | for lic_exception in exceptions: | ||
583 | if ":" in lic_exception: | ||
584 | lic_exception = lic_exception.split(":")[1] | ||
585 | if lic_exception in oe.license.obsolete_license_list(): | ||
586 | bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) | ||
587 | |||
588 | pkgs = d.getVar('PACKAGES').split() | ||
589 | skipped_pkgs = {} | ||
590 | unskipped_pkgs = [] | ||
591 | for pkg in pkgs: | ||
592 | remaining_bad_licenses = oe.license.apply_pkg_license_exception(pkg, bad_licenses, exceptions) | ||
593 | |||
594 | incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) | ||
595 | if incompatible_lic: | ||
596 | skipped_pkgs[pkg] = incompatible_lic | ||
597 | else: | ||
598 | unskipped_pkgs.append(pkg) | ||
599 | 590 | ||
600 | if unskipped_pkgs: | 591 | if unskipped_pkgs: |
601 | for pkg in skipped_pkgs: | 592 | for pkg in skipped_pkgs: |
602 | bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg]))) | 593 | bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg]))) |
603 | d.setVar('_exclude_incompatible-' + pkg, ' '.join(skipped_pkgs[pkg])) | ||
604 | for pkg in unskipped_pkgs: | 594 | for pkg in unskipped_pkgs: |
605 | bb.debug(1, "Including the package %s" % pkg) | 595 | bb.debug(1, "Including the package %s" % pkg) |
606 | else: | 596 | else: |
607 | incompatible_lic = incompatible_license(d, bad_licenses) | 597 | incompatible_lic = oe.license.incompatible_license(d, bad_licenses) |
608 | for pkg in skipped_pkgs: | 598 | for pkg in skipped_pkgs: |
609 | incompatible_lic += skipped_pkgs[pkg] | 599 | incompatible_lic += skipped_pkgs[pkg] |
610 | incompatible_lic = sorted(list(set(incompatible_lic))) | 600 | incompatible_lic = sorted(list(set(incompatible_lic))) |
@@ -674,9 +664,9 @@ python () { | |||
674 | elif path.endswith('.deb'): | 664 | elif path.endswith('.deb'): |
675 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | 665 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') |
676 | 666 | ||
677 | # *.7z should DEPEND on p7zip-native for unpacking | 667 | # *.7z should DEPEND on 7zip-native for unpacking |
678 | elif path.endswith('.7z'): | 668 | elif path.endswith('.7z'): |
679 | d.appendVarFlag('do_unpack', 'depends', ' p7zip-native:do_populate_sysroot') | 669 | d.appendVarFlag('do_unpack', 'depends', ' 7zip-native:do_populate_sysroot') |
680 | 670 | ||
681 | set_packagetriplet(d) | 671 | set_packagetriplet(d) |
682 | 672 | ||
diff --git a/meta/classes-global/buildstats.bbclass b/meta/classes-global/buildstats.bbclass index f49a67aa4f..fe64789e10 100644 --- a/meta/classes-global/buildstats.bbclass +++ b/meta/classes-global/buildstats.bbclass | |||
@@ -188,14 +188,17 @@ python run_buildstats () { | |||
188 | # bitbake fires HeartbeatEvent even before a build has been | 188 | # bitbake fires HeartbeatEvent even before a build has been |
189 | # triggered, causing BUILDNAME to be None | 189 | # triggered, causing BUILDNAME to be None |
190 | ######################################################################## | 190 | ######################################################################## |
191 | if bn is not None: | 191 | if bn is None: |
192 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) | 192 | return |
193 | taskdir = os.path.join(bsdir, d.getVar('PF')) | 193 | |
194 | if isinstance(e, bb.event.HeartbeatEvent) and bb.utils.to_boolean(d.getVar("BB_LOG_HOST_STAT_ON_INTERVAL")): | 194 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) |
195 | taskdir = os.path.join(bsdir, d.getVar('PF')) | ||
196 | if isinstance(e, bb.event.HeartbeatEvent): | ||
197 | if bb.utils.to_boolean(d.getVar("BB_LOG_HOST_STAT_ON_INTERVAL")): | ||
195 | bb.utils.mkdirhier(bsdir) | 198 | bb.utils.mkdirhier(bsdir) |
196 | write_host_data(os.path.join(bsdir, "host_stats_interval"), e, d, "interval") | 199 | write_host_data(os.path.join(bsdir, "host_stats_interval"), e, d, "interval") |
197 | 200 | ||
198 | if isinstance(e, bb.event.BuildStarted): | 201 | elif isinstance(e, bb.event.BuildStarted): |
199 | ######################################################################## | 202 | ######################################################################## |
200 | # If the kernel was not configured to provide I/O statistics, issue | 203 | # If the kernel was not configured to provide I/O statistics, issue |
201 | # a one time warning. | 204 | # a one time warning. |
@@ -234,7 +237,7 @@ python run_buildstats () { | |||
234 | if cpu: | 237 | if cpu: |
235 | f.write("CPU usage: %0.1f%% \n" % cpu) | 238 | f.write("CPU usage: %0.1f%% \n" % cpu) |
236 | 239 | ||
237 | if isinstance(e, bb.build.TaskStarted): | 240 | elif isinstance(e, bb.build.TaskStarted): |
238 | set_timedata("__timedata_task", d, e.time) | 241 | set_timedata("__timedata_task", d, e.time) |
239 | bb.utils.mkdirhier(taskdir) | 242 | bb.utils.mkdirhier(taskdir) |
240 | # write into the task event file the name and start time | 243 | # write into the task event file the name and start time |
@@ -276,7 +279,7 @@ addhandler run_buildstats | |||
276 | run_buildstats[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.event.HeartbeatEvent bb.build.TaskStarted bb.build.TaskSucceeded bb.build.TaskFailed" | 279 | run_buildstats[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.event.HeartbeatEvent bb.build.TaskStarted bb.build.TaskSucceeded bb.build.TaskFailed" |
277 | 280 | ||
278 | python runqueue_stats () { | 281 | python runqueue_stats () { |
279 | import buildstats | 282 | import oe.buildstats |
280 | from bb import event, runqueue | 283 | from bb import event, runqueue |
281 | # We should not record any samples before the first task has started, | 284 | # We should not record any samples before the first task has started, |
282 | # because that's the first activity shown in the process chart. | 285 | # because that's the first activity shown in the process chart. |
@@ -286,7 +289,7 @@ python runqueue_stats () { | |||
286 | # closed when the build is done. | 289 | # closed when the build is done. |
287 | system_stats = d.getVar('_buildstats_system_stats', False) | 290 | system_stats = d.getVar('_buildstats_system_stats', False) |
288 | if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): | 291 | if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): |
289 | system_stats = buildstats.SystemStats(d) | 292 | system_stats = oe.buildstats.SystemStats(d) |
290 | d.setVar('_buildstats_system_stats', system_stats) | 293 | d.setVar('_buildstats_system_stats', system_stats) |
291 | if system_stats: | 294 | if system_stats: |
292 | # Ensure that we sample at important events. | 295 | # Ensure that we sample at important events. |
diff --git a/meta/classes-global/insane.bbclass b/meta/classes-global/insane.bbclass index e963001d09..4ef664b3ce 100644 --- a/meta/classes-global/insane.bbclass +++ b/meta/classes-global/insane.bbclass | |||
@@ -24,41 +24,36 @@ | |||
24 | # files under exec_prefix | 24 | # files under exec_prefix |
25 | # -Check if the package name is upper case | 25 | # -Check if the package name is upper case |
26 | 26 | ||
27 | # These tests are required to be enabled and pass for Yocto Project Compatible Status | ||
28 | # for a layer. To change this list, please contact the Yocto Project TSC. | ||
29 | CHECKLAYER_REQUIRED_TESTS = "\ | ||
30 | configure-gettext configure-unsafe debug-files dep-cmp expanded-d files-invalid \ | ||
31 | host-user-contaminated incompatible-license infodir installed-vs-shipped invalid-chars \ | ||
32 | invalid-packageconfig la \ | ||
33 | license-checksum license-exception license-exists license-file-missing license-format license-no-generic license-syntax \ | ||
34 | mime mime-xdg missing-update-alternatives multilib obsolete-license \ | ||
35 | packages-list patch-fuzz patch-status perllocalpod perm-config perm-line perm-link recipe-naming \ | ||
36 | pkgconfig pkgvarcheck pkgv-undefined pn-overrides shebang-size src-uri-bad symlink-to-sysroot \ | ||
37 | unhandled-features-check unknown-configure-option unlisted-pkg-lics uppercase-pn useless-rpaths \ | ||
38 | var-undefined virtual-slash xorg-driver-abi" | ||
39 | |||
27 | # Elect whether a given type of error is a warning or error, they may | 40 | # Elect whether a given type of error is a warning or error, they may |
28 | # have been set by other files. | 41 | # have been set by other files. |
29 | WARN_QA ?= " libdir xorg-driver-abi buildpaths \ | 42 | WARN_QA ?= "32bit-time native-last pep517-backend" |
30 | textrel incompatible-license files-invalid \ | 43 | ERROR_QA ?= "\ |
31 | infodir build-deps src-uri-bad symlink-to-sysroot multilib \ | 44 | already-stripped arch buildpaths build-deps debug-deps dev-deps dev-elf dev-so empty-dirs file-rdeps \ |
32 | invalid-packageconfig host-user-contaminated uppercase-pn \ | 45 | ldflags libdir missing-ptest rpaths staticdev textrel version-going-backwards \ |
33 | mime mime-xdg unlisted-pkg-lics unhandled-features-check \ | 46 | ${CHECKLAYER_REQUIRED_TESTS}" |
34 | missing-update-alternatives native-last missing-ptest \ | 47 | |
35 | license-exists license-no-generic license-syntax license-format \ | ||
36 | license-incompatible license-file-missing obsolete-license \ | ||
37 | 32bit-time virtual-slash \ | ||
38 | " | ||
39 | ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ | ||
40 | perms dep-cmp pkgvarcheck perm-config perm-line perm-link \ | ||
41 | split-strip packages-list pkgv-undefined var-undefined \ | ||
42 | version-going-backwards expanded-d invalid-chars \ | ||
43 | license-checksum dev-elf file-rdeps configure-unsafe \ | ||
44 | configure-gettext perllocalpod shebang-size \ | ||
45 | already-stripped installed-vs-shipped ldflags compile-host-path \ | ||
46 | install-host-path pn-overrides unknown-configure-option \ | ||
47 | useless-rpaths rpaths staticdev empty-dirs \ | ||
48 | patch-fuzz \ | ||
49 | " | ||
50 | # Add usrmerge QA check based on distro feature | 48 | # Add usrmerge QA check based on distro feature |
51 | ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" | 49 | ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" |
52 | ERROR_QA:append:layer-core = " patch-status" | ||
53 | WARN_QA:append:layer-core = " missing-metadata missing-maintainer" | 50 | WARN_QA:append:layer-core = " missing-metadata missing-maintainer" |
54 | 51 | ||
55 | FAKEROOT_QA = "host-user-contaminated" | 52 | FAKEROOT_QA = "host-user-contaminated" |
56 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ | 53 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ |
57 | enabled tests are listed here, the do_package_qa task will run under fakeroot." | 54 | enabled tests are listed here, the do_package_qa task will run under fakeroot." |
58 | 55 | ||
59 | ALL_QA = "${WARN_QA} ${ERROR_QA}" | 56 | UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --disable-static" |
60 | |||
61 | UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static" | ||
62 | 57 | ||
63 | # This is a list of directories that are expected to be empty. | 58 | # This is a list of directories that are expected to be empty. |
64 | QA_EMPTY_DIRS ?= " \ | 59 | QA_EMPTY_DIRS ?= " \ |
@@ -85,9 +80,10 @@ def package_qa_clean_path(path, d, pkg=None): | |||
85 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") | 80 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") |
86 | 81 | ||
87 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" | 82 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" |
88 | def package_qa_check_shebang_size(path, name, d, elf, messages): | 83 | def package_qa_check_shebang_size(path, name, d, elf): |
89 | import stat | 84 | global cpath |
90 | if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf: | 85 | |
86 | if elf or cpath.islink(path) or not cpath.isfile(path): | ||
91 | return | 87 | return |
92 | 88 | ||
93 | try: | 89 | try: |
@@ -104,50 +100,44 @@ def package_qa_check_shebang_size(path, name, d, elf, messages): | |||
104 | return | 100 | return |
105 | 101 | ||
106 | if len(stanza) > 129: | 102 | if len(stanza) > 129: |
107 | oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d, name))) | 103 | oe.qa.handle_error("shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d, name)), d) |
108 | return | 104 | return |
109 | 105 | ||
110 | QAPATHTEST[libexec] = "package_qa_check_libexec" | 106 | QAPATHTEST[libexec] = "package_qa_check_libexec" |
111 | def package_qa_check_libexec(path,name, d, elf, messages): | 107 | def package_qa_check_libexec(path,name, d, elf): |
112 | 108 | ||
113 | # Skip the case where the default is explicitly /usr/libexec | 109 | # Skip the case where the default is explicitly /usr/libexec |
114 | libexec = d.getVar('libexecdir') | 110 | libexec = d.getVar('libexecdir') |
115 | if libexec == "/usr/libexec": | 111 | if libexec == "/usr/libexec": |
116 | return True | 112 | return |
117 | 113 | ||
118 | if 'libexec' in path.split(os.path.sep): | 114 | if 'libexec' in path.split(os.path.sep): |
119 | oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d, name), libexec)) | 115 | oe.qa.handle_error("libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d, name), libexec), d) |
120 | return False | ||
121 | |||
122 | return True | ||
123 | 116 | ||
124 | QAPATHTEST[rpaths] = "package_qa_check_rpath" | 117 | QAPATHTEST[rpaths] = "package_qa_check_rpath" |
125 | def package_qa_check_rpath(file,name, d, elf, messages): | 118 | def package_qa_check_rpath(file, name, d, elf): |
126 | """ | 119 | """ |
127 | Check for dangerous RPATHs | 120 | Check for dangerous RPATHs |
128 | """ | 121 | """ |
129 | if not elf: | 122 | if not elf: |
130 | return | 123 | return |
131 | 124 | ||
132 | if os.path.islink(file): | ||
133 | return | ||
134 | |||
135 | bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] | 125 | bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] |
136 | 126 | ||
137 | phdrs = elf.run_objdump("-p", d) | 127 | phdrs = elf.run_objdump("-p", d) |
138 | 128 | ||
139 | import re | 129 | import re |
140 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | 130 | rpath_re = re.compile(r"\s+(?:RPATH|RUNPATH)\s+(.*)") |
141 | for line in phdrs.split("\n"): | 131 | for line in phdrs.split("\n"): |
142 | m = rpath_re.match(line) | 132 | m = rpath_re.match(line) |
143 | if m: | 133 | if m: |
144 | rpath = m.group(1) | 134 | rpath = m.group(1) |
145 | for dir in bad_dirs: | 135 | for dir in bad_dirs: |
146 | if dir in rpath: | 136 | if dir in rpath: |
147 | oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file)) | 137 | oe.qa.handle_error("rpaths", "%s: %s contains bad RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath), d) |
148 | 138 | ||
149 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" | 139 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" |
150 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): | 140 | def package_qa_check_useless_rpaths(file, name, d, elf): |
151 | """ | 141 | """ |
152 | Check for RPATHs that are useless but not dangerous | 142 | Check for RPATHs that are useless but not dangerous |
153 | """ | 143 | """ |
@@ -157,16 +147,13 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages): | |||
157 | if not elf: | 147 | if not elf: |
158 | return | 148 | return |
159 | 149 | ||
160 | if os.path.islink(file): | ||
161 | return | ||
162 | |||
163 | libdir = d.getVar("libdir") | 150 | libdir = d.getVar("libdir") |
164 | base_libdir = d.getVar("base_libdir") | 151 | base_libdir = d.getVar("base_libdir") |
165 | 152 | ||
166 | phdrs = elf.run_objdump("-p", d) | 153 | phdrs = elf.run_objdump("-p", d) |
167 | 154 | ||
168 | import re | 155 | import re |
169 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | 156 | rpath_re = re.compile(r"\s+(?:RPATH|RUNPATH)\s+(.*)") |
170 | for line in phdrs.split("\n"): | 157 | for line in phdrs.split("\n"): |
171 | m = rpath_re.match(line) | 158 | m = rpath_re.match(line) |
172 | if m: | 159 | if m: |
@@ -174,31 +161,32 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages): | |||
174 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): | 161 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): |
175 | # The dynamic linker searches both these places anyway. There is no point in | 162 | # The dynamic linker searches both these places anyway. There is no point in |
176 | # looking there again. | 163 | # looking there again. |
177 | oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath)) | 164 | oe.qa.handle_error("useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath), d) |
178 | 165 | ||
179 | QAPATHTEST[dev-so] = "package_qa_check_dev" | 166 | QAPATHTEST[dev-so] = "package_qa_check_dev" |
180 | def package_qa_check_dev(path, name, d, elf, messages): | 167 | def package_qa_check_dev(path, name, d, elf): |
181 | """ | 168 | """ |
182 | Check for ".so" library symlinks in non-dev packages | 169 | Check for ".so" library symlinks in non-dev packages |
183 | """ | 170 | """ |
184 | 171 | global cpath | |
185 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): | 172 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and cpath.islink(path): |
186 | oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ | 173 | oe.qa.handle_error("dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ |
187 | (name, package_qa_clean_path(path, d, name))) | 174 | (name, package_qa_clean_path(path, d, name)), d) |
188 | 175 | ||
189 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" | 176 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" |
190 | def package_qa_check_dev_elf(path, name, d, elf, messages): | 177 | def package_qa_check_dev_elf(path, name, d, elf): |
191 | """ | 178 | """ |
192 | Check that -dev doesn't contain real shared libraries. The test has to | 179 | Check that -dev doesn't contain real shared libraries. The test has to |
193 | check that the file is not a link and is an ELF object as some recipes | 180 | check that the file is not a link and is an ELF object as some recipes |
194 | install link-time .so files that are linker scripts. | 181 | install link-time .so files that are linker scripts. |
195 | """ | 182 | """ |
196 | if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf: | 183 | global cpath |
197 | oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ | 184 | if name.endswith("-dev") and path.endswith(".so") and not cpath.islink(path) and elf: |
198 | (name, package_qa_clean_path(path, d, name))) | 185 | oe.qa.handle_error("dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ |
186 | (name, package_qa_clean_path(path, d, name)), d) | ||
199 | 187 | ||
200 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" | 188 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" |
201 | def package_qa_check_staticdev(path, name, d, elf, messages): | 189 | def package_qa_check_staticdev(path, name, d, elf): |
202 | """ | 190 | """ |
203 | Check for ".a" library in non-staticdev packages | 191 | Check for ".a" library in non-staticdev packages |
204 | There are a number of exceptions to this rule, -pic packages can contain | 192 | There are a number of exceptions to this rule, -pic packages can contain |
@@ -207,22 +195,22 @@ def package_qa_check_staticdev(path, name, d, elf, messages): | |||
207 | """ | 195 | """ |
208 | 196 | ||
209 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: | 197 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: |
210 | oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ | 198 | oe.qa.handle_error("staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ |
211 | (name, package_qa_clean_path(path, d, name))) | 199 | (name, package_qa_clean_path(path, d, name)), d) |
212 | 200 | ||
213 | QAPATHTEST[mime] = "package_qa_check_mime" | 201 | QAPATHTEST[mime] = "package_qa_check_mime" |
214 | def package_qa_check_mime(path, name, d, elf, messages): | 202 | def package_qa_check_mime(path, name, d, elf): |
215 | """ | 203 | """ |
216 | Check if package installs mime types to /usr/share/mime/packages | 204 | Check if package installs mime types to /usr/share/mime/packages |
217 | while no inheriting mime.bbclass | 205 | while no inheriting mime.bbclass |
218 | """ | 206 | """ |
219 | 207 | ||
220 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): | 208 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): |
221 | oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ | 209 | oe.qa.handle_error("mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ |
222 | (name, package_qa_clean_path(path, d, name))) | 210 | (name, package_qa_clean_path(path, d, name)), d) |
223 | 211 | ||
224 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" | 212 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" |
225 | def package_qa_check_mime_xdg(path, name, d, elf, messages): | 213 | def package_qa_check_mime_xdg(path, name, d, elf): |
226 | """ | 214 | """ |
227 | Check if package installs desktop file containing MimeType and requires | 215 | Check if package installs desktop file containing MimeType and requires |
228 | mime-types.bbclass to create /usr/share/applications/mimeinfo.cache | 216 | mime-types.bbclass to create /usr/share/applications/mimeinfo.cache |
@@ -245,10 +233,10 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages): | |||
245 | if name == d.getVar('PN'): | 233 | if name == d.getVar('PN'): |
246 | pkgname = '${PN}' | 234 | pkgname = '${PN}' |
247 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) | 235 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) |
248 | oe.qa.add_message(messages, "mime-xdg", wstr) | 236 | oe.qa.handle_error("mime-xdg", wstr, d) |
249 | if mime_type_found: | 237 | if mime_type_found: |
250 | oe.qa.add_message(messages, "mime-xdg", "%s: contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s" % \ | 238 | oe.qa.handle_error("mime-xdg", "%s: contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s" % \ |
251 | (name, package_qa_clean_path(path, d, name))) | 239 | (name, package_qa_clean_path(path, d, name)), d) |
252 | 240 | ||
253 | def package_qa_check_libdir(d): | 241 | def package_qa_check_libdir(d): |
254 | """ | 242 | """ |
@@ -298,7 +286,7 @@ def package_qa_check_libdir(d): | |||
298 | try: | 286 | try: |
299 | elf.open() | 287 | elf.open() |
300 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | 288 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) |
301 | except (oe.qa.NotELFFileError): | 289 | except (oe.qa.NotELFFileError, FileNotFoundError): |
302 | pass | 290 | pass |
303 | if exec_re.match(rel_path): | 291 | if exec_re.match(rel_path): |
304 | if libdir not in rel_path and libexecdir not in rel_path: | 292 | if libdir not in rel_path and libexecdir not in rel_path: |
@@ -307,25 +295,25 @@ def package_qa_check_libdir(d): | |||
307 | try: | 295 | try: |
308 | elf.open() | 296 | elf.open() |
309 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | 297 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) |
310 | except (oe.qa.NotELFFileError): | 298 | except (oe.qa.NotELFFileError, FileNotFoundError): |
311 | pass | 299 | pass |
312 | 300 | ||
313 | if messages: | 301 | if messages: |
314 | oe.qa.handle_error("libdir", "\n".join(messages), d) | 302 | oe.qa.handle_error("libdir", "\n".join(messages), d) |
315 | 303 | ||
316 | QAPATHTEST[debug-files] = "package_qa_check_dbg" | 304 | QAPATHTEST[debug-files] = "package_qa_check_dbg" |
317 | def package_qa_check_dbg(path, name, d, elf, messages): | 305 | def package_qa_check_dbg(path, name, d, elf): |
318 | """ | 306 | """ |
319 | Check for ".debug" files or directories outside of the dbg package | 307 | Check for ".debug" files or directories outside of the dbg package |
320 | """ | 308 | """ |
321 | 309 | ||
322 | if not "-dbg" in name and not "-ptest" in name: | 310 | if not "-dbg" in name and not "-ptest" in name: |
323 | if '.debug' in path.split(os.path.sep): | 311 | if '.debug' in path.split(os.path.sep): |
324 | oe.qa.add_message(messages, "debug-files", "%s: non debug package contains .debug directory %s" % \ | 312 | oe.qa.handle_error("debug-files", "%s: non debug package contains .debug directory %s" % \ |
325 | (name, package_qa_clean_path(path, d, name))) | 313 | (name, package_qa_clean_path(path, d, name)), d) |
326 | 314 | ||
327 | QAPATHTEST[arch] = "package_qa_check_arch" | 315 | QAPATHTEST[arch] = "package_qa_check_arch" |
328 | def package_qa_check_arch(path,name,d, elf, messages): | 316 | def package_qa_check_arch(path,name,d, elf): |
329 | """ | 317 | """ |
330 | Check if archs are compatible | 318 | Check if archs are compatible |
331 | """ | 319 | """ |
@@ -334,47 +322,47 @@ def package_qa_check_arch(path,name,d, elf, messages): | |||
334 | if not elf: | 322 | if not elf: |
335 | return | 323 | return |
336 | 324 | ||
337 | target_os = d.getVar('HOST_OS') | 325 | host_os = d.getVar('HOST_OS') |
338 | target_arch = d.getVar('HOST_ARCH') | 326 | host_arch = d.getVar('HOST_ARCH') |
339 | provides = d.getVar('PROVIDES') | 327 | provides = d.getVar('PROVIDES') |
340 | bpn = d.getVar('BPN') | ||
341 | 328 | ||
342 | if target_arch == "allarch": | 329 | if host_arch == "allarch": |
343 | pn = d.getVar('PN') | 330 | oe.qa.handle_error("arch", "%s: inherits the allarch class, but has architecture-specific binaries %s" % \ |
344 | oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") | 331 | (name, package_qa_clean_path(path, d, name)), d) |
345 | return | 332 | return |
346 | 333 | ||
347 | # FIXME: Cross package confuse this check, so just skip them | 334 | # If this throws an exception, the machine_dict needs expanding |
348 | for s in ['cross', 'nativesdk', 'cross-canadian']: | 335 | (expected_machine, expected_osabi, expected_abiversion, expected_littleendian, expected_bits) \ |
349 | if bb.data.inherits_class(s, d): | 336 | = oe.elf.machine_dict(d)[host_os][host_arch] |
350 | return | 337 | |
338 | actual_machine = elf.machine() | ||
339 | actual_bits = elf.abiSize() | ||
340 | actual_littleendian = elf.isLittleEndian() | ||
351 | 341 | ||
352 | # avoid following links to /usr/bin (e.g. on udev builds) | 342 | # BPF don't match the target |
353 | # we will check the files pointed to anyway... | 343 | if oe.qa.elf_machine_to_string(actual_machine) == "BPF": |
354 | if os.path.islink(path): | ||
355 | return | 344 | return |
356 | 345 | ||
357 | #if this will throw an exception, then fix the dict above | 346 | # These targets have 32-bit userspace but 64-bit kernel, so fudge the expected values |
358 | (machine, osabi, abiversion, littleendian, bits) \ | 347 | if (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and (host_os in ("linux-gnux32", "linux-muslx32", "linux-gnu_ilp32") or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))): |
359 | = oe.elf.machine_dict(d)[target_os][target_arch] | 348 | expected_bits = 64 |
360 | 349 | ||
361 | # Check the architecture and endiannes of the binary | 350 | # Check the architecture and endiannes of the binary |
362 | is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ | 351 | if expected_machine != actual_machine: |
363 | (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \ | 352 | oe.qa.handle_error("arch", "Architecture did not match (%s, expected %s) in %s" % \ |
364 | target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))) | 353 | (oe.qa.elf_machine_to_string(actual_machine), oe.qa.elf_machine_to_string(expected_machine), package_qa_clean_path(path, d, name)), d) |
365 | is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF") | 354 | |
366 | if not ((machine == elf.machine()) or is_32 or is_bpf): | 355 | if expected_bits != actual_bits: |
367 | oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \ | 356 | oe.qa.handle_error("arch", "Bit size did not match (%d, expected %d) in %s" % \ |
368 | (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name))) | 357 | (actual_bits, expected_bits, package_qa_clean_path(path, d, name)), d) |
369 | elif not ((bits == elf.abiSize()) or is_32 or is_bpf): | 358 | |
370 | oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \ | 359 | if expected_littleendian != actual_littleendian: |
371 | (elf.abiSize(), bits, package_qa_clean_path(path, d, name))) | 360 | oe.qa.handle_error("arch", "Endiannes did not match (%d, expected %d) in %s" % \ |
372 | elif not ((littleendian == elf.isLittleEndian()) or is_bpf): | 361 | (actual_littleendian, expected_littleendian, package_qa_clean_path(path, d, name)), d) |
373 | oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \ | 362 | package_qa_check_arch[vardepsexclude] = "DEFAULTTUNE" |
374 | (elf.isLittleEndian(), littleendian, package_qa_clean_path(path, d, name))) | ||
375 | 363 | ||
376 | QAPATHTEST[desktop] = "package_qa_check_desktop" | 364 | QAPATHTEST[desktop] = "package_qa_check_desktop" |
377 | def package_qa_check_desktop(path, name, d, elf, messages): | 365 | def package_qa_check_desktop(path, name, d, elf): |
378 | """ | 366 | """ |
379 | Run all desktop files through desktop-file-validate. | 367 | Run all desktop files through desktop-file-validate. |
380 | """ | 368 | """ |
@@ -383,10 +371,10 @@ def package_qa_check_desktop(path, name, d, elf, messages): | |||
383 | output = os.popen("%s %s" % (desktop_file_validate, path)) | 371 | output = os.popen("%s %s" % (desktop_file_validate, path)) |
384 | # This only produces output on errors | 372 | # This only produces output on errors |
385 | for l in output: | 373 | for l in output: |
386 | oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip()) | 374 | oe.qa.handle_error("desktop", "Desktop file issue: " + l.strip(), d) |
387 | 375 | ||
388 | QAPATHTEST[textrel] = "package_qa_textrel" | 376 | QAPATHTEST[textrel] = "package_qa_textrel" |
389 | def package_qa_textrel(path, name, d, elf, messages): | 377 | def package_qa_textrel(path, name, d, elf): |
390 | """ | 378 | """ |
391 | Check if the binary contains relocations in .text | 379 | Check if the binary contains relocations in .text |
392 | """ | 380 | """ |
@@ -394,25 +382,18 @@ def package_qa_textrel(path, name, d, elf, messages): | |||
394 | if not elf: | 382 | if not elf: |
395 | return | 383 | return |
396 | 384 | ||
397 | if os.path.islink(path): | ||
398 | return | ||
399 | |||
400 | phdrs = elf.run_objdump("-p", d) | 385 | phdrs = elf.run_objdump("-p", d) |
401 | sane = True | ||
402 | 386 | ||
403 | import re | 387 | import re |
404 | textrel_re = re.compile(r"\s+TEXTREL\s+") | 388 | textrel_re = re.compile(r"\s+TEXTREL\s+") |
405 | for line in phdrs.split("\n"): | 389 | for line in phdrs.split("\n"): |
406 | if textrel_re.match(line): | 390 | if textrel_re.match(line): |
407 | sane = False | 391 | path = package_qa_clean_path(path, d, name) |
408 | break | 392 | oe.qa.handle_error("textrel", "%s: ELF binary %s has relocations in .text" % (name, path), d) |
409 | 393 | return | |
410 | if not sane: | ||
411 | path = package_qa_clean_path(path, d, name) | ||
412 | oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path)) | ||
413 | 394 | ||
414 | QAPATHTEST[ldflags] = "package_qa_hash_style" | 395 | QAPATHTEST[ldflags] = "package_qa_hash_style" |
415 | def package_qa_hash_style(path, name, d, elf, messages): | 396 | def package_qa_hash_style(path, name, d, elf): |
416 | """ | 397 | """ |
417 | Check if the binary has the right hash style... | 398 | Check if the binary has the right hash style... |
418 | """ | 399 | """ |
@@ -420,9 +401,6 @@ def package_qa_hash_style(path, name, d, elf, messages): | |||
420 | if not elf: | 401 | if not elf: |
421 | return | 402 | return |
422 | 403 | ||
423 | if os.path.islink(path): | ||
424 | return | ||
425 | |||
426 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') | 404 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') |
427 | if not gnu_hash: | 405 | if not gnu_hash: |
428 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') | 406 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') |
@@ -444,17 +422,17 @@ def package_qa_hash_style(path, name, d, elf, messages): | |||
444 | sane = True | 422 | sane = True |
445 | if has_syms and not sane: | 423 | if has_syms and not sane: |
446 | path = package_qa_clean_path(path, d, name) | 424 | path = package_qa_clean_path(path, d, name) |
447 | oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name)) | 425 | oe.qa.handle_error("ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name), d) |
426 | package_qa_hash_style[vardepsexclude] = "TCLIBC" | ||
448 | 427 | ||
449 | 428 | ||
450 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" | 429 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" |
451 | def package_qa_check_buildpaths(path, name, d, elf, messages): | 430 | def package_qa_check_buildpaths(path, name, d, elf): |
452 | """ | 431 | """ |
453 | Check for build paths inside target files and error if paths are not | 432 | Check for build paths inside target files and error if paths are not |
454 | explicitly ignored. | 433 | explicitly ignored. |
455 | """ | 434 | """ |
456 | import stat | 435 | import stat |
457 | |||
458 | # Ignore symlinks/devs/fifos | 436 | # Ignore symlinks/devs/fifos |
459 | mode = os.lstat(path).st_mode | 437 | mode = os.lstat(path).st_mode |
460 | if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode): | 438 | if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode): |
@@ -464,12 +442,12 @@ def package_qa_check_buildpaths(path, name, d, elf, messages): | |||
464 | with open(path, 'rb') as f: | 442 | with open(path, 'rb') as f: |
465 | file_content = f.read() | 443 | file_content = f.read() |
466 | if tmpdir in file_content: | 444 | if tmpdir in file_content: |
467 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 445 | path = package_qa_clean_path(path, d, name) |
468 | oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name)) | 446 | oe.qa.handle_error("buildpaths", "File %s in package %s contains reference to TMPDIR" % (path, name), d) |
469 | 447 | ||
470 | 448 | ||
471 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" | 449 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" |
472 | def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | 450 | def package_qa_check_xorg_driver_abi(path, name, d, elf): |
473 | """ | 451 | """ |
474 | Check that all packages containing Xorg drivers have ABI dependencies | 452 | Check that all packages containing Xorg drivers have ABI dependencies |
475 | """ | 453 | """ |
@@ -484,33 +462,34 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | |||
484 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): | 462 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): |
485 | if rdep.startswith("%sxorg-abi-" % mlprefix): | 463 | if rdep.startswith("%sxorg-abi-" % mlprefix): |
486 | return | 464 | return |
487 | oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | 465 | oe.qa.handle_error("xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)), d) |
488 | 466 | ||
489 | QAPATHTEST[infodir] = "package_qa_check_infodir" | 467 | QAPATHTEST[infodir] = "package_qa_check_infodir" |
490 | def package_qa_check_infodir(path, name, d, elf, messages): | 468 | def package_qa_check_infodir(path, name, d, elf): |
491 | """ | 469 | """ |
492 | Check that /usr/share/info/dir isn't shipped in a particular package | 470 | Check that /usr/share/info/dir isn't shipped in a particular package |
493 | """ | 471 | """ |
494 | infodir = d.expand("${infodir}/dir") | 472 | infodir = d.expand("${infodir}/dir") |
495 | 473 | ||
496 | if infodir in path: | 474 | if infodir in path: |
497 | oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.") | 475 | oe.qa.handle_error("infodir", "The %s file is not meant to be shipped in a particular package." % infodir, d) |
498 | 476 | ||
499 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" | 477 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" |
500 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | 478 | def package_qa_check_symlink_to_sysroot(path, name, d, elf): |
501 | """ | 479 | """ |
502 | Check that the package doesn't contain any absolute symlinks to the sysroot. | 480 | Check that the package doesn't contain any absolute symlinks to the sysroot. |
503 | """ | 481 | """ |
504 | if os.path.islink(path): | 482 | global cpath |
483 | if cpath.islink(path): | ||
505 | target = os.readlink(path) | 484 | target = os.readlink(path) |
506 | if os.path.isabs(target): | 485 | if os.path.isabs(target): |
507 | tmpdir = d.getVar('TMPDIR') | 486 | tmpdir = d.getVar('TMPDIR') |
508 | if target.startswith(tmpdir): | 487 | if target.startswith(tmpdir): |
509 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 488 | path = package_qa_clean_path(path, d, name) |
510 | oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) | 489 | oe.qa.handle_error("symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (path, name), d) |
511 | 490 | ||
512 | QAPATHTEST[32bit-time] = "check_32bit_symbols" | 491 | QAPATHTEST[32bit-time] = "check_32bit_symbols" |
513 | def check_32bit_symbols(path, packagename, d, elf, messages): | 492 | def check_32bit_symbols(path, packagename, d, elf): |
514 | """ | 493 | """ |
515 | Check that ELF files do not use any 32 bit time APIs from glibc. | 494 | Check that ELF files do not use any 32 bit time APIs from glibc. |
516 | """ | 495 | """ |
@@ -615,7 +594,7 @@ def check_32bit_symbols(path, packagename, d, elf, messages): | |||
615 | ) | 594 | ) |
616 | 595 | ||
617 | # elf is a oe.qa.ELFFile object | 596 | # elf is a oe.qa.ELFFile object |
618 | if elf is not None: | 597 | if elf: |
619 | phdrs = elf.run_objdump("-tw", d) | 598 | phdrs = elf.run_objdump("-tw", d) |
620 | syms = re.finditer(ptrn, phdrs) | 599 | syms = re.finditer(ptrn, phdrs) |
621 | usedapis = {sym.group('notag') for sym in syms} | 600 | usedapis = {sym.group('notag') for sym in syms} |
@@ -629,11 +608,9 @@ def check_32bit_symbols(path, packagename, d, elf, messages): | |||
629 | if not allowed: | 608 | if not allowed: |
630 | msgformat = elfpath + " uses 32-bit api '%s'" | 609 | msgformat = elfpath + " uses 32-bit api '%s'" |
631 | for sym in usedapis: | 610 | for sym in usedapis: |
632 | oe.qa.add_message(messages, '32bit-time', msgformat % sym) | 611 | oe.qa.handle_error('32bit-time', msgformat % sym, d) |
633 | oe.qa.add_message( | 612 | oe.qa.handle_error('32bit-time', 'Suppress with INSANE_SKIP = "32bit-time"', d) |
634 | messages, '32bit-time', | 613 | check_32bit_symbols[vardepsexclude] = "OVERRIDES" |
635 | 'Suppress with INSANE_SKIP = "32bit-time"' | ||
636 | ) | ||
637 | 614 | ||
638 | # Check license variables | 615 | # Check license variables |
639 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" | 616 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" |
@@ -794,62 +771,19 @@ def qa_check_staged(path,d): | |||
794 | oe.qa.handle_error("pkgconfig", error_msg, d) | 771 | oe.qa.handle_error("pkgconfig", error_msg, d) |
795 | 772 | ||
796 | if not skip_shebang_size: | 773 | if not skip_shebang_size: |
797 | errors = {} | 774 | global cpath |
798 | package_qa_check_shebang_size(path, "", d, None, errors) | 775 | cpath = oe.cachedpath.CachedPath() |
799 | for e in errors: | 776 | package_qa_check_shebang_size(path, "", d, None) |
800 | oe.qa.handle_error(e, errors[e], d) | 777 | cpath = None |
801 | |||
802 | |||
803 | # Run all package-wide warnfuncs and errorfuncs | ||
804 | def package_qa_package(warnfuncs, errorfuncs, package, d): | ||
805 | warnings = {} | ||
806 | errors = {} | ||
807 | |||
808 | for func in warnfuncs: | ||
809 | func(package, d, warnings) | ||
810 | for func in errorfuncs: | ||
811 | func(package, d, errors) | ||
812 | |||
813 | for w in warnings: | ||
814 | oe.qa.handle_error(w, warnings[w], d) | ||
815 | for e in errors: | ||
816 | oe.qa.handle_error(e, errors[e], d) | ||
817 | |||
818 | return len(errors) == 0 | ||
819 | |||
820 | # Run all recipe-wide warnfuncs and errorfuncs | ||
821 | def package_qa_recipe(warnfuncs, errorfuncs, pn, d): | ||
822 | warnings = {} | ||
823 | errors = {} | ||
824 | |||
825 | for func in warnfuncs: | ||
826 | func(pn, d, warnings) | ||
827 | for func in errorfuncs: | ||
828 | func(pn, d, errors) | ||
829 | |||
830 | for w in warnings: | ||
831 | oe.qa.handle_error(w, warnings[w], d) | ||
832 | for e in errors: | ||
833 | oe.qa.handle_error(e, errors[e], d) | ||
834 | |||
835 | return len(errors) == 0 | ||
836 | |||
837 | def prepopulate_objdump_p(elf, d): | ||
838 | output = elf.run_objdump("-p", d) | ||
839 | return (elf.name, output) | ||
840 | 778 | ||
841 | # Walk over all files in a directory and call func | 779 | # Walk over all files in a directory and call func |
842 | def package_qa_walk(warnfuncs, errorfuncs, package, d): | 780 | def package_qa_walk(checkfuncs, package, d): |
843 | #if this will throw an exception, then fix the dict above | 781 | global cpath |
844 | target_os = d.getVar('HOST_OS') | ||
845 | target_arch = d.getVar('HOST_ARCH') | ||
846 | 782 | ||
847 | warnings = {} | ||
848 | errors = {} | ||
849 | elves = {} | 783 | elves = {} |
850 | for path in pkgfiles[package]: | 784 | for path in pkgfiles[package]: |
851 | elf = None | 785 | elf = None |
852 | if os.path.isfile(path): | 786 | if cpath.isfile(path) and not cpath.islink(path): |
853 | elf = oe.qa.ELFFile(path) | 787 | elf = oe.qa.ELFFile(path) |
854 | try: | 788 | try: |
855 | elf.open() | 789 | elf.open() |
@@ -859,24 +793,22 @@ def package_qa_walk(warnfuncs, errorfuncs, package, d): | |||
859 | if elf: | 793 | if elf: |
860 | elves[path] = elf | 794 | elves[path] = elf |
861 | 795 | ||
796 | def prepopulate_objdump_p(elf, d): | ||
797 | output = elf.run_objdump("-p", d) | ||
798 | return (elf.name, output) | ||
799 | |||
862 | results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,)) | 800 | results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,)) |
863 | for item in results: | 801 | for item in results: |
864 | elves[item[0]].set_objdump("-p", item[1]) | 802 | elves[item[0]].set_objdump("-p", item[1]) |
865 | 803 | ||
866 | for path in pkgfiles[package]: | 804 | for path in pkgfiles[package]: |
867 | if path in elves: | 805 | elf = elves.get(path) |
868 | elves[path].open() | 806 | if elf: |
869 | for func in warnfuncs: | 807 | elf.open() |
870 | func(path, package, d, elves.get(path), warnings) | 808 | for func in checkfuncs: |
871 | for func in errorfuncs: | 809 | func(path, package, d, elf) |
872 | func(path, package, d, elves.get(path), errors) | 810 | if elf: |
873 | if path in elves: | 811 | elf.close() |
874 | elves[path].close() | ||
875 | |||
876 | for w in warnings: | ||
877 | oe.qa.handle_error(w, warnings[w], d) | ||
878 | for e in errors: | ||
879 | oe.qa.handle_error(e, errors[e], d) | ||
880 | 812 | ||
881 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | 813 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): |
882 | # Don't do this check for kernel/module recipes, there aren't too many debug/development | 814 | # Don't do this check for kernel/module recipes, there aren't too many debug/development |
@@ -893,25 +825,30 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
893 | 825 | ||
894 | # Now do the sanity check!!! | 826 | # Now do the sanity check!!! |
895 | if "build-deps" not in skip: | 827 | if "build-deps" not in skip: |
828 | def check_rdep(rdep_data, possible_pn): | ||
829 | if rdep_data and "PN" in rdep_data: | ||
830 | possible_pn.add(rdep_data["PN"]) | ||
831 | return rdep_data["PN"] in taskdeps | ||
832 | return False | ||
833 | |||
896 | for rdepend in rdepends: | 834 | for rdepend in rdepends: |
897 | if "-dbg" in rdepend and "debug-deps" not in skip: | 835 | if rdepend.endswith("-dbg") and "debug-deps" not in skip: |
898 | error_msg = "%s rdepends on %s" % (pkg,rdepend) | 836 | error_msg = "%s rdepends on %s" % (pkg,rdepend) |
899 | oe.qa.handle_error("debug-deps", error_msg, d) | 837 | oe.qa.handle_error("debug-deps", error_msg, d) |
900 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: | 838 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: |
901 | error_msg = "%s rdepends on %s" % (pkg, rdepend) | 839 | error_msg = "%s rdepends on %s" % (pkg, rdepend) |
902 | oe.qa.handle_error("dev-deps", error_msg, d) | 840 | oe.qa.handle_error("dev-deps", error_msg, d) |
903 | if rdepend not in packages: | 841 | if rdepend not in packages: |
842 | possible_pn = set() | ||
904 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) | 843 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) |
905 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | 844 | if check_rdep(rdep_data, possible_pn): |
906 | continue | 845 | continue |
907 | if not rdep_data or not 'PN' in rdep_data: | 846 | |
908 | for _, rdep_data in oe.packagedata.foreach_runtime_provider_pkgdata(d, rdepend): | 847 | if any(check_rdep(rdep_data, possible_pn) for _, rdep_data in oe.packagedata.foreach_runtime_provider_pkgdata(d, rdepend)): |
909 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
910 | break | ||
911 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | ||
912 | continue | 848 | continue |
913 | if rdep_data and 'PN' in rdep_data: | 849 | |
914 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN']) | 850 | if possible_pn: |
851 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing one of %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, ", ".join(possible_pn)) | ||
915 | else: | 852 | else: |
916 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) | 853 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) |
917 | oe.qa.handle_error("build-deps", error_msg, d) | 854 | oe.qa.handle_error("build-deps", error_msg, d) |
@@ -998,20 +935,19 @@ def package_qa_check_deps(pkg, pkgdest, d): | |||
998 | check_valid_deps('RCONFLICTS') | 935 | check_valid_deps('RCONFLICTS') |
999 | 936 | ||
1000 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" | 937 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" |
1001 | def package_qa_check_usrmerge(pkg, d, messages): | 938 | def package_qa_check_usrmerge(pkg, d): |
1002 | 939 | global cpath | |
1003 | pkgdest = d.getVar('PKGDEST') | 940 | pkgdest = d.getVar('PKGDEST') |
1004 | pkg_dir = pkgdest + os.sep + pkg + os.sep | 941 | pkg_dir = pkgdest + os.sep + pkg + os.sep |
1005 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() | 942 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() |
1006 | for f in merged_dirs: | 943 | for f in merged_dirs: |
1007 | if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f): | 944 | if cpath.exists(pkg_dir + f) and not cpath.islink(pkg_dir + f): |
1008 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) | 945 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) |
1009 | oe.qa.add_message(messages, "usrmerge", msg) | 946 | oe.qa.handle_error("usrmerge", msg, d) |
1010 | return False | 947 | return |
1011 | return True | ||
1012 | 948 | ||
1013 | QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod" | 949 | QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod" |
1014 | def package_qa_check_perllocalpod(pkg, d, messages): | 950 | def package_qa_check_perllocalpod(pkg, d): |
1015 | """ | 951 | """ |
1016 | Check that the recipe didn't ship a perlocal.pod file, which shouldn't be | 952 | Check that the recipe didn't ship a perlocal.pod file, which shouldn't be |
1017 | installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to | 953 | installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to |
@@ -1025,67 +961,61 @@ def package_qa_check_perllocalpod(pkg, d, messages): | |||
1025 | if matches: | 961 | if matches: |
1026 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] | 962 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] |
1027 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) | 963 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) |
1028 | oe.qa.add_message(messages, "perllocalpod", msg) | 964 | oe.qa.handle_error("perllocalpod", msg, d) |
1029 | 965 | ||
1030 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" | 966 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" |
1031 | def package_qa_check_expanded_d(package, d, messages): | 967 | def package_qa_check_expanded_d(package, d): |
1032 | """ | 968 | """ |
1033 | Check for the expanded D (${D}) value in pkg_* and FILES | 969 | Check for the expanded D (${D}) value in pkg_* and FILES |
1034 | variables, warn the user to use it correctly. | 970 | variables, warn the user to use it correctly. |
1035 | """ | 971 | """ |
1036 | sane = True | ||
1037 | expanded_d = d.getVar('D') | 972 | expanded_d = d.getVar('D') |
1038 | 973 | ||
1039 | for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': | 974 | for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': |
1040 | bbvar = d.getVar(var + ":" + package) or "" | 975 | bbvar = d.getVar(var + ":" + package) or "" |
1041 | if expanded_d in bbvar: | 976 | if expanded_d in bbvar: |
1042 | if var == 'FILES': | 977 | if var == 'FILES': |
1043 | oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) | 978 | oe.qa.handle_error("expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package, d) |
1044 | sane = False | ||
1045 | else: | 979 | else: |
1046 | oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package)) | 980 | oe.qa.handle_error("expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package), d) |
1047 | sane = False | ||
1048 | return sane | ||
1049 | 981 | ||
1050 | QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics" | 982 | QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics" |
1051 | def package_qa_check_unlisted_pkg_lics(package, d, messages): | 983 | def package_qa_check_unlisted_pkg_lics(package, d): |
1052 | """ | 984 | """ |
1053 | Check that all licenses for a package are among the licenses for the recipe. | 985 | Check that all licenses for a package are among the licenses for the recipe. |
1054 | """ | 986 | """ |
1055 | pkg_lics = d.getVar('LICENSE:' + package) | 987 | pkg_lics = d.getVar('LICENSE:' + package) |
1056 | if not pkg_lics: | 988 | if not pkg_lics: |
1057 | return True | 989 | return |
1058 | 990 | ||
1059 | recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE')) | 991 | recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE')) |
1060 | package_lics = oe.license.list_licenses(pkg_lics) | 992 | package_lics = oe.license.list_licenses(pkg_lics) |
1061 | unlisted = package_lics - recipe_lics_set | 993 | unlisted = package_lics - recipe_lics_set |
1062 | if unlisted: | 994 | if unlisted: |
1063 | oe.qa.add_message(messages, "unlisted-pkg-lics", | 995 | oe.qa.handle_error("unlisted-pkg-lics", |
1064 | "LICENSE:%s includes licenses (%s) that are not " | 996 | "LICENSE:%s includes licenses (%s) that are not " |
1065 | "listed in LICENSE" % (package, ' '.join(unlisted))) | 997 | "listed in LICENSE" % (package, ' '.join(unlisted)), d) |
1066 | return False | ||
1067 | obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set | 998 | obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set |
1068 | if obsolete: | 999 | if obsolete: |
1069 | oe.qa.add_message(messages, "obsolete-license", | 1000 | oe.qa.handle_error("obsolete-license", |
1070 | "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete))) | 1001 | "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete)), d) |
1071 | return False | ||
1072 | return True | ||
1073 | 1002 | ||
1074 | QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs" | 1003 | QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs" |
1075 | def package_qa_check_empty_dirs(pkg, d, messages): | 1004 | def package_qa_check_empty_dirs(pkg, d): |
1076 | """ | 1005 | """ |
1077 | Check for the existence of files in directories that are expected to be | 1006 | Check for the existence of files in directories that are expected to be |
1078 | empty. | 1007 | empty. |
1079 | """ | 1008 | """ |
1080 | 1009 | ||
1010 | global cpath | ||
1081 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) | 1011 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) |
1082 | for dir in (d.getVar('QA_EMPTY_DIRS') or "").split(): | 1012 | for dir in (d.getVar('QA_EMPTY_DIRS') or "").split(): |
1083 | empty_dir = oe.path.join(pkgd, dir) | 1013 | empty_dir = oe.path.join(pkgd, dir) |
1084 | if os.path.exists(empty_dir) and os.listdir(empty_dir): | 1014 | if cpath.exists(empty_dir) and os.listdir(empty_dir): |
1085 | recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or | 1015 | recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or |
1086 | "but it is expected to be empty") | 1016 | "but it is expected to be empty") |
1087 | msg = "%s installs files in %s, %s" % (pkg, dir, recommendation) | 1017 | msg = "%s installs files in %s, %s" % (pkg, dir, recommendation) |
1088 | oe.qa.add_message(messages, "empty-dirs", msg) | 1018 | oe.qa.handle_error("empty-dirs", msg, d) |
1089 | 1019 | ||
1090 | def package_qa_check_encoding(keys, encode, d): | 1020 | def package_qa_check_encoding(keys, encode, d): |
1091 | def check_encoding(key, enc): | 1021 | def check_encoding(key, enc): |
@@ -1109,10 +1039,11 @@ HOST_USER_UID := "${@os.getuid()}" | |||
1109 | HOST_USER_GID := "${@os.getgid()}" | 1039 | HOST_USER_GID := "${@os.getgid()}" |
1110 | 1040 | ||
1111 | QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user" | 1041 | QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user" |
1112 | def package_qa_check_host_user(path, name, d, elf, messages): | 1042 | def package_qa_check_host_user(path, name, d, elf): |
1113 | """Check for paths outside of /home which are owned by the user running bitbake.""" | 1043 | """Check for paths outside of /home which are owned by the user running bitbake.""" |
1044 | global cpath | ||
1114 | 1045 | ||
1115 | if not os.path.lexists(path): | 1046 | if not cpath.lexists(path): |
1116 | return | 1047 | return |
1117 | 1048 | ||
1118 | dest = d.getVar('PKGDEST') | 1049 | dest = d.getVar('PKGDEST') |
@@ -1130,17 +1061,15 @@ def package_qa_check_host_user(path, name, d, elf, messages): | |||
1130 | else: | 1061 | else: |
1131 | check_uid = int(d.getVar('HOST_USER_UID')) | 1062 | check_uid = int(d.getVar('HOST_USER_UID')) |
1132 | if stat.st_uid == check_uid: | 1063 | if stat.st_uid == check_uid: |
1133 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) | 1064 | oe.qa.handle_error("host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid), d) |
1134 | return False | ||
1135 | 1065 | ||
1136 | check_gid = int(d.getVar('HOST_USER_GID')) | 1066 | check_gid = int(d.getVar('HOST_USER_GID')) |
1137 | if stat.st_gid == check_gid: | 1067 | if stat.st_gid == check_gid: |
1138 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) | 1068 | oe.qa.handle_error("host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid), d) |
1139 | return False | 1069 | package_qa_check_host_user[vardepsexclude] = "HOST_USER_UID HOST_USER_GID" |
1140 | return True | ||
1141 | 1070 | ||
1142 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" | 1071 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" |
1143 | def package_qa_check_unhandled_features_check(pn, d, messages): | 1072 | def package_qa_check_unhandled_features_check(pn, d): |
1144 | if not bb.data.inherits_class('features_check', d): | 1073 | if not bb.data.inherits_class('features_check', d): |
1145 | var_set = False | 1074 | var_set = False |
1146 | for kind in ['DISTRO', 'MACHINE', 'COMBINED']: | 1075 | for kind in ['DISTRO', 'MACHINE', 'COMBINED']: |
@@ -1151,22 +1080,36 @@ def package_qa_check_unhandled_features_check(pn, d, messages): | |||
1151 | oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) | 1080 | oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) |
1152 | 1081 | ||
1153 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" | 1082 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" |
1154 | def package_qa_check_missing_update_alternatives(pn, d, messages): | 1083 | def package_qa_check_missing_update_alternatives(pn, d): |
1155 | # Look at all packages and find out if any of those sets ALTERNATIVE variable | 1084 | # Look at all packages and find out if any of those sets ALTERNATIVE variable |
1156 | # without inheriting update-alternatives class | 1085 | # without inheriting update-alternatives class |
1157 | for pkg in (d.getVar('PACKAGES') or '').split(): | 1086 | for pkg in (d.getVar('PACKAGES') or '').split(): |
1158 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): | 1087 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): |
1159 | oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) | 1088 | oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) |
1160 | 1089 | ||
1090 | def parse_test_matrix(matrix_name, skip, d): | ||
1091 | testmatrix = d.getVarFlags(matrix_name) or {} | ||
1092 | g = globals() | ||
1093 | checks = [] | ||
1094 | for w in (d.getVar("WARN_QA") or "").split(): | ||
1095 | if w in skip: | ||
1096 | continue | ||
1097 | if w in testmatrix and testmatrix[w] in g: | ||
1098 | checks.append(g[testmatrix[w]]) | ||
1099 | |||
1100 | for e in (d.getVar("ERROR_QA") or "").split(): | ||
1101 | if e in skip: | ||
1102 | continue | ||
1103 | if e in testmatrix and testmatrix[e] in g: | ||
1104 | checks.append(g[testmatrix[e]]) | ||
1105 | return checks | ||
1106 | parse_test_matrix[vardepsexclude] = "ERROR_QA WARN_QA" | ||
1107 | |||
1108 | |||
1161 | # The PACKAGE FUNC to scan each package | 1109 | # The PACKAGE FUNC to scan each package |
1162 | python do_package_qa () { | 1110 | python do_package_qa () { |
1163 | import subprocess | ||
1164 | import oe.packagedata | 1111 | import oe.packagedata |
1165 | 1112 | ||
1166 | bb.note("DO PACKAGE QA") | ||
1167 | |||
1168 | main_lic = d.getVar('LICENSE') | ||
1169 | |||
1170 | # Check for obsolete license references in main LICENSE (packages are checked below for any changes) | 1113 | # Check for obsolete license references in main LICENSE (packages are checked below for any changes) |
1171 | main_licenses = oe.license.list_licenses(d.getVar('LICENSE')) | 1114 | main_licenses = oe.license.list_licenses(d.getVar('LICENSE')) |
1172 | obsolete = set(oe.license.obsolete_license_list()) & main_licenses | 1115 | obsolete = set(oe.license.obsolete_license_list()) & main_licenses |
@@ -1182,27 +1125,28 @@ python do_package_qa () { | |||
1182 | pn = d.getVar('PN') | 1125 | pn = d.getVar('PN') |
1183 | 1126 | ||
1184 | # Scan the packages... | 1127 | # Scan the packages... |
1185 | pkgdest = d.getVar('PKGDEST') | ||
1186 | packages = set((d.getVar('PACKAGES') or '').split()) | 1128 | packages = set((d.getVar('PACKAGES') or '').split()) |
1129 | # no packages should be scanned | ||
1130 | if not packages: | ||
1131 | return | ||
1187 | 1132 | ||
1188 | global pkgfiles | 1133 | global pkgfiles, cpath |
1189 | pkgfiles = {} | 1134 | pkgfiles = {} |
1135 | cpath = oe.cachedpath.CachedPath() | ||
1136 | pkgdest = d.getVar('PKGDEST') | ||
1190 | for pkg in packages: | 1137 | for pkg in packages: |
1191 | pkgfiles[pkg] = [] | ||
1192 | pkgdir = os.path.join(pkgdest, pkg) | 1138 | pkgdir = os.path.join(pkgdest, pkg) |
1139 | pkgfiles[pkg] = [] | ||
1193 | for walkroot, dirs, files in os.walk(pkgdir): | 1140 | for walkroot, dirs, files in os.walk(pkgdir): |
1194 | # Don't walk into top-level CONTROL or DEBIAN directories as these | 1141 | # Don't walk into top-level CONTROL or DEBIAN directories as these |
1195 | # are temporary directories created by do_package. | 1142 | # are temporary directories created by do_package. |
1196 | if walkroot == pkgdir: | 1143 | if walkroot == pkgdir: |
1197 | for control in ("CONTROL", "DEBIAN"): | 1144 | for removedir in ("CONTROL", "DEBIAN"): |
1198 | if control in dirs: | 1145 | try: |
1199 | dirs.remove(control) | 1146 | dirs.remove(removedir) |
1200 | for file in files: | 1147 | except ValueError: |
1201 | pkgfiles[pkg].append(os.path.join(walkroot, file)) | 1148 | pass |
1202 | 1149 | pkgfiles[pkg].extend((os.path.join(walkroot, f) for f in files)) | |
1203 | # no packages should be scanned | ||
1204 | if not packages: | ||
1205 | return | ||
1206 | 1150 | ||
1207 | import re | 1151 | import re |
1208 | # The package name matches the [a-z0-9.+-]+ regular expression | 1152 | # The package name matches the [a-z0-9.+-]+ regular expression |
@@ -1213,24 +1157,6 @@ python do_package_qa () { | |||
1213 | for dep in taskdepdata: | 1157 | for dep in taskdepdata: |
1214 | taskdeps.add(taskdepdata[dep][0]) | 1158 | taskdeps.add(taskdepdata[dep][0]) |
1215 | 1159 | ||
1216 | def parse_test_matrix(matrix_name): | ||
1217 | testmatrix = d.getVarFlags(matrix_name) or {} | ||
1218 | g = globals() | ||
1219 | warnchecks = [] | ||
1220 | for w in (d.getVar("WARN_QA") or "").split(): | ||
1221 | if w in skip: | ||
1222 | continue | ||
1223 | if w in testmatrix and testmatrix[w] in g: | ||
1224 | warnchecks.append(g[testmatrix[w]]) | ||
1225 | |||
1226 | errorchecks = [] | ||
1227 | for e in (d.getVar("ERROR_QA") or "").split(): | ||
1228 | if e in skip: | ||
1229 | continue | ||
1230 | if e in testmatrix and testmatrix[e] in g: | ||
1231 | errorchecks.append(g[testmatrix[e]]) | ||
1232 | return warnchecks, errorchecks | ||
1233 | |||
1234 | for package in packages: | 1160 | for package in packages: |
1235 | skip = set((d.getVar('INSANE_SKIP') or "").split() + | 1161 | skip = set((d.getVar('INSANE_SKIP') or "").split() + |
1236 | (d.getVar('INSANE_SKIP:' + package) or "").split()) | 1162 | (d.getVar('INSANE_SKIP:' + package) or "").split()) |
@@ -1243,21 +1169,23 @@ python do_package_qa () { | |||
1243 | oe.qa.handle_error("pkgname", | 1169 | oe.qa.handle_error("pkgname", |
1244 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) | 1170 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) |
1245 | 1171 | ||
1246 | warn_checks, error_checks = parse_test_matrix("QAPATHTEST") | 1172 | checks = parse_test_matrix("QAPATHTEST", skip, d) |
1247 | package_qa_walk(warn_checks, error_checks, package, d) | 1173 | package_qa_walk(checks, package, d) |
1248 | 1174 | ||
1249 | warn_checks, error_checks = parse_test_matrix("QAPKGTEST") | 1175 | checks = parse_test_matrix("QAPKGTEST", skip, d) |
1250 | package_qa_package(warn_checks, error_checks, package, d) | 1176 | for func in checks: |
1177 | func(package, d) | ||
1251 | 1178 | ||
1252 | package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) | 1179 | package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) |
1253 | package_qa_check_deps(package, pkgdest, d) | 1180 | package_qa_check_deps(package, pkgdest, d) |
1254 | 1181 | ||
1255 | warn_checks, error_checks = parse_test_matrix("QARECIPETEST") | 1182 | checks = parse_test_matrix("QARECIPETEST", skip, d) |
1256 | package_qa_recipe(warn_checks, error_checks, pn, d) | 1183 | for func in checks: |
1184 | func(pn, d) | ||
1257 | 1185 | ||
1258 | if 'libdir' in d.getVar("ALL_QA").split(): | 1186 | package_qa_check_libdir(d) |
1259 | package_qa_check_libdir(d) | ||
1260 | 1187 | ||
1188 | cpath = None | ||
1261 | oe.qa.exit_if_errors(d) | 1189 | oe.qa.exit_if_errors(d) |
1262 | } | 1190 | } |
1263 | 1191 | ||
@@ -1269,11 +1197,17 @@ do_package_qa[vardepsexclude] = "BB_TASKDEPDATA" | |||
1269 | do_package_qa[rdeptask] = "do_packagedata" | 1197 | do_package_qa[rdeptask] = "do_packagedata" |
1270 | addtask do_package_qa after do_packagedata do_package before do_build | 1198 | addtask do_package_qa after do_packagedata do_package before do_build |
1271 | 1199 | ||
1200 | do_build[rdeptask] += "do_package_qa" | ||
1201 | |||
1272 | # Add the package specific INSANE_SKIPs to the sstate dependencies | 1202 | # Add the package specific INSANE_SKIPs to the sstate dependencies |
1273 | python() { | 1203 | python() { |
1274 | pkgs = (d.getVar('PACKAGES') or '').split() | 1204 | pkgs = (d.getVar('PACKAGES') or '').split() |
1275 | for pkg in pkgs: | 1205 | for pkg in pkgs: |
1276 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg)) | 1206 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg)) |
1207 | funcs = d.getVarFlags("QAPATHTEST") | ||
1208 | funcs.update(d.getVarFlags("QAPKGTEST")) | ||
1209 | funcs.update(d.getVarFlags("QARECIPETEST")) | ||
1210 | d.appendVarFlag("do_package_qa", "vardeps", " ".join(funcs.values())) | ||
1277 | } | 1211 | } |
1278 | 1212 | ||
1279 | SSTATETASKS += "do_package_qa" | 1213 | SSTATETASKS += "do_package_qa" |
@@ -1373,10 +1307,10 @@ python do_qa_patch() { | |||
1373 | srcdir = d.getVar('S') | 1307 | srcdir = d.getVar('S') |
1374 | if not bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): | 1308 | if not bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): |
1375 | pass | 1309 | pass |
1310 | elif not (bb.utils.contains('ERROR_QA', 'unimplemented-ptest', True, False, d) or bb.utils.contains('WARN_QA', 'unimplemented-ptest', True, False, d)): | ||
1311 | pass | ||
1376 | elif bb.data.inherits_class('ptest', d): | 1312 | elif bb.data.inherits_class('ptest', d): |
1377 | bb.note("Package %s QA: skipping unimplemented-ptest: ptest implementation detected" % d.getVar('PN')) | 1313 | bb.note("Package %s QA: skipping unimplemented-ptest: ptest implementation detected" % d.getVar('PN')) |
1378 | elif srcdir == d.getVar('WORKDIR'): | ||
1379 | bb.note("Package %s QA: skipping unimplemented-ptest: This check is not supported for recipe with \"S = \"${WORKDIR}\"" % d.getVar('PN')) | ||
1380 | 1314 | ||
1381 | # Detect perl Test:: based tests | 1315 | # Detect perl Test:: based tests |
1382 | elif os.path.exists(os.path.join(srcdir, "t")) and any(filename.endswith('.t') for filename in os.listdir(os.path.join(srcdir, 't'))): | 1316 | elif os.path.exists(os.path.join(srcdir, "t")) and any(filename.endswith('.t') for filename in os.listdir(os.path.join(srcdir, 't'))): |
@@ -1398,8 +1332,15 @@ python do_qa_patch() { | |||
1398 | elif os.path.exists(os.path.join(srcdir, "Makefile.in")) and (match_line_in_files(srcdir, "**/Makefile.in", r'\s*TESTS\s*\+?=') or match_line_in_files(srcdir,"**/*.at",r'.*AT_INIT')): | 1332 | elif os.path.exists(os.path.join(srcdir, "Makefile.in")) and (match_line_in_files(srcdir, "**/Makefile.in", r'\s*TESTS\s*\+?=') or match_line_in_files(srcdir,"**/*.at",r'.*AT_INIT')): |
1399 | oe.qa.handle_error("unimplemented-ptest", "%s: autotools-based tests detected" % d.getVar('PN'), d) | 1333 | oe.qa.handle_error("unimplemented-ptest", "%s: autotools-based tests detected" % d.getVar('PN'), d) |
1400 | 1334 | ||
1335 | # Detect cargo-based tests | ||
1336 | elif os.path.exists(os.path.join(srcdir, "Cargo.toml")) and ( | ||
1337 | match_line_in_files(srcdir, "**/*.rs", r'\s*#\s*\[\s*test\s*\]') or | ||
1338 | match_line_in_files(srcdir, "**/*.rs", r'\s*#\s*\[\s*cfg\s*\(\s*test\s*\)\s*\]') | ||
1339 | ): | ||
1340 | oe.qa.handle_error("unimplemented-ptest", "%s: cargo-based tests detected" % d.getVar('PN'), d) | ||
1341 | |||
1401 | # Last resort, detect a test directory in sources | 1342 | # Last resort, detect a test directory in sources |
1402 | elif any(filename.lower() in ["test", "tests"] for filename in os.listdir(srcdir)): | 1343 | elif os.path.exists(srcdir) and any(filename.lower() in ["test", "tests"] for filename in os.listdir(srcdir)): |
1403 | oe.qa.handle_error("unimplemented-ptest", "%s: test subdirectory detected" % d.getVar('PN'), d) | 1344 | oe.qa.handle_error("unimplemented-ptest", "%s: test subdirectory detected" % d.getVar('PN'), d) |
1404 | 1345 | ||
1405 | oe.qa.exit_if_errors(d) | 1346 | oe.qa.exit_if_errors(d) |
@@ -1484,22 +1425,20 @@ Rerun configure task after fixing this.""" | |||
1484 | except subprocess.CalledProcessError: | 1425 | except subprocess.CalledProcessError: |
1485 | pass | 1426 | pass |
1486 | 1427 | ||
1487 | # Check invalid PACKAGECONFIG | ||
1488 | pkgconfig = (d.getVar("PACKAGECONFIG") or "").split() | ||
1489 | if pkgconfig: | ||
1490 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | ||
1491 | for pconfig in pkgconfig: | ||
1492 | if pconfig not in pkgconfigflags: | ||
1493 | pn = d.getVar('PN') | ||
1494 | error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) | ||
1495 | oe.qa.handle_error("invalid-packageconfig", error_msg, d) | ||
1496 | |||
1497 | oe.qa.exit_if_errors(d) | 1428 | oe.qa.exit_if_errors(d) |
1498 | } | 1429 | } |
1499 | 1430 | ||
1500 | python do_qa_unpack() { | 1431 | python do_qa_unpack() { |
1501 | src_uri = d.getVar('SRC_URI') | 1432 | src_uri = d.getVar('SRC_URI') |
1502 | s_dir = d.getVar('S') | 1433 | s_dir = d.getVar('S') |
1434 | s_dir_orig = d.getVar('S', False) | ||
1435 | |||
1436 | if s_dir_orig == '${WORKDIR}/git' or s_dir_orig == '${UNPACKDIR}/git': | ||
1437 | bb.fatal('Recipes that set S = "${WORKDIR}/git" or S = "${UNPACKDIR}/git" should remove that assignment, as S set by bitbake.conf in oe-core now works.') | ||
1438 | |||
1439 | if '${WORKDIR}' in s_dir_orig: | ||
1440 | bb.fatal('S should be set relative to UNPACKDIR, e.g. replace WORKDIR with UNPACKDIR in "S = {}"'.format(s_dir_orig)) | ||
1441 | |||
1503 | if src_uri and not os.path.exists(s_dir): | 1442 | if src_uri and not os.path.exists(s_dir): |
1504 | bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) | 1443 | bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) |
1505 | } | 1444 | } |
@@ -1507,6 +1446,12 @@ python do_qa_unpack() { | |||
1507 | python do_recipe_qa() { | 1446 | python do_recipe_qa() { |
1508 | import re | 1447 | import re |
1509 | 1448 | ||
1449 | def test_naming(pn, d): | ||
1450 | if pn.endswith("-native") and not bb.data.inherits_class("native", d): | ||
1451 | oe.qa.handle_error("recipe-naming", "Recipe %s appears native but is not, should inherit native" % pn, d) | ||
1452 | if pn.startswith("nativesdk-") and not bb.data.inherits_class("nativesdk", d): | ||
1453 | oe.qa.handle_error("recipe-naming", "Recipe %s appears nativesdk but is not, should inherit nativesdk" % pn, d) | ||
1454 | |||
1510 | def test_missing_metadata(pn, d): | 1455 | def test_missing_metadata(pn, d): |
1511 | fn = d.getVar("FILE") | 1456 | fn = d.getVar("FILE") |
1512 | srcfile = d.getVar('SRC_URI').split() | 1457 | srcfile = d.getVar('SRC_URI').split() |
@@ -1541,10 +1486,21 @@ python do_recipe_qa() { | |||
1541 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url) or "//codeload.github.com/" in url: | 1486 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url) or "//codeload.github.com/" in url: |
1542 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) | 1487 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) |
1543 | 1488 | ||
1489 | def test_packageconfig(pn, d): | ||
1490 | pkgconfigs = (d.getVar("PACKAGECONFIG") or "").split() | ||
1491 | if pkgconfigs: | ||
1492 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | ||
1493 | invalid_pkgconfigs = set(pkgconfigs) - set(pkgconfigflags) | ||
1494 | if invalid_pkgconfigs: | ||
1495 | error_msg = "%s: invalid PACKAGECONFIG(s): %s" % (pn, " ".join(sorted(invalid_pkgconfigs))) | ||
1496 | oe.qa.handle_error("invalid-packageconfig", error_msg, d) | ||
1497 | |||
1544 | pn = d.getVar('PN') | 1498 | pn = d.getVar('PN') |
1499 | test_naming(pn, d) | ||
1545 | test_missing_metadata(pn, d) | 1500 | test_missing_metadata(pn, d) |
1546 | test_missing_maintainer(pn, d) | 1501 | test_missing_maintainer(pn, d) |
1547 | test_srcuri(pn, d) | 1502 | test_srcuri(pn, d) |
1503 | test_packageconfig(pn, d) | ||
1548 | oe.qa.exit_if_errors(d) | 1504 | oe.qa.exit_if_errors(d) |
1549 | } | 1505 | } |
1550 | 1506 | ||
@@ -1572,8 +1528,7 @@ do_unpack[postfuncs] += "do_qa_unpack" | |||
1572 | python () { | 1528 | python () { |
1573 | import re | 1529 | import re |
1574 | 1530 | ||
1575 | tests = d.getVar('ALL_QA').split() | 1531 | if bb.utils.contains('ERROR_QA', 'desktop', True, False, d) or bb.utils.contains('WARN_QA', 'desktop', True, False, d): |
1576 | if "desktop" in tests: | ||
1577 | d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") | 1532 | d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") |
1578 | 1533 | ||
1579 | ########################################################################### | 1534 | ########################################################################### |
@@ -1602,17 +1557,31 @@ python () { | |||
1602 | if prog.search(pn): | 1557 | if prog.search(pn): |
1603 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) | 1558 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) |
1604 | 1559 | ||
1560 | sourcedir = d.getVar("S") | ||
1561 | builddir = d.getVar("B") | ||
1562 | workdir = d.getVar("WORKDIR") | ||
1563 | unpackdir = d.getVar("UNPACKDIR") | ||
1564 | if sourcedir == workdir: | ||
1565 | bb.fatal("Using S = ${WORKDIR} is no longer supported") | ||
1566 | if builddir == workdir: | ||
1567 | bb.fatal("Using B = ${WORKDIR} is no longer supported") | ||
1568 | if unpackdir == workdir: | ||
1569 | bb.fatal("Using UNPACKDIR = ${WORKDIR} is not supported") | ||
1570 | if sourcedir[-1] == '/': | ||
1571 | bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) | ||
1572 | if builddir[-1] == '/': | ||
1573 | bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) | ||
1574 | |||
1605 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder | 1575 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder |
1606 | # why it doesn't work. | 1576 | # why it doesn't work. |
1607 | if (d.getVar(d.expand('DEPENDS:${PN}'))): | 1577 | if (d.getVar(d.expand('DEPENDS:${PN}'))): |
1608 | oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) | 1578 | oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) |
1609 | 1579 | ||
1610 | # virtual/ is meaningless for these variables | 1580 | # virtual/ is meaningless for these variables |
1611 | if "virtual-slash" in (d.getVar("ALL_QA") or "").split(): | 1581 | for k in ['RDEPENDS', 'RPROVIDES']: |
1612 | for k in ['RDEPENDS', 'RPROVIDES']: | 1582 | for var in bb.utils.explode_deps(d.getVar(k + ':' + pn) or ""): |
1613 | for var in bb.utils.explode_deps(d.getVar(k + ':' + pn) or ""): | 1583 | if var.startswith("virtual/"): |
1614 | if var.startswith("virtual/"): | 1584 | oe.qa.handle_error("virtual-slash", "%s is set to %s but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead." % (k, var), d) |
1615 | oe.qa.handle_error("virtual-slash", "%s is set to %s but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead." % (k, var), d) | ||
1616 | 1585 | ||
1617 | issues = [] | 1586 | issues = [] |
1618 | if (d.getVar('PACKAGES') or "").split(): | 1587 | if (d.getVar('PACKAGES') or "").split(): |
@@ -1622,8 +1591,7 @@ python () { | |||
1622 | if d.getVar(var, False): | 1591 | if d.getVar(var, False): |
1623 | issues.append(var) | 1592 | issues.append(var) |
1624 | 1593 | ||
1625 | fakeroot_tests = d.getVar('FAKEROOT_QA').split() | 1594 | if bb.utils.contains('ERROR_QA', 'host-user-contaminated', True, False, d) or bb.utils.contains('WARN_QA', 'host-user-contaminated', True, False, d): |
1626 | if set(tests) & set(fakeroot_tests): | ||
1627 | d.setVarFlag('do_package_qa', 'fakeroot', '1') | 1595 | d.setVarFlag('do_package_qa', 'fakeroot', '1') |
1628 | d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 1596 | d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') |
1629 | else: | 1597 | else: |
diff --git a/meta/classes-global/license.bbclass b/meta/classes-global/license.bbclass index b2e0d3faba..af5f1ed41d 100644 --- a/meta/classes-global/license.bbclass +++ b/meta/classes-global/license.bbclass | |||
@@ -18,8 +18,14 @@ LICENSE_CREATE_PACKAGE ??= "0" | |||
18 | LICENSE_PACKAGE_SUFFIX ??= "-lic" | 18 | LICENSE_PACKAGE_SUFFIX ??= "-lic" |
19 | LICENSE_FILES_DIRECTORY ??= "${datadir}/licenses/" | 19 | LICENSE_FILES_DIRECTORY ??= "${datadir}/licenses/" |
20 | 20 | ||
21 | LICENSE_DEPLOY_PATHCOMPONENT = "${SSTATE_PKGARCH}" | ||
22 | LICENSE_DEPLOY_PATHCOMPONENT:class-cross = "native" | ||
23 | LICENSE_DEPLOY_PATHCOMPONENT:class-native = "native" | ||
24 | # Ensure the *value* of SSTATE_PKGARCH is captured as it is used in the output paths | ||
25 | LICENSE_DEPLOY_PATHCOMPONENT[vardepvalue] += "${LICENSE_DEPLOY_PATHCOMPONENT}" | ||
26 | |||
21 | addtask populate_lic after do_patch before do_build | 27 | addtask populate_lic after do_patch before do_build |
22 | do_populate_lic[dirs] = "${LICSSTATEDIR}/${PN}" | 28 | do_populate_lic[dirs] = "${LICSSTATEDIR}/${LICENSE_DEPLOY_PATHCOMPONENT}/${PN}" |
23 | do_populate_lic[cleandirs] = "${LICSSTATEDIR}" | 29 | do_populate_lic[cleandirs] = "${LICSSTATEDIR}" |
24 | 30 | ||
25 | python do_populate_lic() { | 31 | python do_populate_lic() { |
@@ -29,7 +35,7 @@ python do_populate_lic() { | |||
29 | lic_files_paths = find_license_files(d) | 35 | lic_files_paths = find_license_files(d) |
30 | 36 | ||
31 | # The base directory we wrangle licenses to | 37 | # The base directory we wrangle licenses to |
32 | destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('SSTATE_PKGARCH'), d.getVar('PN')) | 38 | destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('LICENSE_DEPLOY_PATHCOMPONENT'), d.getVar('PN')) |
33 | copy_license_files(lic_files_paths, destdir) | 39 | copy_license_files(lic_files_paths, destdir) |
34 | info = get_recipe_info(d) | 40 | info = get_recipe_info(d) |
35 | with open(os.path.join(destdir, "recipeinfo"), "w") as f: | 41 | with open(os.path.join(destdir, "recipeinfo"), "w") as f: |
@@ -38,8 +44,7 @@ python do_populate_lic() { | |||
38 | oe.qa.exit_if_errors(d) | 44 | oe.qa.exit_if_errors(d) |
39 | } | 45 | } |
40 | 46 | ||
41 | PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '') + ' ' + d.getVar('COREBASE') + '/meta/COPYING').split())}" | 47 | # it would be better to copy them in do_install:append, but find_license_files is python |
42 | # it would be better to copy them in do_install:append, but find_license_filesa is python | ||
43 | python perform_packagecopy:prepend () { | 48 | python perform_packagecopy:prepend () { |
44 | enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) | 49 | enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) |
45 | if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled: | 50 | if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled: |
@@ -149,14 +154,14 @@ def find_license_files(d): | |||
149 | # and "with exceptions" being * | 154 | # and "with exceptions" being * |
150 | # we'll just strip out the modifier and put | 155 | # we'll just strip out the modifier and put |
151 | # the base license. | 156 | # the base license. |
152 | find_license(node.s.replace("+", "").replace("*", "")) | 157 | find_licenses(node.s.replace("+", "").replace("*", "")) |
153 | self.generic_visit(node) | 158 | self.generic_visit(node) |
154 | 159 | ||
155 | def visit_Constant(self, node): | 160 | def visit_Constant(self, node): |
156 | find_license(node.value.replace("+", "").replace("*", "")) | 161 | find_licenses(node.value.replace("+", "").replace("*", "")) |
157 | self.generic_visit(node) | 162 | self.generic_visit(node) |
158 | 163 | ||
159 | def find_license(license_type): | 164 | def find_licenses(license_type): |
160 | try: | 165 | try: |
161 | bb.utils.mkdirhier(gen_lic_dest) | 166 | bb.utils.mkdirhier(gen_lic_dest) |
162 | except: | 167 | except: |
@@ -249,171 +254,6 @@ def find_license_files(d): | |||
249 | 254 | ||
250 | return lic_files_paths | 255 | return lic_files_paths |
251 | 256 | ||
252 | def return_spdx(d, license): | ||
253 | """ | ||
254 | This function returns the spdx mapping of a license if it exists. | ||
255 | """ | ||
256 | return d.getVarFlag('SPDXLICENSEMAP', license) | ||
257 | |||
258 | def canonical_license(d, license): | ||
259 | """ | ||
260 | Return the canonical (SPDX) form of the license if available (so GPLv3 | ||
261 | becomes GPL-3.0-only) or the passed license if there is no canonical form. | ||
262 | """ | ||
263 | return d.getVarFlag('SPDXLICENSEMAP', license) or license | ||
264 | |||
265 | def expand_wildcard_licenses(d, wildcard_licenses): | ||
266 | """ | ||
267 | There are some common wildcard values users may want to use. Support them | ||
268 | here. | ||
269 | """ | ||
270 | licenses = set(wildcard_licenses) | ||
271 | mapping = { | ||
272 | "AGPL-3.0*" : ["AGPL-3.0-only", "AGPL-3.0-or-later"], | ||
273 | "GPL-3.0*" : ["GPL-3.0-only", "GPL-3.0-or-later"], | ||
274 | "LGPL-3.0*" : ["LGPL-3.0-only", "LGPL-3.0-or-later"], | ||
275 | } | ||
276 | for k in mapping: | ||
277 | if k in wildcard_licenses: | ||
278 | licenses.remove(k) | ||
279 | for item in mapping[k]: | ||
280 | licenses.add(item) | ||
281 | |||
282 | for l in licenses: | ||
283 | if l in oe.license.obsolete_license_list(): | ||
284 | bb.fatal("Error, %s is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE" % l) | ||
285 | if "*" in l: | ||
286 | bb.fatal("Error, %s is an invalid license wildcard entry" % l) | ||
287 | |||
288 | return list(licenses) | ||
289 | |||
290 | def incompatible_license_contains(license, truevalue, falsevalue, d): | ||
291 | license = canonical_license(d, license) | ||
292 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | ||
293 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
294 | return truevalue if license in bad_licenses else falsevalue | ||
295 | |||
296 | def incompatible_pkg_license(d, dont_want_licenses, license): | ||
297 | # Handles an "or" or two license sets provided by | ||
298 | # flattened_licenses(), pick one that works if possible. | ||
299 | def choose_lic_set(a, b): | ||
300 | return a if all(oe.license.license_ok(canonical_license(d, lic), | ||
301 | dont_want_licenses) for lic in a) else b | ||
302 | |||
303 | try: | ||
304 | licenses = oe.license.flattened_licenses(license, choose_lic_set) | ||
305 | except oe.license.LicenseError as exc: | ||
306 | bb.fatal('%s: %s' % (d.getVar('P'), exc)) | ||
307 | |||
308 | incompatible_lic = [] | ||
309 | for l in licenses: | ||
310 | license = canonical_license(d, l) | ||
311 | if not oe.license.license_ok(license, dont_want_licenses): | ||
312 | incompatible_lic.append(license) | ||
313 | |||
314 | return sorted(incompatible_lic) | ||
315 | |||
316 | def incompatible_license(d, dont_want_licenses, package=None): | ||
317 | """ | ||
318 | This function checks if a recipe has only incompatible licenses. It also | ||
319 | take into consideration 'or' operand. dont_want_licenses should be passed | ||
320 | as canonical (SPDX) names. | ||
321 | """ | ||
322 | import oe.license | ||
323 | license = d.getVar("LICENSE:%s" % package) if package else None | ||
324 | if not license: | ||
325 | license = d.getVar('LICENSE') | ||
326 | |||
327 | return incompatible_pkg_license(d, dont_want_licenses, license) | ||
328 | |||
329 | def check_license_flags(d): | ||
330 | """ | ||
331 | This function checks if a recipe has any LICENSE_FLAGS that | ||
332 | aren't acceptable. | ||
333 | |||
334 | If it does, it returns the all LICENSE_FLAGS missing from the list | ||
335 | of acceptable license flags, or all of the LICENSE_FLAGS if there | ||
336 | is no list of acceptable flags. | ||
337 | |||
338 | If everything is is acceptable, it returns None. | ||
339 | """ | ||
340 | |||
341 | def license_flag_matches(flag, acceptlist, pn): | ||
342 | """ | ||
343 | Return True if flag matches something in acceptlist, None if not. | ||
344 | |||
345 | Before we test a flag against the acceptlist, we append _${PN} | ||
346 | to it. We then try to match that string against the | ||
347 | acceptlist. This covers the normal case, where we expect | ||
348 | LICENSE_FLAGS to be a simple string like 'commercial', which | ||
349 | the user typically matches exactly in the acceptlist by | ||
350 | explicitly appending the package name e.g 'commercial_foo'. | ||
351 | If we fail the match however, we then split the flag across | ||
352 | '_' and append each fragment and test until we either match or | ||
353 | run out of fragments. | ||
354 | """ | ||
355 | flag_pn = ("%s_%s" % (flag, pn)) | ||
356 | for candidate in acceptlist: | ||
357 | if flag_pn == candidate: | ||
358 | return True | ||
359 | |||
360 | flag_cur = "" | ||
361 | flagments = flag_pn.split("_") | ||
362 | flagments.pop() # we've already tested the full string | ||
363 | for flagment in flagments: | ||
364 | if flag_cur: | ||
365 | flag_cur += "_" | ||
366 | flag_cur += flagment | ||
367 | for candidate in acceptlist: | ||
368 | if flag_cur == candidate: | ||
369 | return True | ||
370 | return False | ||
371 | |||
372 | def all_license_flags_match(license_flags, acceptlist): | ||
373 | """ Return all unmatched flags, None if all flags match """ | ||
374 | pn = d.getVar('PN') | ||
375 | split_acceptlist = acceptlist.split() | ||
376 | flags = [] | ||
377 | for flag in license_flags.split(): | ||
378 | if not license_flag_matches(flag, split_acceptlist, pn): | ||
379 | flags.append(flag) | ||
380 | return flags if flags else None | ||
381 | |||
382 | license_flags = d.getVar('LICENSE_FLAGS') | ||
383 | if license_flags: | ||
384 | acceptlist = d.getVar('LICENSE_FLAGS_ACCEPTED') | ||
385 | if not acceptlist: | ||
386 | return license_flags.split() | ||
387 | unmatched_flags = all_license_flags_match(license_flags, acceptlist) | ||
388 | if unmatched_flags: | ||
389 | return unmatched_flags | ||
390 | return None | ||
391 | |||
392 | def check_license_format(d): | ||
393 | """ | ||
394 | This function checks if LICENSE is well defined, | ||
395 | Validate operators in LICENSES. | ||
396 | No spaces are allowed between LICENSES. | ||
397 | """ | ||
398 | pn = d.getVar('PN') | ||
399 | licenses = d.getVar('LICENSE') | ||
400 | from oe.license import license_operator, license_operator_chars, license_pattern | ||
401 | |||
402 | elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) | ||
403 | for pos, element in enumerate(elements): | ||
404 | if license_pattern.match(element): | ||
405 | if pos > 0 and license_pattern.match(elements[pos - 1]): | ||
406 | oe.qa.handle_error('license-format', | ||
407 | '%s: LICENSE value "%s" has an invalid format - license names ' \ | ||
408 | 'must be separated by the following characters to indicate ' \ | ||
409 | 'the license selection: %s' % | ||
410 | (pn, licenses, license_operator_chars), d) | ||
411 | elif not license_operator.match(element): | ||
412 | oe.qa.handle_error('license-format', | ||
413 | '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \ | ||
414 | 'in the valid list of separators (%s)' % | ||
415 | (pn, licenses, element, license_operator_chars), d) | ||
416 | |||
417 | SSTATETASKS += "do_populate_lic" | 257 | SSTATETASKS += "do_populate_lic" |
418 | do_populate_lic[sstate-inputdirs] = "${LICSSTATEDIR}" | 258 | do_populate_lic[sstate-inputdirs] = "${LICSSTATEDIR}" |
419 | do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/" | 259 | do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/" |
diff --git a/meta/classes-global/logging.bbclass b/meta/classes-global/logging.bbclass index ce03abfe42..136f1e1733 100644 --- a/meta/classes-global/logging.bbclass +++ b/meta/classes-global/logging.bbclass | |||
@@ -32,6 +32,16 @@ bbnote() { | |||
32 | fi | 32 | fi |
33 | } | 33 | } |
34 | 34 | ||
35 | # Notify the user of a noteworthy condition. | ||
36 | # Output: logs console | ||
37 | bbverbnote() { | ||
38 | if [ -p ${LOGFIFO} ]; then | ||
39 | printf "%b\0" "bbverbnote $*" > ${LOGFIFO} | ||
40 | else | ||
41 | echo "NOTE: $*" | ||
42 | fi | ||
43 | } | ||
44 | |||
35 | # Print a warning to the log. Warnings are non-fatal, and do not | 45 | # Print a warning to the log. Warnings are non-fatal, and do not |
36 | # indicate a build failure. | 46 | # indicate a build failure. |
37 | # Output: logs console | 47 | # Output: logs console |
diff --git a/meta/classes-global/mirrors.bbclass b/meta/classes-global/mirrors.bbclass index 862648eec5..e31af488b9 100644 --- a/meta/classes-global/mirrors.bbclass +++ b/meta/classes-global/mirrors.bbclass | |||
@@ -6,26 +6,10 @@ | |||
6 | 6 | ||
7 | MIRRORS += "\ | 7 | MIRRORS += "\ |
8 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian/20180310T215105Z/pool \ | 8 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian/20180310T215105Z/pool \ |
9 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian/20250101T023759Z/pool \ | ||
9 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20120328T092752Z/debian/pool \ | 10 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20120328T092752Z/debian/pool \ |
10 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20110127T084257Z/debian/pool \ | 11 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20110127T084257Z/debian/pool \ |
11 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20090802T004153Z/debian/pool \ | 12 | ${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20090802T004153Z/debian/pool \ |
12 | ${DEBIAN_MIRROR} http://ftp.de.debian.org/debian/pool \ | ||
13 | ${DEBIAN_MIRROR} http://ftp.au.debian.org/debian/pool \ | ||
14 | ${DEBIAN_MIRROR} http://ftp.cl.debian.org/debian/pool \ | ||
15 | ${DEBIAN_MIRROR} http://ftp.hr.debian.org/debian/pool \ | ||
16 | ${DEBIAN_MIRROR} http://ftp.fi.debian.org/debian/pool \ | ||
17 | ${DEBIAN_MIRROR} http://ftp.hk.debian.org/debian/pool \ | ||
18 | ${DEBIAN_MIRROR} http://ftp.hu.debian.org/debian/pool \ | ||
19 | ${DEBIAN_MIRROR} http://ftp.ie.debian.org/debian/pool \ | ||
20 | ${DEBIAN_MIRROR} http://ftp.it.debian.org/debian/pool \ | ||
21 | ${DEBIAN_MIRROR} http://ftp.jp.debian.org/debian/pool \ | ||
22 | ${DEBIAN_MIRROR} http://ftp.no.debian.org/debian/pool \ | ||
23 | ${DEBIAN_MIRROR} http://ftp.pl.debian.org/debian/pool \ | ||
24 | ${DEBIAN_MIRROR} http://ftp.ro.debian.org/debian/pool \ | ||
25 | ${DEBIAN_MIRROR} http://ftp.si.debian.org/debian/pool \ | ||
26 | ${DEBIAN_MIRROR} http://ftp.es.debian.org/debian/pool \ | ||
27 | ${DEBIAN_MIRROR} http://ftp.se.debian.org/debian/pool \ | ||
28 | ${DEBIAN_MIRROR} http://ftp.tr.debian.org/debian/pool \ | ||
29 | ${GNU_MIRROR} https://mirrors.kernel.org/gnu \ | 13 | ${GNU_MIRROR} https://mirrors.kernel.org/gnu \ |
30 | ${KERNELORG_MIRROR} http://www.kernel.org/pub \ | 14 | ${KERNELORG_MIRROR} http://www.kernel.org/pub \ |
31 | ${GNUPG_MIRROR} ftp://ftp.gnupg.org/gcrypt \ | 15 | ${GNUPG_MIRROR} ftp://ftp.gnupg.org/gcrypt \ |
@@ -88,7 +72,7 @@ git://.*/.* git://HOST/git/PATH;protocol=https \ | |||
88 | BB_GIT_SHALLOW:pn-binutils = "1" | 72 | BB_GIT_SHALLOW:pn-binutils = "1" |
89 | BB_GIT_SHALLOW:pn-binutils-cross-${TARGET_ARCH} = "1" | 73 | BB_GIT_SHALLOW:pn-binutils-cross-${TARGET_ARCH} = "1" |
90 | BB_GIT_SHALLOW:pn-binutils-cross-canadian-${TRANSLATED_TARGET_ARCH} = "1" | 74 | BB_GIT_SHALLOW:pn-binutils-cross-canadian-${TRANSLATED_TARGET_ARCH} = "1" |
91 | BB_GIT_SHALLOW:pn-binutils-cross-testsuite = "1" | 75 | BB_GIT_SHALLOW:pn-binutils-testsuite = "1" |
92 | BB_GIT_SHALLOW:pn-binutils-crosssdk-${SDK_SYS} = "1" | 76 | BB_GIT_SHALLOW:pn-binutils-crosssdk-${SDK_SYS} = "1" |
93 | BB_GIT_SHALLOW:pn-binutils-native = "1" | 77 | BB_GIT_SHALLOW:pn-binutils-native = "1" |
94 | BB_GIT_SHALLOW:pn-nativesdk-binutils = "1" | 78 | BB_GIT_SHALLOW:pn-nativesdk-binutils = "1" |
diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass index aa1eb5e901..9be1d6a5b1 100644 --- a/meta/classes-global/package.bbclass +++ b/meta/classes-global/package.bbclass | |||
@@ -447,10 +447,6 @@ def gen_packagevar(d, pkgvars="PACKAGEVARS"): | |||
447 | for p in pkgs: | 447 | for p in pkgs: |
448 | for v in vars: | 448 | for v in vars: |
449 | ret.append(v + ":" + p) | 449 | ret.append(v + ":" + p) |
450 | |||
451 | # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for | ||
452 | # affected recipes. | ||
453 | ret.append('_exclude_incompatible-%s' % p) | ||
454 | return " ".join(ret) | 450 | return " ".join(ret) |
455 | 451 | ||
456 | 452 | ||
@@ -476,7 +472,7 @@ python do_package () { | |||
476 | # cache. This is useful if an item this class depends on changes in a | 472 | # cache. This is useful if an item this class depends on changes in a |
477 | # way that the output of this class changes. rpmdeps is a good example | 473 | # way that the output of this class changes. rpmdeps is a good example |
478 | # as any change to rpmdeps requires this to be rerun. | 474 | # as any change to rpmdeps requires this to be rerun. |
479 | # PACKAGE_BBCLASS_VERSION = "5" | 475 | # PACKAGE_BBCLASS_VERSION = "6" |
480 | 476 | ||
481 | # Init cachedpath | 477 | # Init cachedpath |
482 | global cpath | 478 | global cpath |
diff --git a/meta/classes-global/package_ipk.bbclass b/meta/classes-global/package_ipk.bbclass index 209568ae5f..3e72c4c494 100644 --- a/meta/classes-global/package_ipk.bbclass +++ b/meta/classes-global/package_ipk.bbclass | |||
@@ -17,7 +17,7 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks" | |||
17 | # Program to be used to build opkg packages | 17 | # Program to be used to build opkg packages |
18 | OPKGBUILDCMD ??= 'opkg-build -Z zstd -a "${ZSTD_DEFAULTS}"' | 18 | OPKGBUILDCMD ??= 'opkg-build -Z zstd -a "${ZSTD_DEFAULTS}"' |
19 | 19 | ||
20 | OPKG_ARGS += "--force_postinstall --prefer-arch-to-version" | 20 | OPKG_ARGS += "--force-postinstall --prefer-arch-to-version" |
21 | OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}" | 21 | OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}" |
22 | OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE') or "").split())][(d.getVar("PACKAGE_EXCLUDE") or "").strip() != ""]}" | 22 | OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE') or "").split())][(d.getVar("PACKAGE_EXCLUDE") or "").strip() != ""]}" |
23 | 23 | ||
diff --git a/meta/classes-global/package_rpm.bbclass b/meta/classes-global/package_rpm.bbclass index 2e3e4e8c79..f383ed140e 100644 --- a/meta/classes-global/package_rpm.bbclass +++ b/meta/classes-global/package_rpm.bbclass | |||
@@ -10,7 +10,7 @@ IMAGE_PKGTYPE ?= "rpm" | |||
10 | 10 | ||
11 | RPM = "rpm" | 11 | RPM = "rpm" |
12 | RPMBUILD = "rpmbuild" | 12 | RPMBUILD = "rpmbuild" |
13 | RPMBUILD_COMPMODE ?= "${@'w19T%d.zstdio' % int(d.getVar('ZSTD_THREADS'))}" | 13 | RPMBUILD_COMPMODE ?= "${@'w%dT%d.zstdio' % (int(d.getVar('ZSTD_COMPRESSION_LEVEL')), int(d.getVar('ZSTD_THREADS')))}" |
14 | 14 | ||
15 | PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" | 15 | PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" |
16 | 16 | ||
@@ -38,6 +38,7 @@ def filter_nativesdk_deps(srcname, var): | |||
38 | 38 | ||
39 | # Construct per file dependencies file | 39 | # Construct per file dependencies file |
40 | def write_rpm_perfiledata(srcname, d): | 40 | def write_rpm_perfiledata(srcname, d): |
41 | import oe.package | ||
41 | workdir = d.getVar('WORKDIR') | 42 | workdir = d.getVar('WORKDIR') |
42 | packages = d.getVar('PACKAGES') | 43 | packages = d.getVar('PACKAGES') |
43 | pkgd = d.getVar('PKGD') | 44 | pkgd = d.getVar('PKGD') |
@@ -53,12 +54,7 @@ def write_rpm_perfiledata(srcname, d): | |||
53 | key = "FILE" + varname + ":" + dfile + ":" + pkg | 54 | key = "FILE" + varname + ":" + dfile + ":" + pkg |
54 | deps = filter_nativesdk_deps(srcname, d.getVar(key) or "") | 55 | deps = filter_nativesdk_deps(srcname, d.getVar(key) or "") |
55 | depends_dict = bb.utils.explode_dep_versions(deps) | 56 | depends_dict = bb.utils.explode_dep_versions(deps) |
56 | file = dfile.replace("@underscore@", "_") | 57 | file = oe.package.file_reverse_translate(dfile) |
57 | file = file.replace("@closebrace@", "]") | ||
58 | file = file.replace("@openbrace@", "[") | ||
59 | file = file.replace("@tab@", "\t") | ||
60 | file = file.replace("@space@", " ") | ||
61 | file = file.replace("@at@", "@") | ||
62 | outfile.write('"' + pkgd + file + '" : "') | 58 | outfile.write('"' + pkgd + file + '" : "') |
63 | for dep in depends_dict: | 59 | for dep in depends_dict: |
64 | ver = depends_dict[dep] | 60 | ver = depends_dict[dep] |
@@ -108,7 +104,7 @@ python write_specfile () { | |||
108 | # append information for logs and patches to %prep | 104 | # append information for logs and patches to %prep |
109 | def add_prep(d, spec_files_bottom): | 105 | def add_prep(d, spec_files_bottom): |
110 | if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d): | 106 | if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d): |
111 | spec_files_bottom.append('%%prep -n %s' % d.getVar('PN')) | 107 | spec_files_bottom.append('%%prep') |
112 | spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") | 108 | spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") |
113 | spec_files_bottom.append('') | 109 | spec_files_bottom.append('') |
114 | 110 | ||
@@ -191,7 +187,7 @@ python write_specfile () { | |||
191 | if not len(depends_dict[dep]): | 187 | if not len(depends_dict[dep]): |
192 | array.append("%s: %s" % (tag, dep)) | 188 | array.append("%s: %s" % (tag, dep)) |
193 | 189 | ||
194 | def walk_files(walkpath, target, conffiles, dirfiles): | 190 | def walk_files(walkpath, target, conffiles): |
195 | # We can race against the ipk/deb backends which create CONTROL or DEBIAN directories | 191 | # We can race against the ipk/deb backends which create CONTROL or DEBIAN directories |
196 | # when packaging. We just ignore these files which are created in | 192 | # when packaging. We just ignore these files which are created in |
197 | # packages-split/ and not package/ | 193 | # packages-split/ and not package/ |
@@ -205,14 +201,22 @@ python write_specfile () { | |||
205 | try: | 201 | try: |
206 | owner = pwd.getpwuid(stat_f.st_uid).pw_name | 202 | owner = pwd.getpwuid(stat_f.st_uid).pw_name |
207 | except Exception as e: | 203 | except Exception as e: |
208 | bb.error("Content of /etc/passwd in sysroot:\n{}".format( | 204 | filename = d.getVar('RECIPE_SYSROOT') + '/etc/passwd' |
209 | open(d.getVar("RECIPE_SYSROOT") +"/etc/passwd").read())) | 205 | if os.path.exists(filename): |
206 | bb.error("Content of /etc/passwd in sysroot:\n{}".format( | ||
207 | open(filename).read())) | ||
208 | else: | ||
209 | bb.error("File {} doesn't exist in sysroot!".format(filename)) | ||
210 | raise e | 210 | raise e |
211 | try: | 211 | try: |
212 | group = grp.getgrgid(stat_f.st_gid).gr_name | 212 | group = grp.getgrgid(stat_f.st_gid).gr_name |
213 | except Exception as e: | 213 | except Exception as e: |
214 | bb.error("Content of /etc/group in sysroot:\n{}".format( | 214 | filename = d.getVar("RECIPE_SYSROOT") +"/etc/group" |
215 | open(d.getVar("RECIPE_SYSROOT") +"/etc/group").read())) | 215 | if os.path.exists(filename): |
216 | bb.error("Content of /etc/group in sysroot:\n{}".format( | ||
217 | open(filename).read())) | ||
218 | else: | ||
219 | bb.error("File {} doesn't exists in sysroot!".format(filename)) | ||
216 | raise e | 220 | raise e |
217 | return "%attr({:o},{},{}) ".format(mode, owner, group) | 221 | return "%attr({:o},{},{}) ".format(mode, owner, group) |
218 | 222 | ||
@@ -233,22 +237,12 @@ python write_specfile () { | |||
233 | return False | 237 | return False |
234 | dirs[:] = [dir for dir in dirs if not move_to_files(dir)] | 238 | dirs[:] = [dir for dir in dirs if not move_to_files(dir)] |
235 | 239 | ||
236 | # Directory handling can happen in two ways, either DIRFILES is not set at all | 240 | for dir in dirs: |
237 | # in which case we fall back to the older behaviour of packages owning all their | 241 | if dir == "CONTROL" or dir == "DEBIAN": |
238 | # directories | 242 | continue |
239 | if dirfiles is None: | 243 | p = path + '/' + dir |
240 | for dir in dirs: | 244 | # All packages own the directories their files are in... |
241 | if dir == "CONTROL" or dir == "DEBIAN": | 245 | target.append(get_attr(dir) + '%dir "' + escape_chars(p) + '"') |
242 | continue | ||
243 | p = path + '/' + dir | ||
244 | # All packages own the directories their files are in... | ||
245 | target.append(get_attr(dir) + '%dir "' + escape_chars(p) + '"') | ||
246 | elif path: | ||
247 | # packages own only empty directories or explict directory. | ||
248 | # This will prevent the overlapping of security permission. | ||
249 | attr = get_attr(path) | ||
250 | if (not files and not dirs) or path in dirfiles: | ||
251 | target.append(attr + '%dir "' + escape_chars(path) + '"') | ||
252 | 246 | ||
253 | for file in files: | 247 | for file in files: |
254 | if file == "CONTROL" or file == "DEBIAN": | 248 | if file == "CONTROL" or file == "DEBIAN": |
@@ -363,9 +357,6 @@ python write_specfile () { | |||
363 | localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg) | 357 | localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg) |
364 | 358 | ||
365 | conffiles = oe.package.get_conffiles(pkg, d) | 359 | conffiles = oe.package.get_conffiles(pkg, d) |
366 | dirfiles = localdata.getVar('DIRFILES') | ||
367 | if dirfiles is not None: | ||
368 | dirfiles = dirfiles.split() | ||
369 | 360 | ||
370 | splitname = pkgname | 361 | splitname = pkgname |
371 | 362 | ||
@@ -430,7 +421,7 @@ python write_specfile () { | |||
430 | srcrpostrm = splitrpostrm | 421 | srcrpostrm = splitrpostrm |
431 | 422 | ||
432 | file_list = [] | 423 | file_list = [] |
433 | walk_files(root, file_list, conffiles, dirfiles) | 424 | walk_files(root, file_list, conffiles) |
434 | if not file_list and localdata.getVar('ALLOW_EMPTY', False) != "1": | 425 | if not file_list and localdata.getVar('ALLOW_EMPTY', False) != "1": |
435 | bb.note("Not creating empty RPM package for %s" % splitname) | 426 | bb.note("Not creating empty RPM package for %s" % splitname) |
436 | else: | 427 | else: |
@@ -522,7 +513,7 @@ python write_specfile () { | |||
522 | 513 | ||
523 | # Now process files | 514 | # Now process files |
524 | file_list = [] | 515 | file_list = [] |
525 | walk_files(root, file_list, conffiles, dirfiles) | 516 | walk_files(root, file_list, conffiles) |
526 | if not file_list and localdata.getVar('ALLOW_EMPTY', False) != "1": | 517 | if not file_list and localdata.getVar('ALLOW_EMPTY', False) != "1": |
527 | bb.note("Not creating empty RPM package for %s" % splitname) | 518 | bb.note("Not creating empty RPM package for %s" % splitname) |
528 | else: | 519 | else: |
@@ -705,9 +696,10 @@ python do_package_rpm () { | |||
705 | cmd = cmd + " --define '_use_internal_dependency_generator 0'" | 696 | cmd = cmd + " --define '_use_internal_dependency_generator 0'" |
706 | cmd = cmd + " --define '_binaries_in_noarch_packages_terminate_build 0'" | 697 | cmd = cmd + " --define '_binaries_in_noarch_packages_terminate_build 0'" |
707 | cmd = cmd + " --define '_build_id_links none'" | 698 | cmd = cmd + " --define '_build_id_links none'" |
699 | cmd = cmd + " --define '_smp_ncpus_max 4'" | ||
708 | cmd = cmd + " --define '_source_payload %s'" % rpmbuild_compmode | 700 | cmd = cmd + " --define '_source_payload %s'" % rpmbuild_compmode |
709 | cmd = cmd + " --define '_binary_payload %s'" % rpmbuild_compmode | 701 | cmd = cmd + " --define '_binary_payload %s'" % rpmbuild_compmode |
710 | cmd = cmd + " --define 'clamp_mtime_to_source_date_epoch 1'" | 702 | cmd = cmd + " --define 'build_mtime_policy clamp_to_source_date_epoch'" |
711 | cmd = cmd + " --define 'use_source_date_epoch_as_buildtime 1'" | 703 | cmd = cmd + " --define 'use_source_date_epoch_as_buildtime 1'" |
712 | cmd = cmd + " --define '_buildhost reproducible'" | 704 | cmd = cmd + " --define '_buildhost reproducible'" |
713 | cmd = cmd + " --define '__font_provides %{nil}'" | 705 | cmd = cmd + " --define '__font_provides %{nil}'" |
diff --git a/meta/classes-global/retain.bbclass b/meta/classes-global/retain.bbclass new file mode 100644 index 0000000000..46e8c256cf --- /dev/null +++ b/meta/classes-global/retain.bbclass | |||
@@ -0,0 +1,182 @@ | |||
1 | # Creates a tarball of the work directory for a recipe when one of its | ||
2 | # tasks fails, or any other nominated directories. | ||
3 | # Useful in cases where the environment in which builds are run is | ||
4 | # ephemeral or otherwise inaccessible for examination during | ||
5 | # debugging. | ||
6 | # | ||
7 | # To enable, simply add the following to your configuration: | ||
8 | # | ||
9 | # INHERIT += "retain" | ||
10 | # | ||
11 | # You can specify the recipe-specific directories to save upon failure | ||
12 | # or always (space-separated) e.g.: | ||
13 | # | ||
14 | # RETAIN_DIRS_FAILURE = "${WORKDIR};prefix=workdir" # default | ||
15 | # RETAIN_DIRS_ALWAYS = "${T}" | ||
16 | # | ||
17 | # Naturally you can use overrides to limit it to a specific recipe: | ||
18 | # RETAIN_DIRS_ALWAYS:pn-somerecipe = "${T}" | ||
19 | # | ||
20 | # You can also specify global (non-recipe-specific) directories to save: | ||
21 | # | ||
22 | # RETAIN_DIRS_GLOBAL_FAILURE = "${LOG_DIR}" | ||
23 | # RETAIN_DIRS_GLOBAL_ALWAYS = "${BUILDSTATS_BASE}" | ||
24 | # | ||
25 | # If you wish to use a different tarball name prefix than the default of | ||
26 | # the directory name, you can do so by specifying a ;prefix= followed by | ||
27 | # the desired prefix (no spaces) in any of the RETAIN_DIRS_* variables. | ||
28 | # e.g. to always save the log files with a "recipelogs" as the prefix for | ||
29 | # the tarball of ${T} you would do this: | ||
30 | # | ||
31 | # RETAIN_DIRS_ALWAYS = "${T};prefix=recipelogs" | ||
32 | # | ||
33 | # Notes: | ||
34 | # * For this to be useful you also need corresponding logic in your build | ||
35 | # orchestration tool to pick up any files written out to RETAIN_OUTDIR | ||
36 | # (with the other assumption being that no files are present there at | ||
37 | # the start of the build, since there is no logic to purge old files). | ||
38 | # * Work directories can be quite large, so saving them can take some time | ||
39 | # and of course space. | ||
40 | # * Tarball creation is deferred to the end of the build, thus you will | ||
41 | # get the state at the end, not immediately upon failure. | ||
42 | # * Extra directories must naturally be populated at the time the retain | ||
43 | # class goes to save them (build completion); to try ensure this for | ||
44 | # things that are also saved on build completion (e.g. buildstats), put | ||
45 | # the INHERIT += "retain" after the INHERIT += lines for the class that | ||
46 | # is writing out the data that you wish to save. | ||
47 | # * The tarballs have the tarball name as a top-level directory so that | ||
48 | # multiple tarballs can be extracted side-by-side easily. | ||
49 | # | ||
50 | # Copyright (c) 2020, 2024 Microsoft Corporation | ||
51 | # | ||
52 | # SPDX-License-Identifier: GPL-2.0-only | ||
53 | # | ||
54 | |||
55 | RETAIN_OUTDIR ?= "${TMPDIR}/retained" | ||
56 | RETAIN_DIRS_FAILURE ?= "${WORKDIR};prefix=workdir" | ||
57 | RETAIN_DIRS_ALWAYS ?= "" | ||
58 | RETAIN_DIRS_GLOBAL_FAILURE ?= "" | ||
59 | RETAIN_DIRS_GLOBAL_ALWAYS ?= "" | ||
60 | RETAIN_TARBALL_SUFFIX ?= "${DATETIME}.tar.gz" | ||
61 | RETAIN_ENABLED ?= "1" | ||
62 | |||
63 | |||
64 | def retain_retain_dir(desc, tarprefix, path, tarbasepath, d): | ||
65 | import datetime | ||
66 | |||
67 | outdir = d.getVar('RETAIN_OUTDIR') | ||
68 | bb.utils.mkdirhier(outdir) | ||
69 | suffix = d.getVar('RETAIN_TARBALL_SUFFIX') | ||
70 | tarname = '%s_%s' % (tarprefix, suffix) | ||
71 | tarfp = os.path.join(outdir, '%s' % tarname) | ||
72 | tardir = os.path.relpath(path, tarbasepath) | ||
73 | cmdargs = ['tar', 'cfa', tarfp] | ||
74 | # Prefix paths within the tarball with the tarball name so that | ||
75 | # multiple tarballs can be extracted side-by-side | ||
76 | tarname_noext = os.path.splitext(tarname)[0] | ||
77 | if tarname_noext.endswith('.tar'): | ||
78 | tarname_noext = tarname_noext[:-4] | ||
79 | cmdargs += ['--transform', 's:^:%s/:' % tarname_noext] | ||
80 | cmdargs += [tardir] | ||
81 | try: | ||
82 | bb.process.run(cmdargs, cwd=tarbasepath) | ||
83 | except bb.process.ExecutionError as e: | ||
84 | # It is possible for other tasks to be writing to the workdir | ||
85 | # while we are tarring it up, in which case tar will return 1, | ||
86 | # but we don't care in this situation (tar returns 2 for other | ||
87 | # errors so we we will see those) | ||
88 | if e.exitcode != 1: | ||
89 | bb.warn('retain: error saving %s: %s' % (desc, str(e))) | ||
90 | |||
91 | |||
92 | addhandler retain_task_handler | ||
93 | retain_task_handler[eventmask] = "bb.build.TaskFailed bb.build.TaskSucceeded" | ||
94 | |||
95 | addhandler retain_build_handler | ||
96 | retain_build_handler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted" | ||
97 | |||
98 | python retain_task_handler() { | ||
99 | if d.getVar('RETAIN_ENABLED') != '1': | ||
100 | return | ||
101 | |||
102 | dirs = d.getVar('RETAIN_DIRS_ALWAYS') | ||
103 | if isinstance(e, bb.build.TaskFailed): | ||
104 | dirs += ' ' + d.getVar('RETAIN_DIRS_FAILURE') | ||
105 | |||
106 | dirs = dirs.strip().split() | ||
107 | if dirs: | ||
108 | outdir = d.getVar('RETAIN_OUTDIR') | ||
109 | bb.utils.mkdirhier(outdir) | ||
110 | dirlist_file = os.path.join(outdir, 'retain_dirs.list') | ||
111 | pn = d.getVar('PN') | ||
112 | taskname = d.getVar('BB_CURRENTTASK') | ||
113 | with open(dirlist_file, 'a') as f: | ||
114 | for entry in dirs: | ||
115 | f.write('%s %s %s\n' % (pn, taskname, entry)) | ||
116 | } | ||
117 | |||
118 | python retain_build_handler() { | ||
119 | outdir = d.getVar('RETAIN_OUTDIR') | ||
120 | dirlist_file = os.path.join(outdir, 'retain_dirs.list') | ||
121 | |||
122 | if isinstance(e, bb.event.BuildStarted): | ||
123 | if os.path.exists(dirlist_file): | ||
124 | os.remove(dirlist_file) | ||
125 | return | ||
126 | |||
127 | if d.getVar('RETAIN_ENABLED') != '1': | ||
128 | return | ||
129 | |||
130 | savedirs = {} | ||
131 | try: | ||
132 | with open(dirlist_file, 'r') as f: | ||
133 | for line in f: | ||
134 | pn, _, path = line.rstrip().split() | ||
135 | if not path in savedirs: | ||
136 | savedirs[path] = pn | ||
137 | os.remove(dirlist_file) | ||
138 | except FileNotFoundError: | ||
139 | pass | ||
140 | |||
141 | if e.getFailures(): | ||
142 | for path in (d.getVar('RETAIN_DIRS_GLOBAL_FAILURE') or '').strip().split(): | ||
143 | savedirs[path] = '' | ||
144 | |||
145 | for path in (d.getVar('RETAIN_DIRS_GLOBAL_ALWAYS') or '').strip().split(): | ||
146 | savedirs[path] = '' | ||
147 | |||
148 | if savedirs: | ||
149 | bb.plain('NOTE: retain: retaining build output...') | ||
150 | count = 0 | ||
151 | for path, pn in savedirs.items(): | ||
152 | prefix = None | ||
153 | if ';' in path: | ||
154 | pathsplit = path.split(';') | ||
155 | path = pathsplit[0] | ||
156 | for param in pathsplit[1:]: | ||
157 | if '=' in param: | ||
158 | name, value = param.split('=', 1) | ||
159 | if name == 'prefix': | ||
160 | prefix = value | ||
161 | else: | ||
162 | bb.error('retain: invalid parameter "%s" in RETAIN_* variable value' % param) | ||
163 | return | ||
164 | else: | ||
165 | bb.error('retain: parameter "%s" missing value in RETAIN_* variable value' % param) | ||
166 | return | ||
167 | if prefix: | ||
168 | itemname = prefix | ||
169 | else: | ||
170 | itemname = os.path.basename(path) | ||
171 | if pn: | ||
172 | # Always add the recipe name in front | ||
173 | itemname = pn + '_' + itemname | ||
174 | if os.path.exists(path): | ||
175 | retain_retain_dir(itemname, itemname, path, os.path.dirname(path), d) | ||
176 | count += 1 | ||
177 | else: | ||
178 | bb.warn('retain: path %s does not currently exist' % path) | ||
179 | if count: | ||
180 | item = 'archive' if count == 1 else 'archives' | ||
181 | bb.plain('NOTE: retain: saved %d %s to %s' % (count, item, outdir)) | ||
182 | } | ||
diff --git a/meta/classes-global/sanity.bbclass b/meta/classes-global/sanity.bbclass index 180c6b77d8..1044ed9cc6 100644 --- a/meta/classes-global/sanity.bbclass +++ b/meta/classes-global/sanity.bbclass | |||
@@ -299,6 +299,11 @@ def check_path_length(filepath, pathname, limit): | |||
299 | return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit) | 299 | return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit) |
300 | return "" | 300 | return "" |
301 | 301 | ||
302 | def check_non_ascii(filepath, pathname): | ||
303 | if(not filepath.isascii()): | ||
304 | return "Non-ASCII character(s) in %s path (\"%s\") detected. This would cause build failures as we build software that doesn't support this.\n" % (pathname, filepath) | ||
305 | return "" | ||
306 | |||
302 | def get_filesystem_id(path): | 307 | def get_filesystem_id(path): |
303 | import subprocess | 308 | import subprocess |
304 | try: | 309 | try: |
@@ -475,6 +480,31 @@ def check_wsl(d): | |||
475 | bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space") | 480 | bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space") |
476 | return None | 481 | return None |
477 | 482 | ||
483 | def check_userns(): | ||
484 | """ | ||
485 | Check that user namespaces are functional, as they're used for network isolation. | ||
486 | """ | ||
487 | |||
488 | # There is a known failure case with AppAmrmor where the unshare() call | ||
489 | # succeeds (at which point the uid is nobody) but writing to the uid_map | ||
490 | # fails (so the uid isn't reset back to the user's uid). We can detect this. | ||
491 | parentuid = os.getuid() | ||
492 | if not bb.utils.is_local_uid(parentuid): | ||
493 | return None | ||
494 | pid = os.fork() | ||
495 | if not pid: | ||
496 | try: | ||
497 | bb.utils.disable_network() | ||
498 | except: | ||
499 | pass | ||
500 | os._exit(parentuid != os.getuid()) | ||
501 | |||
502 | ret = os.waitpid(pid, 0)[1] | ||
503 | if ret: | ||
504 | bb.fatal("User namespaces are not usable by BitBake, possibly due to AppArmor.\n" | ||
505 | "See https://discourse.ubuntu.com/t/ubuntu-24-04-lts-noble-numbat-release-notes/39890#unprivileged-user-namespace-restrictions for more information.") | ||
506 | |||
507 | |||
478 | # Require at least gcc version 8.0 | 508 | # Require at least gcc version 8.0 |
479 | # | 509 | # |
480 | # This can be fixed on CentOS-7 with devtoolset-6+ | 510 | # This can be fixed on CentOS-7 with devtoolset-6+ |
@@ -484,23 +514,23 @@ def check_wsl(d): | |||
484 | # built buildtools-extended-tarball) | 514 | # built buildtools-extended-tarball) |
485 | # | 515 | # |
486 | def check_gcc_version(sanity_data): | 516 | def check_gcc_version(sanity_data): |
487 | import subprocess | 517 | version = oe.utils.get_host_gcc_version(sanity_data) |
488 | 518 | if bb.utils.vercmp_string_op(version, "8.0", "<"): | |
489 | build_cc, version = oe.utils.get_host_compiler_version(sanity_data) | 519 | return "Your version of gcc is older than 8.0 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n" |
490 | if build_cc.strip() == "gcc": | ||
491 | if bb.utils.vercmp_string_op(version, "8.0", "<"): | ||
492 | return "Your version of gcc is older than 8.0 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n" | ||
493 | return None | 520 | return None |
494 | 521 | ||
495 | # Tar version 1.24 and onwards handle overwriting symlinks correctly | 522 | # Tar version 1.24 and onwards handle overwriting symlinks correctly |
496 | # but earlier versions do not; this needs to work properly for sstate | 523 | # but earlier versions do not; this needs to work properly for sstate |
497 | # Version 1.28 is needed so opkg-build works correctly when reproducible builds are enabled | 524 | # Version 1.28 is needed so opkg-build works correctly when reproducible builds are enabled |
525 | # Gtar is assumed at to be used as tar in poky | ||
498 | def check_tar_version(sanity_data): | 526 | def check_tar_version(sanity_data): |
499 | import subprocess | 527 | import subprocess |
500 | try: | 528 | try: |
501 | result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') | 529 | result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') |
502 | except subprocess.CalledProcessError as e: | 530 | except subprocess.CalledProcessError as e: |
503 | return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) | 531 | return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) |
532 | if not "GNU" in result: | ||
533 | return "Your version of tar is not gtar. Please install gtar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" | ||
504 | version = result.split()[3] | 534 | version = result.split()[3] |
505 | if bb.utils.vercmp_string_op(version, "1.28", "<"): | 535 | if bb.utils.vercmp_string_op(version, "1.28", "<"): |
506 | return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" | 536 | return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" |
@@ -574,6 +604,28 @@ def drop_v14_cross_builds(d): | |||
574 | bb.utils.remove(stamp + "*") | 604 | bb.utils.remove(stamp + "*") |
575 | bb.utils.remove(workdir, recurse = True) | 605 | bb.utils.remove(workdir, recurse = True) |
576 | 606 | ||
607 | def check_cpp_toolchain_flag(d, flag, error_message=None): | ||
608 | """ | ||
609 | Checks if the g++ compiler supports the given flag | ||
610 | """ | ||
611 | import shlex | ||
612 | import subprocess | ||
613 | |||
614 | cpp_code = """ | ||
615 | #include <iostream> | ||
616 | int main() { | ||
617 | std::cout << "Hello, World!" << std::endl; | ||
618 | return 0; | ||
619 | } | ||
620 | """ | ||
621 | |||
622 | cmd = ["g++", "-x", "c++","-", "-o", "/dev/null", flag] | ||
623 | try: | ||
624 | subprocess.run(cmd, input=cpp_code, capture_output=True, text=True, check=True) | ||
625 | return None | ||
626 | except subprocess.CalledProcessError as e: | ||
627 | return error_message or f"An unexpected issue occurred during the C++ toolchain check: {str(e)}" | ||
628 | |||
577 | def sanity_handle_abichanges(status, d): | 629 | def sanity_handle_abichanges(status, d): |
578 | # | 630 | # |
579 | # Check the 'ABI' of TMPDIR | 631 | # Check the 'ABI' of TMPDIR |
@@ -638,17 +690,18 @@ def check_sanity_version_change(status, d): | |||
638 | status.addresult(check_git_version(d)) | 690 | status.addresult(check_git_version(d)) |
639 | status.addresult(check_perl_modules(d)) | 691 | status.addresult(check_perl_modules(d)) |
640 | status.addresult(check_wsl(d)) | 692 | status.addresult(check_wsl(d)) |
693 | status.addresult(check_userns()) | ||
641 | 694 | ||
642 | missing = "" | 695 | missing = "" |
643 | 696 | ||
644 | if not check_app_exists("${MAKE}", d): | 697 | if not check_app_exists("${MAKE}", d): |
645 | missing = missing + "GNU make," | 698 | missing = missing + "GNU make," |
646 | 699 | ||
647 | if not check_app_exists('${BUILD_CC}', d): | 700 | if not check_app_exists('gcc', d): |
648 | missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC") | 701 | missing = missing + "C Compiler (gcc)," |
649 | 702 | ||
650 | if not check_app_exists('${BUILD_CXX}', d): | 703 | if not check_app_exists('g++', d): |
651 | missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX") | 704 | missing = missing + "C++ Compiler (g++)," |
652 | 705 | ||
653 | required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES') | 706 | required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES') |
654 | 707 | ||
@@ -668,6 +721,7 @@ def check_sanity_version_change(status, d): | |||
668 | # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) | 721 | # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) |
669 | import stat | 722 | import stat |
670 | tmpdir = d.getVar('TMPDIR') | 723 | tmpdir = d.getVar('TMPDIR') |
724 | topdir = d.getVar('TOPDIR') | ||
671 | status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) | 725 | status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) |
672 | tmpdirmode = os.stat(tmpdir).st_mode | 726 | tmpdirmode = os.stat(tmpdir).st_mode |
673 | if (tmpdirmode & stat.S_ISGID): | 727 | if (tmpdirmode & stat.S_ISGID): |
@@ -676,14 +730,14 @@ def check_sanity_version_change(status, d): | |||
676 | status.addresult("TMPDIR is setuid, please don't build in a setuid directory") | 730 | status.addresult("TMPDIR is setuid, please don't build in a setuid directory") |
677 | 731 | ||
678 | # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS | 732 | # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS |
679 | pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") | 733 | pseudoignorepaths = (d.getVar('PSEUDO_IGNORE_PATHS', expand=True) or "").split(",") |
680 | workdir = d.getVar('WORKDIR', expand=True) | 734 | workdir = d.getVar('WORKDIR', expand=True) |
681 | for i in pseudoignorepaths: | 735 | for i in pseudoignorepaths: |
682 | if i and workdir.startswith(i): | 736 | if i and workdir.startswith(i): |
683 | status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n") | 737 | status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n") |
684 | 738 | ||
685 | # Check if PSEUDO_IGNORE_PATHS and paths under pseudo control overlap | 739 | # Check if PSEUDO_IGNORE_PATHS and paths under pseudo control overlap |
686 | pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") | 740 | pseudoignorepaths = (d.getVar('PSEUDO_IGNORE_PATHS', expand=True) or "").split(",") |
687 | pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}" | 741 | pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}" |
688 | pseudocontroldir = d.expand(pseudo_control_dir).split(",") | 742 | pseudocontroldir = d.expand(pseudo_control_dir).split(",") |
689 | for i in pseudoignorepaths: | 743 | for i in pseudoignorepaths: |
@@ -731,8 +785,11 @@ def check_sanity_version_change(status, d): | |||
731 | if not oes_bb_conf: | 785 | if not oes_bb_conf: |
732 | status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') | 786 | status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') |
733 | 787 | ||
734 | # The length of TMPDIR can't be longer than 410 | 788 | # The length of TMPDIR can't be longer than 400 |
735 | status.addresult(check_path_length(tmpdir, "TMPDIR", 410)) | 789 | status.addresult(check_path_length(tmpdir, "TMPDIR", 400)) |
790 | |||
791 | # Check that TOPDIR does not contain non ascii chars (perl_5.40.0, Perl-native and shadow-native build failures) | ||
792 | status.addresult(check_non_ascii(topdir, "TOPDIR")) | ||
736 | 793 | ||
737 | # Check that TMPDIR isn't located on nfs | 794 | # Check that TMPDIR isn't located on nfs |
738 | status.addresult(check_not_nfs(tmpdir, "TMPDIR")) | 795 | status.addresult(check_not_nfs(tmpdir, "TMPDIR")) |
@@ -741,6 +798,14 @@ def check_sanity_version_change(status, d): | |||
741 | # macOS with default HFS+ file system) | 798 | # macOS with default HFS+ file system) |
742 | status.addresult(check_case_sensitive(tmpdir, "TMPDIR")) | 799 | status.addresult(check_case_sensitive(tmpdir, "TMPDIR")) |
743 | 800 | ||
801 | # Check if linking with lstdc++ is failing | ||
802 | status.addresult(check_cpp_toolchain_flag(d, "-lstdc++")) | ||
803 | |||
804 | # Check if the C++ toochain support the "--std=gnu++20" flag | ||
805 | status.addresult(check_cpp_toolchain_flag(d, "--std=gnu++20", | ||
806 | "An error occurred during checking the C++ toolchain for '--std=gnu++20' support. " | ||
807 | "Please use a g++ compiler that supports C++20 (e.g. g++ version 10 onwards).")) | ||
808 | |||
744 | def sanity_check_locale(d): | 809 | def sanity_check_locale(d): |
745 | """ | 810 | """ |
746 | Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists. | 811 | Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists. |
@@ -759,10 +824,10 @@ def check_sanity_everybuild(status, d): | |||
759 | if 0 == os.getuid(): | 824 | if 0 == os.getuid(): |
760 | raise_sanity_error("Do not use Bitbake as root.", d) | 825 | raise_sanity_error("Do not use Bitbake as root.", d) |
761 | 826 | ||
762 | # Check the Python version, we now have a minimum of Python 3.8 | 827 | # Check the Python version, we now have a minimum of Python 3.9 |
763 | import sys | 828 | import sys |
764 | if sys.hexversion < 0x030800F0: | 829 | if sys.hexversion < 0x030900F0: |
765 | status.addresult('The system requires at least Python 3.8 to run. Please update your Python interpreter.\n') | 830 | status.addresult('The system requires at least Python 3.9 to run. Please update your Python interpreter.\n') |
766 | 831 | ||
767 | # Check the bitbake version meets minimum requirements | 832 | # Check the bitbake version meets minimum requirements |
768 | minversion = d.getVar('BB_MIN_VERSION') | 833 | minversion = d.getVar('BB_MIN_VERSION') |
diff --git a/meta/classes-global/sstate.bbclass b/meta/classes-global/sstate.bbclass index 04539bbb99..53bc2e3940 100644 --- a/meta/classes-global/sstate.bbclass +++ b/meta/classes-global/sstate.bbclass | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | SSTATE_VERSION = "12" | 7 | SSTATE_VERSION = "14" |
8 | 8 | ||
9 | SSTATE_ZSTD_CLEVEL ??= "8" | 9 | SSTATE_ZSTD_CLEVEL ??= "8" |
10 | 10 | ||
@@ -76,7 +76,7 @@ SSTATE_SCAN_CMD_NATIVE ??= 'grep -Irl -e ${RECIPE_SYSROOT} -e ${RECIPE_SYSROOT_N | |||
76 | SSTATE_HASHEQUIV_FILEMAP ?= " \ | 76 | SSTATE_HASHEQUIV_FILEMAP ?= " \ |
77 | populate_sysroot:*/postinst-useradd-*:${TMPDIR} \ | 77 | populate_sysroot:*/postinst-useradd-*:${TMPDIR} \ |
78 | populate_sysroot:*/postinst-useradd-*:${COREBASE} \ | 78 | populate_sysroot:*/postinst-useradd-*:${COREBASE} \ |
79 | populate_sysroot:*/postinst-useradd-*:regex-\s(PATH|PSEUDO_IGNORE_PATHS|HOME|LOGNAME|OMP_NUM_THREADS|USER)=.*\s \ | 79 | populate_sysroot:*/postinst-useradd-*:regex-\s(PATH|PSEUDO_INCLUDE_PATHS|HOME|LOGNAME|OMP_NUM_THREADS|USER)=.*\s \ |
80 | populate_sysroot:*/crossscripts/*:${TMPDIR} \ | 80 | populate_sysroot:*/crossscripts/*:${TMPDIR} \ |
81 | populate_sysroot:*/crossscripts/*:${COREBASE} \ | 81 | populate_sysroot:*/crossscripts/*:${COREBASE} \ |
82 | " | 82 | " |
@@ -103,7 +103,6 @@ SSTATECREATEFUNCS[vardeps] = "SSTATE_SCAN_FILES" | |||
103 | SSTATEPOSTCREATEFUNCS = "" | 103 | SSTATEPOSTCREATEFUNCS = "" |
104 | SSTATEPREINSTFUNCS = "" | 104 | SSTATEPREINSTFUNCS = "" |
105 | SSTATEPOSTUNPACKFUNCS = "sstate_hardcode_path_unpack" | 105 | SSTATEPOSTUNPACKFUNCS = "sstate_hardcode_path_unpack" |
106 | SSTATEPOSTINSTFUNCS = "" | ||
107 | EXTRA_STAGING_FIXMES ?= "HOSTTOOLS_DIR" | 106 | EXTRA_STAGING_FIXMES ?= "HOSTTOOLS_DIR" |
108 | 107 | ||
109 | # Check whether sstate exists for tasks that support sstate and are in the | 108 | # Check whether sstate exists for tasks that support sstate and are in the |
@@ -161,7 +160,10 @@ python () { | |||
161 | d.setVar('SSTATETASKS', " ".join(unique_tasks)) | 160 | d.setVar('SSTATETASKS', " ".join(unique_tasks)) |
162 | for task in unique_tasks: | 161 | for task in unique_tasks: |
163 | d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ") | 162 | d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ") |
164 | d.appendVarFlag(task, 'postfuncs', " sstate_task_postfunc") | 163 | # Generally sstate should be last, execpt for buildhistory functions |
164 | postfuncs = (d.getVarFlag(task, 'postfuncs') or "").split() | ||
165 | newpostfuncs = [p for p in postfuncs if "buildhistory" not in p] + ["sstate_task_postfunc"] + [p for p in postfuncs if "buildhistory" in p] | ||
166 | d.setVarFlag(task, 'postfuncs', " ".join(newpostfuncs)) | ||
165 | d.setVarFlag(task, 'network', '1') | 167 | d.setVarFlag(task, 'network', '1') |
166 | d.setVarFlag(task + "_setscene", 'network', '1') | 168 | d.setVarFlag(task + "_setscene", 'network', '1') |
167 | } | 169 | } |
@@ -189,7 +191,6 @@ def sstate_state_fromvars(d, task = None): | |||
189 | plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs') or "").split() | 191 | plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs') or "").split() |
190 | lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split() | 192 | lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split() |
191 | lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split() | 193 | lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split() |
192 | interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "").split() | ||
193 | fixmedir = d.getVarFlag("do_" + task, 'sstate-fixmedir') or "" | 194 | fixmedir = d.getVarFlag("do_" + task, 'sstate-fixmedir') or "" |
194 | if not task or len(inputs) != len(outputs): | 195 | if not task or len(inputs) != len(outputs): |
195 | bb.fatal("sstate variables not setup correctly?!") | 196 | bb.fatal("sstate variables not setup correctly?!") |
@@ -205,7 +206,6 @@ def sstate_state_fromvars(d, task = None): | |||
205 | ss['lockfiles'] = lockfiles | 206 | ss['lockfiles'] = lockfiles |
206 | ss['lockfiles-shared'] = lockfilesshared | 207 | ss['lockfiles-shared'] = lockfilesshared |
207 | ss['plaindirs'] = plaindirs | 208 | ss['plaindirs'] = plaindirs |
208 | ss['interceptfuncs'] = interceptfuncs | ||
209 | ss['fixmedir'] = fixmedir | 209 | ss['fixmedir'] = fixmedir |
210 | return ss | 210 | return ss |
211 | 211 | ||
@@ -225,12 +225,23 @@ def sstate_install(ss, d): | |||
225 | import oe.sstatesig | 225 | import oe.sstatesig |
226 | import subprocess | 226 | import subprocess |
227 | 227 | ||
228 | def prepdir(dir): | ||
229 | # remove dir if it exists, ensure any parent directories do exist | ||
230 | if os.path.exists(dir): | ||
231 | oe.path.remove(dir) | ||
232 | bb.utils.mkdirhier(dir) | ||
233 | oe.path.remove(dir) | ||
234 | |||
235 | sstateinst = d.getVar("SSTATE_INSTDIR") | ||
236 | |||
237 | for state in ss['dirs']: | ||
238 | prepdir(state[1]) | ||
239 | bb.utils.rename(sstateinst + state[0], state[1]) | ||
240 | |||
228 | sharedfiles = [] | 241 | sharedfiles = [] |
229 | shareddirs = [] | 242 | shareddirs = [] |
230 | bb.utils.mkdirhier(d.expand("${SSTATE_MANIFESTS}")) | 243 | bb.utils.mkdirhier(d.expand("${SSTATE_MANIFESTS}")) |
231 | 244 | ||
232 | sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) | ||
233 | |||
234 | manifest, d2 = oe.sstatesig.sstate_get_manifest_filename(ss['task'], d) | 245 | manifest, d2 = oe.sstatesig.sstate_get_manifest_filename(ss['task'], d) |
235 | 246 | ||
236 | if os.access(manifest, os.R_OK): | 247 | if os.access(manifest, os.R_OK): |
@@ -329,15 +340,21 @@ def sstate_install(ss, d): | |||
329 | if os.path.exists(state[1]): | 340 | if os.path.exists(state[1]): |
330 | oe.path.copyhardlinktree(state[1], state[2]) | 341 | oe.path.copyhardlinktree(state[1], state[2]) |
331 | 342 | ||
332 | for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split(): | 343 | for plain in ss['plaindirs']: |
333 | # All hooks should run in the SSTATE_INSTDIR | 344 | workdir = d.getVar('WORKDIR') |
334 | bb.build.exec_func(postinst, d, (sstateinst,)) | 345 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), "work-shared") |
346 | src = sstateinst + "/" + plain.replace(workdir, '') | ||
347 | if sharedworkdir in plain: | ||
348 | src = sstateinst + "/" + plain.replace(sharedworkdir, '') | ||
349 | dest = plain | ||
350 | bb.utils.mkdirhier(src) | ||
351 | prepdir(dest) | ||
352 | bb.utils.rename(src, dest) | ||
335 | 353 | ||
336 | for lock in locks: | 354 | for lock in locks: |
337 | bb.utils.unlockfile(lock) | 355 | bb.utils.unlockfile(lock) |
338 | 356 | ||
339 | sstate_install[vardepsexclude] += "SSTATE_ALLOW_OVERLAP_FILES SSTATE_MANMACH SSTATE_MANFILEPREFIX" | 357 | sstate_install[vardepsexclude] += "SSTATE_ALLOW_OVERLAP_FILES SSTATE_MANMACH SSTATE_MANFILEPREFIX STAMP" |
340 | sstate_install[vardeps] += "${SSTATEPOSTINSTFUNCS}" | ||
341 | 358 | ||
342 | def sstate_installpkg(ss, d): | 359 | def sstate_installpkg(ss, d): |
343 | from oe.gpg_sign import get_signer | 360 | from oe.gpg_sign import get_signer |
@@ -393,29 +410,8 @@ def sstate_installpkgdir(ss, d): | |||
393 | # All hooks should run in the SSTATE_INSTDIR | 410 | # All hooks should run in the SSTATE_INSTDIR |
394 | bb.build.exec_func(f, d, (sstateinst,)) | 411 | bb.build.exec_func(f, d, (sstateinst,)) |
395 | 412 | ||
396 | def prepdir(dir): | ||
397 | # remove dir if it exists, ensure any parent directories do exist | ||
398 | if os.path.exists(dir): | ||
399 | oe.path.remove(dir) | ||
400 | bb.utils.mkdirhier(dir) | ||
401 | oe.path.remove(dir) | ||
402 | |||
403 | for state in ss['dirs']: | ||
404 | prepdir(state[1]) | ||
405 | bb.utils.rename(sstateinst + state[0], state[1]) | ||
406 | sstate_install(ss, d) | 413 | sstate_install(ss, d) |
407 | 414 | ||
408 | for plain in ss['plaindirs']: | ||
409 | workdir = d.getVar('WORKDIR') | ||
410 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), "work-shared") | ||
411 | src = sstateinst + "/" + plain.replace(workdir, '') | ||
412 | if sharedworkdir in plain: | ||
413 | src = sstateinst + "/" + plain.replace(sharedworkdir, '') | ||
414 | dest = plain | ||
415 | bb.utils.mkdirhier(src) | ||
416 | prepdir(dest) | ||
417 | bb.utils.rename(src, dest) | ||
418 | |||
419 | return True | 415 | return True |
420 | 416 | ||
421 | python sstate_hardcode_path_unpack () { | 417 | python sstate_hardcode_path_unpack () { |
@@ -645,20 +641,12 @@ def sstate_package(ss, d): | |||
645 | 641 | ||
646 | tmpdir = d.getVar('TMPDIR') | 642 | tmpdir = d.getVar('TMPDIR') |
647 | 643 | ||
648 | fixtime = False | ||
649 | if ss['task'] == "package": | ||
650 | fixtime = True | ||
651 | |||
652 | def fixtimestamp(root, path): | ||
653 | f = os.path.join(root, path) | ||
654 | if os.lstat(f).st_mtime > sde: | ||
655 | os.utime(f, (sde, sde), follow_symlinks=False) | ||
656 | |||
657 | sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task']) | 644 | sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task']) |
658 | sde = int(d.getVar("SOURCE_DATE_EPOCH") or time.time()) | 645 | sde = int(d.getVar("SOURCE_DATE_EPOCH") or time.time()) |
659 | d.setVar("SSTATE_CURRTASK", ss['task']) | 646 | d.setVar("SSTATE_CURRTASK", ss['task']) |
660 | bb.utils.remove(sstatebuild, recurse=True) | 647 | bb.utils.remove(sstatebuild, recurse=True) |
661 | bb.utils.mkdirhier(sstatebuild) | 648 | bb.utils.mkdirhier(sstatebuild) |
649 | exit = False | ||
662 | for state in ss['dirs']: | 650 | for state in ss['dirs']: |
663 | if not os.path.exists(state[1]): | 651 | if not os.path.exists(state[1]): |
664 | continue | 652 | continue |
@@ -668,8 +656,6 @@ def sstate_package(ss, d): | |||
668 | # to sstate tasks but there aren't many of these so better just avoid them entirely. | 656 | # to sstate tasks but there aren't many of these so better just avoid them entirely. |
669 | for walkroot, dirs, files in os.walk(state[1]): | 657 | for walkroot, dirs, files in os.walk(state[1]): |
670 | for file in files + dirs: | 658 | for file in files + dirs: |
671 | if fixtime: | ||
672 | fixtimestamp(walkroot, file) | ||
673 | srcpath = os.path.join(walkroot, file) | 659 | srcpath = os.path.join(walkroot, file) |
674 | if not os.path.islink(srcpath): | 660 | if not os.path.islink(srcpath): |
675 | continue | 661 | continue |
@@ -679,8 +665,11 @@ def sstate_package(ss, d): | |||
679 | if not link.startswith(tmpdir): | 665 | if not link.startswith(tmpdir): |
680 | continue | 666 | continue |
681 | bb.error("sstate found an absolute path symlink %s pointing at %s. Please replace this with a relative link." % (srcpath, link)) | 667 | bb.error("sstate found an absolute path symlink %s pointing at %s. Please replace this with a relative link." % (srcpath, link)) |
668 | exit = True | ||
682 | bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) | 669 | bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) |
683 | bb.utils.rename(state[1], sstatebuild + state[0]) | 670 | bb.utils.rename(state[1], sstatebuild + state[0]) |
671 | if exit: | ||
672 | bb.fatal("Failing task due to absolute path symlinks") | ||
684 | 673 | ||
685 | workdir = d.getVar('WORKDIR') | 674 | workdir = d.getVar('WORKDIR') |
686 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), "work-shared") | 675 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), "work-shared") |
@@ -691,11 +680,6 @@ def sstate_package(ss, d): | |||
691 | bb.utils.mkdirhier(plain) | 680 | bb.utils.mkdirhier(plain) |
692 | bb.utils.mkdirhier(pdir) | 681 | bb.utils.mkdirhier(pdir) |
693 | bb.utils.rename(plain, pdir) | 682 | bb.utils.rename(plain, pdir) |
694 | if fixtime: | ||
695 | fixtimestamp(pdir, "") | ||
696 | for walkroot, dirs, files in os.walk(pdir): | ||
697 | for file in files + dirs: | ||
698 | fixtimestamp(walkroot, file) | ||
699 | 683 | ||
700 | d.setVar('SSTATE_BUILDDIR', sstatebuild) | 684 | d.setVar('SSTATE_BUILDDIR', sstatebuild) |
701 | d.setVar('SSTATE_INSTDIR', sstatebuild) | 685 | d.setVar('SSTATE_INSTDIR', sstatebuild) |
@@ -728,7 +712,7 @@ def sstate_package(ss, d): | |||
728 | 712 | ||
729 | return | 713 | return |
730 | 714 | ||
731 | sstate_package[vardepsexclude] += "SSTATE_SIG_KEY" | 715 | sstate_package[vardepsexclude] += "SSTATE_SIG_KEY SSTATE_PKG" |
732 | 716 | ||
733 | def pstaging_fetch(sstatefetch, d): | 717 | def pstaging_fetch(sstatefetch, d): |
734 | import bb.fetch2 | 718 | import bb.fetch2 |
@@ -742,7 +726,6 @@ def pstaging_fetch(sstatefetch, d): | |||
742 | localdata = bb.data.createCopy(d) | 726 | localdata = bb.data.createCopy(d) |
743 | 727 | ||
744 | dldir = localdata.expand("${SSTATE_DIR}") | 728 | dldir = localdata.expand("${SSTATE_DIR}") |
745 | bb.utils.mkdirhier(dldir) | ||
746 | 729 | ||
747 | localdata.delVar('MIRRORS') | 730 | localdata.delVar('MIRRORS') |
748 | localdata.setVar('FILESPATH', dldir) | 731 | localdata.setVar('FILESPATH', dldir) |
@@ -762,16 +745,19 @@ def pstaging_fetch(sstatefetch, d): | |||
762 | if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False): | 745 | if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False): |
763 | uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)] | 746 | uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)] |
764 | 747 | ||
765 | for srcuri in uris: | 748 | with bb.utils.umask(bb.utils.to_filemode(d.getVar("OE_SHARED_UMASK"))): |
766 | localdata.delVar('SRC_URI') | 749 | bb.utils.mkdirhier(dldir) |
767 | localdata.setVar('SRC_URI', srcuri) | ||
768 | try: | ||
769 | fetcher = bb.fetch2.Fetch([srcuri], localdata, cache=False) | ||
770 | fetcher.checkstatus() | ||
771 | fetcher.download() | ||
772 | 750 | ||
773 | except bb.fetch2.BBFetchException: | 751 | for srcuri in uris: |
774 | pass | 752 | localdata.delVar('SRC_URI') |
753 | localdata.setVar('SRC_URI', srcuri) | ||
754 | try: | ||
755 | fetcher = bb.fetch2.Fetch([srcuri], localdata, cache=False) | ||
756 | fetcher.checkstatus() | ||
757 | fetcher.download() | ||
758 | |||
759 | except bb.fetch2.BBFetchException: | ||
760 | pass | ||
775 | 761 | ||
776 | def sstate_setscene(d): | 762 | def sstate_setscene(d): |
777 | shared_state = sstate_state_fromvars(d) | 763 | shared_state = sstate_state_fromvars(d) |
@@ -790,12 +776,10 @@ sstate_task_prefunc[dirs] = "${WORKDIR}" | |||
790 | python sstate_task_postfunc () { | 776 | python sstate_task_postfunc () { |
791 | shared_state = sstate_state_fromvars(d) | 777 | shared_state = sstate_state_fromvars(d) |
792 | 778 | ||
793 | for intercept in shared_state['interceptfuncs']: | 779 | shared_umask = bb.utils.to_filemode(d.getVar("OE_SHARED_UMASK")) |
794 | bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),)) | 780 | omask = os.umask(shared_umask) |
795 | 781 | if omask != shared_umask: | |
796 | omask = os.umask(0o002) | 782 | bb.note("Using umask %0o (not %0o) for sstate packaging" % (shared_umask, omask)) |
797 | if omask != 0o002: | ||
798 | bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) | ||
799 | sstate_package(shared_state, d) | 783 | sstate_package(shared_state, d) |
800 | os.umask(omask) | 784 | os.umask(omask) |
801 | 785 | ||
@@ -860,15 +844,15 @@ python sstate_create_and_sign_package () { | |||
860 | 844 | ||
861 | # Create the required sstate directory if it is not present. | 845 | # Create the required sstate directory if it is not present. |
862 | if not sstate_pkg.parent.is_dir(): | 846 | if not sstate_pkg.parent.is_dir(): |
863 | with bb.utils.umask(0o002): | 847 | shared_umask = bb.utils.to_filemode(d.getVar("OE_SHARED_UMASK")) |
848 | with bb.utils.umask(shared_umask): | ||
864 | bb.utils.mkdirhier(str(sstate_pkg.parent)) | 849 | bb.utils.mkdirhier(str(sstate_pkg.parent)) |
865 | 850 | ||
866 | if sign_pkg: | 851 | if sign_pkg: |
867 | from tempfile import TemporaryDirectory | 852 | from tempfile import TemporaryDirectory |
868 | with TemporaryDirectory(dir=sstate_pkg.parent) as tmp_dir: | 853 | with TemporaryDirectory(dir=sstate_pkg.parent) as tmp_dir: |
869 | tmp_pkg = Path(tmp_dir) / sstate_pkg.name | 854 | tmp_pkg = Path(tmp_dir) / sstate_pkg.name |
870 | d.setVar("TMP_SSTATE_PKG", str(tmp_pkg)) | 855 | sstate_archive_package(tmp_pkg, d) |
871 | bb.build.exec_func('sstate_archive_package', d) | ||
872 | 856 | ||
873 | from oe.gpg_sign import get_signer | 857 | from oe.gpg_sign import get_signer |
874 | signer = get_signer(d, 'local') | 858 | signer = get_signer(d, 'local') |
@@ -888,8 +872,7 @@ python sstate_create_and_sign_package () { | |||
888 | from tempfile import NamedTemporaryFile | 872 | from tempfile import NamedTemporaryFile |
889 | with NamedTemporaryFile(prefix=sstate_pkg.name, dir=sstate_pkg.parent) as tmp_pkg_fd: | 873 | with NamedTemporaryFile(prefix=sstate_pkg.name, dir=sstate_pkg.parent) as tmp_pkg_fd: |
890 | tmp_pkg = tmp_pkg_fd.name | 874 | tmp_pkg = tmp_pkg_fd.name |
891 | d.setVar("TMP_SSTATE_PKG", str(tmp_pkg)) | 875 | sstate_archive_package(tmp_pkg, d) |
892 | bb.build.exec_func('sstate_archive_package',d) | ||
893 | update_file(tmp_pkg, sstate_pkg) | 876 | update_file(tmp_pkg, sstate_pkg) |
894 | # update_file() may have renamed tmp_pkg, which must exist when the | 877 | # update_file() may have renamed tmp_pkg, which must exist when the |
895 | # NamedTemporaryFile() context handler ends. | 878 | # NamedTemporaryFile() context handler ends. |
@@ -897,32 +880,33 @@ python sstate_create_and_sign_package () { | |||
897 | 880 | ||
898 | } | 881 | } |
899 | 882 | ||
900 | # Shell function to generate a sstate package from a directory | 883 | # Function to generate a sstate package from the current directory. |
901 | # set as SSTATE_BUILDDIR. Will be run from within SSTATE_BUILDDIR. | ||
902 | # The calling function handles moving the sstate package into the final | 884 | # The calling function handles moving the sstate package into the final |
903 | # destination. | 885 | # destination. |
904 | sstate_archive_package () { | 886 | def sstate_archive_package(sstate_pkg, d): |
905 | OPT="-cS" | 887 | import subprocess |
906 | ZSTD="zstd -${SSTATE_ZSTD_CLEVEL} -T${ZSTD_THREADS}" | ||
907 | # Use pzstd if available | ||
908 | if [ -x "$(command -v pzstd)" ]; then | ||
909 | ZSTD="pzstd -${SSTATE_ZSTD_CLEVEL} -p ${ZSTD_THREADS}" | ||
910 | fi | ||
911 | 888 | ||
912 | # Need to handle empty directories | 889 | cmd = [ |
913 | if [ "$(ls -A)" ]; then | 890 | "tar", |
914 | set +e | 891 | "-I", d.expand("pzstd -${SSTATE_ZSTD_CLEVEL} -p${ZSTD_THREADS}"), |
915 | tar -I "$ZSTD" $OPT -f ${TMP_SSTATE_PKG} * | 892 | "-cS", |
916 | ret=$? | 893 | "-f", sstate_pkg, |
917 | if [ $ret -ne 0 ] && [ $ret -ne 1 ]; then | 894 | ] |
918 | exit 1 | 895 | |
919 | fi | 896 | # tar refuses to create an empty archive unless told explicitly |
920 | set -e | 897 | files = sorted(os.listdir(".")) |
921 | else | 898 | if not files: |
922 | tar -I "$ZSTD" $OPT --file=${TMP_SSTATE_PKG} --files-from=/dev/null | 899 | files = ["--files-from=/dev/null"] |
923 | fi | 900 | |
924 | chmod 0664 ${TMP_SSTATE_PKG} | 901 | try: |
925 | } | 902 | subprocess.run(cmd + files, check=True) |
903 | except subprocess.CalledProcessError as e: | ||
904 | # Ignore error 1 as this is caused by files changing | ||
905 | # (link count increasing from hardlinks being created). | ||
906 | if e.returncode != 1: | ||
907 | raise | ||
908 | |||
909 | os.chmod(sstate_pkg, 0o664) | ||
926 | 910 | ||
927 | 911 | ||
928 | python sstate_report_unihash() { | 912 | python sstate_report_unihash() { |
@@ -1115,7 +1099,7 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True, | |||
1115 | bb.parse.siggen.checkhashes(sq_data, missed, found, d) | 1099 | bb.parse.siggen.checkhashes(sq_data, missed, found, d) |
1116 | 1100 | ||
1117 | return found | 1101 | return found |
1118 | setscene_depvalid[vardepsexclude] = "SSTATE_EXCLUDEDEPS_SYSROOT" | 1102 | setscene_depvalid[vardepsexclude] = "SSTATE_EXCLUDEDEPS_SYSROOT _SSTATE_EXCLUDEDEPS_SYSROOT" |
1119 | 1103 | ||
1120 | BB_SETSCENE_DEPVALID = "setscene_depvalid" | 1104 | BB_SETSCENE_DEPVALID = "setscene_depvalid" |
1121 | 1105 | ||
diff --git a/meta/classes-global/staging.bbclass b/meta/classes-global/staging.bbclass index 3678a1b441..1008867a6c 100644 --- a/meta/classes-global/staging.bbclass +++ b/meta/classes-global/staging.bbclass | |||
@@ -126,8 +126,8 @@ do_populate_sysroot[vardeps] += "${SYSROOT_PREPROCESS_FUNCS}" | |||
126 | do_populate_sysroot[vardepsexclude] += "BB_MULTI_PROVIDER_ALLOWED" | 126 | do_populate_sysroot[vardepsexclude] += "BB_MULTI_PROVIDER_ALLOWED" |
127 | 127 | ||
128 | POPULATESYSROOTDEPS = "" | 128 | POPULATESYSROOTDEPS = "" |
129 | POPULATESYSROOTDEPS:class-target = "virtual/${HOST_PREFIX}binutils:do_populate_sysroot" | 129 | POPULATESYSROOTDEPS:class-target = "virtual/cross-binutils:do_populate_sysroot" |
130 | POPULATESYSROOTDEPS:class-nativesdk = "virtual/${HOST_PREFIX}binutils:do_populate_sysroot" | 130 | POPULATESYSROOTDEPS:class-nativesdk = "virtual/nativesdk-cross-binutils:do_populate_sysroot" |
131 | do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}" | 131 | do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}" |
132 | 132 | ||
133 | SSTATETASKS += "do_populate_sysroot" | 133 | SSTATETASKS += "do_populate_sysroot" |
@@ -652,10 +652,17 @@ python do_prepare_recipe_sysroot () { | |||
652 | addtask do_prepare_recipe_sysroot before do_configure after do_fetch | 652 | addtask do_prepare_recipe_sysroot before do_configure after do_fetch |
653 | 653 | ||
654 | python staging_taskhandler() { | 654 | python staging_taskhandler() { |
655 | EXCLUDED_TASKS = ( | ||
656 | "do_prepare_recipe_sysroot", | ||
657 | "do_create_spdx", | ||
658 | ) | ||
655 | bbtasks = e.tasklist | 659 | bbtasks = e.tasklist |
656 | for task in bbtasks: | 660 | for task in bbtasks: |
661 | if task in EXCLUDED_TASKS: | ||
662 | continue | ||
663 | |||
657 | deps = d.getVarFlag(task, "depends") | 664 | deps = d.getVarFlag(task, "depends") |
658 | if task != 'do_prepare_recipe_sysroot' and (task == "do_configure" or (deps and "populate_sysroot" in deps)): | 665 | if task == "do_configure" or (deps and "populate_sysroot" in deps): |
659 | d.prependVarFlag(task, "prefuncs", "extend_recipe_sysroot ") | 666 | d.prependVarFlag(task, "prefuncs", "extend_recipe_sysroot ") |
660 | } | 667 | } |
661 | staging_taskhandler[eventmask] = "bb.event.RecipeTaskPreProcess" | 668 | staging_taskhandler[eventmask] = "bb.event.RecipeTaskPreProcess" |
diff --git a/meta/classes-global/uninative.bbclass b/meta/classes-global/uninative.bbclass index d2297b53f5..c246a1ecd6 100644 --- a/meta/classes-global/uninative.bbclass +++ b/meta/classes-global/uninative.bbclass | |||
@@ -109,7 +109,7 @@ ${UNINATIVE_STAGING_DIR}-uninative/relocate_sdk.py \ | |||
109 | ${UNINATIVE_LOADER} \ | 109 | ${UNINATIVE_LOADER} \ |
110 | ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/${bindir_native}/patchelf-uninative \ | 110 | ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/${bindir_native}/patchelf-uninative \ |
111 | ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so*" % chksum) | 111 | ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so*" % chksum) |
112 | subprocess.check_output(cmd, shell=True) | 112 | subprocess.check_output(cmd, shell=True, text=True, stderr=subprocess.STDOUT) |
113 | 113 | ||
114 | with open(loaderchksum, "w") as f: | 114 | with open(loaderchksum, "w") as f: |
115 | f.write(chksum) | 115 | f.write(chksum) |
@@ -122,7 +122,9 @@ ${UNINATIVE_STAGING_DIR}-uninative/relocate_sdk.py \ | |||
122 | bb.warn("Disabling uninative as unable to fetch uninative tarball: %s" % str(exc)) | 122 | bb.warn("Disabling uninative as unable to fetch uninative tarball: %s" % str(exc)) |
123 | bb.warn("To build your own uninative loader, please bitbake uninative-tarball and set UNINATIVE_TARBALL appropriately.") | 123 | bb.warn("To build your own uninative loader, please bitbake uninative-tarball and set UNINATIVE_TARBALL appropriately.") |
124 | except subprocess.CalledProcessError as exc: | 124 | except subprocess.CalledProcessError as exc: |
125 | bb.warn("Disabling uninative as unable to install uninative tarball: %s" % str(exc)) | 125 | bb.warn("Disabling uninative as unable to install uninative tarball:") |
126 | bb.warn(str(exc)) | ||
127 | bb.warn(exc.stdout) | ||
126 | bb.warn("To build your own uninative loader, please bitbake uninative-tarball and set UNINATIVE_TARBALL appropriately.") | 128 | bb.warn("To build your own uninative loader, please bitbake uninative-tarball and set UNINATIVE_TARBALL appropriately.") |
127 | finally: | 129 | finally: |
128 | os.chdir(olddir) | 130 | os.chdir(olddir) |
@@ -140,7 +142,7 @@ def enable_uninative(d): | |||
140 | loader = d.getVar("UNINATIVE_LOADER") | 142 | loader = d.getVar("UNINATIVE_LOADER") |
141 | if os.path.exists(loader): | 143 | if os.path.exists(loader): |
142 | bb.debug(2, "Enabling uninative") | 144 | bb.debug(2, "Enabling uninative") |
143 | d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d)) | 145 | d.setVar("NATIVELSBSTRING", "universal") |
144 | d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp") | 146 | d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp") |
145 | d.appendVarFlag("SSTATEPOSTUNPACKFUNCS", "vardepvalueexclude", "| uninative_changeinterp") | 147 | d.appendVarFlag("SSTATEPOSTUNPACKFUNCS", "vardepvalueexclude", "| uninative_changeinterp") |
146 | d.appendVar("BUILD_LDFLAGS", " -Wl,--allow-shlib-undefined -Wl,--dynamic-linker=${UNINATIVE_LOADER} -pthread") | 148 | d.appendVar("BUILD_LDFLAGS", " -Wl,--allow-shlib-undefined -Wl,--dynamic-linker=${UNINATIVE_LOADER} -pthread") |
diff --git a/meta/classes-global/utility-tasks.bbclass b/meta/classes-global/utility-tasks.bbclass index ae2da330b8..394cc3158d 100644 --- a/meta/classes-global/utility-tasks.bbclass +++ b/meta/classes-global/utility-tasks.bbclass | |||
@@ -9,18 +9,17 @@ do_listtasks[nostamp] = "1" | |||
9 | python do_listtasks() { | 9 | python do_listtasks() { |
10 | taskdescs = {} | 10 | taskdescs = {} |
11 | maxlen = 0 | 11 | maxlen = 0 |
12 | for e in d.keys(): | 12 | for t in bb.build.listtasks(d): |
13 | if d.getVarFlag(e, 'task'): | 13 | maxlen = max(maxlen, len(t)) |
14 | maxlen = max(maxlen, len(e)) | 14 | |
15 | if e.endswith('_setscene'): | 15 | if t.endswith('_setscene'): |
16 | desc = "%s (setscene version)" % (d.getVarFlag(e[:-9], 'doc') or '') | 16 | desc = "%s (setscene version)" % (d.getVarFlag(t[:-9], 'doc') or '') |
17 | else: | 17 | else: |
18 | desc = d.getVarFlag(e, 'doc') or '' | 18 | desc = d.getVarFlag(t, 'doc') or '' |
19 | taskdescs[e] = desc | 19 | taskdescs[t] = desc |
20 | 20 | ||
21 | tasks = sorted(taskdescs.keys()) | 21 | for task, doc in sorted(taskdescs.items()): |
22 | for taskname in tasks: | 22 | bb.plain("%s %s" % (task.ljust(maxlen), doc)) |
23 | bb.plain("%s %s" % (taskname.ljust(maxlen), taskdescs[taskname])) | ||
24 | } | 23 | } |
25 | 24 | ||
26 | CLEANFUNCS ?= "" | 25 | CLEANFUNCS ?= "" |
diff --git a/meta/classes-global/utils.bbclass b/meta/classes-global/utils.bbclass index 957389928f..530a490ea8 100644 --- a/meta/classes-global/utils.bbclass +++ b/meta/classes-global/utils.bbclass | |||
@@ -15,7 +15,7 @@ oe_soinstall() { | |||
15 | ;; | 15 | ;; |
16 | esac | 16 | esac |
17 | install -m 755 $1 $2/$libname | 17 | install -m 755 $1 $2/$libname |
18 | sonamelink=`${READELF} -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | 18 | sonamelink=`${OBJDUMP} -p $1 | grep SONAME | awk '{print $2}'` |
19 | if [ -z $sonamelink ]; then | 19 | if [ -z $sonamelink ]; then |
20 | bbfatal "oe_soinstall: $libname is missing ELF tag 'SONAME'." | 20 | bbfatal "oe_soinstall: $libname is missing ELF tag 'SONAME'." |
21 | fi | 21 | fi |
@@ -147,7 +147,7 @@ oe_libinstall() { | |||
147 | # special case hack for non-libtool .so.#.#.# links | 147 | # special case hack for non-libtool .so.#.#.# links |
148 | baselibfile=`basename "$libfile"` | 148 | baselibfile=`basename "$libfile"` |
149 | if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then | 149 | if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then |
150 | sonamelink=`${READELF} -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'` | 150 | sonamelink=`${OBJDUMP} -p $libfile | grep SONAME | awk '{print $2}'` |
151 | solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'` | 151 | solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'` |
152 | if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then | 152 | if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then |
153 | __runcmd ln -sf $baselibfile $destpath/$sonamelink | 153 | __runcmd ln -sf $baselibfile $destpath/$sonamelink |
@@ -367,3 +367,13 @@ check_git_config() { | |||
367 | git config --local user.name "${PATCH_GIT_USER_NAME}" | 367 | git config --local user.name "${PATCH_GIT_USER_NAME}" |
368 | fi | 368 | fi |
369 | } | 369 | } |
370 | |||
371 | # Sets fixed git committer and author for reproducible commits | ||
372 | reproducible_git_committer_author() { | ||
373 | export GIT_COMMITTER_NAME="${PATCH_GIT_USER_NAME}" | ||
374 | export GIT_COMMITTER_EMAIL="${PATCH_GIT_USER_EMAIL}" | ||
375 | export GIT_COMMITTER_DATE="$(date -d @${SOURCE_DATE_EPOCH})" | ||
376 | export GIT_AUTHOR_NAME="${PATCH_GIT_USER_NAME}" | ||
377 | export GIT_AUTHOR_EMAIL="${PATCH_GIT_USER_EMAIL}" | ||
378 | export GIT_AUTHOR_DATE="$(date -d @${SOURCE_DATE_EPOCH})" | ||
379 | } | ||
diff --git a/meta/classes-global/yocto-check-layer.bbclass b/meta/classes-global/yocto-check-layer.bbclass new file mode 100644 index 0000000000..ba93085325 --- /dev/null +++ b/meta/classes-global/yocto-check-layer.bbclass | |||
@@ -0,0 +1,62 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | # This class is used by the yocto-check-layer script for additional | ||
8 | # per-recipe tests. | ||
9 | # | ||
10 | # It adds an anonymous python function with extra processing to all recipes, | ||
11 | # globally inheriting this class isn't advisable - yocto-check-layer script | ||
12 | # handles that during its signature dump | ||
13 | |||
14 | |||
15 | # Ensure that recipes don't skip required QA checks as listed | ||
16 | # in CHECKLAYER_REQUIRED_TESTS, defined by insane.bbclass | ||
17 | def check_insane_skip(d): | ||
18 | required_tests = set((d.getVar('CHECKLAYER_REQUIRED_TESTS') or '').split()) | ||
19 | packages = set((d.getVar('PACKAGES') or '').split()) | ||
20 | for package in packages: | ||
21 | skip = set((d.getVar('INSANE_SKIP') or "").split() + | ||
22 | (d.getVar('INSANE_SKIP:' + package) or "").split()) | ||
23 | skip_required = skip & required_tests | ||
24 | if skip_required: | ||
25 | oe.qa.write_error(" ".join(skip_required), 'Package %s is skipping required QA tests.' % package, d) | ||
26 | bb.error("QA Issue: %s [%s]" % ('Package %s is skipping required QA tests.' % package, " ".join(skip_required))) | ||
27 | d.setVar("QA_ERRORS_FOUND", "True") | ||
28 | |||
29 | |||
30 | # Check that no tasks (with rare exceptions) between do_fetch and do_build | ||
31 | # use the network. | ||
32 | def check_network_flag(d): | ||
33 | # BPN:task names that are allowed to reach the network, using fnmatch to compare. | ||
34 | allowed = [] | ||
35 | # build-appliance-image uses pip at image time | ||
36 | allowed += ["build-appliance-image:do_image"] | ||
37 | |||
38 | def is_allowed(bpn, task): | ||
39 | from fnmatch import fnmatch | ||
40 | name = f"{bpn}:{task}" | ||
41 | return any(fnmatch(name, pattern) for pattern in allowed) | ||
42 | |||
43 | bpn = d.getVar("BPN") | ||
44 | seen = set() | ||
45 | stack = {"do_build"} | ||
46 | while stack: | ||
47 | task = stack.pop() | ||
48 | if task == "do_fetch": | ||
49 | continue | ||
50 | |||
51 | seen.add(task) | ||
52 | deps = d.getVarFlag(task, "deps") or [] | ||
53 | stack |= {d for d in deps if d not in seen} | ||
54 | |||
55 | network = bb.utils.to_boolean(d.getVarFlag(task, "network")) | ||
56 | if network and not is_allowed(bpn, task): | ||
57 | bb.error(f"QA Issue: task {task} has network enabled") | ||
58 | |||
59 | python () { | ||
60 | check_insane_skip(d) | ||
61 | check_network_flag(d) | ||
62 | } | ||