summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/archiver.bbclass6
-rw-r--r--meta/classes/buildhistory.bbclass135
-rw-r--r--meta/classes/create-spdx-2.2.bbclass482
-rw-r--r--meta/classes/create-spdx-3.0.bbclass205
-rw-r--r--meta/classes/create-spdx.bbclass2
-rw-r--r--meta/classes/cve-check.bbclass345
-rw-r--r--meta/classes/devtool-source.bbclass8
-rw-r--r--meta/classes/go-vendor.bbclass12
-rw-r--r--meta/classes/icecc.bbclass461
-rw-r--r--meta/classes/migrate_localcount.bbclass52
-rw-r--r--meta/classes/multilib.bbclass88
-rw-r--r--meta/classes/multilib_global.bbclass34
-rw-r--r--meta/classes/report-error.bbclass1
-rw-r--r--meta/classes/sign_rpm.bbclass2
-rw-r--r--meta/classes/siteconfig.bbclass39
-rw-r--r--meta/classes/spdx-common.bbclass107
-rw-r--r--meta/classes/toaster.bbclass2
-rw-r--r--meta/classes/toolchain/clang.bbclass37
-rw-r--r--meta/classes/toolchain/gcc-native.bbclass15
-rw-r--r--meta/classes/toolchain/gcc.bbclass33
-rw-r--r--meta/classes/vex.bbclass303
-rw-r--r--meta/classes/yocto-check-layer.bbclass22
22 files changed, 1102 insertions, 1289 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index 2d0bbfbd42..a95c899a0f 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -166,6 +166,7 @@ python () {
166 d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') 166 d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}')
167 d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') 167 d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}')
168 d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') 168 d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}')
169 d.appendVar('PSEUDO_INCLUDE_PATHS', ',${ARCHIVER_TOPDIR}')
169 if ar_dumpdata == "1": 170 if ar_dumpdata == "1":
170 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 171 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
171 if ar_recipe == "1": 172 if ar_recipe == "1":
@@ -339,7 +340,7 @@ python do_ar_mirror() {
339 dl_dir = d.getVar('DL_DIR') 340 dl_dir = d.getVar('DL_DIR')
340 mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split() 341 mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split()
341 mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror') 342 mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror')
342 have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS') 343 have_mirror_tarballs = oe.types.boolean(d.getVar('BB_GENERATE_MIRROR_TARBALLS'))
343 344
344 if mirror_mode == 'combined': 345 if mirror_mode == 'combined':
345 destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR') 346 destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR')
@@ -473,7 +474,8 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
473 474
474def is_work_shared(d): 475def is_work_shared(d):
475 sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared') 476 sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared')
476 return d.getVar('S').startswith(sharedworkdir) 477 sourcedir = os.path.realpath(d.getVar('S'))
478 return sourcedir.startswith(sharedworkdir)
477 479
478# Run do_unpack and do_patch 480# Run do_unpack and do_patch
479python do_unpack_and_patch() { 481python do_unpack_and_patch() {
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index fd53e92402..4a380c10c6 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -16,28 +16,6 @@ BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory"
16BUILDHISTORY_DIR_IMAGE = "${BUILDHISTORY_DIR}/images/${MACHINE_ARCH}/${TCLIBC}/${IMAGE_BASENAME}" 16BUILDHISTORY_DIR_IMAGE = "${BUILDHISTORY_DIR}/images/${MACHINE_ARCH}/${TCLIBC}/${IMAGE_BASENAME}"
17BUILDHISTORY_DIR_PACKAGE = "${BUILDHISTORY_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}" 17BUILDHISTORY_DIR_PACKAGE = "${BUILDHISTORY_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}"
18 18
19# Setting this to non-empty will remove the old content of the buildhistory as part of
20# the current bitbake invocation and replace it with information about what was built
21# during the build.
22#
23# This is meant to be used in continuous integration (CI) systems when invoking bitbake
24# for full world builds. The effect in that case is that information about packages
25# that no longer get build also gets removed from the buildhistory, which is not
26# the case otherwise.
27#
28# The advantage over manually cleaning the buildhistory outside of bitbake is that
29# the "version-going-backwards" check still works. When relying on that, be careful
30# about failed world builds: they will lead to incomplete information in the
31# buildhistory because information about packages that could not be built will
32# also get removed. A CI system should handle that by discarding the buildhistory
33# of failed builds.
34#
35# The expected usage is via auto.conf, but passing via the command line also works
36# with: BB_ENV_PASSTHROUGH_ADDITIONS=BUILDHISTORY_RESET BUILDHISTORY_RESET=1
37BUILDHISTORY_RESET ?= ""
38
39BUILDHISTORY_OLD_DIR = "${BUILDHISTORY_DIR}/${@ "old" if "${BUILDHISTORY_RESET}" else ""}"
40BUILDHISTORY_OLD_DIR_PACKAGE = "${BUILDHISTORY_OLD_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}"
41BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}${SDK_EXT}/${IMAGE_BASENAME}" 19BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}${SDK_EXT}/${IMAGE_BASENAME}"
42BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group" 20BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group"
43BUILDHISTORY_SDK_FILES ?= "conf/local.conf conf/bblayers.conf conf/auto.conf conf/locked-sigs.inc conf/devtool.conf" 21BUILDHISTORY_SDK_FILES ?= "conf/local.conf conf/bblayers.conf conf/auto.conf conf/locked-sigs.inc conf/devtool.conf"
@@ -47,25 +25,33 @@ BUILDHISTORY_PUSH_REPO ?= ""
47BUILDHISTORY_TAG ?= "build" 25BUILDHISTORY_TAG ?= "build"
48BUILDHISTORY_PATH_PREFIX_STRIP ?= "" 26BUILDHISTORY_PATH_PREFIX_STRIP ?= ""
49 27
50SSTATEPOSTINSTFUNCS:append = " buildhistory_emit_pkghistory" 28# We want to avoid influencing the signatures of the task so use vardepsexclude
51# We want to avoid influencing the signatures of sstate tasks - first the function itself: 29do_populate_sysroot[postfuncs] += "buildhistory_emit_sysroot"
52sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory" 30do_populate_sysroot_setscene[postfuncs] += "buildhistory_emit_sysroot"
53# then the value added to SSTATEPOSTINSTFUNCS: 31do_populate_sysroot[vardepsexclude] += "buildhistory_emit_sysroot"
54SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory" 32
33do_package[postfuncs] += "buildhistory_list_pkg_files"
34do_package_setscene[postfuncs] += "buildhistory_list_pkg_files"
35do_package[vardepsexclude] += "buildhistory_list_pkg_files"
36
37do_packagedata[postfuncs] += "buildhistory_emit_pkghistory"
38do_packagedata_setscene[postfuncs] += "buildhistory_emit_pkghistory"
39do_packagedata[vardepsexclude] += "buildhistory_emit_pkghistory"
55 40
56# Similarly for our function that gets the output signatures 41# Similarly for our function that gets the output signatures
57SSTATEPOSTUNPACKFUNCS:append = " buildhistory_emit_outputsigs" 42SSTATEPOSTUNPACKFUNCS:append = " buildhistory_emit_outputsigs"
58sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs" 43sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs"
59SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs" 44SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs"
60 45
61# All items excepts those listed here will be removed from a recipe's 46# All items except those listed here will be removed from a recipe's
62# build history directory by buildhistory_emit_pkghistory(). This is 47# build history directory by buildhistory_emit_pkghistory(). This is
63# necessary because some of these items (package directories, files that 48# necessary because some of these items (package directories, files that
64# we no longer emit) might be obsolete. 49# we no longer emit) might be obsolete.
65# 50#
66# When extending build history, derive your class from buildhistory.bbclass 51# The files listed here are either written by tasks that aren't do_package (e.g.
67# and extend this list here with the additional files created by the derived 52# latest_srcrev from do_fetch) so do_package must not remove them, or, they're
68# class. 53# used to read values in do_package before always being overwritten, e.g. latest,
54# for version backwards checks.
69BUILDHISTORY_PRESERVE = "latest latest_srcrev sysroot" 55BUILDHISTORY_PRESERVE = "latest latest_srcrev sysroot"
70 56
71PATCH_GIT_USER_EMAIL ?= "buildhistory@oe" 57PATCH_GIT_USER_EMAIL ?= "buildhistory@oe"
@@ -91,28 +77,16 @@ buildhistory_emit_sysroot() {
91# Write out metadata about this package for comparison when writing future packages 77# Write out metadata about this package for comparison when writing future packages
92# 78#
93python buildhistory_emit_pkghistory() { 79python buildhistory_emit_pkghistory() {
94 if d.getVar('BB_CURRENTTASK') in ['populate_sysroot', 'populate_sysroot_setscene']:
95 bb.build.exec_func("buildhistory_emit_sysroot", d)
96 return 0
97
98 if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
99 return 0
100
101 if d.getVar('BB_CURRENTTASK') in ['package', 'package_setscene']:
102 # Create files-in-<package-name>.txt files containing a list of files of each recipe's package
103 bb.build.exec_func("buildhistory_list_pkg_files", d)
104 return 0
105
106 if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']:
107 return 0
108
109 import re 80 import re
110 import json 81 import json
111 import shlex 82 import shlex
112 import errno 83 import errno
84 import shutil
85
86 if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
87 return 0
113 88
114 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') 89 pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
115 oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE')
116 90
117 class RecipeInfo: 91 class RecipeInfo:
118 def __init__(self, name): 92 def __init__(self, name):
@@ -153,7 +127,7 @@ python buildhistory_emit_pkghistory() {
153 # Variables that need to be written to their own separate file 127 # Variables that need to be written to their own separate file
154 self.filevars = dict.fromkeys(['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm']) 128 self.filevars = dict.fromkeys(['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'])
155 129
156 # Should check PACKAGES here to see if anything removed 130 # Should check PACKAGES here to see if anything was removed
157 131
158 def readPackageInfo(pkg, histfile): 132 def readPackageInfo(pkg, histfile):
159 pkginfo = PackageInfo(pkg) 133 pkginfo = PackageInfo(pkg)
@@ -207,7 +181,7 @@ python buildhistory_emit_pkghistory() {
207 181
208 def getlastpkgversion(pkg): 182 def getlastpkgversion(pkg):
209 try: 183 try:
210 histfile = os.path.join(oldpkghistdir, pkg, "latest") 184 histfile = os.path.join(pkghistdir, pkg, "latest")
211 return readPackageInfo(pkg, histfile) 185 return readPackageInfo(pkg, histfile)
212 except EnvironmentError: 186 except EnvironmentError:
213 return None 187 return None
@@ -535,7 +509,7 @@ buildhistory_get_installed() {
535 grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot 509 grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot
536 fi 510 fi
537 511
538 # add complementary package information 512 # Add complementary package information
539 if [ -e ${WORKDIR}/complementary_pkgs.txt ]; then 513 if [ -e ${WORKDIR}/complementary_pkgs.txt ]; then
540 cp ${WORKDIR}/complementary_pkgs.txt $1 514 cp ${WORKDIR}/complementary_pkgs.txt $1
541 fi 515 fi
@@ -573,7 +547,7 @@ buildhistory_get_sdk_installed_target() {
573 547
574buildhistory_list_files() { 548buildhistory_list_files() {
575 # List the files in the specified directory, but exclude date/time etc. 549 # List the files in the specified directory, but exclude date/time etc.
576 # This is somewhat messy, but handles where the size is not printed for device files under pseudo 550 # This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
577 ( cd $1 551 ( cd $1
578 find_cmd='find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n"' 552 find_cmd='find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n"'
579 if [ "$3" = "fakeroot" ] ; then 553 if [ "$3" = "fakeroot" ] ; then
@@ -587,7 +561,7 @@ buildhistory_list_files_no_owners() {
587 # List the files in the specified directory, but exclude date/time etc. 561 # List the files in the specified directory, but exclude date/time etc.
588 # Also don't output the ownership data, but instead output just - - so 562 # Also don't output the ownership data, but instead output just - - so
589 # that the same parsing code as for _list_files works. 563 # that the same parsing code as for _list_files works.
590 # This is somewhat messy, but handles where the size is not printed for device files under pseudo 564 # This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
591 ( cd $1 565 ( cd $1
592 find_cmd='find . ! -path . -printf "%M - - %10s %p -> %l\n"' 566 find_cmd='find . ! -path . -printf "%M - - %10s %p -> %l\n"'
593 if [ "$3" = "fakeroot" ] ; then 567 if [ "$3" = "fakeroot" ] ; then
@@ -598,16 +572,17 @@ buildhistory_list_files_no_owners() {
598} 572}
599 573
600buildhistory_list_pkg_files() { 574buildhistory_list_pkg_files() {
575 if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'package', '1', '0', d)}" = "0" ] ; then
576 return
577 fi
578
601 # Create individual files-in-package for each recipe's package 579 # Create individual files-in-package for each recipe's package
602 for pkgdir in $(find ${PKGDEST}/* -maxdepth 0 -type d); do 580 pkgdirlist=$(find ${PKGDEST}/* -maxdepth 0 -type d)
581 for pkgdir in $pkgdirlist; do
603 pkgname=$(basename $pkgdir) 582 pkgname=$(basename $pkgdir)
604 outfolder="${BUILDHISTORY_DIR_PACKAGE}/$pkgname" 583 outfolder="${BUILDHISTORY_DIR_PACKAGE}/$pkgname"
605 outfile="$outfolder/files-in-package.txt" 584 outfile="$outfolder/files-in-package.txt"
606 # Make sure the output folder exists so we can create the file 585 mkdir -p $outfolder
607 if [ ! -d $outfolder ] ; then
608 bbdebug 2 "Folder $outfolder does not exist, file $outfile not created"
609 continue
610 fi
611 buildhistory_list_files $pkgdir $outfile fakeroot 586 buildhistory_list_files $pkgdir $outfile fakeroot
612 done 587 done
613} 588}
@@ -842,9 +817,9 @@ END
842 if [ ! -e .git ] ; then 817 if [ ! -e .git ] ; then
843 git init -q 818 git init -q
844 else 819 else
845 git tag -f ${BUILDHISTORY_TAG}-minus-3 ${BUILDHISTORY_TAG}-minus-2 > /dev/null 2>&1 || true 820 git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-3 ${BUILDHISTORY_TAG}-minus-2 > /dev/null 2>&1 || true
846 git tag -f ${BUILDHISTORY_TAG}-minus-2 ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true 821 git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-2 ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
847 git tag -f ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true 822 git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
848 fi 823 fi
849 824
850 check_git_config 825 check_git_config
@@ -855,10 +830,9 @@ END
855 CMDLINE="${@buildhistory_get_cmdline(d)}" 830 CMDLINE="${@buildhistory_get_cmdline(d)}"
856 if [ "$repostatus" != "" ] ; then 831 if [ "$repostatus" != "" ] ; then
857 git add -A . 832 git add -A .
858 # porcelain output looks like "?? packages/foo/bar" 833 # Porcelain output looks like "?? packages/foo/bar"
859 # Ensure we commit metadata-revs with the first commit 834 # Ensure we commit metadata-revs with the first commit
860 buildhistory_single_commit "$CMDLINE" "$HOSTNAME" dummy 835 buildhistory_single_commit "$CMDLINE" "$HOSTNAME" dummy
861 git gc --auto --quiet
862 else 836 else
863 buildhistory_single_commit "$CMDLINE" "$HOSTNAME" 837 buildhistory_single_commit "$CMDLINE" "$HOSTNAME"
864 fi 838 fi
@@ -869,25 +843,7 @@ END
869 843
870python buildhistory_eventhandler() { 844python buildhistory_eventhandler() {
871 if (e.data.getVar('BUILDHISTORY_FEATURES') or "").strip(): 845 if (e.data.getVar('BUILDHISTORY_FEATURES') or "").strip():
872 reset = e.data.getVar("BUILDHISTORY_RESET") 846 if isinstance(e, bb.event.BuildCompleted):
873 olddir = e.data.getVar("BUILDHISTORY_OLD_DIR")
874 if isinstance(e, bb.event.BuildStarted):
875 if reset:
876 import shutil
877 # Clean up after potentially interrupted build.
878 if os.path.isdir(olddir):
879 shutil.rmtree(olddir)
880 rootdir = e.data.getVar("BUILDHISTORY_DIR")
881 bb.utils.mkdirhier(rootdir)
882 entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ]
883 bb.utils.mkdirhier(olddir)
884 for entry in entries:
885 bb.utils.rename(os.path.join(rootdir, entry),
886 os.path.join(olddir, entry))
887 elif isinstance(e, bb.event.BuildCompleted):
888 if reset:
889 import shutil
890 shutil.rmtree(olddir)
891 if e.data.getVar("BUILDHISTORY_COMMIT") == "1": 847 if e.data.getVar("BUILDHISTORY_COMMIT") == "1":
892 bb.note("Writing buildhistory") 848 bb.note("Writing buildhistory")
893 bb.build.exec_func("buildhistory_write_sigs", d) 849 bb.build.exec_func("buildhistory_write_sigs", d)
@@ -925,13 +881,12 @@ def _get_srcrev_values(d):
925 dict_tag_srcrevs = {} 881 dict_tag_srcrevs = {}
926 for scm in scms: 882 for scm in scms:
927 ud = urldata[scm] 883 ud = urldata[scm]
928 for name in ud.names: 884 autoinc, rev = ud.method.sortable_revision(ud, d, ud.name)
929 autoinc, rev = ud.method.sortable_revision(ud, d, name) 885 dict_srcrevs[ud.name] = rev
930 dict_srcrevs[name] = rev 886 if 'tag' in ud.parm:
931 if 'tag' in ud.parm: 887 tag = ud.parm['tag'];
932 tag = ud.parm['tag']; 888 key = ud.name+'_'+tag
933 key = name+'_'+tag 889 dict_tag_srcrevs[key] = rev
934 dict_tag_srcrevs[key] = rev
935 return (dict_srcrevs, dict_tag_srcrevs) 890 return (dict_srcrevs, dict_tag_srcrevs)
936 891
937do_fetch[postfuncs] += "write_srcrev" 892do_fetch[postfuncs] += "write_srcrev"
@@ -990,7 +945,7 @@ def write_latest_ptest_result(d, histdir):
990 output_ptest = os.path.join(histdir, 'ptest') 945 output_ptest = os.path.join(histdir, 'ptest')
991 if os.path.exists(input_ptest): 946 if os.path.exists(input_ptest):
992 try: 947 try:
993 # Lock it avoid race issue 948 # Lock it to avoid race issue
994 lock = bb.utils.lockfile(output_ptest + "/ptest.lock") 949 lock = bb.utils.lockfile(output_ptest + "/ptest.lock")
995 bb.utils.mkdirhier(output_ptest) 950 bb.utils.mkdirhier(output_ptest)
996 oe.path.copytree(input_ptest, output_ptest) 951 oe.path.copytree(input_ptest, output_ptest)
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass
index 7c8a0b8b0f..94e0108815 100644
--- a/meta/classes/create-spdx-2.2.bbclass
+++ b/meta/classes/create-spdx-2.2.bbclass
@@ -4,36 +4,9 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6 6
7DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx" 7inherit spdx-common
8 8
9# The product name that the CVE database uses. Defaults to BPN, but may need to 9SPDX_VERSION = "2.2"
10# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
11CVE_PRODUCT ??= "${BPN}"
12CVE_VERSION ??= "${PV}"
13
14SPDXDIR ??= "${WORKDIR}/spdx"
15SPDXDEPLOY = "${SPDXDIR}/deploy"
16SPDXWORK = "${SPDXDIR}/work"
17SPDXIMAGEWORK = "${SPDXDIR}/image-work"
18SPDXSDKWORK = "${SPDXDIR}/sdk-work"
19SPDXDEPS = "${SPDXDIR}/deps.json"
20
21SPDX_TOOL_NAME ??= "oe-spdx-creator"
22SPDX_TOOL_VERSION ??= "1.0"
23
24SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
25
26SPDX_INCLUDE_SOURCES ??= "0"
27SPDX_ARCHIVE_SOURCES ??= "0"
28SPDX_ARCHIVE_PACKAGED ??= "0"
29
30SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
31SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs"
32SPDX_PRETTY ??= "0"
33
34SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
35
36SPDX_CUSTOM_ANNOTATION_VARS ??= ""
37 10
38SPDX_ORG ??= "OpenEmbedded ()" 11SPDX_ORG ??= "OpenEmbedded ()"
39SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}" 12SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
@@ -42,27 +15,16 @@ SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created f
42 is the contact information for the person or organization who is doing the \ 15 is the contact information for the person or organization who is doing the \
43 build." 16 build."
44 17
45def extract_licenses(filename): 18SPDX_ARCHIVE_SOURCES ??= "0"
46 import re 19SPDX_ARCHIVE_PACKAGED ??= "0"
47
48 lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
49
50 try:
51 with open(filename, 'rb') as f:
52 size = min(15000, os.stat(filename).st_size)
53 txt = f.read(size)
54 licenses = re.findall(lic_regex, txt)
55 if licenses:
56 ascii_licenses = [lic.decode('ascii') for lic in licenses]
57 return ascii_licenses
58 except Exception as e:
59 bb.warn(f"Exception reading {filename}: {e}")
60 return None
61 20
62def get_doc_namespace(d, doc): 21def get_namespace(d, name):
63 import uuid 22 import uuid
64 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE")) 23 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
65 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name))) 24 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), name, str(uuid.uuid5(namespace_uuid, name)))
25
26SPDX_PACKAGE_VERSION ??= "${PV}"
27SPDX_PACKAGE_VERSION[doc] = "The version of a package, versionInfo in recipe, package and image"
66 28
67def create_annotation(d, comment): 29def create_annotation(d, comment):
68 from datetime import datetime, timezone 30 from datetime import datetime, timezone
@@ -80,31 +42,16 @@ def recipe_spdx_is_native(d, recipe):
80 a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and 42 a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
81 a.comment == "isNative" for a in recipe.annotations) 43 a.comment == "isNative" for a in recipe.annotations)
82 44
83def is_work_shared_spdx(d):
84 return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
85
86def get_json_indent(d): 45def get_json_indent(d):
87 if d.getVar("SPDX_PRETTY") == "1": 46 if d.getVar("SPDX_PRETTY") == "1":
88 return 2 47 return 2
89 return None 48 return None
90 49
91python() {
92 import json
93 if d.getVar("SPDX_LICENSE_DATA"):
94 return
95
96 with open(d.getVar("SPDX_LICENSES"), "r") as f:
97 data = json.load(f)
98 # Transform the license array to a dictionary
99 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
100 d.setVar("SPDX_LICENSE_DATA", data)
101}
102 50
103def convert_license_to_spdx(lic, document, d, existing={}): 51def convert_license_to_spdx(lic, license_data, document, d, existing={}):
104 from pathlib import Path 52 from pathlib import Path
105 import oe.spdx 53 import oe.spdx
106 54
107 license_data = d.getVar("SPDX_LICENSE_DATA")
108 extracted = {} 55 extracted = {}
109 56
110 def add_extracted_license(ident, name): 57 def add_extracted_license(ident, name):
@@ -132,11 +79,17 @@ def convert_license_to_spdx(lic, document, d, existing={}):
132 pass 79 pass
133 if extracted_info.extractedText is None: 80 if extracted_info.extractedText is None:
134 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set 81 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
135 filename = d.getVarFlag('NO_GENERIC_LICENSE', name) 82 entry = d.getVarFlag('NO_GENERIC_LICENSE', name).split(';')
83 filename = entry[0]
84 params = {i.split('=')[0]: i.split('=')[1] for i in entry[1:] if '=' in i}
85 beginline = int(params.get('beginline', 1))
86 endline = params.get('endline', None)
87 if endline:
88 endline = int(endline)
136 if filename: 89 if filename:
137 filename = d.expand("${S}/" + filename) 90 filename = d.expand("${S}/" + filename)
138 with open(filename, errors="replace") as f: 91 with open(filename, errors="replace") as f:
139 extracted_info.extractedText = f.read() 92 extracted_info.extractedText = "".join(line for idx, line in enumerate(f, 1) if beginline <= idx and idx <= (endline or idx))
140 else: 93 else:
141 bb.fatal("Cannot find any text for license %s" % name) 94 bb.fatal("Cannot find any text for license %s" % name)
142 95
@@ -172,37 +125,10 @@ def convert_license_to_spdx(lic, document, d, existing={}):
172 125
173 return ' '.join(convert(l) for l in lic_split) 126 return ' '.join(convert(l) for l in lic_split)
174 127
175def process_sources(d):
176 pn = d.getVar('PN')
177 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
178 if pn in assume_provided:
179 for p in d.getVar("PROVIDES").split():
180 if p != pn:
181 pn = p
182 break
183
184 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
185 # so avoid archiving source here.
186 if pn.startswith('glibc-locale'):
187 return False
188 if d.getVar('PN') == "libtool-cross":
189 return False
190 if d.getVar('PN') == "libgcc-initial":
191 return False
192 if d.getVar('PN') == "shadow-sysroot":
193 return False
194
195 # We just archive gcc-source for all the gcc related recipes
196 if d.getVar('BPN') in ['gcc', 'libgcc']:
197 bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
198 return False
199
200 return True
201
202
203def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): 128def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
204 from pathlib import Path 129 from pathlib import Path
205 import oe.spdx 130 import oe.spdx
131 import oe.spdx_common
206 import hashlib 132 import hashlib
207 133
208 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") 134 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
@@ -213,6 +139,11 @@ def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archiv
213 spdx_files = [] 139 spdx_files = []
214 140
215 file_counter = 1 141 file_counter = 1
142
143 check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1"
144 if check_compiled_sources:
145 compiled_sources, types = oe.spdx_common.get_compiled_sources(d)
146 bb.debug(1, f"Total compiled files: {len(compiled_sources)}")
216 for subdir, dirs, files in os.walk(topdir): 147 for subdir, dirs, files in os.walk(topdir):
217 dirs[:] = [d for d in dirs if d not in ignore_dirs] 148 dirs[:] = [d for d in dirs if d not in ignore_dirs]
218 if subdir == str(topdir): 149 if subdir == str(topdir):
@@ -223,6 +154,10 @@ def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archiv
223 filename = str(filepath.relative_to(topdir)) 154 filename = str(filepath.relative_to(topdir))
224 155
225 if not filepath.is_symlink() and filepath.is_file(): 156 if not filepath.is_symlink() and filepath.is_file():
157 # Check if file is compiled
158 if check_compiled_sources:
159 if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types):
160 continue
226 spdx_file = oe.spdx.SPDXFile() 161 spdx_file = oe.spdx.SPDXFile()
227 spdx_file.SPDXID = get_spdxid(file_counter) 162 spdx_file.SPDXID = get_spdxid(file_counter)
228 for t in get_types(filepath): 163 for t in get_types(filepath):
@@ -255,7 +190,7 @@ def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archiv
255 )) 190 ))
256 191
257 if "SOURCE" in spdx_file.fileTypes: 192 if "SOURCE" in spdx_file.fileTypes:
258 extracted_lics = extract_licenses(filepath) 193 extracted_lics = oe.spdx_common.extract_licenses(filepath)
259 if extracted_lics: 194 if extracted_lics:
260 spdx_file.licenseInfoInFiles = extracted_lics 195 spdx_file.licenseInfoInFiles = extracted_lics
261 196
@@ -313,7 +248,8 @@ def add_package_sources_from_debug(d, package_doc, spdx_package, package, packag
313 debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '') 248 debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
314 else: 249 else:
315 debugsrc_path = search / debugsrc.lstrip("/") 250 debugsrc_path = search / debugsrc.lstrip("/")
316 if not debugsrc_path.exists(): 251 # We can only hash files below, skip directories, links, etc.
252 if not os.path.isfile(debugsrc_path):
317 continue 253 continue
318 254
319 file_sha256 = bb.utils.sha256_file(debugsrc_path) 255 file_sha256 = bb.utils.sha256_file(debugsrc_path)
@@ -346,32 +282,31 @@ def collect_dep_recipes(d, doc, spdx_recipe):
346 from pathlib import Path 282 from pathlib import Path
347 import oe.sbom 283 import oe.sbom
348 import oe.spdx 284 import oe.spdx
285 import oe.spdx_common
349 286
350 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) 287 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
351 spdx_deps_file = Path(d.getVar("SPDXDEPS")) 288 package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
352 package_archs = d.getVar("SSTATE_ARCHS").split()
353 package_archs.reverse() 289 package_archs.reverse()
354 290
355 dep_recipes = [] 291 dep_recipes = []
356 292
357 with spdx_deps_file.open("r") as f: 293 deps = oe.spdx_common.get_spdx_deps(d)
358 deps = json.load(f)
359 294
360 for dep_pn, dep_hashfn, in_taskhash in deps: 295 for dep in deps:
361 # If this dependency is not calculated in the taskhash skip it. 296 # If this dependency is not calculated in the taskhash skip it.
362 # Otherwise, it can result in broken links since this task won't 297 # Otherwise, it can result in broken links since this task won't
363 # rebuild and see the new SPDX ID if the dependency changes 298 # rebuild and see the new SPDX ID if the dependency changes
364 if not in_taskhash: 299 if not dep.in_taskhash:
365 continue 300 continue
366 301
367 dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep_pn, dep_hashfn) 302 dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep.pn, dep.hashfn)
368 if not dep_recipe_path: 303 if not dep_recipe_path:
369 bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep_pn, dep_hashfn)) 304 bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep.pn, dep.hashfn))
370 305
371 spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path) 306 spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
372 307
373 for pkg in spdx_dep_doc.packages: 308 for pkg in spdx_dep_doc.packages:
374 if pkg.name == dep_pn: 309 if pkg.name == dep.pn:
375 spdx_dep_recipe = pkg 310 spdx_dep_recipe = pkg
376 break 311 break
377 else: 312 else:
@@ -395,7 +330,7 @@ def collect_dep_recipes(d, doc, spdx_recipe):
395 330
396 return dep_recipes 331 return dep_recipes
397 332
398collect_dep_recipes[vardepsexclude] = "SSTATE_ARCHS" 333collect_dep_recipes[vardepsexclude] = "SPDX_MULTILIB_SSTATE_ARCHS"
399 334
400def collect_dep_sources(d, dep_recipes): 335def collect_dep_sources(d, dep_recipes):
401 import oe.sbom 336 import oe.sbom
@@ -430,99 +365,52 @@ def add_download_packages(d, doc, recipe):
430 for download_idx, src_uri in enumerate(d.getVar('SRC_URI').split()): 365 for download_idx, src_uri in enumerate(d.getVar('SRC_URI').split()):
431 f = bb.fetch2.FetchData(src_uri, d) 366 f = bb.fetch2.FetchData(src_uri, d)
432 367
433 for name in f.names: 368 package = oe.spdx.SPDXPackage()
434 package = oe.spdx.SPDXPackage() 369 package.name = "%s-source-%d" % (d.getVar("PN"), download_idx + 1)
435 package.name = "%s-source-%d" % (d.getVar("PN"), download_idx + 1) 370 package.SPDXID = oe.sbom.get_download_spdxid(d, download_idx + 1)
436 package.SPDXID = oe.sbom.get_download_spdxid(d, download_idx + 1)
437 371
438 if f.type == "file": 372 if f.type == "file":
439 continue 373 continue
440 374
441 uri = f.type 375 if f.method.supports_checksum(f):
442 proto = getattr(f, "proto", None) 376 for checksum_id in CHECKSUM_LIST:
443 if proto is not None: 377 if checksum_id.upper() not in oe.spdx.SPDXPackage.ALLOWED_CHECKSUMS:
444 uri = uri + "+" + proto 378 continue
445 uri = uri + "://" + f.host + f.path
446
447 if f.method.supports_srcrev():
448 uri = uri + "@" + f.revisions[name]
449
450 if f.method.supports_checksum(f):
451 for checksum_id in CHECKSUM_LIST:
452 if checksum_id.upper() not in oe.spdx.SPDXPackage.ALLOWED_CHECKSUMS:
453 continue
454
455 expected_checksum = getattr(f, "%s_expected" % checksum_id)
456 if expected_checksum is None:
457 continue
458
459 c = oe.spdx.SPDXChecksum()
460 c.algorithm = checksum_id.upper()
461 c.checksumValue = expected_checksum
462 package.checksums.append(c)
463
464 package.downloadLocation = uri
465 doc.packages.append(package)
466 doc.add_relationship(doc, "DESCRIBES", package)
467 # In the future, we might be able to do more fancy dependencies,
468 # but this should be sufficient for now
469 doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
470
471def collect_direct_deps(d, dep_task):
472 current_task = "do_" + d.getVar("BB_CURRENTTASK")
473 pn = d.getVar("PN")
474
475 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
476
477 for this_dep in taskdepdata.values():
478 if this_dep[0] == pn and this_dep[1] == current_task:
479 break
480 else:
481 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
482
483 deps = set()
484 for dep_name in this_dep[3]:
485 dep_data = taskdepdata[dep_name]
486 if dep_data[1] == dep_task and dep_data[0] != pn:
487 deps.add((dep_data[0], dep_data[7], dep_name in this_dep[8]))
488
489 return sorted(deps)
490
491collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
492collect_direct_deps[vardeps] += "DEPENDS"
493
494python do_collect_spdx_deps() {
495 # This task calculates the build time dependencies of the recipe, and is
496 # required because while a task can deptask on itself, those dependencies
497 # do not show up in BB_TASKDEPDATA. To work around that, this task does the
498 # deptask on do_create_spdx and writes out the dependencies it finds, then
499 # do_create_spdx reads in the found dependencies when writing the actual
500 # SPDX document
501 import json
502 from pathlib import Path
503 379
504 spdx_deps_file = Path(d.getVar("SPDXDEPS")) 380 expected_checksum = getattr(f, "%s_expected" % checksum_id)
381 if expected_checksum is None:
382 continue
505 383
506 deps = collect_direct_deps(d, "do_create_spdx") 384 c = oe.spdx.SPDXChecksum()
385 c.algorithm = checksum_id.upper()
386 c.checksumValue = expected_checksum
387 package.checksums.append(c)
388
389 package.downloadLocation = oe.spdx_common.fetch_data_to_uri(f, f.name)
390 doc.packages.append(package)
391 doc.add_relationship(doc, "DESCRIBES", package)
392 # In the future, we might be able to do more fancy dependencies,
393 # but this should be sufficient for now
394 doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
395
396def get_license_list_version(license_data, d):
397 # Newer versions of the SPDX license list are SemVer ("MAJOR.MINOR.MICRO"),
398 # but SPDX 2 only uses "MAJOR.MINOR".
399 return ".".join(license_data["licenseListVersion"].split(".")[:2])
507 400
508 with spdx_deps_file.open("w") as f:
509 json.dump(deps, f)
510}
511# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
512addtask do_collect_spdx_deps after do_unpack
513do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
514do_collect_spdx_deps[deptask] = "do_create_spdx"
515do_collect_spdx_deps[dirs] = "${SPDXDIR}"
516 401
517python do_create_spdx() { 402python do_create_spdx() {
518 from datetime import datetime, timezone 403 from datetime import datetime, timezone
519 import oe.sbom 404 import oe.sbom
520 import oe.spdx 405 import oe.spdx
406 import oe.spdx_common
521 import uuid 407 import uuid
522 from pathlib import Path 408 from pathlib import Path
523 from contextlib import contextmanager 409 from contextlib import contextmanager
524 import oe.cve_check 410 import oe.cve_check
525 411
412 license_data = oe.spdx_common.load_spdx_license_data(d)
413
526 @contextmanager 414 @contextmanager
527 def optional_tarfile(name, guard, mode="w"): 415 def optional_tarfile(name, guard, mode="w"):
528 import tarfile 416 import tarfile
@@ -551,17 +439,17 @@ python do_create_spdx() {
551 doc = oe.spdx.SPDXDocument() 439 doc = oe.spdx.SPDXDocument()
552 440
553 doc.name = "recipe-" + d.getVar("PN") 441 doc.name = "recipe-" + d.getVar("PN")
554 doc.documentNamespace = get_doc_namespace(d, doc) 442 doc.documentNamespace = get_namespace(d, doc.name)
555 doc.creationInfo.created = creation_time 443 doc.creationInfo.created = creation_time
556 doc.creationInfo.comment = "This document was created by analyzing recipe files during the build." 444 doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
557 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 445 doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
558 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") 446 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
559 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) 447 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
560 doc.creationInfo.creators.append("Person: N/A ()") 448 doc.creationInfo.creators.append("Person: N/A ()")
561 449
562 recipe = oe.spdx.SPDXPackage() 450 recipe = oe.spdx.SPDXPackage()
563 recipe.name = d.getVar("PN") 451 recipe.name = d.getVar("PN")
564 recipe.versionInfo = d.getVar("PV") 452 recipe.versionInfo = d.getVar("SPDX_PACKAGE_VERSION")
565 recipe.SPDXID = oe.sbom.get_recipe_spdxid(d) 453 recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
566 recipe.supplier = d.getVar("SPDX_SUPPLIER") 454 recipe.supplier = d.getVar("SPDX_SUPPLIER")
567 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): 455 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
@@ -573,7 +461,7 @@ python do_create_spdx() {
573 461
574 license = d.getVar("LICENSE") 462 license = d.getVar("LICENSE")
575 if license: 463 if license:
576 recipe.licenseDeclared = convert_license_to_spdx(license, doc, d) 464 recipe.licenseDeclared = convert_license_to_spdx(license, license_data, doc, d)
577 465
578 summary = d.getVar("SUMMARY") 466 summary = d.getVar("SUMMARY")
579 if summary: 467 if summary:
@@ -610,10 +498,10 @@ python do_create_spdx() {
610 498
611 add_download_packages(d, doc, recipe) 499 add_download_packages(d, doc, recipe)
612 500
613 if process_sources(d) and include_sources: 501 if oe.spdx_common.process_sources(d) and include_sources:
614 recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst") 502 recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
615 with optional_tarfile(recipe_archive, archive_sources) as archive: 503 with optional_tarfile(recipe_archive, archive_sources) as archive:
616 spdx_get_src(d) 504 oe.spdx_common.get_patched_src(d)
617 505
618 add_package_files( 506 add_package_files(
619 d, 507 d,
@@ -655,10 +543,10 @@ python do_create_spdx() {
655 package_doc = oe.spdx.SPDXDocument() 543 package_doc = oe.spdx.SPDXDocument()
656 pkg_name = d.getVar("PKG:%s" % package) or package 544 pkg_name = d.getVar("PKG:%s" % package) or package
657 package_doc.name = pkg_name 545 package_doc.name = pkg_name
658 package_doc.documentNamespace = get_doc_namespace(d, package_doc) 546 package_doc.documentNamespace = get_namespace(d, package_doc.name)
659 package_doc.creationInfo.created = creation_time 547 package_doc.creationInfo.created = creation_time
660 package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build." 548 package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
661 package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 549 package_doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
662 package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") 550 package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
663 package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) 551 package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
664 package_doc.creationInfo.creators.append("Person: N/A ()") 552 package_doc.creationInfo.creators.append("Person: N/A ()")
@@ -670,8 +558,8 @@ python do_create_spdx() {
670 558
671 spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name) 559 spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
672 spdx_package.name = pkg_name 560 spdx_package.name = pkg_name
673 spdx_package.versionInfo = d.getVar("PV") 561 spdx_package.versionInfo = d.getVar("SPDX_PACKAGE_VERSION")
674 spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses) 562 spdx_package.licenseDeclared = convert_license_to_spdx(package_license, license_data, package_doc, d, found_licenses)
675 spdx_package.supplier = d.getVar("SPDX_SUPPLIER") 563 spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
676 564
677 package_doc.packages.append(spdx_package) 565 package_doc.packages.append(spdx_package)
@@ -714,50 +602,16 @@ addtask do_create_spdx_setscene
714 602
715do_create_spdx[dirs] = "${SPDXWORK}" 603do_create_spdx[dirs] = "${SPDXWORK}"
716do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}" 604do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
717do_create_spdx[depends] += "${PATCHDEPENDENCY}" 605do_create_spdx[depends] += " \
718 606 ${PATCHDEPENDENCY} \
719def collect_package_providers(d): 607 ${@create_spdx_source_deps(d)} \
720 from pathlib import Path 608"
721 import oe.sbom
722 import oe.spdx
723 import json
724
725 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
726
727 providers = {}
728
729 deps = collect_direct_deps(d, "do_create_spdx")
730 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
731
732 for dep_pn, dep_hashfn, _ in deps:
733 localdata = d
734 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
735 if not recipe_data:
736 localdata = bb.data.createCopy(d)
737 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
738 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
739
740 for pkg in recipe_data.get("PACKAGES", "").split():
741
742 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
743 rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
744 rprovides.add(pkg)
745
746 if "PKG" in pkg_data:
747 pkg = pkg_data["PKG"]
748 rprovides.add(pkg)
749
750 for r in rprovides:
751 providers[r] = (pkg, dep_hashfn)
752
753 return providers
754
755collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
756 609
757python do_create_runtime_spdx() { 610python do_create_runtime_spdx() {
758 from datetime import datetime, timezone 611 from datetime import datetime, timezone
759 import oe.sbom 612 import oe.sbom
760 import oe.spdx 613 import oe.spdx
614 import oe.spdx_common
761 import oe.packagedata 615 import oe.packagedata
762 from pathlib import Path 616 from pathlib import Path
763 617
@@ -767,9 +621,11 @@ python do_create_runtime_spdx() {
767 621
768 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") 622 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
769 623
770 providers = collect_package_providers(d) 624 license_data = oe.spdx_common.load_spdx_license_data(d)
625
626 providers = oe.spdx_common.collect_package_providers(d)
771 pkg_arch = d.getVar("SSTATE_PKGARCH") 627 pkg_arch = d.getVar("SSTATE_PKGARCH")
772 package_archs = d.getVar("SSTATE_ARCHS").split() 628 package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
773 package_archs.reverse() 629 package_archs.reverse()
774 630
775 if not is_native: 631 if not is_native:
@@ -800,10 +656,10 @@ python do_create_runtime_spdx() {
800 656
801 runtime_doc = oe.spdx.SPDXDocument() 657 runtime_doc = oe.spdx.SPDXDocument()
802 runtime_doc.name = "runtime-" + pkg_name 658 runtime_doc.name = "runtime-" + pkg_name
803 runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc) 659 runtime_doc.documentNamespace = get_namespace(localdata, runtime_doc.name)
804 runtime_doc.creationInfo.created = creation_time 660 runtime_doc.creationInfo.created = creation_time
805 runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies." 661 runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
806 runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 662 runtime_doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
807 runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") 663 runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
808 runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) 664 runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
809 runtime_doc.creationInfo.creators.append("Person: N/A ()") 665 runtime_doc.creationInfo.creators.append("Person: N/A ()")
@@ -875,7 +731,7 @@ python do_create_runtime_spdx() {
875 oe.sbom.write_doc(d, runtime_doc, pkg_arch, "runtime", spdx_deploy, indent=get_json_indent(d)) 731 oe.sbom.write_doc(d, runtime_doc, pkg_arch, "runtime", spdx_deploy, indent=get_json_indent(d))
876} 732}
877 733
878do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS" 734do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SPDX_MULTILIB_SSTATE_ARCHS"
879 735
880addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work 736addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
881SSTATETASKS += "do_create_runtime_spdx" 737SSTATETASKS += "do_create_runtime_spdx"
@@ -891,60 +747,6 @@ do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
891do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" 747do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
892do_create_runtime_spdx[rdeptask] = "do_create_spdx" 748do_create_runtime_spdx[rdeptask] = "do_create_spdx"
893 749
894def spdx_get_src(d):
895 """
896 save patched source of the recipe in SPDX_WORKDIR.
897 """
898 import shutil
899 spdx_workdir = d.getVar('SPDXWORK')
900 spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
901 pn = d.getVar('PN')
902
903 workdir = d.getVar("WORKDIR")
904
905 try:
906 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
907 if not is_work_shared_spdx(d):
908 # Change the WORKDIR to make do_unpack do_patch run in another dir.
909 d.setVar('WORKDIR', spdx_workdir)
910 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
911 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
912
913 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
914 # possibly requiring of the following tasks (such as some recipes's
915 # do_patch required 'B' existed).
916 bb.utils.mkdirhier(d.getVar('B'))
917
918 bb.build.exec_func('do_unpack', d)
919 # Copy source of kernel to spdx_workdir
920 if is_work_shared_spdx(d):
921 share_src = d.getVar('WORKDIR')
922 d.setVar('WORKDIR', spdx_workdir)
923 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
924 src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
925 bb.utils.mkdirhier(src_dir)
926 if bb.data.inherits_class('kernel',d):
927 share_src = d.getVar('STAGING_KERNEL_DIR')
928 cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
929 cmd_copy_shared_res = os.popen(cmd_copy_share).read()
930 bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
931
932 git_path = src_dir + "/.git"
933 if os.path.exists(git_path):
934 shutils.rmtree(git_path)
935
936 # Make sure gcc and kernel sources are patched only once
937 if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
938 bb.build.exec_func('do_patch', d)
939
940 # Some userland has no source.
941 if not os.path.exists( spdx_workdir ):
942 bb.utils.mkdirhier(spdx_workdir)
943 finally:
944 d.setVar("WORKDIR", workdir)
945
946spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR"
947
948do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx" 750do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
949do_rootfs[cleandirs] += "${SPDXIMAGEWORK}" 751do_rootfs[cleandirs] += "${SPDXIMAGEWORK}"
950 752
@@ -1002,6 +804,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
1002 import os 804 import os
1003 import oe.spdx 805 import oe.spdx
1004 import oe.sbom 806 import oe.sbom
807 import oe.spdx_common
1005 import io 808 import io
1006 import json 809 import json
1007 from datetime import timezone, datetime 810 from datetime import timezone, datetime
@@ -1009,8 +812,10 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
1009 import tarfile 812 import tarfile
1010 import bb.compress.zstd 813 import bb.compress.zstd
1011 814
1012 providers = collect_package_providers(d) 815 license_data = oe.spdx_common.load_spdx_license_data(d)
1013 package_archs = d.getVar("SSTATE_ARCHS").split() 816
817 providers = oe.spdx_common.collect_package_providers(d)
818 package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
1014 package_archs.reverse() 819 package_archs.reverse()
1015 820
1016 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") 821 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
@@ -1019,68 +824,69 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
1019 824
1020 doc = oe.spdx.SPDXDocument() 825 doc = oe.spdx.SPDXDocument()
1021 doc.name = rootfs_name 826 doc.name = rootfs_name
1022 doc.documentNamespace = get_doc_namespace(d, doc) 827 doc.documentNamespace = get_namespace(d, doc.name)
1023 doc.creationInfo.created = creation_time 828 doc.creationInfo.created = creation_time
1024 doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build." 829 doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
1025 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 830 doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
1026 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") 831 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
1027 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) 832 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
1028 doc.creationInfo.creators.append("Person: N/A ()") 833 doc.creationInfo.creators.append("Person: N/A ()")
1029 834
1030 image = oe.spdx.SPDXPackage() 835 image = oe.spdx.SPDXPackage()
1031 image.name = d.getVar("PN") 836 image.name = d.getVar("PN")
1032 image.versionInfo = d.getVar("PV") 837 image.versionInfo = d.getVar("SPDX_PACKAGE_VERSION")
1033 image.SPDXID = rootfs_spdxid 838 image.SPDXID = rootfs_spdxid
1034 image.supplier = d.getVar("SPDX_SUPPLIER") 839 image.supplier = d.getVar("SPDX_SUPPLIER")
1035 840
1036 doc.packages.append(image) 841 doc.packages.append(image)
1037 842
1038 for name in sorted(packages.keys()): 843 if packages:
1039 if name not in providers: 844 for name in sorted(packages.keys()):
1040 bb.fatal("Unable to find SPDX provider for '%s'" % name) 845 if name not in providers:
1041 846 bb.fatal("Unable to find SPDX provider for '%s'" % name)
1042 pkg_name, pkg_hashfn = providers[name]
1043 847
1044 pkg_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, pkg_name, pkg_hashfn) 848 pkg_name, pkg_hashfn = providers[name]
1045 if not pkg_spdx_path:
1046 bb.fatal("No SPDX file found for package %s, %s" % (pkg_name, pkg_hashfn))
1047 849
1048 pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path) 850 pkg_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, pkg_name, pkg_hashfn)
851 if not pkg_spdx_path:
852 bb.fatal("No SPDX file found for package %s, %s" % (pkg_name, pkg_hashfn))
1049 853
1050 for p in pkg_doc.packages: 854 pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
1051 if p.name == name:
1052 pkg_ref = oe.spdx.SPDXExternalDocumentRef()
1053 pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
1054 pkg_ref.spdxDocument = pkg_doc.documentNamespace
1055 pkg_ref.checksum.algorithm = "SHA1"
1056 pkg_ref.checksum.checksumValue = pkg_doc_sha1
1057 855
1058 doc.externalDocumentRefs.append(pkg_ref) 856 for p in pkg_doc.packages:
1059 doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID)) 857 if p.name == name:
1060 break 858 pkg_ref = oe.spdx.SPDXExternalDocumentRef()
1061 else: 859 pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
1062 bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path)) 860 pkg_ref.spdxDocument = pkg_doc.documentNamespace
861 pkg_ref.checksum.algorithm = "SHA1"
862 pkg_ref.checksum.checksumValue = pkg_doc_sha1
1063 863
1064 runtime_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "runtime-" + name, pkg_hashfn) 864 doc.externalDocumentRefs.append(pkg_ref)
1065 if not runtime_spdx_path: 865 doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
1066 bb.fatal("No runtime SPDX document found for %s, %s" % (name, pkg_hashfn)) 866 break
1067 867 else:
1068 runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path) 868 bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
1069 869
1070 runtime_ref = oe.spdx.SPDXExternalDocumentRef() 870 runtime_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "runtime-" + name, pkg_hashfn)
1071 runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name 871 if not runtime_spdx_path:
1072 runtime_ref.spdxDocument = runtime_doc.documentNamespace 872 bb.fatal("No runtime SPDX document found for %s, %s" % (name, pkg_hashfn))
1073 runtime_ref.checksum.algorithm = "SHA1" 873
1074 runtime_ref.checksum.checksumValue = runtime_doc_sha1 874 runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
1075 875
1076 # "OTHER" isn't ideal here, but I can't find a relationship that makes sense 876 runtime_ref = oe.spdx.SPDXExternalDocumentRef()
1077 doc.externalDocumentRefs.append(runtime_ref) 877 runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
1078 doc.add_relationship( 878 runtime_ref.spdxDocument = runtime_doc.documentNamespace
1079 image, 879 runtime_ref.checksum.algorithm = "SHA1"
1080 "OTHER", 880 runtime_ref.checksum.checksumValue = runtime_doc_sha1
1081 "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID), 881
1082 comment="Runtime dependencies for %s" % name 882 # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
1083 ) 883 doc.externalDocumentRefs.append(runtime_ref)
884 doc.add_relationship(
885 image,
886 "OTHER",
887 "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
888 comment="Runtime dependencies for %s" % name
889 )
1084 bb.utils.mkdirhier(spdx_workdir) 890 bb.utils.mkdirhier(spdx_workdir)
1085 image_spdx_path = spdx_workdir / (rootfs_name + ".spdx.json") 891 image_spdx_path = spdx_workdir / (rootfs_name + ".spdx.json")
1086 892
@@ -1161,4 +967,4 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
1161 967
1162 tar.addfile(info, fileobj=index_str) 968 tar.addfile(info, fileobj=index_str)
1163 969
1164combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SSTATE_ARCHS" 970combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SPDX_MULTILIB_SSTATE_ARCHS"
diff --git a/meta/classes/create-spdx-3.0.bbclass b/meta/classes/create-spdx-3.0.bbclass
new file mode 100644
index 0000000000..c0a5436ad6
--- /dev/null
+++ b/meta/classes/create-spdx-3.0.bbclass
@@ -0,0 +1,205 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7inherit spdx-common
8
9SPDX_VERSION = "3.0.1"
10
11# The list of SPDX profiles generated documents will conform to
12SPDX_PROFILES ?= "core build software simpleLicensing security"
13
14SPDX_INCLUDE_BUILD_VARIABLES ??= "0"
15SPDX_INCLUDE_BUILD_VARIABLES[doc] = "If set to '1', the bitbake variables for a \
16 recipe will be included in the Build object. This will most likely result \
17 in non-reproducible SPDX output"
18
19SPDX_INCLUDE_BITBAKE_PARENT_BUILD ??= "0"
20SPDX_INCLUDE_BITBAKE_PARENT_BUILD[doc] = "Report the parent invocation of bitbake \
21 for each Build object. This allows you to know who invoked bitbake to perform \
22 a build, but will result in non-reproducible SPDX output."
23
24SPDX_PACKAGE_ADDITIONAL_PURPOSE ?= ""
25SPDX_PACKAGE_ADDITIONAL_PURPOSE[doc] = "The list of additional purposes to assign to \
26 the generated packages for a recipe. The primary purpose is always `install`. \
27 Packages overrides are allowed to override the additional purposes for \
28 individual packages."
29
30SPDX_IMAGE_PURPOSE ?= "filesystemImage"
31SPDX_IMAGE_PURPOSE[doc] = "The list of purposes to assign to the generated images. \
32 The first listed item will be the Primary Purpose and all additional items will \
33 be added as additional purposes"
34
35SPDX_SDK_PURPOSE ?= "install"
36SPDX_SDK_PURPOSE[doc] = "The list of purposes to assign to the generate SDK installer. \
37 The first listed item will be the Primary Purpose and all additional items will \
38 be added as additional purposes"
39
40SPDX_INCLUDE_VEX ??= "current"
41SPDX_INCLUDE_VEX[doc] = "Controls what VEX information is in the output. Set to \
42 'none' to disable all VEX data. Set to 'current' to only include VEX data \
43 for vulnerabilities not already fixed in the upstream source code \
44 (recommended). Set to 'all' to get all known historical vulnerabilities, \
45 including those already fixed upstream (warning: This can be large and \
46 slow)."
47
48SPDX_INCLUDE_TIMESTAMPS ?= "0"
49SPDX_INCLUDE_TIMESTAMPS[doc] = "Include time stamps in SPDX output. This is \
50 useful if you want to know when artifacts were produced and when builds \
51 occurred, but will result in non-reproducible SPDX output"
52
53SPDX_IMPORTS ??= ""
54SPDX_IMPORTS[doc] = "SPDX_IMPORTS is the base variable that describes how to \
55 reference external SPDX ids. Each import is defined as a key in this \
56 variable with a suffix to describe to as a suffix to look up more \
57 information about the import. Each key can have the following variables: \
58 SPDX_IMPORTS_<key>_spdxid: The Fully qualified SPDX ID of the object \
59 SPDX_IMPORTS_<key>_uri: The URI where the SPDX Document that contains \
60 the external object can be found. Optional but recommended \
61 SPDX_IMPORTS_<key>_hash_<hash>: The Checksum of the SPDX Document that \
62 contains the External ID. <hash> must be one the valid SPDX hashing \
63 algorithms, as described by the HashAlgorithm vocabulary in the\
64 SPDX 3 spec. Optional but recommended"
65
66# Agents
67# Bitbake variables can be used to describe an SPDX Agent that may be used
68# during the build. An Agent is specified using a set of variables which all
69# start with some common base name:
70#
71# <BASE>_name: The name of the Agent (required)
72# <BASE>_type: The type of Agent. Must be one of "person", "organization",
73# "software", or "agent" (the default if not specified)
74# <BASE>_comment: The comment for the Agent (optional)
75# <BASE>_id_<ID>: And External Identifier for the Agent. <ID> must be a valid
76# ExternalIdentifierType from the SPDX 3 spec. Commonly, an E-mail address
77# can be specified with <BASE>_id_email
78#
79# Alternatively, an Agent can be an external reference by referencing a key
80# in SPDX_IMPORTS like so:
81#
82# <BASE>_import = "<key>"
83#
84# Finally, the same agent described by another set of agent variables can be
85# referenced by specifying the basename of the variable that should be
86# referenced:
87#
88# SPDX_PACKAGE_SUPPLIER_ref = "SPDX_AUTHORS_openembedded"
89
90SPDX_AUTHORS ??= "openembedded"
91SPDX_AUTHORS[doc] = "A space separated list of the document authors. Each item \
92 is used to name a base variable like SPDX_AUTHORS_<AUTHOR> that \
93 describes the author."
94
95SPDX_AUTHORS_openembedded_name = "OpenEmbedded"
96SPDX_AUTHORS_openembedded_type = "organization"
97
98SPDX_BUILD_HOST[doc] = "The base variable name to describe the build host on \
99 which a build is running. Must be an SPDX_IMPORTS key. Requires \
100 SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will result in \
101 non-reproducible SPDX output"
102
103SPDX_INVOKED_BY[doc] = "The base variable name to describe the Agent that \
104 invoked the build, which builds will link to if specified. Requires \
105 SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will likely result in \
106 non-reproducible SPDX output"
107
108SPDX_ON_BEHALF_OF[doc] = "The base variable name to describe the Agent on who's \
109 behalf the invoking Agent (SPDX_INVOKED_BY) is running the build. Requires \
110 SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will likely result in \
111 non-reproducible SPDX output"
112
113SPDX_PACKAGE_SUPPLIER[doc] = "The base variable name to describe the Agent who \
114 is supplying artifacts produced by the build"
115
116SPDX_PACKAGE_VERSION ??= "${PV}"
117SPDX_PACKAGE_VERSION[doc] = "The version of a package, software_packageVersion \
118 in software_Package"
119
120SPDX_PACKAGE_URL ??= ""
121SPDX_PACKAGE_URL[doc] = "Provides a place for the SPDX data creator to record \
122the package URL string (in accordance with the Package URL specification) for \
123a software Package."
124
125IMAGE_CLASSES:append = " create-spdx-image-3.0"
126SDK_CLASSES += "create-spdx-sdk-3.0"
127
128oe.spdx30_tasks.set_timestamp_now[vardepsexclude] = "SPDX_INCLUDE_TIMESTAMPS"
129oe.spdx30_tasks.get_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR"
130oe.spdx30_tasks.collect_dep_objsets[vardepsexclude] = "SPDX_MULTILIB_SSTATE_ARCHS"
131
132
133# SPDX library code makes heavy use of classes, which bitbake cannot easily
134# parse out dependencies. As such, the library code files that make use of
135# classes are explicitly added as file checksum dependencies.
136SPDX3_LIB_DEP_FILES = "\
137 ${COREBASE}/meta/lib/oe/sbom30.py:True \
138 ${COREBASE}/meta/lib/oe/spdx30.py:True \
139 "
140
141python do_create_spdx() {
142 import oe.spdx30_tasks
143 oe.spdx30_tasks.create_spdx(d)
144}
145do_create_spdx[vardeps] += "\
146 SPDX_INCLUDE_BITBAKE_PARENT_BUILD \
147 SPDX_PACKAGE_ADDITIONAL_PURPOSE \
148 SPDX_PROFILES \
149 SPDX_NAMESPACE_PREFIX \
150 SPDX_UUID_NAMESPACE \
151 "
152
153addtask do_create_spdx after \
154 do_collect_spdx_deps \
155 do_deploy_source_date_epoch \
156 do_populate_sysroot do_package do_packagedata \
157 before do_populate_sdk do_populate_sdk_ext do_build do_rm_work
158
159SSTATETASKS += "do_create_spdx"
160do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
161do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
162do_create_spdx[file-checksums] += "${SPDX3_LIB_DEP_FILES}"
163
164python do_create_spdx_setscene () {
165 sstate_setscene(d)
166}
167addtask do_create_spdx_setscene
168
169do_create_spdx[dirs] = "${SPDXWORK}"
170do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
171do_create_spdx[depends] += " \
172 ${PATCHDEPENDENCY} \
173 ${@create_spdx_source_deps(d)} \
174"
175
176python do_create_package_spdx() {
177 import oe.spdx30_tasks
178 oe.spdx30_tasks.create_package_spdx(d)
179}
180oe.spdx30_tasks.create_package_spdx[vardepsexclude] = "OVERRIDES"
181
182addtask do_create_package_spdx after do_create_spdx before do_build do_rm_work
183SSTATETASKS += "do_create_package_spdx"
184do_create_package_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
185do_create_package_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
186do_create_package_spdx[file-checksums] += "${SPDX3_LIB_DEP_FILES}"
187
188python do_create_package_spdx_setscene () {
189 sstate_setscene(d)
190}
191addtask do_create_package_spdx_setscene
192
193do_create_package_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
194do_create_package_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
195do_create_package_spdx[rdeptask] = "do_create_spdx"
196
197python spdx30_build_started_handler () {
198 import oe.spdx30_tasks
199 d = e.data.createCopy()
200 oe.spdx30_tasks.write_bitbake_spdx(d)
201}
202
203addhandler spdx30_build_started_handler
204spdx30_build_started_handler[eventmask] = "bb.event.BuildStarted"
205
diff --git a/meta/classes/create-spdx.bbclass b/meta/classes/create-spdx.bbclass
index 19c6c0ff0b..b604973ae0 100644
--- a/meta/classes/create-spdx.bbclass
+++ b/meta/classes/create-spdx.bbclass
@@ -5,4 +5,4 @@
5# 5#
6# Include this class when you don't care what version of SPDX you get; it will 6# Include this class when you don't care what version of SPDX you get; it will
7# be updated to the latest stable version that is supported 7# be updated to the latest stable version that is supported
8inherit create-spdx-2.2 8inherit create-spdx-3.0
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index 56ba8bceef..c63ebd56e1 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -31,25 +31,27 @@
31CVE_PRODUCT ??= "${BPN}" 31CVE_PRODUCT ??= "${BPN}"
32CVE_VERSION ??= "${PV}" 32CVE_VERSION ??= "${PV}"
33 33
34CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK" 34# Possible database sources: NVD1, NVD2, FKIE
35CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_2-1.db" 35NVD_DB_VERSION ?= "FKIE"
36
37# Use different file names for each database source, as they synchronize at different moments, so may be slightly different
38CVE_CHECK_DB_FILENAME ?= "${@'nvdcve_2-2.db' if d.getVar('NVD_DB_VERSION') == 'NVD2' else 'nvdcve_1-3.db' if d.getVar('NVD_DB_VERSION') == 'NVD1' else 'nvdfkie_1-1.db'}"
39CVE_CHECK_DB_FETCHER ?= "${@'cve-update-nvd2-native' if d.getVar('NVD_DB_VERSION') == 'NVD2' else 'cve-update-db-native'}"
40CVE_CHECK_DB_DIR ?= "${STAGING_DIR}/CVE_CHECK"
41CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/${CVE_CHECK_DB_FILENAME}"
36CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock" 42CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock"
37 43
38CVE_CHECK_LOG ?= "${T}/cve.log"
39CVE_CHECK_TMP_FILE ?= "${TMPDIR}/cve_check"
40CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve" 44CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve"
41CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary" 45CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary"
42CVE_CHECK_SUMMARY_FILE ?= "${CVE_CHECK_SUMMARY_DIR}/${CVE_CHECK_SUMMARY_FILE_NAME}"
43CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json" 46CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json"
44CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt" 47CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt"
45 48
46CVE_CHECK_LOG_JSON ?= "${T}/cve.json" 49CVE_CHECK_LOG_JSON ?= "${T}/cve.json"
47 50
48CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve" 51CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
49CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
50CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json" 52CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
51CVE_CHECK_MANIFEST ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.cve" 53CVE_CHECK_MANIFEST_JSON_SUFFIX ?= "json"
52CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.json" 54CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.${CVE_CHECK_MANIFEST_JSON_SUFFIX}"
53CVE_CHECK_COPY_FILES ??= "1" 55CVE_CHECK_COPY_FILES ??= "1"
54CVE_CHECK_CREATE_MANIFEST ??= "1" 56CVE_CHECK_CREATE_MANIFEST ??= "1"
55 57
@@ -58,9 +60,6 @@ CVE_CHECK_REPORT_PATCHED ??= "1"
58 60
59CVE_CHECK_SHOW_WARNINGS ??= "1" 61CVE_CHECK_SHOW_WARNINGS ??= "1"
60 62
61# Provide text output
62CVE_CHECK_FORMAT_TEXT ??= "1"
63
64# Provide JSON output 63# Provide JSON output
65CVE_CHECK_FORMAT_JSON ??= "1" 64CVE_CHECK_FORMAT_JSON ??= "1"
66 65
@@ -105,21 +104,13 @@ CVE_CHECK_LAYER_INCLUDELIST ??= ""
105CVE_VERSION_SUFFIX ??= "" 104CVE_VERSION_SUFFIX ??= ""
106 105
107python () { 106python () {
108 # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS 107 from oe.cve_check import extend_cve_status
109 cve_check_ignore = d.getVar("CVE_CHECK_IGNORE") 108 extend_cve_status(d)
110 if cve_check_ignore: 109
111 bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS") 110 nvd_database_type = d.getVar("NVD_DB_VERSION")
112 for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split(): 111 if nvd_database_type not in ("NVD1", "NVD2", "FKIE"):
113 d.setVarFlag("CVE_STATUS", cve, "ignored") 112 bb.erroronce("Malformed NVD_DB_VERSION, must be one of: NVD1, NVD2, FKIE. Defaulting to NVD2")
114 113 d.setVar("NVD_DB_VERSION", "NVD2")
115 # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once
116 for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split():
117 cve_group = d.getVar(cve_status_group)
118 if cve_group is not None:
119 for cve in cve_group.split():
120 d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status"))
121 else:
122 bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group)
123} 114}
124 115
125def generate_json_report(d, out_path, link_path): 116def generate_json_report(d, out_path, link_path):
@@ -150,20 +141,11 @@ python cve_save_summary_handler () {
150 import datetime 141 import datetime
151 from oe.cve_check import update_symlinks 142 from oe.cve_check import update_symlinks
152 143
153 cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
154
155 cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME") 144 cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME")
156 cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR") 145 cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
157 bb.utils.mkdirhier(cvelogpath) 146 bb.utils.mkdirhier(cvelogpath)
158 147
159 timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') 148 timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
160 cve_summary_file = os.path.join(cvelogpath, "%s-%s.txt" % (cve_summary_name, timestamp))
161
162 if os.path.exists(cve_tmp_file):
163 shutil.copyfile(cve_tmp_file, cve_summary_file)
164 cvefile_link = os.path.join(cvelogpath, cve_summary_name)
165 update_symlinks(cve_summary_file, cvefile_link)
166 bb.plain("Complete CVE report summary created at: %s" % cvefile_link)
167 149
168 if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": 150 if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
169 json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON")) 151 json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON"))
@@ -187,24 +169,23 @@ python do_cve_check () {
187 patched_cves = get_patched_cves(d) 169 patched_cves = get_patched_cves(d)
188 except FileNotFoundError: 170 except FileNotFoundError:
189 bb.fatal("Failure in searching patches") 171 bb.fatal("Failure in searching patches")
190 ignored, patched, unpatched, status = check_cves(d, patched_cves) 172 cve_data, status = check_cves(d, patched_cves)
191 if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status): 173 if len(cve_data) or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
192 cve_data = get_cve_info(d, patched + unpatched + ignored) 174 get_cve_info(d, cve_data)
193 cve_write_data(d, patched, unpatched, ignored, cve_data, status) 175 cve_write_data(d, cve_data, status)
194 else: 176 else:
195 bb.note("No CVE database found, skipping CVE check") 177 bb.note("No CVE database found, skipping CVE check")
196 178
197} 179}
198 180
199addtask cve_check before do_build 181addtask cve_check before do_build
200do_cve_check[depends] = "cve-update-nvd2-native:do_fetch" 182do_cve_check[depends] = "${CVE_CHECK_DB_FETCHER}:do_unpack"
201do_cve_check[nostamp] = "1" 183do_cve_check[nostamp] = "1"
202 184
203python cve_check_cleanup () { 185python cve_check_cleanup () {
204 """ 186 """
205 Delete the file used to gather all the CVE information. 187 Delete the file used to gather all the CVE information.
206 """ 188 """
207 bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE"))
208 bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")) 189 bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH"))
209} 190}
210 191
@@ -222,9 +203,6 @@ python cve_check_write_rootfs_manifest () {
222 from oe.cve_check import cve_check_merge_jsons, update_symlinks 203 from oe.cve_check import cve_check_merge_jsons, update_symlinks
223 204
224 if d.getVar("CVE_CHECK_COPY_FILES") == "1": 205 if d.getVar("CVE_CHECK_COPY_FILES") == "1":
225 deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
226 if os.path.exists(deploy_file):
227 bb.utils.remove(deploy_file)
228 deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") 206 deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
229 if os.path.exists(deploy_file_json): 207 if os.path.exists(deploy_file_json):
230 bb.utils.remove(deploy_file_json) 208 bb.utils.remove(deploy_file_json)
@@ -244,19 +222,13 @@ python cve_check_write_rootfs_manifest () {
244 json_data = {"version":"1", "package": []} 222 json_data = {"version":"1", "package": []}
245 text_data = "" 223 text_data = ""
246 enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1" 224 enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1"
247 enable_text = d.getVar("CVE_CHECK_FORMAT_TEXT") == "1"
248 225
249 save_pn = d.getVar("PN") 226 save_pn = d.getVar("PN")
250 227
251 for pkg in recipies: 228 for pkg in recipies:
252 # To be able to use the CVE_CHECK_RECIPE_FILE variable we have to evaluate 229 # To be able to use the CVE_CHECK_RECIPE_FILE_JSON variable we have to evaluate
253 # it with the different PN names set each time. 230 # it with the different PN names set each time.
254 d.setVar("PN", pkg) 231 d.setVar("PN", pkg)
255 if enable_text:
256 pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE")
257 if os.path.exists(pkgfilepath):
258 with open(pkgfilepath) as pfile:
259 text_data += pfile.read()
260 232
261 if enable_json: 233 if enable_json:
262 pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON") 234 pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
@@ -267,24 +239,17 @@ python cve_check_write_rootfs_manifest () {
267 239
268 d.setVar("PN", save_pn) 240 d.setVar("PN", save_pn)
269 241
270 if enable_text:
271 link_path = os.path.join(deploy_dir, "%s.cve" % link_name)
272 manifest_name = d.getVar("CVE_CHECK_MANIFEST")
273
274 with open(manifest_name, "w") as f:
275 f.write(text_data)
276
277 update_symlinks(manifest_name, link_path)
278 bb.plain("Image CVE report stored in: %s" % manifest_name)
279
280 if enable_json: 242 if enable_json:
281 link_path = os.path.join(deploy_dir, "%s.json" % link_name) 243 manifest_name_suffix = d.getVar("CVE_CHECK_MANIFEST_JSON_SUFFIX")
282 manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON") 244 manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON")
283 245
284 with open(manifest_name, "w") as f: 246 with open(manifest_name, "w") as f:
285 json.dump(json_data, f, indent=2) 247 json.dump(json_data, f, indent=2)
286 248
287 update_symlinks(manifest_name, link_path) 249 if link_name:
250 link_path = os.path.join(deploy_dir, "%s.%s" % (link_name, manifest_name_suffix))
251 update_symlinks(manifest_name, link_path)
252
288 bb.plain("Image CVE JSON report stored in: %s" % manifest_name) 253 bb.plain("Image CVE JSON report stored in: %s" % manifest_name)
289} 254}
290 255
@@ -292,7 +257,51 @@ ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest ' if d
292do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" 257do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
293do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" 258do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
294 259
295def check_cves(d, patched_cves): 260def cve_is_ignored(d, cve_data, cve):
261 if cve not in cve_data:
262 return False
263 if cve_data[cve]['abbrev-status'] == "Ignored":
264 return True
265 return False
266
267def cve_is_patched(d, cve_data, cve):
268 if cve not in cve_data:
269 return False
270 if cve_data[cve]['abbrev-status'] == "Patched":
271 return True
272 return False
273
274def cve_update(d, cve_data, cve, entry):
275 # If no entry, just add it
276 if cve not in cve_data:
277 cve_data[cve] = entry
278 return
279 # If we are updating, there might be change in the status
280 bb.debug(1, "Trying CVE entry update for %s from %s to %s" % (cve, cve_data[cve]['abbrev-status'], entry['abbrev-status']))
281 if cve_data[cve]['abbrev-status'] == "Unknown":
282 cve_data[cve] = entry
283 return
284 if cve_data[cve]['abbrev-status'] == entry['abbrev-status']:
285 return
286 # Update like in {'abbrev-status': 'Patched', 'status': 'version-not-in-range'} to {'abbrev-status': 'Unpatched', 'status': 'version-in-range'}
287 if entry['abbrev-status'] == "Unpatched" and cve_data[cve]['abbrev-status'] == "Patched":
288 if entry['status'] == "version-in-range" and cve_data[cve]['status'] == "version-not-in-range":
289 # New result from the scan, vulnerable
290 cve_data[cve] = entry
291 bb.debug(1, "CVE entry %s update from Patched to Unpatched from the scan result" % cve)
292 return
293 if entry['abbrev-status'] == "Patched" and cve_data[cve]['abbrev-status'] == "Unpatched":
294 if entry['status'] == "version-not-in-range" and cve_data[cve]['status'] == "version-in-range":
295 # Range does not match the scan, but we already have a vulnerable match, ignore
296 bb.debug(1, "CVE entry %s update from Patched to Unpatched from the scan result - not applying" % cve)
297 return
298 # If we have an "Ignored", it has a priority
299 if cve_data[cve]['abbrev-status'] == "Ignored":
300 bb.debug(1, "CVE %s not updating because Ignored" % cve)
301 return
302 bb.warn("Unhandled CVE entry update for %s from %s to %s" % (cve, cve_data[cve], entry))
303
304def check_cves(d, cve_data):
296 """ 305 """
297 Connect to the NVD database and find unpatched cves. 306 Connect to the NVD database and find unpatched cves.
298 """ 307 """
@@ -302,28 +311,19 @@ def check_cves(d, patched_cves):
302 real_pv = d.getVar("PV") 311 real_pv = d.getVar("PV")
303 suffix = d.getVar("CVE_VERSION_SUFFIX") 312 suffix = d.getVar("CVE_VERSION_SUFFIX")
304 313
305 cves_unpatched = []
306 cves_ignored = []
307 cves_status = [] 314 cves_status = []
308 cves_in_recipe = False 315 cves_in_recipe = False
309 # CVE_PRODUCT can contain more than one product (eg. curl/libcurl) 316 # CVE_PRODUCT can contain more than one product (eg. curl/libcurl)
310 products = d.getVar("CVE_PRODUCT").split() 317 products = d.getVar("CVE_PRODUCT").split()
311 # If this has been unset then we're not scanning for CVEs here (for example, image recipes) 318 # If this has been unset then we're not scanning for CVEs here (for example, image recipes)
312 if not products: 319 if not products:
313 return ([], [], [], []) 320 return ([], [])
314 pv = d.getVar("CVE_VERSION").split("+git")[0] 321 pv = d.getVar("CVE_VERSION").split("+git")[0]
315 322
316 # If the recipe has been skipped/ignored we return empty lists 323 # If the recipe has been skipped/ignored we return empty lists
317 if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split(): 324 if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split():
318 bb.note("Recipe has been skipped by cve-check") 325 bb.note("Recipe has been skipped by cve-check")
319 return ([], [], [], []) 326 return ([], [])
320
321 # Convert CVE_STATUS into ignored CVEs and check validity
322 cve_ignore = []
323 for cve in (d.getVarFlags("CVE_STATUS") or {}):
324 decoded_status, _, _ = decode_cve_status(d, cve)
325 if decoded_status == "Ignored":
326 cve_ignore.append(cve)
327 327
328 import sqlite3 328 import sqlite3
329 db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") 329 db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
@@ -342,19 +342,19 @@ def check_cves(d, patched_cves):
342 for cverow in cve_cursor: 342 for cverow in cve_cursor:
343 cve = cverow[0] 343 cve = cverow[0]
344 344
345 if cve in cve_ignore:
346 bb.note("%s-%s ignores %s" % (product, pv, cve))
347 cves_ignored.append(cve)
348 continue
349 elif cve in patched_cves:
350 bb.note("%s has been patched" % (cve))
351 continue
352 # Write status once only for each product 345 # Write status once only for each product
353 if not cves_in_product: 346 if not cves_in_product:
354 cves_status.append([product, True]) 347 cves_status.append([product, True])
355 cves_in_product = True 348 cves_in_product = True
356 cves_in_recipe = True 349 cves_in_recipe = True
357 350
351 if cve_is_ignored(d, cve_data, cve):
352 bb.note("%s-%s ignores %s" % (product, pv, cve))
353 continue
354 elif cve_is_patched(d, cve_data, cve):
355 bb.note("%s has been patched" % (cve))
356 continue
357
358 vulnerable = False 358 vulnerable = False
359 ignored = False 359 ignored = False
360 360
@@ -362,7 +362,7 @@ def check_cves(d, patched_cves):
362 for row in product_cursor: 362 for row in product_cursor:
363 (_, _, _, version_start, operator_start, version_end, operator_end) = row 363 (_, _, _, version_start, operator_start, version_end, operator_end) = row
364 #bb.debug(2, "Evaluating row " + str(row)) 364 #bb.debug(2, "Evaluating row " + str(row))
365 if cve in cve_ignore: 365 if cve_is_ignored(d, cve_data, cve):
366 ignored = True 366 ignored = True
367 367
368 version_start = convert_cve_version(version_start) 368 version_start = convert_cve_version(version_start)
@@ -401,16 +401,16 @@ def check_cves(d, patched_cves):
401 if vulnerable: 401 if vulnerable:
402 if ignored: 402 if ignored:
403 bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv)) 403 bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv))
404 cves_ignored.append(cve) 404 cve_update(d, cve_data, cve, {"abbrev-status": "Ignored"})
405 else: 405 else:
406 bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve)) 406 bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
407 cves_unpatched.append(cve) 407 cve_update(d, cve_data, cve, {"abbrev-status": "Unpatched", "status": "version-in-range"})
408 break 408 break
409 product_cursor.close() 409 product_cursor.close()
410 410
411 if not vulnerable: 411 if not vulnerable:
412 bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve)) 412 bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve))
413 patched_cves.add(cve) 413 cve_update(d, cve_data, cve, {"abbrev-status": "Patched", "status": "version-not-in-range"})
414 cve_cursor.close() 414 cve_cursor.close()
415 415
416 if not cves_in_product: 416 if not cves_in_product:
@@ -418,123 +418,44 @@ def check_cves(d, patched_cves):
418 cves_status.append([product, False]) 418 cves_status.append([product, False])
419 419
420 conn.close() 420 conn.close()
421 diff_ignore = list(set(cve_ignore) - set(cves_ignored))
422 if diff_ignore:
423 oe.qa.handle_error("cve_status_not_in_db", "Found CVE (%s) with CVE_STATUS set that are not found in database for this component" % " ".join(diff_ignore), d)
424 421
425 if not cves_in_recipe: 422 if not cves_in_recipe:
426 bb.note("No CVE records for products in recipe %s" % (pn)) 423 bb.note("No CVE records for products in recipe %s" % (pn))
427 424
428 return (list(cves_ignored), list(patched_cves), cves_unpatched, cves_status) 425 if d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
426 unpatched_cves = [cve for cve in cve_data if cve_data[cve]["abbrev-status"] == "Unpatched"]
427 if unpatched_cves:
428 bb.warn("Found unpatched CVE (%s)" % " ".join(unpatched_cves))
429
430 return (cve_data, cves_status)
429 431
430def get_cve_info(d, cves): 432def get_cve_info(d, cve_data):
431 """ 433 """
432 Get CVE information from the database. 434 Get CVE information from the database.
433 """ 435 """
434 436
435 import sqlite3 437 import sqlite3
436 438
437 cve_data = {}
438 db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro") 439 db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
439 conn = sqlite3.connect(db_file, uri=True) 440 conn = sqlite3.connect(db_file, uri=True)
440 441
441 for cve in cves: 442 for cve in cve_data:
442 cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,)) 443 cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,))
443 for row in cursor: 444 for row in cursor:
444 cve_data[row[0]] = {} 445 # The CVE itdelf has been added already
445 cve_data[row[0]]["summary"] = row[1] 446 if row[0] not in cve_data:
446 cve_data[row[0]]["scorev2"] = row[2] 447 bb.note("CVE record %s not present" % row[0])
447 cve_data[row[0]]["scorev3"] = row[3] 448 continue
448 cve_data[row[0]]["modified"] = row[4] 449 #cve_data[row[0]] = {}
449 cve_data[row[0]]["vector"] = row[5] 450 cve_data[row[0]]["NVD-summary"] = row[1]
450 cve_data[row[0]]["vectorString"] = row[6] 451 cve_data[row[0]]["NVD-scorev2"] = row[2]
452 cve_data[row[0]]["NVD-scorev3"] = row[3]
453 cve_data[row[0]]["NVD-scorev4"] = row[4]
454 cve_data[row[0]]["NVD-modified"] = row[5]
455 cve_data[row[0]]["NVD-vector"] = row[6]
456 cve_data[row[0]]["NVD-vectorString"] = row[7]
451 cursor.close() 457 cursor.close()
452 conn.close() 458 conn.close()
453 return cve_data
454
455def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
456 """
457 Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and
458 CVE manifest if enabled.
459 """
460
461 from oe.cve_check import decode_cve_status
462
463 cve_file = d.getVar("CVE_CHECK_LOG")
464 fdir_name = d.getVar("FILE_DIRNAME")
465 layer = fdir_name.split("/")[-3]
466
467 include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
468 exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
469
470 report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
471
472 if exclude_layers and layer in exclude_layers:
473 return
474
475 if include_layers and layer not in include_layers:
476 return
477
478 # Early exit, the text format does not report packages without CVEs
479 if not patched+unpatched+ignored:
480 return
481
482 nvd_link = "https://nvd.nist.gov/vuln/detail/"
483 write_string = ""
484 unpatched_cves = []
485 bb.utils.mkdirhier(os.path.dirname(cve_file))
486
487 for cve in sorted(cve_data):
488 is_patched = cve in patched
489 is_ignored = cve in ignored
490
491 status = "Unpatched"
492 if (is_patched or is_ignored) and not report_all:
493 continue
494 if is_ignored:
495 status = "Ignored"
496 elif is_patched:
497 status = "Patched"
498 else:
499 # default value of status is Unpatched
500 unpatched_cves.append(cve)
501
502 write_string += "LAYER: %s\n" % layer
503 write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
504 write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
505 write_string += "CVE: %s\n" % cve
506 write_string += "CVE STATUS: %s\n" % status
507 _, detail, description = decode_cve_status(d, cve)
508 if detail:
509 write_string += "CVE DETAIL: %s\n" % detail
510 if description:
511 write_string += "CVE DESCRIPTION: %s\n" % description
512 write_string += "CVE SUMMARY: %s\n" % cve_data[cve]["summary"]
513 write_string += "CVSS v2 BASE SCORE: %s\n" % cve_data[cve]["scorev2"]
514 write_string += "CVSS v3 BASE SCORE: %s\n" % cve_data[cve]["scorev3"]
515 write_string += "VECTOR: %s\n" % cve_data[cve]["vector"]
516 write_string += "VECTORSTRING: %s\n" % cve_data[cve]["vectorString"]
517 write_string += "MORE INFORMATION: %s%s\n\n" % (nvd_link, cve)
518
519 if unpatched_cves and d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
520 bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file))
521
522 with open(cve_file, "w") as f:
523 bb.note("Writing file %s with CVE information" % cve_file)
524 f.write(write_string)
525
526 if d.getVar("CVE_CHECK_COPY_FILES") == "1":
527 deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
528 bb.utils.mkdirhier(os.path.dirname(deploy_file))
529 with open(deploy_file, "w") as f:
530 f.write(write_string)
531
532 if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
533 cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
534 bb.utils.mkdirhier(cvelogpath)
535
536 with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
537 f.write("%s" % write_string)
538 459
539def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file): 460def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file):
540 """ 461 """
@@ -566,13 +487,11 @@ def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_fi
566 with open(index_path, "a+") as f: 487 with open(index_path, "a+") as f:
567 f.write("%s\n" % fragment_path) 488 f.write("%s\n" % fragment_path)
568 489
569def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status): 490def cve_write_data_json(d, cve_data, cve_status):
570 """ 491 """
571 Prepare CVE data for the JSON format, then write it. 492 Prepare CVE data for the JSON format, then write it.
572 """ 493 """
573 494
574 from oe.cve_check import decode_cve_status
575
576 output = {"version":"1", "package": []} 495 output = {"version":"1", "package": []}
577 nvd_link = "https://nvd.nist.gov/vuln/detail/" 496 nvd_link = "https://nvd.nist.gov/vuln/detail/"
578 497
@@ -590,8 +509,6 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
590 if include_layers and layer not in include_layers: 509 if include_layers and layer not in include_layers:
591 return 510 return
592 511
593 unpatched_cves = []
594
595 product_data = [] 512 product_data = []
596 for s in cve_status: 513 for s in cve_status:
597 p = {"product": s[0], "cvesInRecord": "Yes"} 514 p = {"product": s[0], "cvesInRecord": "Yes"}
@@ -606,39 +523,33 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
606 "version" : package_version, 523 "version" : package_version,
607 "products": product_data 524 "products": product_data
608 } 525 }
526
609 cve_list = [] 527 cve_list = []
610 528
611 for cve in sorted(cve_data): 529 for cve in sorted(cve_data):
612 is_patched = cve in patched 530 if not report_all and (cve_data[cve]["abbrev-status"] == "Patched" or cve_data[cve]["abbrev-status"] == "Ignored"):
613 is_ignored = cve in ignored
614 status = "Unpatched"
615 if (is_patched or is_ignored) and not report_all:
616 continue 531 continue
617 if is_ignored:
618 status = "Ignored"
619 elif is_patched:
620 status = "Patched"
621 else:
622 # default value of status is Unpatched
623 unpatched_cves.append(cve)
624
625 issue_link = "%s%s" % (nvd_link, cve) 532 issue_link = "%s%s" % (nvd_link, cve)
626 533
627 cve_item = { 534 cve_item = {
628 "id" : cve, 535 "id" : cve,
629 "summary" : cve_data[cve]["summary"], 536 "status" : cve_data[cve]["abbrev-status"],
630 "scorev2" : cve_data[cve]["scorev2"], 537 "link": issue_link,
631 "scorev3" : cve_data[cve]["scorev3"],
632 "vector" : cve_data[cve]["vector"],
633 "vectorString" : cve_data[cve]["vectorString"],
634 "status" : status,
635 "link": issue_link
636 } 538 }
637 _, detail, description = decode_cve_status(d, cve) 539 if 'NVD-summary' in cve_data[cve]:
638 if detail: 540 cve_item["summary"] = cve_data[cve]["NVD-summary"]
639 cve_item["detail"] = detail 541 cve_item["scorev2"] = cve_data[cve]["NVD-scorev2"]
640 if description: 542 cve_item["scorev3"] = cve_data[cve]["NVD-scorev3"]
641 cve_item["description"] = description 543 cve_item["scorev4"] = cve_data[cve]["NVD-scorev4"]
544 cve_item["modified"] = cve_data[cve]["NVD-modified"]
545 cve_item["vector"] = cve_data[cve]["NVD-vector"]
546 cve_item["vectorString"] = cve_data[cve]["NVD-vectorString"]
547 if 'status' in cve_data[cve]:
548 cve_item["detail"] = cve_data[cve]["status"]
549 if 'justification' in cve_data[cve]:
550 cve_item["description"] = cve_data[cve]["justification"]
551 if 'resource' in cve_data[cve]:
552 cve_item["patch-file"] = cve_data[cve]["resource"]
642 cve_list.append(cve_item) 553 cve_list.append(cve_item)
643 554
644 package_data["issue"] = cve_list 555 package_data["issue"] = cve_list
@@ -650,12 +561,10 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
650 561
651 cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file) 562 cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file)
652 563
653def cve_write_data(d, patched, unpatched, ignored, cve_data, status): 564def cve_write_data(d, cve_data, status):
654 """ 565 """
655 Write CVE data in each enabled format. 566 Write CVE data in each enabled format.
656 """ 567 """
657 568
658 if d.getVar("CVE_CHECK_FORMAT_TEXT") == "1":
659 cve_write_data_text(d, patched, unpatched, ignored, cve_data)
660 if d.getVar("CVE_CHECK_FORMAT_JSON") == "1": 569 if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
661 cve_write_data_json(d, patched, unpatched, ignored, cve_data, status) 570 cve_write_data_json(d, cve_data, status)
diff --git a/meta/classes/devtool-source.bbclass b/meta/classes/devtool-source.bbclass
index 3e24800dcb..2e0070486b 100644
--- a/meta/classes/devtool-source.bbclass
+++ b/meta/classes/devtool-source.bbclass
@@ -92,9 +92,9 @@ python devtool_post_unpack() {
92 for fname in local_files: 92 for fname in local_files:
93 f.write('%s\n' % fname) 93 f.write('%s\n' % fname)
94 94
95 if os.path.dirname(srcsubdir) != workdir: 95 if srcsubdir.startswith(unpackdir) and os.path.dirname(srcsubdir) != unpackdir:
96 # Handle if S is set to a subdirectory of the source 96 # Handle if S is set to a subdirectory of the source
97 srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0]) 97 srcsubdir = os.path.normpath(os.path.join(unpackdir, os.path.relpath(srcsubdir, unpackdir).split(os.sep)[0]))
98 98
99 scriptutils.git_convert_standalone_clone(srcsubdir) 99 scriptutils.git_convert_standalone_clone(srcsubdir)
100 100
@@ -179,9 +179,9 @@ python devtool_post_patch() {
179 # (otherwise we'd likely be left with identical commits that have different hashes) 179 # (otherwise we'd likely be left with identical commits that have different hashes)
180 bb.process.run('git rebase devtool-no-overrides', cwd=srcsubdir) 180 bb.process.run('git rebase devtool-no-overrides', cwd=srcsubdir)
181 bb.process.run('git checkout %s' % devbranch, cwd=srcsubdir) 181 bb.process.run('git checkout %s' % devbranch, cwd=srcsubdir)
182 bb.process.run('git tag -f devtool-patched', cwd=srcsubdir) 182 bb.process.run('git tag -f --no-sign devtool-patched', cwd=srcsubdir)
183 if os.path.exists(os.path.join(srcsubdir, '.gitmodules')): 183 if os.path.exists(os.path.join(srcsubdir, '.gitmodules')):
184 bb.process.run('git submodule foreach --recursive "git tag -f devtool-patched"', cwd=srcsubdir) 184 bb.process.run('git submodule foreach --recursive "git tag -f --no-sign devtool-patched"', cwd=srcsubdir)
185 185
186} 186}
187 187
diff --git a/meta/classes/go-vendor.bbclass b/meta/classes/go-vendor.bbclass
index 1bbb99ac79..6ec6178add 100644
--- a/meta/classes/go-vendor.bbclass
+++ b/meta/classes/go-vendor.bbclass
@@ -70,7 +70,7 @@ python do_go_vendor() {
70 if os.path.exists(vendor_dir): 70 if os.path.exists(vendor_dir):
71 # Nothing to do except re-establish link to actual vendor folder 71 # Nothing to do except re-establish link to actual vendor folder
72 if not os.path.exists(linkname): 72 if not os.path.exists(linkname):
73 os.symlink(vendor_dir, linkname) 73 oe.path.relsymlink(vendor_dir, linkname)
74 return 74 return
75 75
76 bb.utils.mkdirhier(vendor_dir) 76 bb.utils.mkdirhier(vendor_dir)
@@ -156,7 +156,7 @@ python do_go_vendor() {
156 shutil.copy2(rootdirLicese, subdirLicense) 156 shutil.copy2(rootdirLicese, subdirLicense)
157 157
158 # Copy vendor manifest 158 # Copy vendor manifest
159 modules_txt_src = os.path.join(d.getVar('WORKDIR'), "modules.txt") 159 modules_txt_src = os.path.join(d.getVar('UNPACKDIR'), "modules.txt")
160 bb.debug(1, "cp %s --> %s" % (modules_txt_src, vendor_dir)) 160 bb.debug(1, "cp %s --> %s" % (modules_txt_src, vendor_dir))
161 shutil.copy2(modules_txt_src, vendor_dir) 161 shutil.copy2(modules_txt_src, vendor_dir)
162 162
@@ -201,11 +201,15 @@ python do_go_vendor() {
201 for vendored_name, replaced_path in replaced_paths.items(): 201 for vendored_name, replaced_path in replaced_paths.items():
202 symlink_target = os.path.join(source_dir, *['src', go_import, replaced_path]) 202 symlink_target = os.path.join(source_dir, *['src', go_import, replaced_path])
203 symlink_name = os.path.join(vendor_dir, vendored_name) 203 symlink_name = os.path.join(vendor_dir, vendored_name)
204 relative_symlink_target = os.path.relpath(symlink_target, os.path.dirname(symlink_name))
204 bb.debug(1, "vendored name %s, symlink name %s" % (vendored_name, symlink_name)) 205 bb.debug(1, "vendored name %s, symlink name %s" % (vendored_name, symlink_name))
205 os.symlink(symlink_target, symlink_name) 206
207 os.makedirs(os.path.dirname(symlink_name), exist_ok=True)
208 os.symlink(relative_symlink_target, symlink_name)
206 209
207 # Create a symlink to the actual directory 210 # Create a symlink to the actual directory
208 os.symlink(vendor_dir, linkname) 211 relative_vendor_dir = os.path.relpath(vendor_dir, os.path.dirname(linkname))
212 os.symlink(relative_vendor_dir, linkname)
209} 213}
210 214
211addtask go_vendor before do_patch after do_unpack 215addtask go_vendor before do_patch after do_unpack
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
deleted file mode 100644
index 159cae20f8..0000000000
--- a/meta/classes/icecc.bbclass
+++ /dev/null
@@ -1,461 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# Icecream distributed compiling support
8#
9# Stages directories with symlinks from gcc/g++ to icecc, for both
10# native and cross compilers. Depending on each configure or compile,
11# the directories are added at the head of the PATH list and ICECC_CXX
12# and ICECC_CC are set.
13#
14# For the cross compiler, creates a tar.gz of our toolchain and sets
15# ICECC_VERSION accordingly.
16#
17# The class now handles all 3 different compile 'stages' (i.e native ,cross-kernel and target) creating the
18# necessary environment tar.gz file to be used by the remote machines.
19# It also supports meta-toolchain generation.
20#
21# If ICECC_PATH is not set in local.conf then the class will try to locate it using 'bb.utils.which'
22# but nothing is sure. ;)
23#
24# If ICECC_ENV_EXEC is set in local.conf, then it should point to the icecc-create-env script provided by the user
25# or the default one provided by icecc-create-env_0.1.bb will be used.
26# (NOTE that this is a modified version of the needed script and *not the one that comes with icecream*).
27#
28# User can specify if specific recipes or recipes inheriting specific classes should not use icecc to distribute
29# compile jobs to remote machines, but handle them locally by defining ICECC_CLASS_DISABLE and ICECC_RECIPE_DISABLE
30# with the appropriate values in local.conf. In addition the user can force to enable icecc for recipes
31# which set an empty PARALLEL_MAKE variable by defining ICECC_RECIPE_ENABLE.
32#
33#########################################################################################
34# Error checking is kept to minimum so double check any parameters you pass to the class
35#########################################################################################
36
37BB_BASEHASH_IGNORE_VARS += "ICECC_PARALLEL_MAKE ICECC_DISABLED ICECC_RECIPE_DISABLE \
38 ICECC_CLASS_DISABLE ICECC_RECIPE_ENABLE ICECC_PATH ICECC_ENV_EXEC \
39 ICECC_CARET_WORKAROUND ICECC_CFLAGS ICECC_ENV_VERSION \
40 ICECC_DEBUG ICECC_LOGFILE ICECC_REPEAT_RATE ICECC_PREFERRED_HOST \
41 ICECC_CLANG_REMOTE_CPP ICECC_IGNORE_UNVERIFIED ICECC_TEST_SOCKET \
42 ICECC_ENV_DEBUG ICECC_REMOTE_CPP \
43 "
44
45ICECC_ENV_EXEC ?= "${STAGING_BINDIR_NATIVE}/icecc-create-env"
46
47HOSTTOOLS_NONFATAL += "icecc patchelf"
48
49# This version can be incremented when changes are made to the environment that
50# invalidate the version on the compile nodes. Changing it will cause a new
51# environment to be created.
52#
53# A useful thing to do for testing icecream changes locally is to add a
54# subversion in local.conf:
55# ICECC_ENV_VERSION:append = "-my-ver-1"
56ICECC_ENV_VERSION = "2"
57
58# Default to disabling the caret workaround, If set to "1" in local.conf, icecc
59# will locally recompile any files that have warnings, which can adversely
60# affect performance.
61#
62# See: https://github.com/icecc/icecream/issues/190
63export ICECC_CARET_WORKAROUND ??= "0"
64
65export ICECC_REMOTE_CPP ??= "0"
66
67ICECC_CFLAGS = ""
68CFLAGS += "${ICECC_CFLAGS}"
69CXXFLAGS += "${ICECC_CFLAGS}"
70
71# Debug flags when generating environments
72ICECC_ENV_DEBUG ??= ""
73
74# Disable recipe list contains a list of recipes that can not distribute
75# compile tasks for one reason or the other. When adding a new entry, please
76# document why (how it failed) so that we can re-evaluate it later e.g. when
77# there is a new version.
78#
79# libgcc-initial - fails with CPP sanity check error if host sysroot contains
80# cross gcc built for another target tune/variant.
81# pixman - prng_state: TLS reference mismatches non-TLS reference, possibly due to
82# pragma omp threadprivate(prng_state).
83# systemtap - _HelperSDT.c undefs macros and uses the identifiers in macros emitting
84# inline assembly.
85# target-sdk-provides-dummy - ${HOST_PREFIX} is empty which triggers the "NULL
86# prefix" error.
87ICECC_RECIPE_DISABLE += "\
88 libgcc-initial \
89 pixman \
90 systemtap \
91 target-sdk-provides-dummy \
92 "
93
94# Classes that should not use icecc. When adding a new entry, please
95# document why (how it failed) so that we can re-evaluate it later.
96#
97# image - images aren't compiling, but the testing framework for images captures
98# PARALLEL_MAKE as part of the test environment. Many tests won't use
99# icecream, but leaving the high level of parallelism can cause them to
100# consume an unnecessary amount of resources.
101ICECC_CLASS_DISABLE += "\
102 image \
103 "
104
105def get_icecc_dep(d):
106 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
107 # we need that built is the responsibility of the patch function / class, not
108 # the application.
109 if not d.getVar('INHIBIT_DEFAULT_DEPS'):
110 return "icecc-create-env-native"
111 return ""
112
113DEPENDS:prepend = "${@get_icecc_dep(d)} "
114
115get_cross_kernel_cc[vardepsexclude] += "KERNEL_CC"
116def get_cross_kernel_cc(bb,d):
117 if not icecc_is_kernel(bb, d):
118 return None
119
120 # evaluate the expression by the shell if necessary
121 kernel_cc = d.getVar('KERNEL_CC')
122 if '`' in kernel_cc or '$(' in kernel_cc:
123 import subprocess
124 kernel_cc = subprocess.check_output("echo %s" % kernel_cc, shell=True).decode("utf-8")[:-1]
125
126 kernel_cc = kernel_cc.replace('ccache', '').strip()
127 kernel_cc = kernel_cc.split(' ')[0]
128 kernel_cc = kernel_cc.strip()
129 return kernel_cc
130
131def get_icecc(d):
132 return d.getVar('ICECC_PATH') or bb.utils.which(os.getenv("PATH"), "icecc")
133
134def use_icecc(bb,d):
135 if d.getVar('ICECC_DISABLED') == "1":
136 # don't even try it, when explicitly disabled
137 return "no"
138
139 # allarch recipes don't use compiler
140 if icecc_is_allarch(bb, d):
141 return "no"
142
143 if icecc_is_cross_canadian(bb, d):
144 return "no"
145
146 pn = d.getVar('PN')
147 bpn = d.getVar('BPN')
148
149 # Enable/disable checks are made against BPN, because there is a good
150 # chance that if icecc should be skipped for a recipe, it should be skipped
151 # for all the variants of that recipe. PN is still checked in case a user
152 # specified a more specific recipe.
153 check_pn = set([pn, bpn])
154
155 class_disable = (d.getVar('ICECC_CLASS_DISABLE') or "").split()
156
157 for bbclass in class_disable:
158 if bb.data.inherits_class(bbclass, d):
159 bb.debug(1, "%s: bbclass %s found in disable, disable icecc" % (pn, bbclass))
160 return "no"
161
162 disabled_recipes = (d.getVar('ICECC_RECIPE_DISABLE') or "").split()
163 enabled_recipes = (d.getVar('ICECC_RECIPE_ENABLE') or "").split()
164
165 if check_pn & set(disabled_recipes):
166 bb.debug(1, "%s: found in disable list, disable icecc" % pn)
167 return "no"
168
169 if check_pn & set(enabled_recipes):
170 bb.debug(1, "%s: found in enabled recipes list, enable icecc" % pn)
171 return "yes"
172
173 if d.getVar('PARALLEL_MAKE') == "":
174 bb.debug(1, "%s: has empty PARALLEL_MAKE, disable icecc" % pn)
175 return "no"
176
177 return "yes"
178
179def icecc_is_allarch(bb, d):
180 return d.getVar("PACKAGE_ARCH") == "all"
181
182def icecc_is_kernel(bb, d):
183 return \
184 bb.data.inherits_class("kernel", d);
185
186def icecc_is_native(bb, d):
187 return \
188 bb.data.inherits_class("cross", d) or \
189 bb.data.inherits_class("native", d);
190
191def icecc_is_cross_canadian(bb, d):
192 return bb.data.inherits_class("cross-canadian", d)
193
194def icecc_dir(bb, d):
195 return d.expand('${TMPDIR}/work-shared/ice')
196
197# Don't pollute allarch signatures with TARGET_FPU
198icecc_version[vardepsexclude] += "TARGET_FPU"
199def icecc_version(bb, d):
200 if use_icecc(bb, d) == "no":
201 return ""
202
203 parallel = d.getVar('ICECC_PARALLEL_MAKE') or ""
204 if not d.getVar('PARALLEL_MAKE') == "" and parallel:
205 d.setVar("PARALLEL_MAKE", parallel)
206
207 # Disable showing the caret in the GCC compiler output if the workaround is
208 # disabled
209 if d.getVar('ICECC_CARET_WORKAROUND') == '0':
210 d.setVar('ICECC_CFLAGS', '-fno-diagnostics-show-caret')
211
212 if icecc_is_native(bb, d):
213 archive_name = "local-host-env"
214 elif d.expand('${HOST_PREFIX}') == "":
215 bb.fatal(d.expand("${PN}"), " NULL prefix")
216 else:
217 prefix = d.expand('${HOST_PREFIX}' )
218 distro = d.expand('${DISTRO}')
219 target_sys = d.expand('${TARGET_SYS}')
220 float = d.getVar('TARGET_FPU') or "hard"
221 archive_name = prefix + distro + "-" + target_sys + "-" + float
222 if icecc_is_kernel(bb, d):
223 archive_name += "-kernel"
224
225 import socket
226 ice_dir = icecc_dir(bb, d)
227 tar_file = os.path.join(ice_dir, "{archive}-{version}-@VERSION@-{hostname}.tar.gz".format(
228 archive=archive_name,
229 version=d.getVar('ICECC_ENV_VERSION'),
230 hostname=socket.gethostname()
231 ))
232
233 return tar_file
234
235def icecc_path(bb,d):
236 if use_icecc(bb, d) == "no":
237 # don't create unnecessary directories when icecc is disabled
238 return
239
240 staging = os.path.join(d.expand('${STAGING_BINDIR}'), "ice")
241 if icecc_is_kernel(bb, d):
242 staging += "-kernel"
243
244 return staging
245
246def icecc_get_external_tool(bb, d, tool):
247 external_toolchain_bindir = d.expand('${EXTERNAL_TOOLCHAIN}${bindir_cross}')
248 target_prefix = d.expand('${TARGET_PREFIX}')
249 return os.path.join(external_toolchain_bindir, '%s%s' % (target_prefix, tool))
250
251def icecc_get_tool_link(tool, d):
252 import subprocess
253 try:
254 return subprocess.check_output("readlink -f %s" % tool, shell=True).decode("utf-8")[:-1]
255 except subprocess.CalledProcessError as e:
256 bb.note("icecc: one of the tools probably disappeared during recipe parsing, cmd readlink -f %s returned %d:\n%s" % (tool, e.returncode, e.output.decode("utf-8")))
257 return tool
258
259def icecc_get_path_tool(tool, d):
260 # This is a little ugly, but we want to make sure we add an actual
261 # compiler to the toolchain, not ccache. Some distros (e.g. Fedora)
262 # have ccache enabled by default using symlinks in PATH, meaning ccache
263 # would be found first when looking for the compiler.
264 paths = os.getenv("PATH").split(':')
265 while True:
266 p, hist = bb.utils.which(':'.join(paths), tool, history=True)
267 if not p or os.path.basename(icecc_get_tool_link(p, d)) != 'ccache':
268 return p
269 paths = paths[len(hist):]
270
271 return ""
272
273# Don't pollute native signatures with target TUNE_PKGARCH through STAGING_BINDIR_TOOLCHAIN
274icecc_get_tool[vardepsexclude] += "STAGING_BINDIR_TOOLCHAIN"
275def icecc_get_tool(bb, d, tool):
276 if icecc_is_native(bb, d):
277 return icecc_get_path_tool(tool, d)
278 elif icecc_is_kernel(bb, d):
279 return icecc_get_path_tool(get_cross_kernel_cc(bb, d), d)
280 else:
281 ice_dir = d.expand('${STAGING_BINDIR_TOOLCHAIN}')
282 target_sys = d.expand('${TARGET_SYS}')
283 for p in ice_dir.split(':'):
284 tool_bin = os.path.join(p, "%s-%s" % (target_sys, tool))
285 if os.path.isfile(tool_bin):
286 return tool_bin
287 external_tool_bin = icecc_get_external_tool(bb, d, tool)
288 if os.path.isfile(external_tool_bin):
289 return external_tool_bin
290 return ""
291
292def icecc_get_and_check_tool(bb, d, tool):
293 # Check that g++ or gcc is not a symbolic link to icecc binary in
294 # PATH or icecc-create-env script will silently create an invalid
295 # compiler environment package.
296 t = icecc_get_tool(bb, d, tool)
297 if t:
298 link_path = icecc_get_tool_link(t, d)
299 if link_path == get_icecc(d):
300 bb.error("%s is a symlink to %s in PATH and this prevents icecc from working" % (t, link_path))
301 return ""
302 else:
303 return t
304 else:
305 return t
306
307wait_for_file() {
308 local TIME_ELAPSED=0
309 local FILE_TO_TEST=$1
310 local TIMEOUT=$2
311 until [ -f "$FILE_TO_TEST" ]
312 do
313 TIME_ELAPSED=$(expr $TIME_ELAPSED + 1)
314 if [ $TIME_ELAPSED -gt $TIMEOUT ]
315 then
316 return 1
317 fi
318 sleep 1
319 done
320}
321
322def set_icecc_env():
323 # dummy python version of set_icecc_env
324 return
325
326set_icecc_env[vardepsexclude] += "KERNEL_CC"
327set_icecc_env() {
328 if [ "${@use_icecc(bb, d)}" = "no" ]
329 then
330 return
331 fi
332 ICECC_VERSION="${@icecc_version(bb, d)}"
333 if [ "x${ICECC_VERSION}" = "x" ]
334 then
335 bbwarn "Cannot use icecc: could not get ICECC_VERSION"
336 return
337 fi
338
339 ICE_PATH="${@icecc_path(bb, d)}"
340 if [ "x${ICE_PATH}" = "x" ]
341 then
342 bbwarn "Cannot use icecc: could not get ICE_PATH"
343 return
344 fi
345
346 ICECC_BIN="${@get_icecc(d)}"
347 if [ -z "${ICECC_BIN}" ]; then
348 bbwarn "Cannot use icecc: icecc binary not found"
349 return
350 fi
351 if [ -z "$(which patchelf patchelf-uninative)" ]; then
352 bbwarn "Cannot use icecc: patchelf not found"
353 return
354 fi
355
356 ICECC_CC="${@icecc_get_and_check_tool(bb, d, "gcc")}"
357 ICECC_CXX="${@icecc_get_and_check_tool(bb, d, "g++")}"
358 # cannot use icecc_get_and_check_tool here because it assumes as without target_sys prefix
359 ICECC_WHICH_AS="${@bb.utils.which(os.getenv('PATH'), 'as')}"
360 if [ ! -x "${ICECC_CC}" -o ! -x "${ICECC_CXX}" ]
361 then
362 bbnote "Cannot use icecc: could not get ICECC_CC or ICECC_CXX"
363 return
364 fi
365
366 ICE_VERSION="$($ICECC_CC -dumpversion)"
367 ICECC_VERSION=$(echo ${ICECC_VERSION} | sed -e "s/@VERSION@/$ICE_VERSION/g")
368 if [ ! -x "${ICECC_ENV_EXEC}" ]
369 then
370 bbwarn "Cannot use icecc: invalid ICECC_ENV_EXEC"
371 return
372 fi
373
374 # Create symlinks to icecc and wrapper-scripts in the recipe-sysroot directory
375 mkdir -p $ICE_PATH/symlinks
376 if [ -n "${KERNEL_CC}" ]; then
377 compilers="${@get_cross_kernel_cc(bb,d)}"
378 else
379 compilers="${HOST_PREFIX}gcc ${HOST_PREFIX}g++"
380 fi
381 for compiler in $compilers; do
382 ln -sf $ICECC_BIN $ICE_PATH/symlinks/$compiler
383 cat <<-__EOF__ > $ICE_PATH/$compiler
384 #!/bin/sh -e
385 export ICECC_VERSION=$ICECC_VERSION
386 export ICECC_CC=$ICECC_CC
387 export ICECC_CXX=$ICECC_CXX
388 $ICE_PATH/symlinks/$compiler "\$@"
389 __EOF__
390 chmod 775 $ICE_PATH/$compiler
391 done
392
393 ICECC_AS="$(${ICECC_CC} -print-prog-name=as)"
394 # for target recipes should return something like:
395 # /OE/tmp-eglibc/sysroots/x86_64-linux/usr/libexec/arm920tt-oe-linux-gnueabi/gcc/arm-oe-linux-gnueabi/4.8.2/as
396 # and just "as" for native, if it returns "as" in current directory (for whatever reason) use "as" from PATH
397 if [ "$(dirname "${ICECC_AS}")" = "." ]
398 then
399 ICECC_AS="${ICECC_WHICH_AS}"
400 fi
401
402 if [ ! -f "${ICECC_VERSION}.done" ]
403 then
404 mkdir -p "$(dirname "${ICECC_VERSION}")"
405
406 # the ICECC_VERSION generation step must be locked by a mutex
407 # in order to prevent race conditions
408 if flock -n "${ICECC_VERSION}.lock" \
409 ${ICECC_ENV_EXEC} ${ICECC_ENV_DEBUG} "${ICECC_CC}" "${ICECC_CXX}" "${ICECC_AS}" "${ICECC_VERSION}"
410 then
411 touch "${ICECC_VERSION}.done"
412 elif ! wait_for_file "${ICECC_VERSION}.done" 30
413 then
414 # locking failed so wait for ${ICECC_VERSION}.done to appear
415 bbwarn "Timeout waiting for ${ICECC_VERSION}.done"
416 return
417 fi
418 fi
419
420 # Don't let ccache find the icecream compiler links that have been created, otherwise
421 # it can end up invoking icecream recursively.
422 export CCACHE_PATH="$PATH"
423 export CCACHE_DISABLE="1"
424
425 export PATH="$ICE_PATH:$PATH"
426
427 bbnote "Using icecc path: $ICE_PATH"
428 bbnote "Using icecc tarball: $ICECC_VERSION"
429}
430
431do_configure:prepend() {
432 set_icecc_env
433}
434
435do_compile:prepend() {
436 set_icecc_env
437}
438
439do_compile_kernelmodules:prepend() {
440 set_icecc_env
441}
442
443do_install:prepend() {
444 set_icecc_env
445}
446
447# Icecream is not (currently) supported in the extensible SDK
448ICECC_SDK_HOST_TASK = "nativesdk-icecc-toolchain"
449ICECC_SDK_HOST_TASK:task-populate-sdk-ext = ""
450
451# Don't include icecream in uninative tarball
452ICECC_SDK_HOST_TASK:pn-uninative-tarball = ""
453
454# Add the toolchain scripts to the SDK
455TOOLCHAIN_HOST_TASK:append = " ${ICECC_SDK_HOST_TASK}"
456
457python () {
458 if d.getVar('ICECC_DISABLED') != "1":
459 for task in ['do_configure', 'do_compile', 'do_compile_kernelmodules', 'do_install']:
460 d.setVarFlag(task, 'network', '1')
461}
diff --git a/meta/classes/migrate_localcount.bbclass b/meta/classes/migrate_localcount.bbclass
deleted file mode 100644
index 1d00c110e2..0000000000
--- a/meta/classes/migrate_localcount.bbclass
+++ /dev/null
@@ -1,52 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7PRSERV_DUMPDIR ??= "${LOG_DIR}/db"
8LOCALCOUNT_DUMPFILE ??= "${PRSERV_DUMPDIR}/prserv-localcount-exports.inc"
9
10python migrate_localcount_handler () {
11 import bb.event
12 if not e.data:
13 return
14
15 pv = e.data.getVar('PV')
16 if not 'AUTOINC' in pv:
17 return
18
19 localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data)
20 pn = e.data.getVar('PN')
21 revs = localcounts.get_by_pattern('%%-%s_rev' % pn)
22 counts = localcounts.get_by_pattern('%%-%s_count' % pn)
23 if not revs or not counts:
24 return
25
26 if len(revs) != len(counts):
27 bb.warn("The number of revs and localcounts don't match in %s" % pn)
28 return
29
30 version = e.data.getVar('PRAUTOINX')
31 srcrev = bb.fetch2.get_srcrev(e.data)
32 base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)]
33 pkgarch = e.data.getVar('PACKAGE_ARCH')
34 value = max(int(count) for count in counts)
35
36 if len(revs) == 1:
37 if srcrev != ('AUTOINC+%s' % revs[0]):
38 value += 1
39 else:
40 value += 1
41
42 bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR'))
43 df = e.data.getVar('LOCALCOUNT_DUMPFILE')
44 flock = bb.utils.lockfile("%s.lock" % df)
45 with open(df, 'a') as fd:
46 fd.write('PRAUTO$%s$%s$%s = "%s"\n' %
47 (base_ver, pkgarch, srcrev, str(value)))
48 bb.utils.unlockfile(flock)
49}
50
51addhandler migrate_localcount_handler
52migrate_localcount_handler[eventmask] = "bb.event.RecipeParsed"
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index b6c09969b1..b959bbd93c 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -5,30 +5,31 @@
5# 5#
6 6
7python multilib_virtclass_handler () { 7python multilib_virtclass_handler () {
8 cls = e.data.getVar("BBEXTENDCURR") 8 cls = d.getVar("BBEXTENDCURR")
9 variant = e.data.getVar("BBEXTENDVARIANT") 9 variant = d.getVar("BBEXTENDVARIANT")
10 if cls != "multilib" or not variant: 10 if cls != "multilib" or not variant:
11 return 11 return
12 12
13 localdata = bb.data.createCopy(e.data) 13 localdata = bb.data.createCopy(d)
14 localdata.delVar('TMPDIR') 14 localdata.delVar('TMPDIR')
15 e.data.setVar('STAGING_KERNEL_DIR', localdata.getVar('STAGING_KERNEL_DIR')) 15 d.setVar('STAGING_KERNEL_DIR', localdata.getVar('STAGING_KERNEL_DIR'))
16 16
17 # There should only be one kernel in multilib configs 17 # There should only be one kernel in multilib configs
18 # We also skip multilib setup for module packages. 18 # We also skip multilib setup for module packages.
19 provides = (e.data.getVar("PROVIDES") or "").split() 19 provides = (d.getVar("PROVIDES") or "").split()
20 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split() 20 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split()
21 bpn = e.data.getVar("BPN") 21 bpn = d.getVar("BPN")
22 if "virtual/kernel" in provides or \ 22 if ("virtual/kernel" in provides
23 bb.data.inherits_class('module-base', e.data) or \ 23 or bb.data.inherits_class('module-base', d)
24 bpn in non_ml_recipes: 24 or bb.data.inherits_class('kernel-fit-image', d)
25 or bpn in non_ml_recipes):
25 raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn) 26 raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn)
26 27
27 save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME") or "" 28 save_var_name = d.getVar("MULTILIB_SAVE_VARNAME") or ""
28 for name in save_var_name.split(): 29 for name in save_var_name.split():
29 val=e.data.getVar(name) 30 val = d.getVar(name)
30 if val: 31 if val:
31 e.data.setVar(name + "_MULTILIB_ORIGINAL", val) 32 d.setVar(name + "_MULTILIB_ORIGINAL", val)
32 33
33 # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it 34 # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it
34 d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}") 35 d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}")
@@ -36,66 +37,67 @@ python multilib_virtclass_handler () {
36 overrides = e.data.getVar("OVERRIDES", False) 37 overrides = e.data.getVar("OVERRIDES", False)
37 pn = e.data.getVar("PN", False) 38 pn = e.data.getVar("PN", False)
38 overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn) 39 overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn)
39 e.data.setVar("OVERRIDES", overrides) 40 d.setVar("OVERRIDES", overrides)
40 41
41 if bb.data.inherits_class('image', e.data): 42 if bb.data.inherits_class('image', d):
42 e.data.setVar("MLPREFIX", variant + "-") 43 d.setVar("MLPREFIX", variant + "-")
43 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 44 d.setVar("PN", variant + "-" + d.getVar("PN", False))
44 e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT')) 45 d.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT'))
45 override = ":virtclass-multilib-" + variant 46 override = ":virtclass-multilib-" + variant
46 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 47 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
47 target_vendor = e.data.getVar("TARGET_VENDOR:" + "virtclass-multilib-" + variant, False) 48 target_vendor = d.getVar("TARGET_VENDOR:" + "virtclass-multilib-" + variant, False)
48 if target_vendor: 49 if target_vendor:
49 e.data.setVar("TARGET_VENDOR", target_vendor) 50 d.setVar("TARGET_VENDOR", target_vendor)
50 return 51 return
51 52
52 if bb.data.inherits_class('cross-canadian', e.data): 53 if bb.data.inherits_class('cross-canadian', d):
53 # Multilib cross-candian should use the same nativesdk sysroot without MLPREFIX 54 # Multilib cross-candian should use the same nativesdk sysroot without MLPREFIX
54 e.data.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot") 55 d.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot")
55 e.data.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot") 56 d.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot")
56 e.data.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot") 57 d.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot")
57 e.data.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant) 58 d.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant)
58 e.data.setVar("MLPREFIX", variant + "-") 59 d.setVar("MLPREFIX", variant + "-")
59 override = ":virtclass-multilib-" + variant 60 override = ":virtclass-multilib-" + variant
60 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 61 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
61 return 62 return
62 63
63 if bb.data.inherits_class('native', e.data): 64 if bb.data.inherits_class('native', d):
64 raise bb.parse.SkipRecipe("We can't extend native recipes") 65 raise bb.parse.SkipRecipe("We can't extend native recipes")
65 66
66 if bb.data.inherits_class('nativesdk', e.data) or bb.data.inherits_class('crosssdk', e.data): 67 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d):
67 raise bb.parse.SkipRecipe("We can't extend nativesdk recipes") 68 raise bb.parse.SkipRecipe("We can't extend nativesdk recipes")
68 69
69 if bb.data.inherits_class('allarch', e.data) and not d.getVar('MULTILIB_VARIANTS') \ 70 if (bb.data.inherits_class('allarch', d)
70 and not bb.data.inherits_class('packagegroup', e.data): 71 and not d.getVar('MULTILIB_VARIANTS')
72 and not bb.data.inherits_class('packagegroup', d)):
71 raise bb.parse.SkipRecipe("Don't extend allarch recipes which are not packagegroups") 73 raise bb.parse.SkipRecipe("Don't extend allarch recipes which are not packagegroups")
72 74
73 # Expand this since this won't work correctly once we set a multilib into place 75 # Expand this since this won't work correctly once we set a multilib into place
74 e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) 76 d.setVar("ALL_MULTILIB_PACKAGE_ARCHS", d.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
75 77
76 override = ":virtclass-multilib-" + variant 78 override = ":virtclass-multilib-" + variant
77 79
78 skip_msg = e.data.getVarFlag('SKIP_RECIPE', e.data.getVar('PN')) 80 skip_msg = d.getVarFlag('SKIP_RECIPE', d.getVar('PN'))
79 if skip_msg: 81 if skip_msg:
80 pn_new = variant + "-" + e.data.getVar('PN') 82 pn_new = variant + "-" + d.getVar('PN')
81 if not e.data.getVarFlag('SKIP_RECIPE', pn_new): 83 if not d.getVarFlag('SKIP_RECIPE', pn_new):
82 e.data.setVarFlag('SKIP_RECIPE', pn_new, skip_msg) 84 d.setVarFlag('SKIP_RECIPE', pn_new, skip_msg)
83 85
84 e.data.setVar("MLPREFIX", variant + "-") 86 d.setVar("MLPREFIX", variant + "-")
85 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 87 d.setVar("PN", variant + "-" + d.getVar("PN", False))
86 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 88 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
87 89
88 # Expand INCOMPATIBLE_LICENSE_EXCEPTIONS with multilib prefix 90 # Expand INCOMPATIBLE_LICENSE_EXCEPTIONS with multilib prefix
89 pkgs = e.data.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") 91 pkgs = d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS")
90 if pkgs: 92 if pkgs:
91 for pkg in pkgs.split(): 93 for pkg in pkgs.split():
92 pkgs += " " + variant + "-" + pkg 94 pkgs += " " + variant + "-" + pkg
93 e.data.setVar("INCOMPATIBLE_LICENSE_EXCEPTIONS", pkgs) 95 d.setVar("INCOMPATIBLE_LICENSE_EXCEPTIONS", pkgs)
94 96
95 # DEFAULTTUNE can change TARGET_ARCH override so expand this now before update_data 97 # DEFAULTTUNE can change TARGET_ARCH override so expand this now before update_data
96 newtune = e.data.getVar("DEFAULTTUNE:" + "virtclass-multilib-" + variant, False) 98 newtune = d.getVar("DEFAULTTUNE:" + "virtclass-multilib-" + variant, False)
97 if newtune: 99 if newtune:
98 e.data.setVar("DEFAULTTUNE", newtune) 100 d.setVar("DEFAULTTUNE", newtune)
99} 101}
100 102
101addhandler multilib_virtclass_handler 103addhandler multilib_virtclass_handler
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass
index 6095d278dd..c1d6de100c 100644
--- a/meta/classes/multilib_global.bbclass
+++ b/meta/classes/multilib_global.bbclass
@@ -155,6 +155,12 @@ def preferred_ml_updates(d):
155 extramp.append(translate_provide(pref, p)) 155 extramp.append(translate_provide(pref, p))
156 d.setVar("BB_MULTI_PROVIDER_ALLOWED", " ".join(mp + extramp)) 156 d.setVar("BB_MULTI_PROVIDER_ALLOWED", " ".join(mp + extramp))
157 157
158 virtprovs = d.getVar("BB_RECIPE_VIRTUAL_PROVIDERS").split()
159 for p in virtprovs.copy():
160 for pref in prefixes:
161 virtprovs.append(translate_provide(pref, p))
162 d.setVar("BB_RECIPE_VIRTUAL_PROVIDERS", " ".join(virtprovs))
163
158 abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() 164 abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
159 extras = [] 165 extras = []
160 for p in prefixes: 166 for p in prefixes:
@@ -171,24 +177,26 @@ def preferred_ml_updates(d):
171 d.appendVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", " " + " ".join(extras)) 177 d.appendVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", " " + " ".join(extras))
172 178
173python multilib_virtclass_handler_vendor () { 179python multilib_virtclass_handler_vendor () {
174 if isinstance(e, bb.event.ConfigParsed): 180 for v in d.getVar("MULTILIB_VARIANTS").split():
175 for v in e.data.getVar("MULTILIB_VARIANTS").split(): 181 if d.getVar("TARGET_VENDOR:virtclass-multilib-" + v, False) is None:
176 if e.data.getVar("TARGET_VENDOR:virtclass-multilib-" + v, False) is None: 182 d.setVar("TARGET_VENDOR:virtclass-multilib-" + v, d.getVar("TARGET_VENDOR", False) + "ml" + v)
177 e.data.setVar("TARGET_VENDOR:virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) 183 preferred_ml_updates(d)
178 preferred_ml_updates(e.data)
179} 184}
180addhandler multilib_virtclass_handler_vendor 185addhandler multilib_virtclass_handler_vendor
181multilib_virtclass_handler_vendor[eventmask] = "bb.event.ConfigParsed" 186multilib_virtclass_handler_vendor[eventmask] = "bb.event.ConfigParsed"
182 187
183python multilib_virtclass_handler_global () { 188python multilib_virtclass_handler_global () {
184 variant = e.data.getVar("BBEXTENDVARIANT") 189 variant = d.getVar("BBEXTENDVARIANT")
185 if variant: 190 if variant:
186 return 191 return
187 192
193 if bb.data.inherits_class('native', d):
194 return
195
188 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split() 196 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split()
189 197
190 if bb.data.inherits_class('kernel', e.data) or \ 198 if bb.data.inherits_class('kernel', d) or \
191 bb.data.inherits_class('module-base', e.data) or \ 199 bb.data.inherits_class('module-base', d) or \
192 d.getVar('BPN') in non_ml_recipes: 200 d.getVar('BPN') in non_ml_recipes:
193 201
194 # We need to avoid expanding KERNEL_VERSION which we can do by deleting it 202 # We need to avoid expanding KERNEL_VERSION which we can do by deleting it
@@ -197,7 +205,7 @@ python multilib_virtclass_handler_global () {
197 localdata.delVar("KERNEL_VERSION") 205 localdata.delVar("KERNEL_VERSION")
198 localdata.delVar("KERNEL_VERSION_PKG_NAME") 206 localdata.delVar("KERNEL_VERSION_PKG_NAME")
199 207
200 variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split() 208 variants = (d.getVar("MULTILIB_VARIANTS") or "").split()
201 209
202 import oe.classextend 210 import oe.classextend
203 clsextends = [] 211 clsextends = []
@@ -208,22 +216,22 @@ python multilib_virtclass_handler_global () {
208 origprovs = provs = localdata.getVar("PROVIDES") or "" 216 origprovs = provs = localdata.getVar("PROVIDES") or ""
209 for clsextend in clsextends: 217 for clsextend in clsextends:
210 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) 218 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False)
211 e.data.setVar("PROVIDES", provs) 219 d.setVar("PROVIDES", provs)
212 220
213 # Process RPROVIDES 221 # Process RPROVIDES
214 origrprovs = rprovs = localdata.getVar("RPROVIDES") or "" 222 origrprovs = rprovs = localdata.getVar("RPROVIDES") or ""
215 for clsextend in clsextends: 223 for clsextend in clsextends:
216 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) 224 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False)
217 if rprovs.strip(): 225 if rprovs.strip():
218 e.data.setVar("RPROVIDES", rprovs) 226 d.setVar("RPROVIDES", rprovs)
219 227
220 # Process RPROVIDES:${PN}... 228 # Process RPROVIDES:${PN}...
221 for pkg in (e.data.getVar("PACKAGES") or "").split(): 229 for pkg in (d.getVar("PACKAGES") or "").split():
222 origrprovs = rprovs = localdata.getVar("RPROVIDES:%s" % pkg) or "" 230 origrprovs = rprovs = localdata.getVar("RPROVIDES:%s" % pkg) or ""
223 for clsextend in clsextends: 231 for clsextend in clsextends:
224 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES:%s" % pkg, setvar=False) 232 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES:%s" % pkg, setvar=False)
225 rprovs = rprovs + " " + clsextend.extname + "-" + pkg 233 rprovs = rprovs + " " + clsextend.extname + "-" + pkg
226 e.data.setVar("RPROVIDES:%s" % pkg, rprovs) 234 d.setVar("RPROVIDES:%s" % pkg, rprovs)
227} 235}
228 236
229addhandler multilib_virtclass_handler_global 237addhandler multilib_virtclass_handler_global
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass
index 1452513a66..2b880c8b0c 100644
--- a/meta/classes/report-error.bbclass
+++ b/meta/classes/report-error.bbclass
@@ -81,6 +81,7 @@ python errorreport_handler () {
81 task = e.task 81 task = e.task
82 taskdata={} 82 taskdata={}
83 log = e.data.getVar('BB_LOGFILE') 83 log = e.data.getVar('BB_LOGFILE')
84 taskdata['recipe'] = e.data.expand("${PN}")
84 taskdata['package'] = e.data.expand("${PF}") 85 taskdata['package'] = e.data.expand("${PF}")
85 taskdata['task'] = task 86 taskdata['task'] = task
86 if log: 87 if log:
diff --git a/meta/classes/sign_rpm.bbclass b/meta/classes/sign_rpm.bbclass
index ee0c4808fa..b5b21b0db1 100644
--- a/meta/classes/sign_rpm.bbclass
+++ b/meta/classes/sign_rpm.bbclass
@@ -33,7 +33,7 @@
33 33
34inherit sanity 34inherit sanity
35 35
36RPM_SIGN_PACKAGES='1' 36RPM_SIGN_PACKAGES = '1'
37RPM_SIGN_FILES ?= '0' 37RPM_SIGN_FILES ?= '0'
38RPM_GPG_BACKEND ?= 'local' 38RPM_GPG_BACKEND ?= 'local'
39# SHA-256 is used by default 39# SHA-256 is used by default
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
deleted file mode 100644
index 953cafd285..0000000000
--- a/meta/classes/siteconfig.bbclass
+++ /dev/null
@@ -1,39 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7python siteconfig_do_siteconfig () {
8 shared_state = sstate_state_fromvars(d)
9 if shared_state['task'] != 'populate_sysroot':
10 return
11 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
12 bb.debug(1, "No site_config directory, skipping do_siteconfig")
13 return
14 sstate_install(shared_state, d)
15 bb.build.exec_func('do_siteconfig_gencache', d)
16 sstate_clean(shared_state, d)
17}
18
19EXTRASITECONFIG ?= ""
20
21siteconfig_do_siteconfig_gencache () {
22 mkdir -p ${WORKDIR}/site_config_${MACHINE}
23 gen-site-config ${FILE_DIRNAME}/site_config \
24 >${WORKDIR}/site_config_${MACHINE}/configure.ac
25 cd ${WORKDIR}/site_config_${MACHINE}
26 autoconf
27 rm -f ${BPN}_cache
28 CONFIG_SITE="" ${EXTRASITECONFIG} ./configure ${CONFIGUREOPTS} --cache-file ${BPN}_cache
29 sed -n -e "/ac_cv_c_bigendian/p" -e "/ac_cv_sizeof_/p" \
30 -e "/ac_cv_type_/p" -e "/ac_cv_header_/p" -e "/ac_cv_func_/p" \
31 < ${BPN}_cache > ${BPN}_config
32 mkdir -p ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
33 cp ${BPN}_config ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
34
35}
36
37do_populate_sysroot[sstate-interceptfuncs] += "do_siteconfig "
38
39EXPORT_FUNCTIONS do_siteconfig do_siteconfig_gencache
diff --git a/meta/classes/spdx-common.bbclass b/meta/classes/spdx-common.bbclass
new file mode 100644
index 0000000000..ca0416d1c7
--- /dev/null
+++ b/meta/classes/spdx-common.bbclass
@@ -0,0 +1,107 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7SPDX_VERSION ??= ""
8DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx/${SPDX_VERSION}"
9
10# The product name that the CVE database uses. Defaults to BPN, but may need to
11# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
12CVE_PRODUCT ??= "${BPN}"
13CVE_VERSION ??= "${PV}"
14
15SPDXDIR ??= "${WORKDIR}/spdx/${SPDX_VERSION}"
16SPDXDEPLOY = "${SPDXDIR}/deploy"
17SPDXWORK = "${SPDXDIR}/work"
18SPDXIMAGEWORK = "${SPDXDIR}/image-work"
19SPDXSDKWORK = "${SPDXDIR}/sdk-work"
20SPDXSDKEXTWORK = "${SPDXDIR}/sdk-ext-work"
21SPDXDEPS = "${SPDXDIR}/deps.json"
22
23SPDX_TOOL_NAME ??= "oe-spdx-creator"
24SPDX_TOOL_VERSION ??= "1.0"
25
26SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
27
28SPDX_INCLUDE_SOURCES ??= "0"
29SPDX_INCLUDE_COMPILED_SOURCES ??= "0"
30
31SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
32SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs"
33SPDX_PRETTY ??= "0"
34
35SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
36
37SPDX_CUSTOM_ANNOTATION_VARS ??= ""
38
39SPDX_MULTILIB_SSTATE_ARCHS ??= "${SSTATE_ARCHS}"
40
41python () {
42 from oe.cve_check import extend_cve_status
43 extend_cve_status(d)
44 if d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1":
45 d.setVar("SPDX_INCLUDE_SOURCES", "1")
46}
47
48def create_spdx_source_deps(d):
49 import oe.spdx_common
50
51 deps = []
52 if d.getVar("SPDX_INCLUDE_SOURCES") == "1":
53 pn = d.getVar('PN')
54 # do_unpack is a hack for now; we only need it to get the
55 # dependencies do_unpack already has so we can extract the source
56 # ourselves
57 if oe.spdx_common.has_task(d, "do_unpack"):
58 deps.append("%s:do_unpack" % pn)
59
60 if oe.spdx_common.is_work_shared_spdx(d) and \
61 oe.spdx_common.process_sources(d):
62 # For kernel source code
63 if oe.spdx_common.has_task(d, "do_shared_workdir"):
64 deps.append("%s:do_shared_workdir" % pn)
65 elif d.getVar('S') == d.getVar('STAGING_KERNEL_DIR'):
66 deps.append("virtual/kernel:do_shared_workdir")
67
68 # For gcc-source-${PV} source code
69 if oe.spdx_common.has_task(d, "do_preconfigure"):
70 deps.append("%s:do_preconfigure" % pn)
71 elif oe.spdx_common.has_task(d, "do_patch"):
72 deps.append("%s:do_patch" % pn)
73 # For gcc-cross-x86_64 source code
74 elif oe.spdx_common.has_task(d, "do_configure"):
75 deps.append("%s:do_configure" % pn)
76
77 return " ".join(deps)
78
79
80python do_collect_spdx_deps() {
81 # This task calculates the build time dependencies of the recipe, and is
82 # required because while a task can deptask on itself, those dependencies
83 # do not show up in BB_TASKDEPDATA. To work around that, this task does the
84 # deptask on do_create_spdx and writes out the dependencies it finds, then
85 # do_create_spdx reads in the found dependencies when writing the actual
86 # SPDX document
87 import json
88 import oe.spdx_common
89 from pathlib import Path
90
91 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
92
93 deps = oe.spdx_common.collect_direct_deps(d, "do_create_spdx")
94
95 with spdx_deps_file.open("w") as f:
96 json.dump(deps, f)
97}
98# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
99addtask do_collect_spdx_deps after do_unpack
100do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
101do_collect_spdx_deps[deptask] = "do_create_spdx"
102do_collect_spdx_deps[dirs] = "${SPDXDIR}"
103
104oe.spdx_common.collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
105oe.spdx_common.collect_direct_deps[vardeps] += "DEPENDS"
106oe.spdx_common.collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
107oe.spdx_common.get_patched_src[vardepsexclude] += "STAGING_KERNEL_DIR"
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass
index 03c4f3a930..af7c457808 100644
--- a/meta/classes/toaster.bbclass
+++ b/meta/classes/toaster.bbclass
@@ -84,7 +84,7 @@ python toaster_layerinfo_dumpdata() {
84 84
85 llayerinfo = {} 85 llayerinfo = {}
86 86
87 for layer in { l for l in bblayers.strip().split(" ") if len(l) }: 87 for layer in { l for l in bblayers.strip().split() if len(l) }:
88 llayerinfo[layer] = _get_layer_dict(layer) 88 llayerinfo[layer] = _get_layer_dict(layer)
89 89
90 90
diff --git a/meta/classes/toolchain/clang.bbclass b/meta/classes/toolchain/clang.bbclass
new file mode 100644
index 0000000000..d7b8a3657c
--- /dev/null
+++ b/meta/classes/toolchain/clang.bbclass
@@ -0,0 +1,37 @@
1CC = "${CCACHE}${HOST_PREFIX}clang ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
2CXX = "${CCACHE}${HOST_PREFIX}clang++ ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
3FC = "${HOST_PREFIX}gfortran ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
4CPP = "${CCACHE}${HOST_PREFIX}clang ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS} -E"
5LD = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-lld', '${HOST_PREFIX}ld.lld${TOOLCHAIN_OPTIONS} ${HOST_LD_ARCH}', '${HOST_PREFIX}ld${TOOLCHAIN_OPTIONS} ${HOST_LD_ARCH}', d)}"
6CCLD = "${CCACHE}${HOST_PREFIX}clang ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
7RANLIB = "${HOST_PREFIX}llvm-ranlib"
8AR = "${HOST_PREFIX}llvm-ar"
9AS = "${HOST_PREFIX}as ${HOST_AS_ARCH}"
10STRIP = "${HOST_PREFIX}llvm-strip"
11OBJCOPY = "${HOST_PREFIX}llvm-objcopy"
12OBJDUMP = "${HOST_PREFIX}llvm-objdump"
13STRINGS = "${HOST_PREFIX}llvm-strings"
14NM = "${HOST_PREFIX}llvm-nm"
15READELF = "${HOST_PREFIX}llvm-readelf"
16
17PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-cc = "${MLPREFIX}clang-cross-${TARGET_ARCH}"
18PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-c++ = "${MLPREFIX}clang-cross-${TARGET_ARCH}"
19PREFERRED_PROVIDER_virtual/${MLPREFIX}compilerlibs = "${MLPREFIX}gcc-runtime"
20PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-cc:class-nativesdk = "clang-crosssdk-${SDK_SYS}"
21PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-c++:class-nativesdk = "clang-crosssdk-${SDK_SYS}"
22
23PREFERRED_PROVIDER_virtual/nativesdk-cross-cc:class-crosssdk = "clang-crosssdk-${SDK_SYS}"
24PREFERRED_PROVIDER_virtual/nativesdk-cross-c++:class-crosssdk = "clang-crosssdk-${SDK_SYS}"
25
26PREFERRED_PROVIDER_virtual/nativesdk-cross-cc:class-cross-canadian = "clang-crosssdk-${SDK_SYS}"
27PREFERRED_PROVIDER_virtual/nativesdk-cross-c++:class-cross-canadian = "clang-crosssdk-${SDK_SYS}"
28
29
30BASE_DEFAULT_DEPS:append:class-target = " compiler-rt"
31
32TUNE_CCARGS += "${@bb.utils.contains("DISTRO_FEATURES", "usrmerge", " --dyld-prefix=/usr", "", d)}"
33
34LDFLAGS:append:class-nativesdk:x86-64 = " -Wl,-dynamic-linker,${base_libdir}/ld-linux-x86-64.so.2"
35LDFLAGS:append:class-nativesdk:aarch64 = " -Wl,-dynamic-linker,${base_libdir}/ld-linux-aarch64.so.1"
36
37TCOVERRIDE = "toolchain-clang"
diff --git a/meta/classes/toolchain/gcc-native.bbclass b/meta/classes/toolchain/gcc-native.bbclass
new file mode 100644
index 0000000000..a708bd0389
--- /dev/null
+++ b/meta/classes/toolchain/gcc-native.bbclass
@@ -0,0 +1,15 @@
1BUILD_CC = "${CCACHE}${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
2BUILD_CXX = "${CCACHE}${BUILD_PREFIX}g++ ${BUILD_CC_ARCH}"
3BUILD_FC = "${BUILD_PREFIX}gfortran ${BUILD_CC_ARCH}"
4BUILD_CPP = "${BUILD_PREFIX}gcc ${BUILD_CC_ARCH} -E"
5BUILD_LD = "${BUILD_PREFIX}ld ${BUILD_LD_ARCH}"
6BUILD_CCLD = "${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
7BUILD_AR = "${BUILD_PREFIX}ar"
8BUILD_AS = "${BUILD_PREFIX}as ${BUILD_AS_ARCH}"
9BUILD_RANLIB = "${BUILD_PREFIX}ranlib -D"
10BUILD_STRIP = "${BUILD_PREFIX}strip"
11BUILD_OBJCOPY = "${BUILD_PREFIX}objcopy"
12BUILD_OBJDUMP = "${BUILD_PREFIX}objdump"
13BUILD_NM = "${BUILD_PREFIX}nm"
14BUILD_READELF = "${BUILD_PREFIX}readelf"
15
diff --git a/meta/classes/toolchain/gcc.bbclass b/meta/classes/toolchain/gcc.bbclass
new file mode 100644
index 0000000000..a5adb5ca37
--- /dev/null
+++ b/meta/classes/toolchain/gcc.bbclass
@@ -0,0 +1,33 @@
1CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
2CXX = "${CCACHE}${HOST_PREFIX}g++ ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
3FC = "${HOST_PREFIX}gfortran ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
4CPP = "${HOST_PREFIX}gcc -E${TOOLCHAIN_OPTIONS} ${HOST_CC_ARCH}"
5LD = "${HOST_PREFIX}ld${TOOLCHAIN_OPTIONS} ${HOST_LD_ARCH}"
6CCLD = "${CC}"
7AR = "${HOST_PREFIX}gcc-ar"
8AS = "${HOST_PREFIX}as ${HOST_AS_ARCH}"
9RANLIB = "${HOST_PREFIX}gcc-ranlib"
10STRIP = "${HOST_PREFIX}strip"
11OBJCOPY = "${HOST_PREFIX}objcopy"
12OBJDUMP = "${HOST_PREFIX}objdump"
13STRINGS = "${HOST_PREFIX}strings"
14NM = "${HOST_PREFIX}gcc-nm"
15READELF = "${HOST_PREFIX}readelf"
16
17PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-cc = "${MLPREFIX}gcc-cross-${TARGET_ARCH}"
18PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-c++ = "${MLPREFIX}gcc-cross-${TARGET_ARCH}"
19PREFERRED_PROVIDER_virtual/${MLPREFIX}compilerlibs = "${MLPREFIX}gcc-runtime"
20
21PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-cc:class-nativesdk = "gcc-crosssdk-${SDK_SYS}"
22PREFERRED_PROVIDER_virtual/${MLPREFIX}cross-c++:class-nativesdk = "gcc-crosssdk-${SDK_SYS}"
23PREFERRED_PROVIDER_virtual/${MLPREFIX}compilerlibs:class-nativesdk = "nativesdk-gcc-runtime"
24
25PREFERRED_PROVIDER_virtual/nativesdk-cross-cc:class-crosssdk = "gcc-crosssdk-${SDK_SYS}"
26PREFERRED_PROVIDER_virtual/nativesdk-cross-c++:class-crosssdk = "gcc-crosssdk-${SDK_SYS}"
27PREFERRED_PROVIDER_virtual/nativesdk-compilerlibs:class-crosssdk = "nativesdk-gcc-runtime"
28
29PREFERRED_PROVIDER_virtual/nativesdk-cross-cc:class-cross-canadian = "gcc-crosssdk-${SDK_SYS}"
30PREFERRED_PROVIDER_virtual/nativesdk-cross-c++:class-cross-canadian = "gcc-crosssdk-${SDK_SYS}"
31PREFERRED_PROVIDER_virtual/nativesdk-compilerlibs:class-cross-canadian = "nativesdk-gcc-runtime"
32
33TCOVERRIDE = "toolchain-gcc"
diff --git a/meta/classes/vex.bbclass b/meta/classes/vex.bbclass
new file mode 100644
index 0000000000..402d8e0d96
--- /dev/null
+++ b/meta/classes/vex.bbclass
@@ -0,0 +1,303 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# This class is used to generate metadata needed by external
8# tools to check for vulnerabilities, for example CVEs.
9#
10# In order to use this class just inherit the class in the
11# local.conf file and it will add the generate_vex task for
12# every recipe. If an image is build it will generate a report
13# in DEPLOY_DIR_IMAGE for all the packages used, it will also
14# generate a file for all recipes used in the build.
15#
16# Variables use CVE_CHECK prefix to keep compatibility with
17# the cve-check class
18#
19# Example:
20# bitbake -c generate_vex openssl
21# bitbake core-image-sato
22# bitbake -k -c generate_vex universe
23#
24# The product name that the CVE database uses defaults to BPN, but may need to
25# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
26CVE_PRODUCT ??= "${BPN}"
27CVE_VERSION ??= "${PV}"
28
29CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve"
30
31CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json"
32CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt"
33
34CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
35CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
36CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.json"
37
38# Skip CVE Check for packages (PN)
39CVE_CHECK_SKIP_RECIPE ?= ""
40
41# Replace NVD DB check status for a given CVE. Each of CVE has to be mentioned
42# separately with optional detail and description for this status.
43#
44# CVE_STATUS[CVE-1234-0001] = "not-applicable-platform: Issue only applies on Windows"
45# CVE_STATUS[CVE-1234-0002] = "fixed-version: Fixed externally"
46#
47# Settings the same status and reason for multiple CVEs is possible
48# via CVE_STATUS_GROUPS variable.
49#
50# CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED"
51#
52# CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0003"
53# CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows"
54# CVE_STATUS_PATCHED = "CVE-1234-0002 CVE-1234-0004"
55# CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally"
56#
57# All possible CVE statuses could be found in cve-check-map.conf
58# CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored"
59# CVE_CHECK_STATUSMAP[fixed-version] = "Patched"
60#
61# CVE_CHECK_IGNORE is deprecated and CVE_STATUS has to be used instead.
62# Keep CVE_CHECK_IGNORE until other layers migrate to new variables
63CVE_CHECK_IGNORE ?= ""
64
65# Layers to be excluded
66CVE_CHECK_LAYER_EXCLUDELIST ??= ""
67
68# Layers to be included
69CVE_CHECK_LAYER_INCLUDELIST ??= ""
70
71
72# set to "alphabetical" for version using single alphabetical character as increment release
73CVE_VERSION_SUFFIX ??= ""
74
75python () {
76 if bb.data.inherits_class("cve-check", d):
77 raise bb.parse.SkipRecipe("Skipping recipe: found incompatible combination of cve-check and vex enabled at the same time.")
78
79 from oe.cve_check import extend_cve_status
80 extend_cve_status(d)
81}
82
83def generate_json_report(d, out_path, link_path):
84 if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")):
85 import json
86 from oe.cve_check import cve_check_merge_jsons, update_symlinks
87
88 bb.note("Generating JSON CVE summary")
89 index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
90 summary = {"version":"1", "package": []}
91 with open(index_file) as f:
92 filename = f.readline()
93 while filename:
94 with open(filename.rstrip()) as j:
95 data = json.load(j)
96 cve_check_merge_jsons(summary, data)
97 filename = f.readline()
98
99 summary["package"].sort(key=lambda d: d['name'])
100
101 with open(out_path, "w") as f:
102 json.dump(summary, f, indent=2)
103
104 update_symlinks(out_path, link_path)
105
106python vex_save_summary_handler () {
107 import shutil
108 import datetime
109 from oe.cve_check import update_symlinks
110
111 cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
112
113 bb.utils.mkdirhier(cvelogpath)
114 timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
115
116 json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON"))
117 json_summary_name = os.path.join(cvelogpath, "cve-summary-%s.json" % (timestamp))
118 generate_json_report(d, json_summary_name, json_summary_link_name)
119 bb.plain("Complete CVE JSON report summary created at: %s" % json_summary_link_name)
120}
121
122addhandler vex_save_summary_handler
123vex_save_summary_handler[eventmask] = "bb.event.BuildCompleted"
124
125python do_generate_vex () {
126 """
127 Generate metadata needed for vulnerability checking for
128 the current recipe
129 """
130 from oe.cve_check import get_patched_cves
131
132 try:
133 patched_cves = get_patched_cves(d)
134 cves_status = []
135 products = d.getVar("CVE_PRODUCT").split()
136 for product in products:
137 if ":" in product:
138 _, product = product.split(":", 1)
139 cves_status.append([product, False])
140
141 except FileNotFoundError:
142 bb.fatal("Failure in searching patches")
143
144 cve_write_data_json(d, patched_cves, cves_status)
145}
146
147addtask generate_vex before do_build
148
149python vex_cleanup () {
150 """
151 Delete the file used to gather all the CVE information.
152 """
153 bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH"))
154}
155
156addhandler vex_cleanup
157vex_cleanup[eventmask] = "bb.event.BuildCompleted"
158
159python vex_write_rootfs_manifest () {
160 """
161 Create VEX/CVE manifest when building an image
162 """
163
164 import json
165 from oe.rootfs import image_list_installed_packages
166 from oe.cve_check import cve_check_merge_jsons, update_symlinks
167
168 deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
169 if os.path.exists(deploy_file_json):
170 bb.utils.remove(deploy_file_json)
171
172 # Create a list of relevant recipies
173 recipies = set()
174 for pkg in list(image_list_installed_packages(d)):
175 pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
176 'runtime-reverse', pkg)
177 pkg_data = oe.packagedata.read_pkgdatafile(pkg_info)
178 recipies.add(pkg_data["PN"])
179
180 bb.note("Writing rootfs VEX manifest")
181 deploy_dir = d.getVar("IMGDEPLOYDIR")
182 link_name = d.getVar("IMAGE_LINK_NAME")
183
184 json_data = {"version":"1", "package": []}
185 text_data = ""
186
187 save_pn = d.getVar("PN")
188
189 for pkg in recipies:
190 # To be able to use the CVE_CHECK_RECIPE_FILE_JSON variable we have to evaluate
191 # it with the different PN names set each time.
192 d.setVar("PN", pkg)
193
194 pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
195 if os.path.exists(pkgfilepath):
196 with open(pkgfilepath) as j:
197 data = json.load(j)
198 cve_check_merge_jsons(json_data, data)
199 else:
200 bb.warn("Missing cve file for %s" % pkg)
201
202 d.setVar("PN", save_pn)
203
204 link_path = os.path.join(deploy_dir, "%s.json" % link_name)
205 manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON")
206
207 with open(manifest_name, "w") as f:
208 json.dump(json_data, f, indent=2)
209
210 update_symlinks(manifest_name, link_path)
211 bb.plain("Image VEX JSON report stored in: %s" % manifest_name)
212}
213
214ROOTFS_POSTPROCESS_COMMAND:prepend = "vex_write_rootfs_manifest; "
215do_rootfs[recrdeptask] += "do_generate_vex "
216do_populate_sdk[recrdeptask] += "do_generate_vex "
217
218def cve_write_data_json(d, cve_data, cve_status):
219 """
220 Prepare CVE data for the JSON format, then write it.
221 Done for each recipe.
222 """
223
224 from oe.cve_check import get_cpe_ids
225 import json
226
227 output = {"version":"1", "package": []}
228 nvd_link = "https://nvd.nist.gov/vuln/detail/"
229
230 fdir_name = d.getVar("FILE_DIRNAME")
231 layer = fdir_name.split("/")[-3]
232
233 include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
234 exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
235
236 if exclude_layers and layer in exclude_layers:
237 return
238
239 if include_layers and layer not in include_layers:
240 return
241
242 product_data = []
243 for s in cve_status:
244 p = {"product": s[0], "cvesInRecord": "Yes"}
245 if s[1] == False:
246 p["cvesInRecord"] = "No"
247 product_data.append(p)
248 product_data = list({p['product']:p for p in product_data}.values())
249
250 package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV"))
251 cpes = get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
252 package_data = {
253 "name" : d.getVar("PN"),
254 "layer" : layer,
255 "version" : package_version,
256 "products": product_data,
257 "cpes": cpes
258 }
259
260 cve_list = []
261
262 for cve in sorted(cve_data):
263 issue_link = "%s%s" % (nvd_link, cve)
264
265 cve_item = {
266 "id" : cve,
267 "status" : cve_data[cve]["abbrev-status"],
268 "link": issue_link,
269 }
270 if 'NVD-summary' in cve_data[cve]:
271 cve_item["summary"] = cve_data[cve]["NVD-summary"]
272 cve_item["scorev2"] = cve_data[cve]["NVD-scorev2"]
273 cve_item["scorev3"] = cve_data[cve]["NVD-scorev3"]
274 cve_item["scorev4"] = cve_data[cve]["NVD-scorev4"]
275 cve_item["vector"] = cve_data[cve]["NVD-vector"]
276 cve_item["vectorString"] = cve_data[cve]["NVD-vectorString"]
277 if 'status' in cve_data[cve]:
278 cve_item["detail"] = cve_data[cve]["status"]
279 if 'justification' in cve_data[cve]:
280 cve_item["description"] = cve_data[cve]["justification"]
281 if 'resource' in cve_data[cve]:
282 cve_item["patch-file"] = cve_data[cve]["resource"]
283 cve_list.append(cve_item)
284
285 package_data["issue"] = cve_list
286 output["package"].append(package_data)
287
288 deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
289
290 write_string = json.dumps(output, indent=2)
291
292 cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
293 index_path = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
294 bb.utils.mkdirhier(cvelogpath)
295 bb.utils.mkdirhier(os.path.dirname(deploy_file))
296 fragment_file = os.path.basename(deploy_file)
297 fragment_path = os.path.join(cvelogpath, fragment_file)
298 with open(fragment_path, "w") as f:
299 f.write(write_string)
300 with open(deploy_file, "w") as f:
301 f.write(write_string)
302 with open(index_path, "a+") as f:
303 f.write("%s\n" % fragment_path)
diff --git a/meta/classes/yocto-check-layer.bbclass b/meta/classes/yocto-check-layer.bbclass
deleted file mode 100644
index 404f5fd9f2..0000000000
--- a/meta/classes/yocto-check-layer.bbclass
+++ /dev/null
@@ -1,22 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7#
8# This class is used by yocto-check-layer script for additional per-recipe tests
9# The first test ensures that the layer has no recipes skipping 'installed-vs-shipped' QA checks
10#
11
12WARN_QA:remove = "installed-vs-shipped"
13ERROR_QA:append = " installed-vs-shipped"
14
15python () {
16 packages = set((d.getVar('PACKAGES') or '').split())
17 for package in packages:
18 skip = set((d.getVar('INSANE_SKIP') or "").split() +
19 (d.getVar('INSANE_SKIP:' + package) or "").split())
20 if 'installed-vs-shipped' in skip:
21 oe.qa.handle_error("installed-vs-shipped", 'Package %s is skipping "installed-vs-shipped" QA test.' % package, d)
22}