summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/archiver.bbclass3
-rw-r--r--meta/classes/create-spdx-2.2.bbclass261
-rw-r--r--meta/classes/devtool-source.bbclass74
-rw-r--r--meta/classes/multilib.bbclass87
-rw-r--r--meta/classes/multilib_global.bbclass25
-rw-r--r--meta/classes/siteconfig.bbclass39
-rw-r--r--meta/classes/spdx-common.bbclass257
7 files changed, 338 insertions, 408 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index 2d0bbfbd42..9d286224d6 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -473,7 +473,8 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
473 473
474def is_work_shared(d): 474def is_work_shared(d):
475 sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared') 475 sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared')
476 return d.getVar('S').startswith(sharedworkdir) 476 sourcedir = os.path.realpath(d.getVar('S'))
477 return sourcedir.startswith(sharedworkdir)
477 478
478# Run do_unpack and do_patch 479# Run do_unpack and do_patch
479python do_unpack_and_patch() { 480python do_unpack_and_patch() {
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass
index 7c8a0b8b0f..3ebf92b5e1 100644
--- a/meta/classes/create-spdx-2.2.bbclass
+++ b/meta/classes/create-spdx-2.2.bbclass
@@ -4,65 +4,15 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6 6
7DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx" 7inherit spdx-common
8 8
9# The product name that the CVE database uses. Defaults to BPN, but may need to 9SPDX_VERSION = "2.2"
10# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). 10
11CVE_PRODUCT ??= "${BPN}" 11def get_namespace(d, name):
12CVE_VERSION ??= "${PV}"
13
14SPDXDIR ??= "${WORKDIR}/spdx"
15SPDXDEPLOY = "${SPDXDIR}/deploy"
16SPDXWORK = "${SPDXDIR}/work"
17SPDXIMAGEWORK = "${SPDXDIR}/image-work"
18SPDXSDKWORK = "${SPDXDIR}/sdk-work"
19SPDXDEPS = "${SPDXDIR}/deps.json"
20
21SPDX_TOOL_NAME ??= "oe-spdx-creator"
22SPDX_TOOL_VERSION ??= "1.0"
23
24SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
25
26SPDX_INCLUDE_SOURCES ??= "0"
27SPDX_ARCHIVE_SOURCES ??= "0"
28SPDX_ARCHIVE_PACKAGED ??= "0"
29
30SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
31SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs"
32SPDX_PRETTY ??= "0"
33
34SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
35
36SPDX_CUSTOM_ANNOTATION_VARS ??= ""
37
38SPDX_ORG ??= "OpenEmbedded ()"
39SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
40SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
41 this recipe. For SPDX documents create using this class during the build, this \
42 is the contact information for the person or organization who is doing the \
43 build."
44
45def extract_licenses(filename):
46 import re
47
48 lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
49
50 try:
51 with open(filename, 'rb') as f:
52 size = min(15000, os.stat(filename).st_size)
53 txt = f.read(size)
54 licenses = re.findall(lic_regex, txt)
55 if licenses:
56 ascii_licenses = [lic.decode('ascii') for lic in licenses]
57 return ascii_licenses
58 except Exception as e:
59 bb.warn(f"Exception reading {filename}: {e}")
60 return None
61
62def get_doc_namespace(d, doc):
63 import uuid 12 import uuid
64 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE")) 13 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
65 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name))) 14 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), name, str(uuid.uuid5(namespace_uuid, name)))
15
66 16
67def create_annotation(d, comment): 17def create_annotation(d, comment):
68 from datetime import datetime, timezone 18 from datetime import datetime, timezone
@@ -80,26 +30,6 @@ def recipe_spdx_is_native(d, recipe):
80 a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and 30 a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
81 a.comment == "isNative" for a in recipe.annotations) 31 a.comment == "isNative" for a in recipe.annotations)
82 32
83def is_work_shared_spdx(d):
84 return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
85
86def get_json_indent(d):
87 if d.getVar("SPDX_PRETTY") == "1":
88 return 2
89 return None
90
91python() {
92 import json
93 if d.getVar("SPDX_LICENSE_DATA"):
94 return
95
96 with open(d.getVar("SPDX_LICENSES"), "r") as f:
97 data = json.load(f)
98 # Transform the license array to a dictionary
99 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
100 d.setVar("SPDX_LICENSE_DATA", data)
101}
102
103def convert_license_to_spdx(lic, document, d, existing={}): 33def convert_license_to_spdx(lic, document, d, existing={}):
104 from pathlib import Path 34 from pathlib import Path
105 import oe.spdx 35 import oe.spdx
@@ -172,34 +102,6 @@ def convert_license_to_spdx(lic, document, d, existing={}):
172 102
173 return ' '.join(convert(l) for l in lic_split) 103 return ' '.join(convert(l) for l in lic_split)
174 104
175def process_sources(d):
176 pn = d.getVar('PN')
177 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
178 if pn in assume_provided:
179 for p in d.getVar("PROVIDES").split():
180 if p != pn:
181 pn = p
182 break
183
184 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
185 # so avoid archiving source here.
186 if pn.startswith('glibc-locale'):
187 return False
188 if d.getVar('PN') == "libtool-cross":
189 return False
190 if d.getVar('PN') == "libgcc-initial":
191 return False
192 if d.getVar('PN') == "shadow-sysroot":
193 return False
194
195 # We just archive gcc-source for all the gcc related recipes
196 if d.getVar('BPN') in ['gcc', 'libgcc']:
197 bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
198 return False
199
200 return True
201
202
203def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): 105def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
204 from pathlib import Path 106 from pathlib import Path
205 import oe.spdx 107 import oe.spdx
@@ -348,14 +250,12 @@ def collect_dep_recipes(d, doc, spdx_recipe):
348 import oe.spdx 250 import oe.spdx
349 251
350 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) 252 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
351 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
352 package_archs = d.getVar("SSTATE_ARCHS").split() 253 package_archs = d.getVar("SSTATE_ARCHS").split()
353 package_archs.reverse() 254 package_archs.reverse()
354 255
355 dep_recipes = [] 256 dep_recipes = []
356 257
357 with spdx_deps_file.open("r") as f: 258 deps = get_spdx_deps(d)
358 deps = json.load(f)
359 259
360 for dep_pn, dep_hashfn, in_taskhash in deps: 260 for dep_pn, dep_hashfn, in_taskhash in deps:
361 # If this dependency is not calculated in the taskhash skip it. 261 # If this dependency is not calculated in the taskhash skip it.
@@ -468,51 +368,6 @@ def add_download_packages(d, doc, recipe):
468 # but this should be sufficient for now 368 # but this should be sufficient for now
469 doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe) 369 doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
470 370
471def collect_direct_deps(d, dep_task):
472 current_task = "do_" + d.getVar("BB_CURRENTTASK")
473 pn = d.getVar("PN")
474
475 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
476
477 for this_dep in taskdepdata.values():
478 if this_dep[0] == pn and this_dep[1] == current_task:
479 break
480 else:
481 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
482
483 deps = set()
484 for dep_name in this_dep[3]:
485 dep_data = taskdepdata[dep_name]
486 if dep_data[1] == dep_task and dep_data[0] != pn:
487 deps.add((dep_data[0], dep_data[7], dep_name in this_dep[8]))
488
489 return sorted(deps)
490
491collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
492collect_direct_deps[vardeps] += "DEPENDS"
493
494python do_collect_spdx_deps() {
495 # This task calculates the build time dependencies of the recipe, and is
496 # required because while a task can deptask on itself, those dependencies
497 # do not show up in BB_TASKDEPDATA. To work around that, this task does the
498 # deptask on do_create_spdx and writes out the dependencies it finds, then
499 # do_create_spdx reads in the found dependencies when writing the actual
500 # SPDX document
501 import json
502 from pathlib import Path
503
504 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
505
506 deps = collect_direct_deps(d, "do_create_spdx")
507
508 with spdx_deps_file.open("w") as f:
509 json.dump(deps, f)
510}
511# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
512addtask do_collect_spdx_deps after do_unpack
513do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
514do_collect_spdx_deps[deptask] = "do_create_spdx"
515do_collect_spdx_deps[dirs] = "${SPDXDIR}"
516 371
517python do_create_spdx() { 372python do_create_spdx() {
518 from datetime import datetime, timezone 373 from datetime import datetime, timezone
@@ -551,7 +406,7 @@ python do_create_spdx() {
551 doc = oe.spdx.SPDXDocument() 406 doc = oe.spdx.SPDXDocument()
552 407
553 doc.name = "recipe-" + d.getVar("PN") 408 doc.name = "recipe-" + d.getVar("PN")
554 doc.documentNamespace = get_doc_namespace(d, doc) 409 doc.documentNamespace = get_namespace(d, doc.name)
555 doc.creationInfo.created = creation_time 410 doc.creationInfo.created = creation_time
556 doc.creationInfo.comment = "This document was created by analyzing recipe files during the build." 411 doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
557 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 412 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
@@ -655,7 +510,7 @@ python do_create_spdx() {
655 package_doc = oe.spdx.SPDXDocument() 510 package_doc = oe.spdx.SPDXDocument()
656 pkg_name = d.getVar("PKG:%s" % package) or package 511 pkg_name = d.getVar("PKG:%s" % package) or package
657 package_doc.name = pkg_name 512 package_doc.name = pkg_name
658 package_doc.documentNamespace = get_doc_namespace(d, package_doc) 513 package_doc.documentNamespace = get_namespace(d, package_doc.name)
659 package_doc.creationInfo.created = creation_time 514 package_doc.creationInfo.created = creation_time
660 package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build." 515 package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
661 package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 516 package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
@@ -716,44 +571,6 @@ do_create_spdx[dirs] = "${SPDXWORK}"
716do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}" 571do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
717do_create_spdx[depends] += "${PATCHDEPENDENCY}" 572do_create_spdx[depends] += "${PATCHDEPENDENCY}"
718 573
719def collect_package_providers(d):
720 from pathlib import Path
721 import oe.sbom
722 import oe.spdx
723 import json
724
725 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
726
727 providers = {}
728
729 deps = collect_direct_deps(d, "do_create_spdx")
730 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
731
732 for dep_pn, dep_hashfn, _ in deps:
733 localdata = d
734 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
735 if not recipe_data:
736 localdata = bb.data.createCopy(d)
737 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
738 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
739
740 for pkg in recipe_data.get("PACKAGES", "").split():
741
742 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
743 rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
744 rprovides.add(pkg)
745
746 if "PKG" in pkg_data:
747 pkg = pkg_data["PKG"]
748 rprovides.add(pkg)
749
750 for r in rprovides:
751 providers[r] = (pkg, dep_hashfn)
752
753 return providers
754
755collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
756
757python do_create_runtime_spdx() { 574python do_create_runtime_spdx() {
758 from datetime import datetime, timezone 575 from datetime import datetime, timezone
759 import oe.sbom 576 import oe.sbom
@@ -800,7 +617,7 @@ python do_create_runtime_spdx() {
800 617
801 runtime_doc = oe.spdx.SPDXDocument() 618 runtime_doc = oe.spdx.SPDXDocument()
802 runtime_doc.name = "runtime-" + pkg_name 619 runtime_doc.name = "runtime-" + pkg_name
803 runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc) 620 runtime_doc.documentNamespace = get_namespace(localdata, runtime_doc.name)
804 runtime_doc.creationInfo.created = creation_time 621 runtime_doc.creationInfo.created = creation_time
805 runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies." 622 runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
806 runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 623 runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
@@ -891,60 +708,6 @@ do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
891do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" 708do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
892do_create_runtime_spdx[rdeptask] = "do_create_spdx" 709do_create_runtime_spdx[rdeptask] = "do_create_spdx"
893 710
894def spdx_get_src(d):
895 """
896 save patched source of the recipe in SPDX_WORKDIR.
897 """
898 import shutil
899 spdx_workdir = d.getVar('SPDXWORK')
900 spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
901 pn = d.getVar('PN')
902
903 workdir = d.getVar("WORKDIR")
904
905 try:
906 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
907 if not is_work_shared_spdx(d):
908 # Change the WORKDIR to make do_unpack do_patch run in another dir.
909 d.setVar('WORKDIR', spdx_workdir)
910 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
911 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
912
913 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
914 # possibly requiring of the following tasks (such as some recipes's
915 # do_patch required 'B' existed).
916 bb.utils.mkdirhier(d.getVar('B'))
917
918 bb.build.exec_func('do_unpack', d)
919 # Copy source of kernel to spdx_workdir
920 if is_work_shared_spdx(d):
921 share_src = d.getVar('WORKDIR')
922 d.setVar('WORKDIR', spdx_workdir)
923 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
924 src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
925 bb.utils.mkdirhier(src_dir)
926 if bb.data.inherits_class('kernel',d):
927 share_src = d.getVar('STAGING_KERNEL_DIR')
928 cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
929 cmd_copy_shared_res = os.popen(cmd_copy_share).read()
930 bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
931
932 git_path = src_dir + "/.git"
933 if os.path.exists(git_path):
934 shutils.rmtree(git_path)
935
936 # Make sure gcc and kernel sources are patched only once
937 if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
938 bb.build.exec_func('do_patch', d)
939
940 # Some userland has no source.
941 if not os.path.exists( spdx_workdir ):
942 bb.utils.mkdirhier(spdx_workdir)
943 finally:
944 d.setVar("WORKDIR", workdir)
945
946spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR"
947
948do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx" 711do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
949do_rootfs[cleandirs] += "${SPDXIMAGEWORK}" 712do_rootfs[cleandirs] += "${SPDXIMAGEWORK}"
950 713
@@ -1019,7 +782,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
1019 782
1020 doc = oe.spdx.SPDXDocument() 783 doc = oe.spdx.SPDXDocument()
1021 doc.name = rootfs_name 784 doc.name = rootfs_name
1022 doc.documentNamespace = get_doc_namespace(d, doc) 785 doc.documentNamespace = get_namespace(d, doc.name)
1023 doc.creationInfo.created = creation_time 786 doc.creationInfo.created = creation_time
1024 doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build." 787 doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
1025 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] 788 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
diff --git a/meta/classes/devtool-source.bbclass b/meta/classes/devtool-source.bbclass
index 4158c20c7e..3e24800dcb 100644
--- a/meta/classes/devtool-source.bbclass
+++ b/meta/classes/devtool-source.bbclass
@@ -26,8 +26,6 @@
26 26
27 27
28DEVTOOL_TEMPDIR ?= "" 28DEVTOOL_TEMPDIR ?= ""
29DEVTOOL_PATCH_SRCDIR = "${DEVTOOL_TEMPDIR}/patchworkdir"
30
31 29
32python() { 30python() {
33 tempdir = d.getVar('DEVTOOL_TEMPDIR') 31 tempdir = d.getVar('DEVTOOL_TEMPDIR')
@@ -60,7 +58,6 @@ python() {
60 else: 58 else:
61 unpacktask = 'do_unpack' 59 unpacktask = 'do_unpack'
62 d.appendVarFlag(unpacktask, 'postfuncs', ' devtool_post_unpack') 60 d.appendVarFlag(unpacktask, 'postfuncs', ' devtool_post_unpack')
63 d.prependVarFlag('do_patch', 'prefuncs', ' devtool_pre_patch')
64 d.appendVarFlag('do_patch', 'postfuncs', ' devtool_post_patch') 61 d.appendVarFlag('do_patch', 'postfuncs', ' devtool_post_patch')
65 62
66 # NOTE: in order for the patch stuff to be fully functional, 63 # NOTE: in order for the patch stuff to be fully functional,
@@ -79,67 +76,23 @@ python devtool_post_unpack() {
79 76
80 tempdir = d.getVar('DEVTOOL_TEMPDIR') 77 tempdir = d.getVar('DEVTOOL_TEMPDIR')
81 workdir = d.getVar('WORKDIR') 78 workdir = d.getVar('WORKDIR')
79 unpackdir = d.getVar('UNPACKDIR')
82 srcsubdir = d.getVar('S') 80 srcsubdir = d.getVar('S')
83 81
84 def _move_file(src, dst): 82 # Add locally copied files to gitignore as we add back to the metadata directly
85 """Move a file. Creates all the directory components of destination path."""
86 dst_d = os.path.dirname(dst)
87 if dst_d:
88 bb.utils.mkdirhier(dst_d)
89 shutil.move(src, dst)
90
91 def _ls_tree(directory):
92 """Recursive listing of files in a directory"""
93 ret = []
94 for root, dirs, files in os.walk(directory):
95 ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
96 fname in files])
97 return ret
98
99 is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
100 # Move local source files into separate subdir
101 recipe_patches = [os.path.basename(patch) for patch in
102 oe.recipeutils.get_recipe_patches(d)]
103 local_files = oe.recipeutils.get_recipe_local_files(d) 83 local_files = oe.recipeutils.get_recipe_local_files(d)
104
105 if is_kernel_yocto:
106 for key in [f for f in local_files if f.endswith('scc')]:
107 with open(local_files[key], 'r') as sccfile:
108 for l in sccfile:
109 line = l.split()
110 if line and line[0] in ('kconf', 'patch'):
111 cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
112 if cfg not in local_files.values():
113 local_files[line[-1]] = cfg
114 shutil.copy2(cfg, workdir)
115
116 # Ignore local files with subdir={BP}
117 srcabspath = os.path.abspath(srcsubdir) 84 srcabspath = os.path.abspath(srcsubdir)
118 local_files = [fname for fname in local_files if 85 local_files = [fname for fname in local_files if
119 os.path.exists(os.path.join(workdir, fname)) and 86 os.path.exists(os.path.join(unpackdir, fname)) and
120 (srcabspath == workdir or not 87 srcabspath == unpackdir]
121 os.path.join(workdir, fname).startswith(srcabspath +
122 os.sep))]
123 if local_files: 88 if local_files:
124 for fname in local_files: 89 with open(os.path.join(tempdir, '.gitignore'), 'a+') as f:
125 _move_file(os.path.join(workdir, fname), 90 f.write('# Ignore local files, by default. Remove following lines'
126 os.path.join(tempdir, 'oe-local-files', fname)) 91 'if you want to commit the directory to Git\n')
127 with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'), 92 for fname in local_files:
128 'w') as f: 93 f.write('%s\n' % fname)
129 f.write('# Ignore local files, by default. Remove this file ' 94
130 'if you want to commit the directory to Git\n*\n') 95 if os.path.dirname(srcsubdir) != workdir:
131
132 if srcsubdir == workdir:
133 # Find non-patch non-local sources that were "unpacked" to srctree
134 # directory
135 src_files = [fname for fname in _ls_tree(workdir) if
136 os.path.basename(fname) not in recipe_patches]
137 srcsubdir = d.getVar('DEVTOOL_PATCH_SRCDIR')
138 # Move source files to S
139 for path in src_files:
140 _move_file(os.path.join(workdir, path),
141 os.path.join(srcsubdir, path))
142 elif os.path.dirname(srcsubdir) != workdir:
143 # Handle if S is set to a subdirectory of the source 96 # Handle if S is set to a subdirectory of the source
144 srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0]) 97 srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
145 98
@@ -164,11 +117,6 @@ python devtool_post_unpack() {
164 f.write(srcsubdir) 117 f.write(srcsubdir)
165} 118}
166 119
167python devtool_pre_patch() {
168 if d.getVar('S') == d.getVar('WORKDIR'):
169 d.setVar('S', '${DEVTOOL_PATCH_SRCDIR}')
170}
171
172python devtool_post_patch() { 120python devtool_post_patch() {
173 import shutil 121 import shutil
174 tempdir = d.getVar('DEVTOOL_TEMPDIR') 122 tempdir = d.getVar('DEVTOOL_TEMPDIR')
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index b6c09969b1..a4151658a6 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -5,30 +5,30 @@
5# 5#
6 6
7python multilib_virtclass_handler () { 7python multilib_virtclass_handler () {
8 cls = e.data.getVar("BBEXTENDCURR") 8 cls = d.getVar("BBEXTENDCURR")
9 variant = e.data.getVar("BBEXTENDVARIANT") 9 variant = d.getVar("BBEXTENDVARIANT")
10 if cls != "multilib" or not variant: 10 if cls != "multilib" or not variant:
11 return 11 return
12 12
13 localdata = bb.data.createCopy(e.data) 13 localdata = bb.data.createCopy(d)
14 localdata.delVar('TMPDIR') 14 localdata.delVar('TMPDIR')
15 e.data.setVar('STAGING_KERNEL_DIR', localdata.getVar('STAGING_KERNEL_DIR')) 15 d.setVar('STAGING_KERNEL_DIR', localdata.getVar('STAGING_KERNEL_DIR'))
16 16
17 # There should only be one kernel in multilib configs 17 # There should only be one kernel in multilib configs
18 # We also skip multilib setup for module packages. 18 # We also skip multilib setup for module packages.
19 provides = (e.data.getVar("PROVIDES") or "").split() 19 provides = (d.getVar("PROVIDES") or "").split()
20 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split() 20 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split()
21 bpn = e.data.getVar("BPN") 21 bpn = d.getVar("BPN")
22 if "virtual/kernel" in provides or \ 22 if ("virtual/kernel" in provides
23 bb.data.inherits_class('module-base', e.data) or \ 23 or bb.data.inherits_class('module-base', d)
24 bpn in non_ml_recipes: 24 or bpn in non_ml_recipes):
25 raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn) 25 raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn)
26 26
27 save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME") or "" 27 save_var_name = d.getVar("MULTILIB_SAVE_VARNAME") or ""
28 for name in save_var_name.split(): 28 for name in save_var_name.split():
29 val=e.data.getVar(name) 29 val = d.getVar(name)
30 if val: 30 if val:
31 e.data.setVar(name + "_MULTILIB_ORIGINAL", val) 31 d.setVar(name + "_MULTILIB_ORIGINAL", val)
32 32
33 # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it 33 # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it
34 d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}") 34 d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}")
@@ -36,66 +36,67 @@ python multilib_virtclass_handler () {
36 overrides = e.data.getVar("OVERRIDES", False) 36 overrides = e.data.getVar("OVERRIDES", False)
37 pn = e.data.getVar("PN", False) 37 pn = e.data.getVar("PN", False)
38 overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn) 38 overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn)
39 e.data.setVar("OVERRIDES", overrides) 39 d.setVar("OVERRIDES", overrides)
40 40
41 if bb.data.inherits_class('image', e.data): 41 if bb.data.inherits_class('image', d):
42 e.data.setVar("MLPREFIX", variant + "-") 42 d.setVar("MLPREFIX", variant + "-")
43 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 43 d.setVar("PN", variant + "-" + d.getVar("PN", False))
44 e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT')) 44 d.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT'))
45 override = ":virtclass-multilib-" + variant 45 override = ":virtclass-multilib-" + variant
46 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 46 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
47 target_vendor = e.data.getVar("TARGET_VENDOR:" + "virtclass-multilib-" + variant, False) 47 target_vendor = d.getVar("TARGET_VENDOR:" + "virtclass-multilib-" + variant, False)
48 if target_vendor: 48 if target_vendor:
49 e.data.setVar("TARGET_VENDOR", target_vendor) 49 d.setVar("TARGET_VENDOR", target_vendor)
50 return 50 return
51 51
52 if bb.data.inherits_class('cross-canadian', e.data): 52 if bb.data.inherits_class('cross-canadian', d):
53 # Multilib cross-candian should use the same nativesdk sysroot without MLPREFIX 53 # Multilib cross-candian should use the same nativesdk sysroot without MLPREFIX
54 e.data.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot") 54 d.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot")
55 e.data.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot") 55 d.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot")
56 e.data.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot") 56 d.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot")
57 e.data.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant) 57 d.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant)
58 e.data.setVar("MLPREFIX", variant + "-") 58 d.setVar("MLPREFIX", variant + "-")
59 override = ":virtclass-multilib-" + variant 59 override = ":virtclass-multilib-" + variant
60 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 60 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
61 return 61 return
62 62
63 if bb.data.inherits_class('native', e.data): 63 if bb.data.inherits_class('native', d):
64 raise bb.parse.SkipRecipe("We can't extend native recipes") 64 raise bb.parse.SkipRecipe("We can't extend native recipes")
65 65
66 if bb.data.inherits_class('nativesdk', e.data) or bb.data.inherits_class('crosssdk', e.data): 66 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d):
67 raise bb.parse.SkipRecipe("We can't extend nativesdk recipes") 67 raise bb.parse.SkipRecipe("We can't extend nativesdk recipes")
68 68
69 if bb.data.inherits_class('allarch', e.data) and not d.getVar('MULTILIB_VARIANTS') \ 69 if (bb.data.inherits_class('allarch', d)
70 and not bb.data.inherits_class('packagegroup', e.data): 70 and not d.getVar('MULTILIB_VARIANTS')
71 and not bb.data.inherits_class('packagegroup', d)):
71 raise bb.parse.SkipRecipe("Don't extend allarch recipes which are not packagegroups") 72 raise bb.parse.SkipRecipe("Don't extend allarch recipes which are not packagegroups")
72 73
73 # Expand this since this won't work correctly once we set a multilib into place 74 # Expand this since this won't work correctly once we set a multilib into place
74 e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) 75 d.setVar("ALL_MULTILIB_PACKAGE_ARCHS", d.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
75 76
76 override = ":virtclass-multilib-" + variant 77 override = ":virtclass-multilib-" + variant
77 78
78 skip_msg = e.data.getVarFlag('SKIP_RECIPE', e.data.getVar('PN')) 79 skip_msg = d.getVarFlag('SKIP_RECIPE', d.getVar('PN'))
79 if skip_msg: 80 if skip_msg:
80 pn_new = variant + "-" + e.data.getVar('PN') 81 pn_new = variant + "-" + d.getVar('PN')
81 if not e.data.getVarFlag('SKIP_RECIPE', pn_new): 82 if not d.getVarFlag('SKIP_RECIPE', pn_new):
82 e.data.setVarFlag('SKIP_RECIPE', pn_new, skip_msg) 83 d.setVarFlag('SKIP_RECIPE', pn_new, skip_msg)
83 84
84 e.data.setVar("MLPREFIX", variant + "-") 85 d.setVar("MLPREFIX", variant + "-")
85 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 86 d.setVar("PN", variant + "-" + d.getVar("PN", False))
86 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 87 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
87 88
88 # Expand INCOMPATIBLE_LICENSE_EXCEPTIONS with multilib prefix 89 # Expand INCOMPATIBLE_LICENSE_EXCEPTIONS with multilib prefix
89 pkgs = e.data.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") 90 pkgs = d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS")
90 if pkgs: 91 if pkgs:
91 for pkg in pkgs.split(): 92 for pkg in pkgs.split():
92 pkgs += " " + variant + "-" + pkg 93 pkgs += " " + variant + "-" + pkg
93 e.data.setVar("INCOMPATIBLE_LICENSE_EXCEPTIONS", pkgs) 94 d.setVar("INCOMPATIBLE_LICENSE_EXCEPTIONS", pkgs)
94 95
95 # DEFAULTTUNE can change TARGET_ARCH override so expand this now before update_data 96 # DEFAULTTUNE can change TARGET_ARCH override so expand this now before update_data
96 newtune = e.data.getVar("DEFAULTTUNE:" + "virtclass-multilib-" + variant, False) 97 newtune = d.getVar("DEFAULTTUNE:" + "virtclass-multilib-" + variant, False)
97 if newtune: 98 if newtune:
98 e.data.setVar("DEFAULTTUNE", newtune) 99 d.setVar("DEFAULTTUNE", newtune)
99} 100}
100 101
101addhandler multilib_virtclass_handler 102addhandler multilib_virtclass_handler
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass
index 6095d278dd..973ac9130b 100644
--- a/meta/classes/multilib_global.bbclass
+++ b/meta/classes/multilib_global.bbclass
@@ -171,24 +171,23 @@ def preferred_ml_updates(d):
171 d.appendVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", " " + " ".join(extras)) 171 d.appendVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", " " + " ".join(extras))
172 172
173python multilib_virtclass_handler_vendor () { 173python multilib_virtclass_handler_vendor () {
174 if isinstance(e, bb.event.ConfigParsed): 174 for v in d.getVar("MULTILIB_VARIANTS").split():
175 for v in e.data.getVar("MULTILIB_VARIANTS").split(): 175 if d.getVar("TARGET_VENDOR:virtclass-multilib-" + v, False) is None:
176 if e.data.getVar("TARGET_VENDOR:virtclass-multilib-" + v, False) is None: 176 d.setVar("TARGET_VENDOR:virtclass-multilib-" + v, d.getVar("TARGET_VENDOR", False) + "ml" + v)
177 e.data.setVar("TARGET_VENDOR:virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) 177 preferred_ml_updates(d)
178 preferred_ml_updates(e.data)
179} 178}
180addhandler multilib_virtclass_handler_vendor 179addhandler multilib_virtclass_handler_vendor
181multilib_virtclass_handler_vendor[eventmask] = "bb.event.ConfigParsed" 180multilib_virtclass_handler_vendor[eventmask] = "bb.event.ConfigParsed"
182 181
183python multilib_virtclass_handler_global () { 182python multilib_virtclass_handler_global () {
184 variant = e.data.getVar("BBEXTENDVARIANT") 183 variant = d.getVar("BBEXTENDVARIANT")
185 if variant: 184 if variant:
186 return 185 return
187 186
188 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split() 187 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split()
189 188
190 if bb.data.inherits_class('kernel', e.data) or \ 189 if bb.data.inherits_class('kernel', d) or \
191 bb.data.inherits_class('module-base', e.data) or \ 190 bb.data.inherits_class('module-base', d) or \
192 d.getVar('BPN') in non_ml_recipes: 191 d.getVar('BPN') in non_ml_recipes:
193 192
194 # We need to avoid expanding KERNEL_VERSION which we can do by deleting it 193 # We need to avoid expanding KERNEL_VERSION which we can do by deleting it
@@ -197,7 +196,7 @@ python multilib_virtclass_handler_global () {
197 localdata.delVar("KERNEL_VERSION") 196 localdata.delVar("KERNEL_VERSION")
198 localdata.delVar("KERNEL_VERSION_PKG_NAME") 197 localdata.delVar("KERNEL_VERSION_PKG_NAME")
199 198
200 variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split() 199 variants = (d.getVar("MULTILIB_VARIANTS") or "").split()
201 200
202 import oe.classextend 201 import oe.classextend
203 clsextends = [] 202 clsextends = []
@@ -208,22 +207,22 @@ python multilib_virtclass_handler_global () {
208 origprovs = provs = localdata.getVar("PROVIDES") or "" 207 origprovs = provs = localdata.getVar("PROVIDES") or ""
209 for clsextend in clsextends: 208 for clsextend in clsextends:
210 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) 209 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False)
211 e.data.setVar("PROVIDES", provs) 210 d.setVar("PROVIDES", provs)
212 211
213 # Process RPROVIDES 212 # Process RPROVIDES
214 origrprovs = rprovs = localdata.getVar("RPROVIDES") or "" 213 origrprovs = rprovs = localdata.getVar("RPROVIDES") or ""
215 for clsextend in clsextends: 214 for clsextend in clsextends:
216 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) 215 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False)
217 if rprovs.strip(): 216 if rprovs.strip():
218 e.data.setVar("RPROVIDES", rprovs) 217 d.setVar("RPROVIDES", rprovs)
219 218
220 # Process RPROVIDES:${PN}... 219 # Process RPROVIDES:${PN}...
221 for pkg in (e.data.getVar("PACKAGES") or "").split(): 220 for pkg in (d.getVar("PACKAGES") or "").split():
222 origrprovs = rprovs = localdata.getVar("RPROVIDES:%s" % pkg) or "" 221 origrprovs = rprovs = localdata.getVar("RPROVIDES:%s" % pkg) or ""
223 for clsextend in clsextends: 222 for clsextend in clsextends:
224 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES:%s" % pkg, setvar=False) 223 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES:%s" % pkg, setvar=False)
225 rprovs = rprovs + " " + clsextend.extname + "-" + pkg 224 rprovs = rprovs + " " + clsextend.extname + "-" + pkg
226 e.data.setVar("RPROVIDES:%s" % pkg, rprovs) 225 d.setVar("RPROVIDES:%s" % pkg, rprovs)
227} 226}
228 227
229addhandler multilib_virtclass_handler_global 228addhandler multilib_virtclass_handler_global
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
deleted file mode 100644
index 953cafd285..0000000000
--- a/meta/classes/siteconfig.bbclass
+++ /dev/null
@@ -1,39 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7python siteconfig_do_siteconfig () {
8 shared_state = sstate_state_fromvars(d)
9 if shared_state['task'] != 'populate_sysroot':
10 return
11 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
12 bb.debug(1, "No site_config directory, skipping do_siteconfig")
13 return
14 sstate_install(shared_state, d)
15 bb.build.exec_func('do_siteconfig_gencache', d)
16 sstate_clean(shared_state, d)
17}
18
19EXTRASITECONFIG ?= ""
20
21siteconfig_do_siteconfig_gencache () {
22 mkdir -p ${WORKDIR}/site_config_${MACHINE}
23 gen-site-config ${FILE_DIRNAME}/site_config \
24 >${WORKDIR}/site_config_${MACHINE}/configure.ac
25 cd ${WORKDIR}/site_config_${MACHINE}
26 autoconf
27 rm -f ${BPN}_cache
28 CONFIG_SITE="" ${EXTRASITECONFIG} ./configure ${CONFIGUREOPTS} --cache-file ${BPN}_cache
29 sed -n -e "/ac_cv_c_bigendian/p" -e "/ac_cv_sizeof_/p" \
30 -e "/ac_cv_type_/p" -e "/ac_cv_header_/p" -e "/ac_cv_func_/p" \
31 < ${BPN}_cache > ${BPN}_config
32 mkdir -p ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
33 cp ${BPN}_config ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
34
35}
36
37do_populate_sysroot[sstate-interceptfuncs] += "do_siteconfig "
38
39EXPORT_FUNCTIONS do_siteconfig do_siteconfig_gencache
diff --git a/meta/classes/spdx-common.bbclass b/meta/classes/spdx-common.bbclass
new file mode 100644
index 0000000000..03f1d0cc27
--- /dev/null
+++ b/meta/classes/spdx-common.bbclass
@@ -0,0 +1,257 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7SPDX_VERSION ??= ""
8DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx/${SPDX_VERSION}"
9
10# The product name that the CVE database uses. Defaults to BPN, but may need to
11# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
12CVE_PRODUCT ??= "${BPN}"
13CVE_VERSION ??= "${PV}"
14
15SPDXDIR ??= "${WORKDIR}/spdx/${SPDX_VERSION}"
16SPDXDEPLOY = "${SPDXDIR}/deploy"
17SPDXWORK = "${SPDXDIR}/work"
18SPDXIMAGEWORK = "${SPDXDIR}/image-work"
19SPDXSDKWORK = "${SPDXDIR}/sdk-work"
20SPDXDEPS = "${SPDXDIR}/deps.json"
21
22SPDX_TOOL_NAME ??= "oe-spdx-creator"
23SPDX_TOOL_VERSION ??= "1.0"
24
25SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
26
27SPDX_INCLUDE_SOURCES ??= "0"
28SPDX_ARCHIVE_SOURCES ??= "0"
29SPDX_ARCHIVE_PACKAGED ??= "0"
30
31SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
32SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs"
33SPDX_PRETTY ??= "0"
34
35SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
36
37SPDX_CUSTOM_ANNOTATION_VARS ??= ""
38
39SPDX_ORG ??= "OpenEmbedded ()"
40SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
41SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
42 this recipe. For SPDX documents create using this class during the build, this \
43 is the contact information for the person or organization who is doing the \
44 build."
45
46def extract_licenses(filename):
47 import re
48
49 lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
50
51 try:
52 with open(filename, 'rb') as f:
53 size = min(15000, os.stat(filename).st_size)
54 txt = f.read(size)
55 licenses = re.findall(lic_regex, txt)
56 if licenses:
57 ascii_licenses = [lic.decode('ascii') for lic in licenses]
58 return ascii_licenses
59 except Exception as e:
60 bb.warn(f"Exception reading {filename}: {e}")
61 return []
62
63def is_work_shared_spdx(d):
64 return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
65
66def get_json_indent(d):
67 if d.getVar("SPDX_PRETTY") == "1":
68 return 2
69 return None
70
71python() {
72 import json
73 if d.getVar("SPDX_LICENSE_DATA"):
74 return
75
76 with open(d.getVar("SPDX_LICENSES"), "r") as f:
77 data = json.load(f)
78 # Transform the license array to a dictionary
79 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
80 d.setVar("SPDX_LICENSE_DATA", data)
81}
82
83def process_sources(d):
84 pn = d.getVar('PN')
85 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
86 if pn in assume_provided:
87 for p in d.getVar("PROVIDES").split():
88 if p != pn:
89 pn = p
90 break
91
92 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
93 # so avoid archiving source here.
94 if pn.startswith('glibc-locale'):
95 return False
96 if d.getVar('PN') == "libtool-cross":
97 return False
98 if d.getVar('PN') == "libgcc-initial":
99 return False
100 if d.getVar('PN') == "shadow-sysroot":
101 return False
102
103 # We just archive gcc-source for all the gcc related recipes
104 if d.getVar('BPN') in ['gcc', 'libgcc']:
105 bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
106 return False
107
108 return True
109
110def collect_direct_deps(d, dep_task):
111 current_task = "do_" + d.getVar("BB_CURRENTTASK")
112 pn = d.getVar("PN")
113
114 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
115
116 for this_dep in taskdepdata.values():
117 if this_dep[0] == pn and this_dep[1] == current_task:
118 break
119 else:
120 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
121
122 deps = set()
123
124 for dep_name in this_dep.deps:
125 dep_data = taskdepdata[dep_name]
126 if dep_data.taskname == dep_task and dep_data.pn != pn:
127 deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps))
128
129 return sorted(deps)
130
131collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
132collect_direct_deps[vardeps] += "DEPENDS"
133
134python do_collect_spdx_deps() {
135 # This task calculates the build time dependencies of the recipe, and is
136 # required because while a task can deptask on itself, those dependencies
137 # do not show up in BB_TASKDEPDATA. To work around that, this task does the
138 # deptask on do_create_spdx and writes out the dependencies it finds, then
139 # do_create_spdx reads in the found dependencies when writing the actual
140 # SPDX document
141 import json
142 from pathlib import Path
143
144 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
145
146 deps = collect_direct_deps(d, "do_create_spdx")
147
148 with spdx_deps_file.open("w") as f:
149 json.dump(deps, f)
150}
151# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
152addtask do_collect_spdx_deps after do_unpack
153do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
154do_collect_spdx_deps[deptask] = "do_create_spdx"
155do_collect_spdx_deps[dirs] = "${SPDXDIR}"
156
157def get_spdx_deps(d):
158 import json
159 from pathlib import Path
160
161 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
162
163 with spdx_deps_file.open("r") as f:
164 return json.load(f)
165
166def collect_package_providers(d):
167 from pathlib import Path
168 import oe.sbom
169 import oe.spdx
170 import json
171
172 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
173
174 providers = {}
175
176 deps = collect_direct_deps(d, "do_create_spdx")
177 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
178
179 for dep_pn, dep_hashfn, _ in deps:
180 localdata = d
181 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
182 if not recipe_data:
183 localdata = bb.data.createCopy(d)
184 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
185 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
186
187 for pkg in recipe_data.get("PACKAGES", "").split():
188
189 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
190 rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
191 rprovides.add(pkg)
192
193 if "PKG" in pkg_data:
194 pkg = pkg_data["PKG"]
195 rprovides.add(pkg)
196
197 for r in rprovides:
198 providers[r] = (pkg, dep_hashfn)
199
200 return providers
201
202collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
203
204def spdx_get_src(d):
205 """
206 save patched source of the recipe in SPDX_WORKDIR.
207 """
208 import shutil
209 spdx_workdir = d.getVar('SPDXWORK')
210 spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
211 pn = d.getVar('PN')
212
213 workdir = d.getVar("WORKDIR")
214
215 try:
216 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
217 if not is_work_shared_spdx(d):
218 # Change the WORKDIR to make do_unpack do_patch run in another dir.
219 d.setVar('WORKDIR', spdx_workdir)
220 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
221 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
222
223 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
224 # possibly requiring of the following tasks (such as some recipes's
225 # do_patch required 'B' existed).
226 bb.utils.mkdirhier(d.getVar('B'))
227
228 bb.build.exec_func('do_unpack', d)
229 # Copy source of kernel to spdx_workdir
230 if is_work_shared_spdx(d):
231 share_src = d.getVar('WORKDIR')
232 d.setVar('WORKDIR', spdx_workdir)
233 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
234 src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
235 bb.utils.mkdirhier(src_dir)
236 if bb.data.inherits_class('kernel',d):
237 share_src = d.getVar('STAGING_KERNEL_DIR')
238 cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
239 cmd_copy_shared_res = os.popen(cmd_copy_share).read()
240 bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
241
242 git_path = src_dir + "/.git"
243 if os.path.exists(git_path):
244 shutils.rmtree(git_path)
245
246 # Make sure gcc and kernel sources are patched only once
247 if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
248 bb.build.exec_func('do_patch', d)
249
250 # Some userland has no source.
251 if not os.path.exists( spdx_workdir ):
252 bb.utils.mkdirhier(spdx_workdir)
253 finally:
254 d.setVar("WORKDIR", workdir)
255
256spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR"
257