diff options
-rw-r--r-- | meta/classes/create-spdx-2.2.bbclass | 23 | ||||
-rw-r--r-- | meta/classes/create-spdx-3.0.bbclass | 22 | ||||
-rw-r--r-- | meta/classes/create-spdx-image-3.0.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/spdx-common.bbclass | 197 | ||||
-rw-r--r-- | meta/lib/oe/sbom30.py | 21 | ||||
-rw-r--r-- | meta/lib/oe/spdx_common.py | 228 |
6 files changed, 270 insertions, 224 deletions
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass index 239a95da14..865323d66a 100644 --- a/meta/classes/create-spdx-2.2.bbclass +++ b/meta/classes/create-spdx-2.2.bbclass | |||
@@ -38,6 +38,12 @@ def recipe_spdx_is_native(d, recipe): | |||
38 | a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and | 38 | a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and |
39 | a.comment == "isNative" for a in recipe.annotations) | 39 | a.comment == "isNative" for a in recipe.annotations) |
40 | 40 | ||
41 | def get_json_indent(d): | ||
42 | if d.getVar("SPDX_PRETTY") == "1": | ||
43 | return 2 | ||
44 | return None | ||
45 | |||
46 | |||
41 | def convert_license_to_spdx(lic, document, d, existing={}): | 47 | def convert_license_to_spdx(lic, document, d, existing={}): |
42 | from pathlib import Path | 48 | from pathlib import Path |
43 | import oe.spdx | 49 | import oe.spdx |
@@ -113,6 +119,7 @@ def convert_license_to_spdx(lic, document, d, existing={}): | |||
113 | def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): | 119 | def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): |
114 | from pathlib import Path | 120 | from pathlib import Path |
115 | import oe.spdx | 121 | import oe.spdx |
122 | import oe.spdx_common | ||
116 | import hashlib | 123 | import hashlib |
117 | 124 | ||
118 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") | 125 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") |
@@ -165,7 +172,7 @@ def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archiv | |||
165 | )) | 172 | )) |
166 | 173 | ||
167 | if "SOURCE" in spdx_file.fileTypes: | 174 | if "SOURCE" in spdx_file.fileTypes: |
168 | extracted_lics = extract_licenses(filepath) | 175 | extracted_lics = oe.spdx_common.extract_licenses(filepath) |
169 | if extracted_lics: | 176 | if extracted_lics: |
170 | spdx_file.licenseInfoInFiles = extracted_lics | 177 | spdx_file.licenseInfoInFiles = extracted_lics |
171 | 178 | ||
@@ -256,6 +263,7 @@ def collect_dep_recipes(d, doc, spdx_recipe): | |||
256 | from pathlib import Path | 263 | from pathlib import Path |
257 | import oe.sbom | 264 | import oe.sbom |
258 | import oe.spdx | 265 | import oe.spdx |
266 | import oe.spdx_common | ||
259 | 267 | ||
260 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | 268 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) |
261 | package_archs = d.getVar("SSTATE_ARCHS").split() | 269 | package_archs = d.getVar("SSTATE_ARCHS").split() |
@@ -263,7 +271,7 @@ def collect_dep_recipes(d, doc, spdx_recipe): | |||
263 | 271 | ||
264 | dep_recipes = [] | 272 | dep_recipes = [] |
265 | 273 | ||
266 | deps = get_spdx_deps(d) | 274 | deps = oe.spdx_common.get_spdx_deps(d) |
267 | 275 | ||
268 | for dep_pn, dep_hashfn, in_taskhash in deps: | 276 | for dep_pn, dep_hashfn, in_taskhash in deps: |
269 | # If this dependency is not calculated in the taskhash skip it. | 277 | # If this dependency is not calculated in the taskhash skip it. |
@@ -386,6 +394,7 @@ python do_create_spdx() { | |||
386 | from datetime import datetime, timezone | 394 | from datetime import datetime, timezone |
387 | import oe.sbom | 395 | import oe.sbom |
388 | import oe.spdx | 396 | import oe.spdx |
397 | import oe.spdx_common | ||
389 | import uuid | 398 | import uuid |
390 | from pathlib import Path | 399 | from pathlib import Path |
391 | from contextlib import contextmanager | 400 | from contextlib import contextmanager |
@@ -478,10 +487,10 @@ python do_create_spdx() { | |||
478 | 487 | ||
479 | add_download_packages(d, doc, recipe) | 488 | add_download_packages(d, doc, recipe) |
480 | 489 | ||
481 | if process_sources(d) and include_sources: | 490 | if oe.spdx_common.process_sources(d) and include_sources: |
482 | recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst") | 491 | recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst") |
483 | with optional_tarfile(recipe_archive, archive_sources) as archive: | 492 | with optional_tarfile(recipe_archive, archive_sources) as archive: |
484 | spdx_get_src(d) | 493 | oe.spdx_common.get_patched_src(d) |
485 | 494 | ||
486 | add_package_files( | 495 | add_package_files( |
487 | d, | 496 | d, |
@@ -588,6 +597,7 @@ python do_create_runtime_spdx() { | |||
588 | from datetime import datetime, timezone | 597 | from datetime import datetime, timezone |
589 | import oe.sbom | 598 | import oe.sbom |
590 | import oe.spdx | 599 | import oe.spdx |
600 | import oe.spdx_common | ||
591 | import oe.packagedata | 601 | import oe.packagedata |
592 | from pathlib import Path | 602 | from pathlib import Path |
593 | 603 | ||
@@ -597,7 +607,7 @@ python do_create_runtime_spdx() { | |||
597 | 607 | ||
598 | creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") | 608 | creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") |
599 | 609 | ||
600 | providers = collect_package_providers(d) | 610 | providers = oe.spdx_common.collect_package_providers(d) |
601 | pkg_arch = d.getVar("SSTATE_PKGARCH") | 611 | pkg_arch = d.getVar("SSTATE_PKGARCH") |
602 | package_archs = d.getVar("SSTATE_ARCHS").split() | 612 | package_archs = d.getVar("SSTATE_ARCHS").split() |
603 | package_archs.reverse() | 613 | package_archs.reverse() |
@@ -778,6 +788,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx | |||
778 | import os | 788 | import os |
779 | import oe.spdx | 789 | import oe.spdx |
780 | import oe.sbom | 790 | import oe.sbom |
791 | import oe.spdx_common | ||
781 | import io | 792 | import io |
782 | import json | 793 | import json |
783 | from datetime import timezone, datetime | 794 | from datetime import timezone, datetime |
@@ -785,7 +796,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx | |||
785 | import tarfile | 796 | import tarfile |
786 | import bb.compress.zstd | 797 | import bb.compress.zstd |
787 | 798 | ||
788 | providers = collect_package_providers(d) | 799 | providers = oe.spdx_common.collect_package_providers(d) |
789 | package_archs = d.getVar("SSTATE_ARCHS").split() | 800 | package_archs = d.getVar("SSTATE_ARCHS").split() |
790 | package_archs.reverse() | 801 | package_archs.reverse() |
791 | 802 | ||
diff --git a/meta/classes/create-spdx-3.0.bbclass b/meta/classes/create-spdx-3.0.bbclass index 51168e4876..a930ea8115 100644 --- a/meta/classes/create-spdx-3.0.bbclass +++ b/meta/classes/create-spdx-3.0.bbclass | |||
@@ -350,20 +350,21 @@ def collect_dep_objsets(d, build): | |||
350 | from pathlib import Path | 350 | from pathlib import Path |
351 | import oe.sbom30 | 351 | import oe.sbom30 |
352 | import oe.spdx30 | 352 | import oe.spdx30 |
353 | import oe.spdx_common | ||
353 | 354 | ||
354 | deps = get_spdx_deps(d) | 355 | deps = oe.spdx_common.get_spdx_deps(d) |
355 | 356 | ||
356 | dep_objsets = [] | 357 | dep_objsets = [] |
357 | dep_builds = set() | 358 | dep_builds = set() |
358 | 359 | ||
359 | dep_build_spdxids = set() | 360 | dep_build_spdxids = set() |
360 | for dep_pn, _, in_taskhash in deps: | 361 | for dep in deps: |
361 | bb.debug(1, "Fetching SPDX for dependency %s" % (dep_pn)) | 362 | bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn)) |
362 | dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(d, "recipes", dep_pn, oe.spdx30.build_Build) | 363 | dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(d, "recipes", dep.pn, oe.spdx30.build_Build) |
363 | # If the dependency is part of the taskhash, return it to be linked | 364 | # If the dependency is part of the taskhash, return it to be linked |
364 | # against. Otherwise, it cannot be linked against because this recipe | 365 | # against. Otherwise, it cannot be linked against because this recipe |
365 | # will not rebuilt if dependency changes | 366 | # will not rebuilt if dependency changes |
366 | if in_taskhash: | 367 | if dep.in_taskhash: |
367 | dep_objsets.append(dep_objset) | 368 | dep_objsets.append(dep_objset) |
368 | 369 | ||
369 | # The build _can_ be linked against (by alias) | 370 | # The build _can_ be linked against (by alias) |
@@ -519,6 +520,7 @@ def set_purposes(d, element, *var_names, force_purposes=[]): | |||
519 | python do_create_spdx() { | 520 | python do_create_spdx() { |
520 | import oe.sbom30 | 521 | import oe.sbom30 |
521 | import oe.spdx30 | 522 | import oe.spdx30 |
523 | import oe.spdx_common | ||
522 | from pathlib import Path | 524 | from pathlib import Path |
523 | from contextlib import contextmanager | 525 | from contextlib import contextmanager |
524 | import oe.cve_check | 526 | import oe.cve_check |
@@ -593,9 +595,9 @@ python do_create_spdx() { | |||
593 | [recipe_spdx_license], | 595 | [recipe_spdx_license], |
594 | ) | 596 | ) |
595 | 597 | ||
596 | if process_sources(d) and include_sources: | 598 | if oe.spdx_common.process_sources(d) and include_sources: |
597 | bb.debug(1, "Adding source files to SPDX") | 599 | bb.debug(1, "Adding source files to SPDX") |
598 | spdx_get_src(d) | 600 | oe.spdx_common.get_patched_src(d) |
599 | 601 | ||
600 | build_inputs |= add_package_files( | 602 | build_inputs |= add_package_files( |
601 | d, | 603 | d, |
@@ -844,6 +846,7 @@ do_create_spdx[depends] += "${PATCHDEPENDENCY}" | |||
844 | python do_create_package_spdx() { | 846 | python do_create_package_spdx() { |
845 | import oe.sbom30 | 847 | import oe.sbom30 |
846 | import oe.spdx30 | 848 | import oe.spdx30 |
849 | import oe.spdx_common | ||
847 | import oe.packagedata | 850 | import oe.packagedata |
848 | from pathlib import Path | 851 | from pathlib import Path |
849 | 852 | ||
@@ -851,7 +854,7 @@ python do_create_package_spdx() { | |||
851 | deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY")) | 854 | deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY")) |
852 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d) | 855 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d) |
853 | 856 | ||
854 | providers = collect_package_providers(d) | 857 | providers = oe.spdx_common.collect_package_providers(d) |
855 | pkg_arch = d.getVar("SSTATE_PKGARCH") | 858 | pkg_arch = d.getVar("SSTATE_PKGARCH") |
856 | 859 | ||
857 | if not is_native: | 860 | if not is_native: |
@@ -957,6 +960,7 @@ do_create_package_spdx[rdeptask] = "do_create_spdx" | |||
957 | python spdx30_build_started_handler () { | 960 | python spdx30_build_started_handler () { |
958 | import oe.spdx30 | 961 | import oe.spdx30 |
959 | import oe.sbom30 | 962 | import oe.sbom30 |
963 | import oe.spdx_common | ||
960 | import os | 964 | import os |
961 | from pathlib import Path | 965 | from pathlib import Path |
962 | from datetime import datetime, timezone | 966 | from datetime import datetime, timezone |
@@ -966,7 +970,7 @@ python spdx30_build_started_handler () { | |||
966 | d = e.data.createCopy() | 970 | d = e.data.createCopy() |
967 | d.setVar("PN", "bitbake") | 971 | d.setVar("PN", "bitbake") |
968 | d.setVar("BB_TASKHASH", "bitbake") | 972 | d.setVar("BB_TASKHASH", "bitbake") |
969 | load_spdx_license_data(d) | 973 | oe.spdx_common.load_spdx_license_data(d) |
970 | 974 | ||
971 | deploy_dir_spdx = Path(e.data.getVar("DEPLOY_DIR_SPDX")) | 975 | deploy_dir_spdx = Path(e.data.getVar("DEPLOY_DIR_SPDX")) |
972 | 976 | ||
diff --git a/meta/classes/create-spdx-image-3.0.bbclass b/meta/classes/create-spdx-image-3.0.bbclass index bda11d54d4..467719555d 100644 --- a/meta/classes/create-spdx-image-3.0.bbclass +++ b/meta/classes/create-spdx-image-3.0.bbclass | |||
@@ -10,7 +10,8 @@ SPDXIMAGEDEPLOYDIR = "${SPDXDIR}/image-deploy" | |||
10 | SPDXROOTFSDEPLOY = "${SPDXDIR}/rootfs-deploy" | 10 | SPDXROOTFSDEPLOY = "${SPDXDIR}/rootfs-deploy" |
11 | 11 | ||
12 | def collect_build_package_inputs(d, objset, build, packages): | 12 | def collect_build_package_inputs(d, objset, build, packages): |
13 | providers = collect_package_providers(d) | 13 | import oe.spdx_common |
14 | providers = oe.spdx_common.collect_package_providers(d) | ||
14 | 15 | ||
15 | build_deps = set() | 16 | build_deps = set() |
16 | 17 | ||
diff --git a/meta/classes/spdx-common.bbclass b/meta/classes/spdx-common.bbclass index 6dfc1fd9e4..d3110a9bdb 100644 --- a/meta/classes/spdx-common.bbclass +++ b/meta/classes/spdx-common.bbclass | |||
@@ -37,96 +37,11 @@ SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json" | |||
37 | 37 | ||
38 | SPDX_CUSTOM_ANNOTATION_VARS ??= "" | 38 | SPDX_CUSTOM_ANNOTATION_VARS ??= "" |
39 | 39 | ||
40 | def extract_licenses(filename): | ||
41 | import re | ||
42 | |||
43 | lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE) | ||
44 | |||
45 | try: | ||
46 | with open(filename, 'rb') as f: | ||
47 | size = min(15000, os.stat(filename).st_size) | ||
48 | txt = f.read(size) | ||
49 | licenses = re.findall(lic_regex, txt) | ||
50 | if licenses: | ||
51 | ascii_licenses = [lic.decode('ascii') for lic in licenses] | ||
52 | return ascii_licenses | ||
53 | except Exception as e: | ||
54 | bb.warn(f"Exception reading {filename}: {e}") | ||
55 | return [] | ||
56 | |||
57 | def is_work_shared_spdx(d): | ||
58 | return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR')) | ||
59 | |||
60 | def get_json_indent(d): | ||
61 | if d.getVar("SPDX_PRETTY") == "1": | ||
62 | return 2 | ||
63 | return None | ||
64 | |||
65 | def load_spdx_license_data(d): | ||
66 | import json | ||
67 | if d.getVar("SPDX_LICENSE_DATA"): | ||
68 | return | ||
69 | |||
70 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
71 | data = json.load(f) | ||
72 | # Transform the license array to a dictionary | ||
73 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
74 | d.setVar("SPDX_LICENSE_DATA", data) | ||
75 | |||
76 | python() { | 40 | python() { |
77 | load_spdx_license_data(d) | 41 | import oe.spdx_common |
42 | oe.spdx_common.load_spdx_license_data(d) | ||
78 | } | 43 | } |
79 | 44 | ||
80 | def process_sources(d): | ||
81 | pn = d.getVar('PN') | ||
82 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
83 | if pn in assume_provided: | ||
84 | for p in d.getVar("PROVIDES").split(): | ||
85 | if p != pn: | ||
86 | pn = p | ||
87 | break | ||
88 | |||
89 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
90 | # so avoid archiving source here. | ||
91 | if pn.startswith('glibc-locale'): | ||
92 | return False | ||
93 | if d.getVar('PN') == "libtool-cross": | ||
94 | return False | ||
95 | if d.getVar('PN') == "libgcc-initial": | ||
96 | return False | ||
97 | if d.getVar('PN') == "shadow-sysroot": | ||
98 | return False | ||
99 | |||
100 | # We just archive gcc-source for all the gcc related recipes | ||
101 | if d.getVar('BPN') in ['gcc', 'libgcc']: | ||
102 | bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn) | ||
103 | return False | ||
104 | |||
105 | return True | ||
106 | |||
107 | def collect_direct_deps(d, dep_task): | ||
108 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
109 | pn = d.getVar("PN") | ||
110 | |||
111 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
112 | |||
113 | for this_dep in taskdepdata.values(): | ||
114 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
115 | break | ||
116 | else: | ||
117 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
118 | |||
119 | deps = set() | ||
120 | |||
121 | for dep_name in this_dep.deps: | ||
122 | dep_data = taskdepdata[dep_name] | ||
123 | if dep_data.taskname == dep_task and dep_data.pn != pn: | ||
124 | deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) | ||
125 | |||
126 | return sorted(deps) | ||
127 | |||
128 | collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA" | ||
129 | collect_direct_deps[vardeps] += "DEPENDS" | ||
130 | 45 | ||
131 | python do_collect_spdx_deps() { | 46 | python do_collect_spdx_deps() { |
132 | # This task calculates the build time dependencies of the recipe, and is | 47 | # This task calculates the build time dependencies of the recipe, and is |
@@ -136,11 +51,12 @@ python do_collect_spdx_deps() { | |||
136 | # do_create_spdx reads in the found dependencies when writing the actual | 51 | # do_create_spdx reads in the found dependencies when writing the actual |
137 | # SPDX document | 52 | # SPDX document |
138 | import json | 53 | import json |
54 | import oe.spdx_common | ||
139 | from pathlib import Path | 55 | from pathlib import Path |
140 | 56 | ||
141 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | 57 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) |
142 | 58 | ||
143 | deps = collect_direct_deps(d, "do_create_spdx") | 59 | deps = oe.spdx_common.collect_direct_deps(d, "do_create_spdx") |
144 | 60 | ||
145 | with spdx_deps_file.open("w") as f: | 61 | with spdx_deps_file.open("w") as f: |
146 | json.dump(deps, f) | 62 | json.dump(deps, f) |
@@ -151,104 +67,7 @@ do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}" | |||
151 | do_collect_spdx_deps[deptask] = "do_create_spdx" | 67 | do_collect_spdx_deps[deptask] = "do_create_spdx" |
152 | do_collect_spdx_deps[dirs] = "${SPDXDIR}" | 68 | do_collect_spdx_deps[dirs] = "${SPDXDIR}" |
153 | 69 | ||
154 | def get_spdx_deps(d): | 70 | oe.spdx_common.collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA" |
155 | import json | 71 | oe.spdx_common.collect_direct_deps[vardeps] += "DEPENDS" |
156 | from pathlib import Path | 72 | oe.spdx_common.collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA" |
157 | 73 | oe.spdx_common.get_patched_src[vardepsexclude] += "STAGING_KERNEL_DIR" | |
158 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
159 | |||
160 | with spdx_deps_file.open("r") as f: | ||
161 | return json.load(f) | ||
162 | |||
163 | def collect_package_providers(d): | ||
164 | from pathlib import Path | ||
165 | import oe.sbom | ||
166 | import oe.spdx | ||
167 | import json | ||
168 | |||
169 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
170 | |||
171 | providers = {} | ||
172 | |||
173 | deps = collect_direct_deps(d, "do_create_spdx") | ||
174 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
175 | |||
176 | for dep_pn, dep_hashfn, _ in deps: | ||
177 | localdata = d | ||
178 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
179 | if not recipe_data: | ||
180 | localdata = bb.data.createCopy(d) | ||
181 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
182 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
183 | |||
184 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
185 | |||
186 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
187 | rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items()) | ||
188 | rprovides.add(pkg) | ||
189 | |||
190 | if "PKG" in pkg_data: | ||
191 | pkg = pkg_data["PKG"] | ||
192 | rprovides.add(pkg) | ||
193 | |||
194 | for r in rprovides: | ||
195 | providers[r] = (pkg, dep_hashfn) | ||
196 | |||
197 | return providers | ||
198 | |||
199 | collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA" | ||
200 | |||
201 | def spdx_get_src(d): | ||
202 | """ | ||
203 | save patched source of the recipe in SPDX_WORKDIR. | ||
204 | """ | ||
205 | import shutil | ||
206 | spdx_workdir = d.getVar('SPDXWORK') | ||
207 | spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | ||
208 | pn = d.getVar('PN') | ||
209 | |||
210 | workdir = d.getVar("WORKDIR") | ||
211 | |||
212 | try: | ||
213 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
214 | if not is_work_shared_spdx(d): | ||
215 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
216 | d.setVar('WORKDIR', spdx_workdir) | ||
217 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
218 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
219 | |||
220 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
221 | # possibly requiring of the following tasks (such as some recipes's | ||
222 | # do_patch required 'B' existed). | ||
223 | bb.utils.mkdirhier(d.getVar('B')) | ||
224 | |||
225 | bb.build.exec_func('do_unpack', d) | ||
226 | # Copy source of kernel to spdx_workdir | ||
227 | if is_work_shared_spdx(d): | ||
228 | share_src = d.getVar('WORKDIR') | ||
229 | d.setVar('WORKDIR', spdx_workdir) | ||
230 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
231 | src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR') | ||
232 | bb.utils.mkdirhier(src_dir) | ||
233 | if bb.data.inherits_class('kernel',d): | ||
234 | share_src = d.getVar('STAGING_KERNEL_DIR') | ||
235 | cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/" | ||
236 | cmd_copy_shared_res = os.popen(cmd_copy_share).read() | ||
237 | bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res) | ||
238 | |||
239 | git_path = src_dir + "/.git" | ||
240 | if os.path.exists(git_path): | ||
241 | shutils.rmtree(git_path) | ||
242 | |||
243 | # Make sure gcc and kernel sources are patched only once | ||
244 | if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)): | ||
245 | bb.build.exec_func('do_patch', d) | ||
246 | |||
247 | # Some userland has no source. | ||
248 | if not os.path.exists( spdx_workdir ): | ||
249 | bb.utils.mkdirhier(spdx_workdir) | ||
250 | finally: | ||
251 | d.setVar("WORKDIR", workdir) | ||
252 | |||
253 | spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR" | ||
254 | |||
diff --git a/meta/lib/oe/sbom30.py b/meta/lib/oe/sbom30.py index 771e87be79..2532d19dad 100644 --- a/meta/lib/oe/sbom30.py +++ b/meta/lib/oe/sbom30.py | |||
@@ -12,6 +12,7 @@ import re | |||
12 | import hashlib | 12 | import hashlib |
13 | import uuid | 13 | import uuid |
14 | import os | 14 | import os |
15 | import oe.spdx_common | ||
15 | from datetime import datetime, timezone | 16 | from datetime import datetime, timezone |
16 | 17 | ||
17 | OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/" | 18 | OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/" |
@@ -205,24 +206,6 @@ def get_alias(obj): | |||
205 | return None | 206 | return None |
206 | 207 | ||
207 | 208 | ||
208 | def extract_licenses(filename): | ||
209 | lic_regex = re.compile( | ||
210 | rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$", re.MULTILINE | ||
211 | ) | ||
212 | |||
213 | try: | ||
214 | with open(filename, "rb") as f: | ||
215 | size = min(15000, os.stat(filename).st_size) | ||
216 | txt = f.read(size) | ||
217 | licenses = re.findall(lic_regex, txt) | ||
218 | if licenses: | ||
219 | ascii_licenses = [lic.decode("ascii") for lic in licenses] | ||
220 | return ascii_licenses | ||
221 | except Exception as e: | ||
222 | bb.warn(f"Exception reading {filename}: {e}") | ||
223 | return [] | ||
224 | |||
225 | |||
226 | def to_list(l): | 209 | def to_list(l): |
227 | if isinstance(l, set): | 210 | if isinstance(l, set): |
228 | l = sorted(list(l)) | 211 | l = sorted(list(l)) |
@@ -630,7 +613,7 @@ class ObjectSet(oe.spdx30.SHACLObjectSet): | |||
630 | return | 613 | return |
631 | 614 | ||
632 | file_licenses = set() | 615 | file_licenses = set() |
633 | for extracted_lic in extract_licenses(filepath): | 616 | for extracted_lic in oe.spdx_common.extract_licenses(filepath): |
634 | file_licenses.add(self.new_license_expression(extracted_lic)) | 617 | file_licenses.add(self.new_license_expression(extracted_lic)) |
635 | 618 | ||
636 | self.new_relationship( | 619 | self.new_relationship( |
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py new file mode 100644 index 0000000000..f23100fe03 --- /dev/null +++ b/meta/lib/oe/spdx_common.py | |||
@@ -0,0 +1,228 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import bb | ||
8 | import collections | ||
9 | import json | ||
10 | import oe.packagedata | ||
11 | import re | ||
12 | import shutil | ||
13 | |||
14 | from pathlib import Path | ||
15 | |||
16 | |||
17 | LIC_REGEX = re.compile( | ||
18 | rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$", | ||
19 | re.MULTILINE, | ||
20 | ) | ||
21 | |||
22 | |||
23 | def extract_licenses(filename): | ||
24 | """ | ||
25 | Extract SPDX License identifiers from a file | ||
26 | """ | ||
27 | try: | ||
28 | with open(filename, "rb") as f: | ||
29 | size = min(15000, os.stat(filename).st_size) | ||
30 | txt = f.read(size) | ||
31 | licenses = re.findall(LIC_REGEX, txt) | ||
32 | if licenses: | ||
33 | ascii_licenses = [lic.decode("ascii") for lic in licenses] | ||
34 | return ascii_licenses | ||
35 | except Exception as e: | ||
36 | bb.warn(f"Exception reading {filename}: {e}") | ||
37 | return [] | ||
38 | |||
39 | |||
40 | def is_work_shared_spdx(d): | ||
41 | return bb.data.inherits_class("kernel", d) or ("work-shared" in d.getVar("WORKDIR")) | ||
42 | |||
43 | |||
44 | def load_spdx_license_data(d): | ||
45 | if d.getVar("SPDX_LICENSE_DATA"): | ||
46 | return | ||
47 | |||
48 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
49 | data = json.load(f) | ||
50 | # Transform the license array to a dictionary | ||
51 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
52 | d.setVar("SPDX_LICENSE_DATA", data) | ||
53 | |||
54 | |||
55 | def process_sources(d): | ||
56 | """ | ||
57 | Returns True if the sources for this recipe should be included in the SPDX | ||
58 | or False if not | ||
59 | """ | ||
60 | pn = d.getVar("PN") | ||
61 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
62 | if pn in assume_provided: | ||
63 | for p in d.getVar("PROVIDES").split(): | ||
64 | if p != pn: | ||
65 | pn = p | ||
66 | break | ||
67 | |||
68 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
69 | # so avoid archiving source here. | ||
70 | if pn.startswith("glibc-locale"): | ||
71 | return False | ||
72 | if d.getVar("PN") == "libtool-cross": | ||
73 | return False | ||
74 | if d.getVar("PN") == "libgcc-initial": | ||
75 | return False | ||
76 | if d.getVar("PN") == "shadow-sysroot": | ||
77 | return False | ||
78 | |||
79 | # We just archive gcc-source for all the gcc related recipes | ||
80 | if d.getVar("BPN") in ["gcc", "libgcc"]: | ||
81 | bb.debug(1, "spdx: There is bug in scan of %s is, do nothing" % pn) | ||
82 | return False | ||
83 | |||
84 | return True | ||
85 | |||
86 | |||
87 | Dep = collections.namedtuple("Dep", ["pn", "hashfn", "in_taskhash"]) | ||
88 | |||
89 | |||
90 | def collect_direct_deps(d, dep_task): | ||
91 | """ | ||
92 | Find direct dependencies of current task | ||
93 | |||
94 | Returns the list of recipes that have a dep_task that the current task | ||
95 | depends on | ||
96 | """ | ||
97 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
98 | pn = d.getVar("PN") | ||
99 | |||
100 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
101 | |||
102 | for this_dep in taskdepdata.values(): | ||
103 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
104 | break | ||
105 | else: | ||
106 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
107 | |||
108 | deps = set() | ||
109 | |||
110 | for dep_name in this_dep.deps: | ||
111 | dep_data = taskdepdata[dep_name] | ||
112 | if dep_data.taskname == dep_task and dep_data.pn != pn: | ||
113 | deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) | ||
114 | |||
115 | return sorted(deps) | ||
116 | |||
117 | |||
118 | def get_spdx_deps(d): | ||
119 | """ | ||
120 | Reads the SPDX dependencies JSON file and returns the data | ||
121 | """ | ||
122 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
123 | |||
124 | deps = [] | ||
125 | with spdx_deps_file.open("r") as f: | ||
126 | for d in json.load(f): | ||
127 | deps.append(Dep(*d)) | ||
128 | return deps | ||
129 | |||
130 | |||
131 | def collect_package_providers(d): | ||
132 | """ | ||
133 | Returns a dictionary where each RPROVIDES is mapped to the package that | ||
134 | provides it | ||
135 | """ | ||
136 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
137 | |||
138 | providers = {} | ||
139 | |||
140 | deps = collect_direct_deps(d, "do_create_spdx") | ||
141 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
142 | |||
143 | for dep_pn, dep_hashfn, _ in deps: | ||
144 | localdata = d | ||
145 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
146 | if not recipe_data: | ||
147 | localdata = bb.data.createCopy(d) | ||
148 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
149 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
150 | |||
151 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
152 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
153 | rprovides = set( | ||
154 | n | ||
155 | for n, _ in bb.utils.explode_dep_versions2( | ||
156 | pkg_data.get("RPROVIDES", "") | ||
157 | ).items() | ||
158 | ) | ||
159 | rprovides.add(pkg) | ||
160 | |||
161 | if "PKG" in pkg_data: | ||
162 | pkg = pkg_data["PKG"] | ||
163 | rprovides.add(pkg) | ||
164 | |||
165 | for r in rprovides: | ||
166 | providers[r] = (pkg, dep_hashfn) | ||
167 | |||
168 | return providers | ||
169 | |||
170 | |||
171 | def get_patched_src(d): | ||
172 | """ | ||
173 | Save patched source of the recipe in SPDX_WORKDIR. | ||
174 | """ | ||
175 | spdx_workdir = d.getVar("SPDXWORK") | ||
176 | spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE") | ||
177 | pn = d.getVar("PN") | ||
178 | |||
179 | workdir = d.getVar("WORKDIR") | ||
180 | |||
181 | try: | ||
182 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
183 | if not is_work_shared_spdx(d): | ||
184 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
185 | d.setVar("WORKDIR", spdx_workdir) | ||
186 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
187 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
188 | |||
189 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
190 | # possibly requiring of the following tasks (such as some recipes's | ||
191 | # do_patch required 'B' existed). | ||
192 | bb.utils.mkdirhier(d.getVar("B")) | ||
193 | |||
194 | bb.build.exec_func("do_unpack", d) | ||
195 | # Copy source of kernel to spdx_workdir | ||
196 | if is_work_shared_spdx(d): | ||
197 | share_src = d.getVar("WORKDIR") | ||
198 | d.setVar("WORKDIR", spdx_workdir) | ||
199 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
200 | src_dir = ( | ||
201 | spdx_workdir | ||
202 | + "/" | ||
203 | + d.getVar("PN") | ||
204 | + "-" | ||
205 | + d.getVar("PV") | ||
206 | + "-" | ||
207 | + d.getVar("PR") | ||
208 | ) | ||
209 | bb.utils.mkdirhier(src_dir) | ||
210 | if bb.data.inherits_class("kernel", d): | ||
211 | share_src = d.getVar("STAGING_KERNEL_DIR") | ||
212 | cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/" | ||
213 | cmd_copy_shared_res = os.popen(cmd_copy_share).read() | ||
214 | bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res) | ||
215 | |||
216 | git_path = src_dir + "/.git" | ||
217 | if os.path.exists(git_path): | ||
218 | shutils.rmtree(git_path) | ||
219 | |||
220 | # Make sure gcc and kernel sources are patched only once | ||
221 | if not (d.getVar("SRC_URI") == "" or is_work_shared_spdx(d)): | ||
222 | bb.build.exec_func("do_patch", d) | ||
223 | |||
224 | # Some userland has no source. | ||
225 | if not os.path.exists(spdx_workdir): | ||
226 | bb.utils.mkdirhier(spdx_workdir) | ||
227 | finally: | ||
228 | d.setVar("WORKDIR", workdir) | ||