diff options
Diffstat (limited to 'meta/classes/create-spdx-2.2.bbclass')
-rw-r--r-- | meta/classes/create-spdx-2.2.bbclass | 970 |
1 files changed, 970 insertions, 0 deletions
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass new file mode 100644 index 0000000000..94e0108815 --- /dev/null +++ b/meta/classes/create-spdx-2.2.bbclass | |||
@@ -0,0 +1,970 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | inherit spdx-common | ||
8 | |||
9 | SPDX_VERSION = "2.2" | ||
10 | |||
11 | SPDX_ORG ??= "OpenEmbedded ()" | ||
12 | SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}" | ||
13 | SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \ | ||
14 | this recipe. For SPDX documents create using this class during the build, this \ | ||
15 | is the contact information for the person or organization who is doing the \ | ||
16 | build." | ||
17 | |||
18 | SPDX_ARCHIVE_SOURCES ??= "0" | ||
19 | SPDX_ARCHIVE_PACKAGED ??= "0" | ||
20 | |||
21 | def get_namespace(d, name): | ||
22 | import uuid | ||
23 | namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE")) | ||
24 | return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), name, str(uuid.uuid5(namespace_uuid, name))) | ||
25 | |||
26 | SPDX_PACKAGE_VERSION ??= "${PV}" | ||
27 | SPDX_PACKAGE_VERSION[doc] = "The version of a package, versionInfo in recipe, package and image" | ||
28 | |||
29 | def create_annotation(d, comment): | ||
30 | from datetime import datetime, timezone | ||
31 | |||
32 | creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") | ||
33 | annotation = oe.spdx.SPDXAnnotation() | ||
34 | annotation.annotationDate = creation_time | ||
35 | annotation.annotationType = "OTHER" | ||
36 | annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) | ||
37 | annotation.comment = comment | ||
38 | return annotation | ||
39 | |||
40 | def recipe_spdx_is_native(d, recipe): | ||
41 | return any(a.annotationType == "OTHER" and | ||
42 | a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and | ||
43 | a.comment == "isNative" for a in recipe.annotations) | ||
44 | |||
45 | def get_json_indent(d): | ||
46 | if d.getVar("SPDX_PRETTY") == "1": | ||
47 | return 2 | ||
48 | return None | ||
49 | |||
50 | |||
51 | def convert_license_to_spdx(lic, license_data, document, d, existing={}): | ||
52 | from pathlib import Path | ||
53 | import oe.spdx | ||
54 | |||
55 | extracted = {} | ||
56 | |||
57 | def add_extracted_license(ident, name): | ||
58 | nonlocal document | ||
59 | |||
60 | if name in extracted: | ||
61 | return | ||
62 | |||
63 | extracted_info = oe.spdx.SPDXExtractedLicensingInfo() | ||
64 | extracted_info.name = name | ||
65 | extracted_info.licenseId = ident | ||
66 | extracted_info.extractedText = None | ||
67 | |||
68 | if name == "PD": | ||
69 | # Special-case this. | ||
70 | extracted_info.extractedText = "Software released to the public domain" | ||
71 | else: | ||
72 | # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH | ||
73 | for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split(): | ||
74 | try: | ||
75 | with (Path(directory) / name).open(errors="replace") as f: | ||
76 | extracted_info.extractedText = f.read() | ||
77 | break | ||
78 | except FileNotFoundError: | ||
79 | pass | ||
80 | if extracted_info.extractedText is None: | ||
81 | # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set | ||
82 | entry = d.getVarFlag('NO_GENERIC_LICENSE', name).split(';') | ||
83 | filename = entry[0] | ||
84 | params = {i.split('=')[0]: i.split('=')[1] for i in entry[1:] if '=' in i} | ||
85 | beginline = int(params.get('beginline', 1)) | ||
86 | endline = params.get('endline', None) | ||
87 | if endline: | ||
88 | endline = int(endline) | ||
89 | if filename: | ||
90 | filename = d.expand("${S}/" + filename) | ||
91 | with open(filename, errors="replace") as f: | ||
92 | extracted_info.extractedText = "".join(line for idx, line in enumerate(f, 1) if beginline <= idx and idx <= (endline or idx)) | ||
93 | else: | ||
94 | bb.fatal("Cannot find any text for license %s" % name) | ||
95 | |||
96 | extracted[name] = extracted_info | ||
97 | document.hasExtractedLicensingInfos.append(extracted_info) | ||
98 | |||
99 | def convert(l): | ||
100 | if l == "(" or l == ")": | ||
101 | return l | ||
102 | |||
103 | if l == "&": | ||
104 | return "AND" | ||
105 | |||
106 | if l == "|": | ||
107 | return "OR" | ||
108 | |||
109 | if l == "CLOSED": | ||
110 | return "NONE" | ||
111 | |||
112 | spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l | ||
113 | if spdx_license in license_data["licenses"]: | ||
114 | return spdx_license | ||
115 | |||
116 | try: | ||
117 | spdx_license = existing[l] | ||
118 | except KeyError: | ||
119 | spdx_license = "LicenseRef-" + l | ||
120 | add_extracted_license(spdx_license, l) | ||
121 | |||
122 | return spdx_license | ||
123 | |||
124 | lic_split = lic.replace("(", " ( ").replace(")", " ) ").replace("|", " | ").replace("&", " & ").split() | ||
125 | |||
126 | return ' '.join(convert(l) for l in lic_split) | ||
127 | |||
128 | def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): | ||
129 | from pathlib import Path | ||
130 | import oe.spdx | ||
131 | import oe.spdx_common | ||
132 | import hashlib | ||
133 | |||
134 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") | ||
135 | if source_date_epoch: | ||
136 | source_date_epoch = int(source_date_epoch) | ||
137 | |||
138 | sha1s = [] | ||
139 | spdx_files = [] | ||
140 | |||
141 | file_counter = 1 | ||
142 | |||
143 | check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1" | ||
144 | if check_compiled_sources: | ||
145 | compiled_sources, types = oe.spdx_common.get_compiled_sources(d) | ||
146 | bb.debug(1, f"Total compiled files: {len(compiled_sources)}") | ||
147 | for subdir, dirs, files in os.walk(topdir): | ||
148 | dirs[:] = [d for d in dirs if d not in ignore_dirs] | ||
149 | if subdir == str(topdir): | ||
150 | dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs] | ||
151 | |||
152 | for file in files: | ||
153 | filepath = Path(subdir) / file | ||
154 | filename = str(filepath.relative_to(topdir)) | ||
155 | |||
156 | if not filepath.is_symlink() and filepath.is_file(): | ||
157 | # Check if file is compiled | ||
158 | if check_compiled_sources: | ||
159 | if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types): | ||
160 | continue | ||
161 | spdx_file = oe.spdx.SPDXFile() | ||
162 | spdx_file.SPDXID = get_spdxid(file_counter) | ||
163 | for t in get_types(filepath): | ||
164 | spdx_file.fileTypes.append(t) | ||
165 | spdx_file.fileName = filename | ||
166 | |||
167 | if archive is not None: | ||
168 | with filepath.open("rb") as f: | ||
169 | info = archive.gettarinfo(fileobj=f) | ||
170 | info.name = filename | ||
171 | info.uid = 0 | ||
172 | info.gid = 0 | ||
173 | info.uname = "root" | ||
174 | info.gname = "root" | ||
175 | |||
176 | if source_date_epoch is not None and info.mtime > source_date_epoch: | ||
177 | info.mtime = source_date_epoch | ||
178 | |||
179 | archive.addfile(info, f) | ||
180 | |||
181 | sha1 = bb.utils.sha1_file(filepath) | ||
182 | sha1s.append(sha1) | ||
183 | spdx_file.checksums.append(oe.spdx.SPDXChecksum( | ||
184 | algorithm="SHA1", | ||
185 | checksumValue=sha1, | ||
186 | )) | ||
187 | spdx_file.checksums.append(oe.spdx.SPDXChecksum( | ||
188 | algorithm="SHA256", | ||
189 | checksumValue=bb.utils.sha256_file(filepath), | ||
190 | )) | ||
191 | |||
192 | if "SOURCE" in spdx_file.fileTypes: | ||
193 | extracted_lics = oe.spdx_common.extract_licenses(filepath) | ||
194 | if extracted_lics: | ||
195 | spdx_file.licenseInfoInFiles = extracted_lics | ||
196 | |||
197 | doc.files.append(spdx_file) | ||
198 | doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file) | ||
199 | spdx_pkg.hasFiles.append(spdx_file.SPDXID) | ||
200 | |||
201 | spdx_files.append(spdx_file) | ||
202 | |||
203 | file_counter += 1 | ||
204 | |||
205 | sha1s.sort() | ||
206 | verifier = hashlib.sha1() | ||
207 | for v in sha1s: | ||
208 | verifier.update(v.encode("utf-8")) | ||
209 | spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest() | ||
210 | |||
211 | return spdx_files | ||
212 | |||
213 | |||
214 | def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources): | ||
215 | from pathlib import Path | ||
216 | import hashlib | ||
217 | import oe.packagedata | ||
218 | import oe.spdx | ||
219 | |||
220 | debug_search_paths = [ | ||
221 | Path(d.getVar('PKGD')), | ||
222 | Path(d.getVar('STAGING_DIR_TARGET')), | ||
223 | Path(d.getVar('STAGING_DIR_NATIVE')), | ||
224 | Path(d.getVar('STAGING_KERNEL_DIR')), | ||
225 | ] | ||
226 | |||
227 | pkg_data = oe.packagedata.read_subpkgdata_extended(package, d) | ||
228 | |||
229 | if pkg_data is None: | ||
230 | return | ||
231 | |||
232 | for file_path, file_data in pkg_data["files_info"].items(): | ||
233 | if not "debugsrc" in file_data: | ||
234 | continue | ||
235 | |||
236 | for pkg_file in package_files: | ||
237 | if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"): | ||
238 | break | ||
239 | else: | ||
240 | bb.fatal("No package file found for %s in %s; SPDX found: %s" % (str(file_path), package, | ||
241 | " ".join(p.fileName for p in package_files))) | ||
242 | continue | ||
243 | |||
244 | for debugsrc in file_data["debugsrc"]: | ||
245 | ref_id = "NOASSERTION" | ||
246 | for search in debug_search_paths: | ||
247 | if debugsrc.startswith("/usr/src/kernel"): | ||
248 | debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '') | ||
249 | else: | ||
250 | debugsrc_path = search / debugsrc.lstrip("/") | ||
251 | # We can only hash files below, skip directories, links, etc. | ||
252 | if not os.path.isfile(debugsrc_path): | ||
253 | continue | ||
254 | |||
255 | file_sha256 = bb.utils.sha256_file(debugsrc_path) | ||
256 | |||
257 | if file_sha256 in sources: | ||
258 | source_file = sources[file_sha256] | ||
259 | |||
260 | doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace) | ||
261 | if doc_ref is None: | ||
262 | doc_ref = oe.spdx.SPDXExternalDocumentRef() | ||
263 | doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name | ||
264 | doc_ref.spdxDocument = source_file.doc.documentNamespace | ||
265 | doc_ref.checksum.algorithm = "SHA1" | ||
266 | doc_ref.checksum.checksumValue = source_file.doc_sha1 | ||
267 | package_doc.externalDocumentRefs.append(doc_ref) | ||
268 | |||
269 | ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID) | ||
270 | else: | ||
271 | bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256)) | ||
272 | break | ||
273 | else: | ||
274 | bb.debug(1, "Debug source %s not found" % debugsrc) | ||
275 | |||
276 | package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc) | ||
277 | |||
278 | add_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR" | ||
279 | |||
280 | def collect_dep_recipes(d, doc, spdx_recipe): | ||
281 | import json | ||
282 | from pathlib import Path | ||
283 | import oe.sbom | ||
284 | import oe.spdx | ||
285 | import oe.spdx_common | ||
286 | |||
287 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
288 | package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split() | ||
289 | package_archs.reverse() | ||
290 | |||
291 | dep_recipes = [] | ||
292 | |||
293 | deps = oe.spdx_common.get_spdx_deps(d) | ||
294 | |||
295 | for dep in deps: | ||
296 | # If this dependency is not calculated in the taskhash skip it. | ||
297 | # Otherwise, it can result in broken links since this task won't | ||
298 | # rebuild and see the new SPDX ID if the dependency changes | ||
299 | if not dep.in_taskhash: | ||
300 | continue | ||
301 | |||
302 | dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep.pn, dep.hashfn) | ||
303 | if not dep_recipe_path: | ||
304 | bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep.pn, dep.hashfn)) | ||
305 | |||
306 | spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path) | ||
307 | |||
308 | for pkg in spdx_dep_doc.packages: | ||
309 | if pkg.name == dep.pn: | ||
310 | spdx_dep_recipe = pkg | ||
311 | break | ||
312 | else: | ||
313 | continue | ||
314 | |||
315 | dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe)) | ||
316 | |||
317 | dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef() | ||
318 | dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name | ||
319 | dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace | ||
320 | dep_recipe_ref.checksum.algorithm = "SHA1" | ||
321 | dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1 | ||
322 | |||
323 | doc.externalDocumentRefs.append(dep_recipe_ref) | ||
324 | |||
325 | doc.add_relationship( | ||
326 | "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID), | ||
327 | "BUILD_DEPENDENCY_OF", | ||
328 | spdx_recipe | ||
329 | ) | ||
330 | |||
331 | return dep_recipes | ||
332 | |||
333 | collect_dep_recipes[vardepsexclude] = "SPDX_MULTILIB_SSTATE_ARCHS" | ||
334 | |||
335 | def collect_dep_sources(d, dep_recipes): | ||
336 | import oe.sbom | ||
337 | |||
338 | sources = {} | ||
339 | for dep in dep_recipes: | ||
340 | # Don't collect sources from native recipes as they | ||
341 | # match non-native sources also. | ||
342 | if recipe_spdx_is_native(d, dep.recipe): | ||
343 | continue | ||
344 | recipe_files = set(dep.recipe.hasFiles) | ||
345 | |||
346 | for spdx_file in dep.doc.files: | ||
347 | if spdx_file.SPDXID not in recipe_files: | ||
348 | continue | ||
349 | |||
350 | if "SOURCE" in spdx_file.fileTypes: | ||
351 | for checksum in spdx_file.checksums: | ||
352 | if checksum.algorithm == "SHA256": | ||
353 | sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file) | ||
354 | break | ||
355 | |||
356 | return sources | ||
357 | |||
358 | def add_download_packages(d, doc, recipe): | ||
359 | import os.path | ||
360 | from bb.fetch2 import decodeurl, CHECKSUM_LIST | ||
361 | import bb.process | ||
362 | import oe.spdx | ||
363 | import oe.sbom | ||
364 | |||
365 | for download_idx, src_uri in enumerate(d.getVar('SRC_URI').split()): | ||
366 | f = bb.fetch2.FetchData(src_uri, d) | ||
367 | |||
368 | package = oe.spdx.SPDXPackage() | ||
369 | package.name = "%s-source-%d" % (d.getVar("PN"), download_idx + 1) | ||
370 | package.SPDXID = oe.sbom.get_download_spdxid(d, download_idx + 1) | ||
371 | |||
372 | if f.type == "file": | ||
373 | continue | ||
374 | |||
375 | if f.method.supports_checksum(f): | ||
376 | for checksum_id in CHECKSUM_LIST: | ||
377 | if checksum_id.upper() not in oe.spdx.SPDXPackage.ALLOWED_CHECKSUMS: | ||
378 | continue | ||
379 | |||
380 | expected_checksum = getattr(f, "%s_expected" % checksum_id) | ||
381 | if expected_checksum is None: | ||
382 | continue | ||
383 | |||
384 | c = oe.spdx.SPDXChecksum() | ||
385 | c.algorithm = checksum_id.upper() | ||
386 | c.checksumValue = expected_checksum | ||
387 | package.checksums.append(c) | ||
388 | |||
389 | package.downloadLocation = oe.spdx_common.fetch_data_to_uri(f, f.name) | ||
390 | doc.packages.append(package) | ||
391 | doc.add_relationship(doc, "DESCRIBES", package) | ||
392 | # In the future, we might be able to do more fancy dependencies, | ||
393 | # but this should be sufficient for now | ||
394 | doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe) | ||
395 | |||
396 | def get_license_list_version(license_data, d): | ||
397 | # Newer versions of the SPDX license list are SemVer ("MAJOR.MINOR.MICRO"), | ||
398 | # but SPDX 2 only uses "MAJOR.MINOR". | ||
399 | return ".".join(license_data["licenseListVersion"].split(".")[:2]) | ||
400 | |||
401 | |||
402 | python do_create_spdx() { | ||
403 | from datetime import datetime, timezone | ||
404 | import oe.sbom | ||
405 | import oe.spdx | ||
406 | import oe.spdx_common | ||
407 | import uuid | ||
408 | from pathlib import Path | ||
409 | from contextlib import contextmanager | ||
410 | import oe.cve_check | ||
411 | |||
412 | license_data = oe.spdx_common.load_spdx_license_data(d) | ||
413 | |||
414 | @contextmanager | ||
415 | def optional_tarfile(name, guard, mode="w"): | ||
416 | import tarfile | ||
417 | import bb.compress.zstd | ||
418 | |||
419 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
420 | |||
421 | if guard: | ||
422 | name.parent.mkdir(parents=True, exist_ok=True) | ||
423 | with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f: | ||
424 | with tarfile.open(fileobj=f, mode=mode + "|") as tf: | ||
425 | yield tf | ||
426 | else: | ||
427 | yield None | ||
428 | |||
429 | |||
430 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
431 | spdx_workdir = Path(d.getVar("SPDXWORK")) | ||
432 | include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1" | ||
433 | archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1" | ||
434 | archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1" | ||
435 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
436 | |||
437 | creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") | ||
438 | |||
439 | doc = oe.spdx.SPDXDocument() | ||
440 | |||
441 | doc.name = "recipe-" + d.getVar("PN") | ||
442 | doc.documentNamespace = get_namespace(d, doc.name) | ||
443 | doc.creationInfo.created = creation_time | ||
444 | doc.creationInfo.comment = "This document was created by analyzing recipe files during the build." | ||
445 | doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d) | ||
446 | doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") | ||
447 | doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) | ||
448 | doc.creationInfo.creators.append("Person: N/A ()") | ||
449 | |||
450 | recipe = oe.spdx.SPDXPackage() | ||
451 | recipe.name = d.getVar("PN") | ||
452 | recipe.versionInfo = d.getVar("SPDX_PACKAGE_VERSION") | ||
453 | recipe.SPDXID = oe.sbom.get_recipe_spdxid(d) | ||
454 | recipe.supplier = d.getVar("SPDX_SUPPLIER") | ||
455 | if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): | ||
456 | recipe.annotations.append(create_annotation(d, "isNative")) | ||
457 | |||
458 | homepage = d.getVar("HOMEPAGE") | ||
459 | if homepage: | ||
460 | recipe.homepage = homepage | ||
461 | |||
462 | license = d.getVar("LICENSE") | ||
463 | if license: | ||
464 | recipe.licenseDeclared = convert_license_to_spdx(license, license_data, doc, d) | ||
465 | |||
466 | summary = d.getVar("SUMMARY") | ||
467 | if summary: | ||
468 | recipe.summary = summary | ||
469 | |||
470 | description = d.getVar("DESCRIPTION") | ||
471 | if description: | ||
472 | recipe.description = description | ||
473 | |||
474 | if d.getVar("SPDX_CUSTOM_ANNOTATION_VARS"): | ||
475 | for var in d.getVar('SPDX_CUSTOM_ANNOTATION_VARS').split(): | ||
476 | recipe.annotations.append(create_annotation(d, var + "=" + d.getVar(var))) | ||
477 | |||
478 | # Some CVEs may be patched during the build process without incrementing the version number, | ||
479 | # so querying for CVEs based on the CPE id can lead to false positives. To account for this, | ||
480 | # save the CVEs fixed by patches to source information field in the SPDX. | ||
481 | patched_cves = oe.cve_check.get_patched_cves(d) | ||
482 | patched_cves = list(patched_cves) | ||
483 | patched_cves = ' '.join(patched_cves) | ||
484 | if patched_cves: | ||
485 | recipe.sourceInfo = "CVEs fixed: " + patched_cves | ||
486 | |||
487 | cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION")) | ||
488 | if cpe_ids: | ||
489 | for cpe_id in cpe_ids: | ||
490 | cpe = oe.spdx.SPDXExternalReference() | ||
491 | cpe.referenceCategory = "SECURITY" | ||
492 | cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type" | ||
493 | cpe.referenceLocator = cpe_id | ||
494 | recipe.externalRefs.append(cpe) | ||
495 | |||
496 | doc.packages.append(recipe) | ||
497 | doc.add_relationship(doc, "DESCRIBES", recipe) | ||
498 | |||
499 | add_download_packages(d, doc, recipe) | ||
500 | |||
501 | if oe.spdx_common.process_sources(d) and include_sources: | ||
502 | recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst") | ||
503 | with optional_tarfile(recipe_archive, archive_sources) as archive: | ||
504 | oe.spdx_common.get_patched_src(d) | ||
505 | |||
506 | add_package_files( | ||
507 | d, | ||
508 | doc, | ||
509 | recipe, | ||
510 | spdx_workdir, | ||
511 | lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter), | ||
512 | lambda filepath: ["SOURCE"], | ||
513 | ignore_dirs=[".git"], | ||
514 | ignore_top_level_dirs=["temp"], | ||
515 | archive=archive, | ||
516 | ) | ||
517 | |||
518 | if archive is not None: | ||
519 | recipe.packageFileName = str(recipe_archive.name) | ||
520 | |||
521 | dep_recipes = collect_dep_recipes(d, doc, recipe) | ||
522 | |||
523 | doc_sha1 = oe.sbom.write_doc(d, doc, pkg_arch, "recipes", indent=get_json_indent(d)) | ||
524 | dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe)) | ||
525 | |||
526 | recipe_ref = oe.spdx.SPDXExternalDocumentRef() | ||
527 | recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name | ||
528 | recipe_ref.spdxDocument = doc.documentNamespace | ||
529 | recipe_ref.checksum.algorithm = "SHA1" | ||
530 | recipe_ref.checksum.checksumValue = doc_sha1 | ||
531 | |||
532 | sources = collect_dep_sources(d, dep_recipes) | ||
533 | found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos} | ||
534 | |||
535 | if not recipe_spdx_is_native(d, recipe): | ||
536 | bb.build.exec_func("read_subpackage_metadata", d) | ||
537 | |||
538 | pkgdest = Path(d.getVar("PKGDEST")) | ||
539 | for package in d.getVar("PACKAGES").split(): | ||
540 | if not oe.packagedata.packaged(package, d): | ||
541 | continue | ||
542 | |||
543 | package_doc = oe.spdx.SPDXDocument() | ||
544 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
545 | package_doc.name = pkg_name | ||
546 | package_doc.documentNamespace = get_namespace(d, package_doc.name) | ||
547 | package_doc.creationInfo.created = creation_time | ||
548 | package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build." | ||
549 | package_doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d) | ||
550 | package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") | ||
551 | package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) | ||
552 | package_doc.creationInfo.creators.append("Person: N/A ()") | ||
553 | package_doc.externalDocumentRefs.append(recipe_ref) | ||
554 | |||
555 | package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE") | ||
556 | |||
557 | spdx_package = oe.spdx.SPDXPackage() | ||
558 | |||
559 | spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name) | ||
560 | spdx_package.name = pkg_name | ||
561 | spdx_package.versionInfo = d.getVar("SPDX_PACKAGE_VERSION") | ||
562 | spdx_package.licenseDeclared = convert_license_to_spdx(package_license, license_data, package_doc, d, found_licenses) | ||
563 | spdx_package.supplier = d.getVar("SPDX_SUPPLIER") | ||
564 | |||
565 | package_doc.packages.append(spdx_package) | ||
566 | |||
567 | package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID)) | ||
568 | package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package) | ||
569 | |||
570 | package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst") | ||
571 | with optional_tarfile(package_archive, archive_packaged) as archive: | ||
572 | package_files = add_package_files( | ||
573 | d, | ||
574 | package_doc, | ||
575 | spdx_package, | ||
576 | pkgdest / package, | ||
577 | lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter), | ||
578 | lambda filepath: ["BINARY"], | ||
579 | ignore_top_level_dirs=['CONTROL', 'DEBIAN'], | ||
580 | archive=archive, | ||
581 | ) | ||
582 | |||
583 | if archive is not None: | ||
584 | spdx_package.packageFileName = str(package_archive.name) | ||
585 | |||
586 | add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources) | ||
587 | |||
588 | oe.sbom.write_doc(d, package_doc, pkg_arch, "packages", indent=get_json_indent(d)) | ||
589 | } | ||
590 | do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS" | ||
591 | # NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source | ||
592 | addtask do_create_spdx after do_package do_packagedata do_unpack do_collect_spdx_deps before do_populate_sdk do_build do_rm_work | ||
593 | |||
594 | SSTATETASKS += "do_create_spdx" | ||
595 | do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}" | ||
596 | do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" | ||
597 | |||
598 | python do_create_spdx_setscene () { | ||
599 | sstate_setscene(d) | ||
600 | } | ||
601 | addtask do_create_spdx_setscene | ||
602 | |||
603 | do_create_spdx[dirs] = "${SPDXWORK}" | ||
604 | do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}" | ||
605 | do_create_spdx[depends] += " \ | ||
606 | ${PATCHDEPENDENCY} \ | ||
607 | ${@create_spdx_source_deps(d)} \ | ||
608 | " | ||
609 | |||
610 | python do_create_runtime_spdx() { | ||
611 | from datetime import datetime, timezone | ||
612 | import oe.sbom | ||
613 | import oe.spdx | ||
614 | import oe.spdx_common | ||
615 | import oe.packagedata | ||
616 | from pathlib import Path | ||
617 | |||
618 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
619 | spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY")) | ||
620 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d) | ||
621 | |||
622 | creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") | ||
623 | |||
624 | license_data = oe.spdx_common.load_spdx_license_data(d) | ||
625 | |||
626 | providers = oe.spdx_common.collect_package_providers(d) | ||
627 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
628 | package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split() | ||
629 | package_archs.reverse() | ||
630 | |||
631 | if not is_native: | ||
632 | bb.build.exec_func("read_subpackage_metadata", d) | ||
633 | |||
634 | dep_package_cache = {} | ||
635 | |||
636 | pkgdest = Path(d.getVar("PKGDEST")) | ||
637 | for package in d.getVar("PACKAGES").split(): | ||
638 | localdata = bb.data.createCopy(d) | ||
639 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
640 | localdata.setVar("PKG", pkg_name) | ||
641 | localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package) | ||
642 | |||
643 | if not oe.packagedata.packaged(package, localdata): | ||
644 | continue | ||
645 | |||
646 | pkg_spdx_path = oe.sbom.doc_path(deploy_dir_spdx, pkg_name, pkg_arch, "packages") | ||
647 | |||
648 | package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path) | ||
649 | |||
650 | for p in package_doc.packages: | ||
651 | if p.name == pkg_name: | ||
652 | spdx_package = p | ||
653 | break | ||
654 | else: | ||
655 | bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path)) | ||
656 | |||
657 | runtime_doc = oe.spdx.SPDXDocument() | ||
658 | runtime_doc.name = "runtime-" + pkg_name | ||
659 | runtime_doc.documentNamespace = get_namespace(localdata, runtime_doc.name) | ||
660 | runtime_doc.creationInfo.created = creation_time | ||
661 | runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies." | ||
662 | runtime_doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d) | ||
663 | runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") | ||
664 | runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) | ||
665 | runtime_doc.creationInfo.creators.append("Person: N/A ()") | ||
666 | |||
667 | package_ref = oe.spdx.SPDXExternalDocumentRef() | ||
668 | package_ref.externalDocumentId = "DocumentRef-package-" + package | ||
669 | package_ref.spdxDocument = package_doc.documentNamespace | ||
670 | package_ref.checksum.algorithm = "SHA1" | ||
671 | package_ref.checksum.checksumValue = package_doc_sha1 | ||
672 | |||
673 | runtime_doc.externalDocumentRefs.append(package_ref) | ||
674 | |||
675 | runtime_doc.add_relationship( | ||
676 | runtime_doc.SPDXID, | ||
677 | "AMENDS", | ||
678 | "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID) | ||
679 | ) | ||
680 | |||
681 | deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") | ||
682 | seen_deps = set() | ||
683 | for dep, _ in deps.items(): | ||
684 | if dep in seen_deps: | ||
685 | continue | ||
686 | |||
687 | if dep not in providers: | ||
688 | continue | ||
689 | |||
690 | (dep, dep_hashfn) = providers[dep] | ||
691 | |||
692 | if not oe.packagedata.packaged(dep, localdata): | ||
693 | continue | ||
694 | |||
695 | dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d) | ||
696 | dep_pkg = dep_pkg_data["PKG"] | ||
697 | |||
698 | if dep in dep_package_cache: | ||
699 | (dep_spdx_package, dep_package_ref) = dep_package_cache[dep] | ||
700 | else: | ||
701 | dep_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, dep_pkg, dep_hashfn) | ||
702 | if not dep_path: | ||
703 | bb.fatal("No SPDX file found for package %s, %s" % (dep_pkg, dep_hashfn)) | ||
704 | |||
705 | spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path) | ||
706 | |||
707 | for pkg in spdx_dep_doc.packages: | ||
708 | if pkg.name == dep_pkg: | ||
709 | dep_spdx_package = pkg | ||
710 | break | ||
711 | else: | ||
712 | bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path)) | ||
713 | |||
714 | dep_package_ref = oe.spdx.SPDXExternalDocumentRef() | ||
715 | dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name | ||
716 | dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace | ||
717 | dep_package_ref.checksum.algorithm = "SHA1" | ||
718 | dep_package_ref.checksum.checksumValue = spdx_dep_sha1 | ||
719 | |||
720 | dep_package_cache[dep] = (dep_spdx_package, dep_package_ref) | ||
721 | |||
722 | runtime_doc.externalDocumentRefs.append(dep_package_ref) | ||
723 | |||
724 | runtime_doc.add_relationship( | ||
725 | "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID), | ||
726 | "RUNTIME_DEPENDENCY_OF", | ||
727 | "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID) | ||
728 | ) | ||
729 | seen_deps.add(dep) | ||
730 | |||
731 | oe.sbom.write_doc(d, runtime_doc, pkg_arch, "runtime", spdx_deploy, indent=get_json_indent(d)) | ||
732 | } | ||
733 | |||
734 | do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SPDX_MULTILIB_SSTATE_ARCHS" | ||
735 | |||
736 | addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work | ||
737 | SSTATETASKS += "do_create_runtime_spdx" | ||
738 | do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}" | ||
739 | do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" | ||
740 | |||
741 | python do_create_runtime_spdx_setscene () { | ||
742 | sstate_setscene(d) | ||
743 | } | ||
744 | addtask do_create_runtime_spdx_setscene | ||
745 | |||
746 | do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}" | ||
747 | do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" | ||
748 | do_create_runtime_spdx[rdeptask] = "do_create_spdx" | ||
749 | |||
750 | do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx" | ||
751 | do_rootfs[cleandirs] += "${SPDXIMAGEWORK}" | ||
752 | |||
753 | ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx" | ||
754 | |||
755 | do_populate_sdk[recrdeptask] += "do_create_spdx do_create_runtime_spdx" | ||
756 | do_populate_sdk[cleandirs] += "${SPDXSDKWORK}" | ||
757 | POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_combine_spdx" | ||
758 | POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_combine_spdx" | ||
759 | |||
760 | python image_combine_spdx() { | ||
761 | import os | ||
762 | import oe.sbom | ||
763 | from pathlib import Path | ||
764 | from oe.rootfs import image_list_installed_packages | ||
765 | |||
766 | image_name = d.getVar("IMAGE_NAME") | ||
767 | image_link_name = d.getVar("IMAGE_LINK_NAME") | ||
768 | imgdeploydir = Path(d.getVar("IMGDEPLOYDIR")) | ||
769 | img_spdxid = oe.sbom.get_image_spdxid(image_name) | ||
770 | packages = image_list_installed_packages(d) | ||
771 | |||
772 | combine_spdx(d, image_name, imgdeploydir, img_spdxid, packages, Path(d.getVar("SPDXIMAGEWORK"))) | ||
773 | |||
774 | def make_image_link(target_path, suffix): | ||
775 | if image_link_name: | ||
776 | link = imgdeploydir / (image_link_name + suffix) | ||
777 | if link != target_path: | ||
778 | link.symlink_to(os.path.relpath(target_path, link.parent)) | ||
779 | |||
780 | spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst") | ||
781 | make_image_link(spdx_tar_path, ".spdx.tar.zst") | ||
782 | } | ||
783 | |||
784 | python sdk_host_combine_spdx() { | ||
785 | sdk_combine_spdx(d, "host") | ||
786 | } | ||
787 | |||
788 | python sdk_target_combine_spdx() { | ||
789 | sdk_combine_spdx(d, "target") | ||
790 | } | ||
791 | |||
792 | def sdk_combine_spdx(d, sdk_type): | ||
793 | import oe.sbom | ||
794 | from pathlib import Path | ||
795 | from oe.sdk import sdk_list_installed_packages | ||
796 | |||
797 | sdk_name = d.getVar("TOOLCHAIN_OUTPUTNAME") + "-" + sdk_type | ||
798 | sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR")) | ||
799 | sdk_spdxid = oe.sbom.get_sdk_spdxid(sdk_name) | ||
800 | sdk_packages = sdk_list_installed_packages(d, sdk_type == "target") | ||
801 | combine_spdx(d, sdk_name, sdk_deploydir, sdk_spdxid, sdk_packages, Path(d.getVar('SPDXSDKWORK'))) | ||
802 | |||
803 | def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx_workdir): | ||
804 | import os | ||
805 | import oe.spdx | ||
806 | import oe.sbom | ||
807 | import oe.spdx_common | ||
808 | import io | ||
809 | import json | ||
810 | from datetime import timezone, datetime | ||
811 | from pathlib import Path | ||
812 | import tarfile | ||
813 | import bb.compress.zstd | ||
814 | |||
815 | license_data = oe.spdx_common.load_spdx_license_data(d) | ||
816 | |||
817 | providers = oe.spdx_common.collect_package_providers(d) | ||
818 | package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split() | ||
819 | package_archs.reverse() | ||
820 | |||
821 | creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") | ||
822 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
823 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") | ||
824 | |||
825 | doc = oe.spdx.SPDXDocument() | ||
826 | doc.name = rootfs_name | ||
827 | doc.documentNamespace = get_namespace(d, doc.name) | ||
828 | doc.creationInfo.created = creation_time | ||
829 | doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build." | ||
830 | doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d) | ||
831 | doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") | ||
832 | doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG")) | ||
833 | doc.creationInfo.creators.append("Person: N/A ()") | ||
834 | |||
835 | image = oe.spdx.SPDXPackage() | ||
836 | image.name = d.getVar("PN") | ||
837 | image.versionInfo = d.getVar("SPDX_PACKAGE_VERSION") | ||
838 | image.SPDXID = rootfs_spdxid | ||
839 | image.supplier = d.getVar("SPDX_SUPPLIER") | ||
840 | |||
841 | doc.packages.append(image) | ||
842 | |||
843 | if packages: | ||
844 | for name in sorted(packages.keys()): | ||
845 | if name not in providers: | ||
846 | bb.fatal("Unable to find SPDX provider for '%s'" % name) | ||
847 | |||
848 | pkg_name, pkg_hashfn = providers[name] | ||
849 | |||
850 | pkg_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, pkg_name, pkg_hashfn) | ||
851 | if not pkg_spdx_path: | ||
852 | bb.fatal("No SPDX file found for package %s, %s" % (pkg_name, pkg_hashfn)) | ||
853 | |||
854 | pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path) | ||
855 | |||
856 | for p in pkg_doc.packages: | ||
857 | if p.name == name: | ||
858 | pkg_ref = oe.spdx.SPDXExternalDocumentRef() | ||
859 | pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name | ||
860 | pkg_ref.spdxDocument = pkg_doc.documentNamespace | ||
861 | pkg_ref.checksum.algorithm = "SHA1" | ||
862 | pkg_ref.checksum.checksumValue = pkg_doc_sha1 | ||
863 | |||
864 | doc.externalDocumentRefs.append(pkg_ref) | ||
865 | doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID)) | ||
866 | break | ||
867 | else: | ||
868 | bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path)) | ||
869 | |||
870 | runtime_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "runtime-" + name, pkg_hashfn) | ||
871 | if not runtime_spdx_path: | ||
872 | bb.fatal("No runtime SPDX document found for %s, %s" % (name, pkg_hashfn)) | ||
873 | |||
874 | runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path) | ||
875 | |||
876 | runtime_ref = oe.spdx.SPDXExternalDocumentRef() | ||
877 | runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name | ||
878 | runtime_ref.spdxDocument = runtime_doc.documentNamespace | ||
879 | runtime_ref.checksum.algorithm = "SHA1" | ||
880 | runtime_ref.checksum.checksumValue = runtime_doc_sha1 | ||
881 | |||
882 | # "OTHER" isn't ideal here, but I can't find a relationship that makes sense | ||
883 | doc.externalDocumentRefs.append(runtime_ref) | ||
884 | doc.add_relationship( | ||
885 | image, | ||
886 | "OTHER", | ||
887 | "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID), | ||
888 | comment="Runtime dependencies for %s" % name | ||
889 | ) | ||
890 | bb.utils.mkdirhier(spdx_workdir) | ||
891 | image_spdx_path = spdx_workdir / (rootfs_name + ".spdx.json") | ||
892 | |||
893 | with image_spdx_path.open("wb") as f: | ||
894 | doc.to_json(f, sort_keys=True, indent=get_json_indent(d)) | ||
895 | |||
896 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
897 | |||
898 | visited_docs = set() | ||
899 | |||
900 | index = {"documents": []} | ||
901 | |||
902 | spdx_tar_path = rootfs_deploydir / (rootfs_name + ".spdx.tar.zst") | ||
903 | with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f: | ||
904 | with tarfile.open(fileobj=f, mode="w|") as tar: | ||
905 | def collect_spdx_document(path): | ||
906 | nonlocal tar | ||
907 | nonlocal deploy_dir_spdx | ||
908 | nonlocal source_date_epoch | ||
909 | nonlocal index | ||
910 | |||
911 | if path in visited_docs: | ||
912 | return | ||
913 | |||
914 | visited_docs.add(path) | ||
915 | |||
916 | with path.open("rb") as f: | ||
917 | doc, sha1 = oe.sbom.read_doc(f) | ||
918 | f.seek(0) | ||
919 | |||
920 | if doc.documentNamespace in visited_docs: | ||
921 | return | ||
922 | |||
923 | bb.note("Adding SPDX document %s" % path) | ||
924 | visited_docs.add(doc.documentNamespace) | ||
925 | info = tar.gettarinfo(fileobj=f) | ||
926 | |||
927 | info.name = doc.name + ".spdx.json" | ||
928 | info.uid = 0 | ||
929 | info.gid = 0 | ||
930 | info.uname = "root" | ||
931 | info.gname = "root" | ||
932 | |||
933 | if source_date_epoch is not None and info.mtime > int(source_date_epoch): | ||
934 | info.mtime = int(source_date_epoch) | ||
935 | |||
936 | tar.addfile(info, f) | ||
937 | |||
938 | index["documents"].append({ | ||
939 | "filename": info.name, | ||
940 | "documentNamespace": doc.documentNamespace, | ||
941 | "sha1": sha1, | ||
942 | }) | ||
943 | |||
944 | for ref in doc.externalDocumentRefs: | ||
945 | ref_path = oe.sbom.doc_find_by_namespace(deploy_dir_spdx, package_archs, ref.spdxDocument) | ||
946 | if not ref_path: | ||
947 | bb.fatal("Cannot find any SPDX file for document %s" % ref.spdxDocument) | ||
948 | collect_spdx_document(ref_path) | ||
949 | |||
950 | collect_spdx_document(image_spdx_path) | ||
951 | |||
952 | index["documents"].sort(key=lambda x: x["filename"]) | ||
953 | |||
954 | index_str = io.BytesIO(json.dumps( | ||
955 | index, | ||
956 | sort_keys=True, | ||
957 | indent=get_json_indent(d), | ||
958 | ).encode("utf-8")) | ||
959 | |||
960 | info = tarfile.TarInfo() | ||
961 | info.name = "index.json" | ||
962 | info.size = len(index_str.getvalue()) | ||
963 | info.uid = 0 | ||
964 | info.gid = 0 | ||
965 | info.uname = "root" | ||
966 | info.gname = "root" | ||
967 | |||
968 | tar.addfile(info, fileobj=index_str) | ||
969 | |||
970 | combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SPDX_MULTILIB_SSTATE_ARCHS" | ||