summaryrefslogtreecommitdiffstats
path: root/meta/classes/create-spdx-2.2.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/create-spdx-2.2.bbclass')
-rw-r--r--meta/classes/create-spdx-2.2.bbclass1164
1 files changed, 1164 insertions, 0 deletions
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass
new file mode 100644
index 0000000000..7c8a0b8b0f
--- /dev/null
+++ b/meta/classes/create-spdx-2.2.bbclass
@@ -0,0 +1,1164 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx"
8
9# The product name that the CVE database uses. Defaults to BPN, but may need to
10# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
11CVE_PRODUCT ??= "${BPN}"
12CVE_VERSION ??= "${PV}"
13
14SPDXDIR ??= "${WORKDIR}/spdx"
15SPDXDEPLOY = "${SPDXDIR}/deploy"
16SPDXWORK = "${SPDXDIR}/work"
17SPDXIMAGEWORK = "${SPDXDIR}/image-work"
18SPDXSDKWORK = "${SPDXDIR}/sdk-work"
19SPDXDEPS = "${SPDXDIR}/deps.json"
20
21SPDX_TOOL_NAME ??= "oe-spdx-creator"
22SPDX_TOOL_VERSION ??= "1.0"
23
24SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
25
26SPDX_INCLUDE_SOURCES ??= "0"
27SPDX_ARCHIVE_SOURCES ??= "0"
28SPDX_ARCHIVE_PACKAGED ??= "0"
29
30SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
31SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs"
32SPDX_PRETTY ??= "0"
33
34SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
35
36SPDX_CUSTOM_ANNOTATION_VARS ??= ""
37
38SPDX_ORG ??= "OpenEmbedded ()"
39SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
40SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
41 this recipe. For SPDX documents create using this class during the build, this \
42 is the contact information for the person or organization who is doing the \
43 build."
44
45def extract_licenses(filename):
46 import re
47
48 lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
49
50 try:
51 with open(filename, 'rb') as f:
52 size = min(15000, os.stat(filename).st_size)
53 txt = f.read(size)
54 licenses = re.findall(lic_regex, txt)
55 if licenses:
56 ascii_licenses = [lic.decode('ascii') for lic in licenses]
57 return ascii_licenses
58 except Exception as e:
59 bb.warn(f"Exception reading {filename}: {e}")
60 return None
61
62def get_doc_namespace(d, doc):
63 import uuid
64 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
65 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name)))
66
67def create_annotation(d, comment):
68 from datetime import datetime, timezone
69
70 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
71 annotation = oe.spdx.SPDXAnnotation()
72 annotation.annotationDate = creation_time
73 annotation.annotationType = "OTHER"
74 annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION"))
75 annotation.comment = comment
76 return annotation
77
78def recipe_spdx_is_native(d, recipe):
79 return any(a.annotationType == "OTHER" and
80 a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
81 a.comment == "isNative" for a in recipe.annotations)
82
83def is_work_shared_spdx(d):
84 return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
85
86def get_json_indent(d):
87 if d.getVar("SPDX_PRETTY") == "1":
88 return 2
89 return None
90
91python() {
92 import json
93 if d.getVar("SPDX_LICENSE_DATA"):
94 return
95
96 with open(d.getVar("SPDX_LICENSES"), "r") as f:
97 data = json.load(f)
98 # Transform the license array to a dictionary
99 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
100 d.setVar("SPDX_LICENSE_DATA", data)
101}
102
103def convert_license_to_spdx(lic, document, d, existing={}):
104 from pathlib import Path
105 import oe.spdx
106
107 license_data = d.getVar("SPDX_LICENSE_DATA")
108 extracted = {}
109
110 def add_extracted_license(ident, name):
111 nonlocal document
112
113 if name in extracted:
114 return
115
116 extracted_info = oe.spdx.SPDXExtractedLicensingInfo()
117 extracted_info.name = name
118 extracted_info.licenseId = ident
119 extracted_info.extractedText = None
120
121 if name == "PD":
122 # Special-case this.
123 extracted_info.extractedText = "Software released to the public domain"
124 else:
125 # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
126 for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split():
127 try:
128 with (Path(directory) / name).open(errors="replace") as f:
129 extracted_info.extractedText = f.read()
130 break
131 except FileNotFoundError:
132 pass
133 if extracted_info.extractedText is None:
134 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
135 filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
136 if filename:
137 filename = d.expand("${S}/" + filename)
138 with open(filename, errors="replace") as f:
139 extracted_info.extractedText = f.read()
140 else:
141 bb.fatal("Cannot find any text for license %s" % name)
142
143 extracted[name] = extracted_info
144 document.hasExtractedLicensingInfos.append(extracted_info)
145
146 def convert(l):
147 if l == "(" or l == ")":
148 return l
149
150 if l == "&":
151 return "AND"
152
153 if l == "|":
154 return "OR"
155
156 if l == "CLOSED":
157 return "NONE"
158
159 spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
160 if spdx_license in license_data["licenses"]:
161 return spdx_license
162
163 try:
164 spdx_license = existing[l]
165 except KeyError:
166 spdx_license = "LicenseRef-" + l
167 add_extracted_license(spdx_license, l)
168
169 return spdx_license
170
171 lic_split = lic.replace("(", " ( ").replace(")", " ) ").replace("|", " | ").replace("&", " & ").split()
172
173 return ' '.join(convert(l) for l in lic_split)
174
175def process_sources(d):
176 pn = d.getVar('PN')
177 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
178 if pn in assume_provided:
179 for p in d.getVar("PROVIDES").split():
180 if p != pn:
181 pn = p
182 break
183
184 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
185 # so avoid archiving source here.
186 if pn.startswith('glibc-locale'):
187 return False
188 if d.getVar('PN') == "libtool-cross":
189 return False
190 if d.getVar('PN') == "libgcc-initial":
191 return False
192 if d.getVar('PN') == "shadow-sysroot":
193 return False
194
195 # We just archive gcc-source for all the gcc related recipes
196 if d.getVar('BPN') in ['gcc', 'libgcc']:
197 bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
198 return False
199
200 return True
201
202
203def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
204 from pathlib import Path
205 import oe.spdx
206 import hashlib
207
208 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
209 if source_date_epoch:
210 source_date_epoch = int(source_date_epoch)
211
212 sha1s = []
213 spdx_files = []
214
215 file_counter = 1
216 for subdir, dirs, files in os.walk(topdir):
217 dirs[:] = [d for d in dirs if d not in ignore_dirs]
218 if subdir == str(topdir):
219 dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
220
221 for file in files:
222 filepath = Path(subdir) / file
223 filename = str(filepath.relative_to(topdir))
224
225 if not filepath.is_symlink() and filepath.is_file():
226 spdx_file = oe.spdx.SPDXFile()
227 spdx_file.SPDXID = get_spdxid(file_counter)
228 for t in get_types(filepath):
229 spdx_file.fileTypes.append(t)
230 spdx_file.fileName = filename
231
232 if archive is not None:
233 with filepath.open("rb") as f:
234 info = archive.gettarinfo(fileobj=f)
235 info.name = filename
236 info.uid = 0
237 info.gid = 0
238 info.uname = "root"
239 info.gname = "root"
240
241 if source_date_epoch is not None and info.mtime > source_date_epoch:
242 info.mtime = source_date_epoch
243
244 archive.addfile(info, f)
245
246 sha1 = bb.utils.sha1_file(filepath)
247 sha1s.append(sha1)
248 spdx_file.checksums.append(oe.spdx.SPDXChecksum(
249 algorithm="SHA1",
250 checksumValue=sha1,
251 ))
252 spdx_file.checksums.append(oe.spdx.SPDXChecksum(
253 algorithm="SHA256",
254 checksumValue=bb.utils.sha256_file(filepath),
255 ))
256
257 if "SOURCE" in spdx_file.fileTypes:
258 extracted_lics = extract_licenses(filepath)
259 if extracted_lics:
260 spdx_file.licenseInfoInFiles = extracted_lics
261
262 doc.files.append(spdx_file)
263 doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file)
264 spdx_pkg.hasFiles.append(spdx_file.SPDXID)
265
266 spdx_files.append(spdx_file)
267
268 file_counter += 1
269
270 sha1s.sort()
271 verifier = hashlib.sha1()
272 for v in sha1s:
273 verifier.update(v.encode("utf-8"))
274 spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest()
275
276 return spdx_files
277
278
279def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources):
280 from pathlib import Path
281 import hashlib
282 import oe.packagedata
283 import oe.spdx
284
285 debug_search_paths = [
286 Path(d.getVar('PKGD')),
287 Path(d.getVar('STAGING_DIR_TARGET')),
288 Path(d.getVar('STAGING_DIR_NATIVE')),
289 Path(d.getVar('STAGING_KERNEL_DIR')),
290 ]
291
292 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
293
294 if pkg_data is None:
295 return
296
297 for file_path, file_data in pkg_data["files_info"].items():
298 if not "debugsrc" in file_data:
299 continue
300
301 for pkg_file in package_files:
302 if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"):
303 break
304 else:
305 bb.fatal("No package file found for %s in %s; SPDX found: %s" % (str(file_path), package,
306 " ".join(p.fileName for p in package_files)))
307 continue
308
309 for debugsrc in file_data["debugsrc"]:
310 ref_id = "NOASSERTION"
311 for search in debug_search_paths:
312 if debugsrc.startswith("/usr/src/kernel"):
313 debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
314 else:
315 debugsrc_path = search / debugsrc.lstrip("/")
316 if not debugsrc_path.exists():
317 continue
318
319 file_sha256 = bb.utils.sha256_file(debugsrc_path)
320
321 if file_sha256 in sources:
322 source_file = sources[file_sha256]
323
324 doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace)
325 if doc_ref is None:
326 doc_ref = oe.spdx.SPDXExternalDocumentRef()
327 doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name
328 doc_ref.spdxDocument = source_file.doc.documentNamespace
329 doc_ref.checksum.algorithm = "SHA1"
330 doc_ref.checksum.checksumValue = source_file.doc_sha1
331 package_doc.externalDocumentRefs.append(doc_ref)
332
333 ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID)
334 else:
335 bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
336 break
337 else:
338 bb.debug(1, "Debug source %s not found" % debugsrc)
339
340 package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc)
341
342add_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR"
343
344def collect_dep_recipes(d, doc, spdx_recipe):
345 import json
346 from pathlib import Path
347 import oe.sbom
348 import oe.spdx
349
350 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
351 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
352 package_archs = d.getVar("SSTATE_ARCHS").split()
353 package_archs.reverse()
354
355 dep_recipes = []
356
357 with spdx_deps_file.open("r") as f:
358 deps = json.load(f)
359
360 for dep_pn, dep_hashfn, in_taskhash in deps:
361 # If this dependency is not calculated in the taskhash skip it.
362 # Otherwise, it can result in broken links since this task won't
363 # rebuild and see the new SPDX ID if the dependency changes
364 if not in_taskhash:
365 continue
366
367 dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep_pn, dep_hashfn)
368 if not dep_recipe_path:
369 bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep_pn, dep_hashfn))
370
371 spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
372
373 for pkg in spdx_dep_doc.packages:
374 if pkg.name == dep_pn:
375 spdx_dep_recipe = pkg
376 break
377 else:
378 continue
379
380 dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe))
381
382 dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef()
383 dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name
384 dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace
385 dep_recipe_ref.checksum.algorithm = "SHA1"
386 dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1
387
388 doc.externalDocumentRefs.append(dep_recipe_ref)
389
390 doc.add_relationship(
391 "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID),
392 "BUILD_DEPENDENCY_OF",
393 spdx_recipe
394 )
395
396 return dep_recipes
397
398collect_dep_recipes[vardepsexclude] = "SSTATE_ARCHS"
399
400def collect_dep_sources(d, dep_recipes):
401 import oe.sbom
402
403 sources = {}
404 for dep in dep_recipes:
405 # Don't collect sources from native recipes as they
406 # match non-native sources also.
407 if recipe_spdx_is_native(d, dep.recipe):
408 continue
409 recipe_files = set(dep.recipe.hasFiles)
410
411 for spdx_file in dep.doc.files:
412 if spdx_file.SPDXID not in recipe_files:
413 continue
414
415 if "SOURCE" in spdx_file.fileTypes:
416 for checksum in spdx_file.checksums:
417 if checksum.algorithm == "SHA256":
418 sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file)
419 break
420
421 return sources
422
423def add_download_packages(d, doc, recipe):
424 import os.path
425 from bb.fetch2 import decodeurl, CHECKSUM_LIST
426 import bb.process
427 import oe.spdx
428 import oe.sbom
429
430 for download_idx, src_uri in enumerate(d.getVar('SRC_URI').split()):
431 f = bb.fetch2.FetchData(src_uri, d)
432
433 for name in f.names:
434 package = oe.spdx.SPDXPackage()
435 package.name = "%s-source-%d" % (d.getVar("PN"), download_idx + 1)
436 package.SPDXID = oe.sbom.get_download_spdxid(d, download_idx + 1)
437
438 if f.type == "file":
439 continue
440
441 uri = f.type
442 proto = getattr(f, "proto", None)
443 if proto is not None:
444 uri = uri + "+" + proto
445 uri = uri + "://" + f.host + f.path
446
447 if f.method.supports_srcrev():
448 uri = uri + "@" + f.revisions[name]
449
450 if f.method.supports_checksum(f):
451 for checksum_id in CHECKSUM_LIST:
452 if checksum_id.upper() not in oe.spdx.SPDXPackage.ALLOWED_CHECKSUMS:
453 continue
454
455 expected_checksum = getattr(f, "%s_expected" % checksum_id)
456 if expected_checksum is None:
457 continue
458
459 c = oe.spdx.SPDXChecksum()
460 c.algorithm = checksum_id.upper()
461 c.checksumValue = expected_checksum
462 package.checksums.append(c)
463
464 package.downloadLocation = uri
465 doc.packages.append(package)
466 doc.add_relationship(doc, "DESCRIBES", package)
467 # In the future, we might be able to do more fancy dependencies,
468 # but this should be sufficient for now
469 doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
470
471def collect_direct_deps(d, dep_task):
472 current_task = "do_" + d.getVar("BB_CURRENTTASK")
473 pn = d.getVar("PN")
474
475 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
476
477 for this_dep in taskdepdata.values():
478 if this_dep[0] == pn and this_dep[1] == current_task:
479 break
480 else:
481 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
482
483 deps = set()
484 for dep_name in this_dep[3]:
485 dep_data = taskdepdata[dep_name]
486 if dep_data[1] == dep_task and dep_data[0] != pn:
487 deps.add((dep_data[0], dep_data[7], dep_name in this_dep[8]))
488
489 return sorted(deps)
490
491collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
492collect_direct_deps[vardeps] += "DEPENDS"
493
494python do_collect_spdx_deps() {
495 # This task calculates the build time dependencies of the recipe, and is
496 # required because while a task can deptask on itself, those dependencies
497 # do not show up in BB_TASKDEPDATA. To work around that, this task does the
498 # deptask on do_create_spdx and writes out the dependencies it finds, then
499 # do_create_spdx reads in the found dependencies when writing the actual
500 # SPDX document
501 import json
502 from pathlib import Path
503
504 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
505
506 deps = collect_direct_deps(d, "do_create_spdx")
507
508 with spdx_deps_file.open("w") as f:
509 json.dump(deps, f)
510}
511# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
512addtask do_collect_spdx_deps after do_unpack
513do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
514do_collect_spdx_deps[deptask] = "do_create_spdx"
515do_collect_spdx_deps[dirs] = "${SPDXDIR}"
516
517python do_create_spdx() {
518 from datetime import datetime, timezone
519 import oe.sbom
520 import oe.spdx
521 import uuid
522 from pathlib import Path
523 from contextlib import contextmanager
524 import oe.cve_check
525
526 @contextmanager
527 def optional_tarfile(name, guard, mode="w"):
528 import tarfile
529 import bb.compress.zstd
530
531 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
532
533 if guard:
534 name.parent.mkdir(parents=True, exist_ok=True)
535 with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f:
536 with tarfile.open(fileobj=f, mode=mode + "|") as tf:
537 yield tf
538 else:
539 yield None
540
541
542 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
543 spdx_workdir = Path(d.getVar("SPDXWORK"))
544 include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
545 archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1"
546 archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1"
547 pkg_arch = d.getVar("SSTATE_PKGARCH")
548
549 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
550
551 doc = oe.spdx.SPDXDocument()
552
553 doc.name = "recipe-" + d.getVar("PN")
554 doc.documentNamespace = get_doc_namespace(d, doc)
555 doc.creationInfo.created = creation_time
556 doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
557 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
558 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
559 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
560 doc.creationInfo.creators.append("Person: N/A ()")
561
562 recipe = oe.spdx.SPDXPackage()
563 recipe.name = d.getVar("PN")
564 recipe.versionInfo = d.getVar("PV")
565 recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
566 recipe.supplier = d.getVar("SPDX_SUPPLIER")
567 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
568 recipe.annotations.append(create_annotation(d, "isNative"))
569
570 homepage = d.getVar("HOMEPAGE")
571 if homepage:
572 recipe.homepage = homepage
573
574 license = d.getVar("LICENSE")
575 if license:
576 recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
577
578 summary = d.getVar("SUMMARY")
579 if summary:
580 recipe.summary = summary
581
582 description = d.getVar("DESCRIPTION")
583 if description:
584 recipe.description = description
585
586 if d.getVar("SPDX_CUSTOM_ANNOTATION_VARS"):
587 for var in d.getVar('SPDX_CUSTOM_ANNOTATION_VARS').split():
588 recipe.annotations.append(create_annotation(d, var + "=" + d.getVar(var)))
589
590 # Some CVEs may be patched during the build process without incrementing the version number,
591 # so querying for CVEs based on the CPE id can lead to false positives. To account for this,
592 # save the CVEs fixed by patches to source information field in the SPDX.
593 patched_cves = oe.cve_check.get_patched_cves(d)
594 patched_cves = list(patched_cves)
595 patched_cves = ' '.join(patched_cves)
596 if patched_cves:
597 recipe.sourceInfo = "CVEs fixed: " + patched_cves
598
599 cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
600 if cpe_ids:
601 for cpe_id in cpe_ids:
602 cpe = oe.spdx.SPDXExternalReference()
603 cpe.referenceCategory = "SECURITY"
604 cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type"
605 cpe.referenceLocator = cpe_id
606 recipe.externalRefs.append(cpe)
607
608 doc.packages.append(recipe)
609 doc.add_relationship(doc, "DESCRIBES", recipe)
610
611 add_download_packages(d, doc, recipe)
612
613 if process_sources(d) and include_sources:
614 recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
615 with optional_tarfile(recipe_archive, archive_sources) as archive:
616 spdx_get_src(d)
617
618 add_package_files(
619 d,
620 doc,
621 recipe,
622 spdx_workdir,
623 lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter),
624 lambda filepath: ["SOURCE"],
625 ignore_dirs=[".git"],
626 ignore_top_level_dirs=["temp"],
627 archive=archive,
628 )
629
630 if archive is not None:
631 recipe.packageFileName = str(recipe_archive.name)
632
633 dep_recipes = collect_dep_recipes(d, doc, recipe)
634
635 doc_sha1 = oe.sbom.write_doc(d, doc, pkg_arch, "recipes", indent=get_json_indent(d))
636 dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
637
638 recipe_ref = oe.spdx.SPDXExternalDocumentRef()
639 recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name
640 recipe_ref.spdxDocument = doc.documentNamespace
641 recipe_ref.checksum.algorithm = "SHA1"
642 recipe_ref.checksum.checksumValue = doc_sha1
643
644 sources = collect_dep_sources(d, dep_recipes)
645 found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos}
646
647 if not recipe_spdx_is_native(d, recipe):
648 bb.build.exec_func("read_subpackage_metadata", d)
649
650 pkgdest = Path(d.getVar("PKGDEST"))
651 for package in d.getVar("PACKAGES").split():
652 if not oe.packagedata.packaged(package, d):
653 continue
654
655 package_doc = oe.spdx.SPDXDocument()
656 pkg_name = d.getVar("PKG:%s" % package) or package
657 package_doc.name = pkg_name
658 package_doc.documentNamespace = get_doc_namespace(d, package_doc)
659 package_doc.creationInfo.created = creation_time
660 package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
661 package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
662 package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
663 package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
664 package_doc.creationInfo.creators.append("Person: N/A ()")
665 package_doc.externalDocumentRefs.append(recipe_ref)
666
667 package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE")
668
669 spdx_package = oe.spdx.SPDXPackage()
670
671 spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
672 spdx_package.name = pkg_name
673 spdx_package.versionInfo = d.getVar("PV")
674 spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
675 spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
676
677 package_doc.packages.append(spdx_package)
678
679 package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID))
680 package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package)
681
682 package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst")
683 with optional_tarfile(package_archive, archive_packaged) as archive:
684 package_files = add_package_files(
685 d,
686 package_doc,
687 spdx_package,
688 pkgdest / package,
689 lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter),
690 lambda filepath: ["BINARY"],
691 ignore_top_level_dirs=['CONTROL', 'DEBIAN'],
692 archive=archive,
693 )
694
695 if archive is not None:
696 spdx_package.packageFileName = str(package_archive.name)
697
698 add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
699
700 oe.sbom.write_doc(d, package_doc, pkg_arch, "packages", indent=get_json_indent(d))
701}
702do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS"
703# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
704addtask do_create_spdx after do_package do_packagedata do_unpack do_collect_spdx_deps before do_populate_sdk do_build do_rm_work
705
706SSTATETASKS += "do_create_spdx"
707do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
708do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
709
710python do_create_spdx_setscene () {
711 sstate_setscene(d)
712}
713addtask do_create_spdx_setscene
714
715do_create_spdx[dirs] = "${SPDXWORK}"
716do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
717do_create_spdx[depends] += "${PATCHDEPENDENCY}"
718
719def collect_package_providers(d):
720 from pathlib import Path
721 import oe.sbom
722 import oe.spdx
723 import json
724
725 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
726
727 providers = {}
728
729 deps = collect_direct_deps(d, "do_create_spdx")
730 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
731
732 for dep_pn, dep_hashfn, _ in deps:
733 localdata = d
734 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
735 if not recipe_data:
736 localdata = bb.data.createCopy(d)
737 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
738 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
739
740 for pkg in recipe_data.get("PACKAGES", "").split():
741
742 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
743 rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
744 rprovides.add(pkg)
745
746 if "PKG" in pkg_data:
747 pkg = pkg_data["PKG"]
748 rprovides.add(pkg)
749
750 for r in rprovides:
751 providers[r] = (pkg, dep_hashfn)
752
753 return providers
754
755collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
756
757python do_create_runtime_spdx() {
758 from datetime import datetime, timezone
759 import oe.sbom
760 import oe.spdx
761 import oe.packagedata
762 from pathlib import Path
763
764 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
765 spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
766 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
767
768 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
769
770 providers = collect_package_providers(d)
771 pkg_arch = d.getVar("SSTATE_PKGARCH")
772 package_archs = d.getVar("SSTATE_ARCHS").split()
773 package_archs.reverse()
774
775 if not is_native:
776 bb.build.exec_func("read_subpackage_metadata", d)
777
778 dep_package_cache = {}
779
780 pkgdest = Path(d.getVar("PKGDEST"))
781 for package in d.getVar("PACKAGES").split():
782 localdata = bb.data.createCopy(d)
783 pkg_name = d.getVar("PKG:%s" % package) or package
784 localdata.setVar("PKG", pkg_name)
785 localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
786
787 if not oe.packagedata.packaged(package, localdata):
788 continue
789
790 pkg_spdx_path = oe.sbom.doc_path(deploy_dir_spdx, pkg_name, pkg_arch, "packages")
791
792 package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
793
794 for p in package_doc.packages:
795 if p.name == pkg_name:
796 spdx_package = p
797 break
798 else:
799 bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path))
800
801 runtime_doc = oe.spdx.SPDXDocument()
802 runtime_doc.name = "runtime-" + pkg_name
803 runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc)
804 runtime_doc.creationInfo.created = creation_time
805 runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
806 runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
807 runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
808 runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
809 runtime_doc.creationInfo.creators.append("Person: N/A ()")
810
811 package_ref = oe.spdx.SPDXExternalDocumentRef()
812 package_ref.externalDocumentId = "DocumentRef-package-" + package
813 package_ref.spdxDocument = package_doc.documentNamespace
814 package_ref.checksum.algorithm = "SHA1"
815 package_ref.checksum.checksumValue = package_doc_sha1
816
817 runtime_doc.externalDocumentRefs.append(package_ref)
818
819 runtime_doc.add_relationship(
820 runtime_doc.SPDXID,
821 "AMENDS",
822 "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID)
823 )
824
825 deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
826 seen_deps = set()
827 for dep, _ in deps.items():
828 if dep in seen_deps:
829 continue
830
831 if dep not in providers:
832 continue
833
834 (dep, dep_hashfn) = providers[dep]
835
836 if not oe.packagedata.packaged(dep, localdata):
837 continue
838
839 dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
840 dep_pkg = dep_pkg_data["PKG"]
841
842 if dep in dep_package_cache:
843 (dep_spdx_package, dep_package_ref) = dep_package_cache[dep]
844 else:
845 dep_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, dep_pkg, dep_hashfn)
846 if not dep_path:
847 bb.fatal("No SPDX file found for package %s, %s" % (dep_pkg, dep_hashfn))
848
849 spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path)
850
851 for pkg in spdx_dep_doc.packages:
852 if pkg.name == dep_pkg:
853 dep_spdx_package = pkg
854 break
855 else:
856 bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path))
857
858 dep_package_ref = oe.spdx.SPDXExternalDocumentRef()
859 dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name
860 dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace
861 dep_package_ref.checksum.algorithm = "SHA1"
862 dep_package_ref.checksum.checksumValue = spdx_dep_sha1
863
864 dep_package_cache[dep] = (dep_spdx_package, dep_package_ref)
865
866 runtime_doc.externalDocumentRefs.append(dep_package_ref)
867
868 runtime_doc.add_relationship(
869 "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID),
870 "RUNTIME_DEPENDENCY_OF",
871 "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID)
872 )
873 seen_deps.add(dep)
874
875 oe.sbom.write_doc(d, runtime_doc, pkg_arch, "runtime", spdx_deploy, indent=get_json_indent(d))
876}
877
878do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS"
879
880addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
881SSTATETASKS += "do_create_runtime_spdx"
882do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
883do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
884
885python do_create_runtime_spdx_setscene () {
886 sstate_setscene(d)
887}
888addtask do_create_runtime_spdx_setscene
889
890do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
891do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
892do_create_runtime_spdx[rdeptask] = "do_create_spdx"
893
894def spdx_get_src(d):
895 """
896 save patched source of the recipe in SPDX_WORKDIR.
897 """
898 import shutil
899 spdx_workdir = d.getVar('SPDXWORK')
900 spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
901 pn = d.getVar('PN')
902
903 workdir = d.getVar("WORKDIR")
904
905 try:
906 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
907 if not is_work_shared_spdx(d):
908 # Change the WORKDIR to make do_unpack do_patch run in another dir.
909 d.setVar('WORKDIR', spdx_workdir)
910 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
911 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
912
913 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
914 # possibly requiring of the following tasks (such as some recipes's
915 # do_patch required 'B' existed).
916 bb.utils.mkdirhier(d.getVar('B'))
917
918 bb.build.exec_func('do_unpack', d)
919 # Copy source of kernel to spdx_workdir
920 if is_work_shared_spdx(d):
921 share_src = d.getVar('WORKDIR')
922 d.setVar('WORKDIR', spdx_workdir)
923 d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
924 src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
925 bb.utils.mkdirhier(src_dir)
926 if bb.data.inherits_class('kernel',d):
927 share_src = d.getVar('STAGING_KERNEL_DIR')
928 cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
929 cmd_copy_shared_res = os.popen(cmd_copy_share).read()
930 bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
931
932 git_path = src_dir + "/.git"
933 if os.path.exists(git_path):
934 shutils.rmtree(git_path)
935
936 # Make sure gcc and kernel sources are patched only once
937 if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
938 bb.build.exec_func('do_patch', d)
939
940 # Some userland has no source.
941 if not os.path.exists( spdx_workdir ):
942 bb.utils.mkdirhier(spdx_workdir)
943 finally:
944 d.setVar("WORKDIR", workdir)
945
946spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR"
947
948do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
949do_rootfs[cleandirs] += "${SPDXIMAGEWORK}"
950
951ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx"
952
953do_populate_sdk[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
954do_populate_sdk[cleandirs] += "${SPDXSDKWORK}"
955POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_combine_spdx"
956POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_combine_spdx"
957
958python image_combine_spdx() {
959 import os
960 import oe.sbom
961 from pathlib import Path
962 from oe.rootfs import image_list_installed_packages
963
964 image_name = d.getVar("IMAGE_NAME")
965 image_link_name = d.getVar("IMAGE_LINK_NAME")
966 imgdeploydir = Path(d.getVar("IMGDEPLOYDIR"))
967 img_spdxid = oe.sbom.get_image_spdxid(image_name)
968 packages = image_list_installed_packages(d)
969
970 combine_spdx(d, image_name, imgdeploydir, img_spdxid, packages, Path(d.getVar("SPDXIMAGEWORK")))
971
972 def make_image_link(target_path, suffix):
973 if image_link_name:
974 link = imgdeploydir / (image_link_name + suffix)
975 if link != target_path:
976 link.symlink_to(os.path.relpath(target_path, link.parent))
977
978 spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst")
979 make_image_link(spdx_tar_path, ".spdx.tar.zst")
980}
981
982python sdk_host_combine_spdx() {
983 sdk_combine_spdx(d, "host")
984}
985
986python sdk_target_combine_spdx() {
987 sdk_combine_spdx(d, "target")
988}
989
990def sdk_combine_spdx(d, sdk_type):
991 import oe.sbom
992 from pathlib import Path
993 from oe.sdk import sdk_list_installed_packages
994
995 sdk_name = d.getVar("TOOLCHAIN_OUTPUTNAME") + "-" + sdk_type
996 sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR"))
997 sdk_spdxid = oe.sbom.get_sdk_spdxid(sdk_name)
998 sdk_packages = sdk_list_installed_packages(d, sdk_type == "target")
999 combine_spdx(d, sdk_name, sdk_deploydir, sdk_spdxid, sdk_packages, Path(d.getVar('SPDXSDKWORK')))
1000
1001def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx_workdir):
1002 import os
1003 import oe.spdx
1004 import oe.sbom
1005 import io
1006 import json
1007 from datetime import timezone, datetime
1008 from pathlib import Path
1009 import tarfile
1010 import bb.compress.zstd
1011
1012 providers = collect_package_providers(d)
1013 package_archs = d.getVar("SSTATE_ARCHS").split()
1014 package_archs.reverse()
1015
1016 creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
1017 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
1018 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
1019
1020 doc = oe.spdx.SPDXDocument()
1021 doc.name = rootfs_name
1022 doc.documentNamespace = get_doc_namespace(d, doc)
1023 doc.creationInfo.created = creation_time
1024 doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
1025 doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
1026 doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
1027 doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
1028 doc.creationInfo.creators.append("Person: N/A ()")
1029
1030 image = oe.spdx.SPDXPackage()
1031 image.name = d.getVar("PN")
1032 image.versionInfo = d.getVar("PV")
1033 image.SPDXID = rootfs_spdxid
1034 image.supplier = d.getVar("SPDX_SUPPLIER")
1035
1036 doc.packages.append(image)
1037
1038 for name in sorted(packages.keys()):
1039 if name not in providers:
1040 bb.fatal("Unable to find SPDX provider for '%s'" % name)
1041
1042 pkg_name, pkg_hashfn = providers[name]
1043
1044 pkg_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, pkg_name, pkg_hashfn)
1045 if not pkg_spdx_path:
1046 bb.fatal("No SPDX file found for package %s, %s" % (pkg_name, pkg_hashfn))
1047
1048 pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
1049
1050 for p in pkg_doc.packages:
1051 if p.name == name:
1052 pkg_ref = oe.spdx.SPDXExternalDocumentRef()
1053 pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
1054 pkg_ref.spdxDocument = pkg_doc.documentNamespace
1055 pkg_ref.checksum.algorithm = "SHA1"
1056 pkg_ref.checksum.checksumValue = pkg_doc_sha1
1057
1058 doc.externalDocumentRefs.append(pkg_ref)
1059 doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
1060 break
1061 else:
1062 bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
1063
1064 runtime_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "runtime-" + name, pkg_hashfn)
1065 if not runtime_spdx_path:
1066 bb.fatal("No runtime SPDX document found for %s, %s" % (name, pkg_hashfn))
1067
1068 runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
1069
1070 runtime_ref = oe.spdx.SPDXExternalDocumentRef()
1071 runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
1072 runtime_ref.spdxDocument = runtime_doc.documentNamespace
1073 runtime_ref.checksum.algorithm = "SHA1"
1074 runtime_ref.checksum.checksumValue = runtime_doc_sha1
1075
1076 # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
1077 doc.externalDocumentRefs.append(runtime_ref)
1078 doc.add_relationship(
1079 image,
1080 "OTHER",
1081 "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
1082 comment="Runtime dependencies for %s" % name
1083 )
1084 bb.utils.mkdirhier(spdx_workdir)
1085 image_spdx_path = spdx_workdir / (rootfs_name + ".spdx.json")
1086
1087 with image_spdx_path.open("wb") as f:
1088 doc.to_json(f, sort_keys=True, indent=get_json_indent(d))
1089
1090 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1091
1092 visited_docs = set()
1093
1094 index = {"documents": []}
1095
1096 spdx_tar_path = rootfs_deploydir / (rootfs_name + ".spdx.tar.zst")
1097 with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f:
1098 with tarfile.open(fileobj=f, mode="w|") as tar:
1099 def collect_spdx_document(path):
1100 nonlocal tar
1101 nonlocal deploy_dir_spdx
1102 nonlocal source_date_epoch
1103 nonlocal index
1104
1105 if path in visited_docs:
1106 return
1107
1108 visited_docs.add(path)
1109
1110 with path.open("rb") as f:
1111 doc, sha1 = oe.sbom.read_doc(f)
1112 f.seek(0)
1113
1114 if doc.documentNamespace in visited_docs:
1115 return
1116
1117 bb.note("Adding SPDX document %s" % path)
1118 visited_docs.add(doc.documentNamespace)
1119 info = tar.gettarinfo(fileobj=f)
1120
1121 info.name = doc.name + ".spdx.json"
1122 info.uid = 0
1123 info.gid = 0
1124 info.uname = "root"
1125 info.gname = "root"
1126
1127 if source_date_epoch is not None and info.mtime > int(source_date_epoch):
1128 info.mtime = int(source_date_epoch)
1129
1130 tar.addfile(info, f)
1131
1132 index["documents"].append({
1133 "filename": info.name,
1134 "documentNamespace": doc.documentNamespace,
1135 "sha1": sha1,
1136 })
1137
1138 for ref in doc.externalDocumentRefs:
1139 ref_path = oe.sbom.doc_find_by_namespace(deploy_dir_spdx, package_archs, ref.spdxDocument)
1140 if not ref_path:
1141 bb.fatal("Cannot find any SPDX file for document %s" % ref.spdxDocument)
1142 collect_spdx_document(ref_path)
1143
1144 collect_spdx_document(image_spdx_path)
1145
1146 index["documents"].sort(key=lambda x: x["filename"])
1147
1148 index_str = io.BytesIO(json.dumps(
1149 index,
1150 sort_keys=True,
1151 indent=get_json_indent(d),
1152 ).encode("utf-8"))
1153
1154 info = tarfile.TarInfo()
1155 info.name = "index.json"
1156 info.size = len(index_str.getvalue())
1157 info.uid = 0
1158 info.gid = 0
1159 info.uname = "root"
1160 info.gname = "root"
1161
1162 tar.addfile(info, fileobj=index_str)
1163
1164combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SSTATE_ARCHS"