diff options
| author | Joshua Watt <JPEWhacker@gmail.com> | 2024-06-10 15:41:48 -0600 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2024-06-12 16:16:03 +0100 |
| commit | 92b202c2460b6d82df585a47ed56f4ed818a76c0 (patch) | |
| tree | bd5234b04562ecb3d61a1d6f3cc4d9e8fdd83c6b | |
| parent | a43f15565b18f818d5519376e54849b604156e38 (diff) | |
| download | poky-92b202c2460b6d82df585a47ed56f4ed818a76c0.tar.gz | |
classes/spdx-common: Move common SPDX to new class
Moves SPDX code that can be shared between different SPDX versions into
a common class
(From OE-Core rev: 769a390adc9fc0b52978abe0f19f885967af0117)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
| -rw-r--r-- | meta/classes/create-spdx-2.2.bbclass | 259 | ||||
| -rw-r--r-- | meta/classes/spdx-common.bbclass | 256 |
2 files changed, 266 insertions, 249 deletions
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass index 7c8a0b8b0f..94a172fbc9 100644 --- a/meta/classes/create-spdx-2.2.bbclass +++ b/meta/classes/create-spdx-2.2.bbclass | |||
| @@ -4,65 +4,13 @@ | |||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
| 5 | # | 5 | # |
| 6 | 6 | ||
| 7 | DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx" | 7 | inherit spdx-common |
| 8 | 8 | ||
| 9 | # The product name that the CVE database uses. Defaults to BPN, but may need to | 9 | def get_namespace(d, name): |
| 10 | # be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). | ||
| 11 | CVE_PRODUCT ??= "${BPN}" | ||
| 12 | CVE_VERSION ??= "${PV}" | ||
| 13 | |||
| 14 | SPDXDIR ??= "${WORKDIR}/spdx" | ||
| 15 | SPDXDEPLOY = "${SPDXDIR}/deploy" | ||
| 16 | SPDXWORK = "${SPDXDIR}/work" | ||
| 17 | SPDXIMAGEWORK = "${SPDXDIR}/image-work" | ||
| 18 | SPDXSDKWORK = "${SPDXDIR}/sdk-work" | ||
| 19 | SPDXDEPS = "${SPDXDIR}/deps.json" | ||
| 20 | |||
| 21 | SPDX_TOOL_NAME ??= "oe-spdx-creator" | ||
| 22 | SPDX_TOOL_VERSION ??= "1.0" | ||
| 23 | |||
| 24 | SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy" | ||
| 25 | |||
| 26 | SPDX_INCLUDE_SOURCES ??= "0" | ||
| 27 | SPDX_ARCHIVE_SOURCES ??= "0" | ||
| 28 | SPDX_ARCHIVE_PACKAGED ??= "0" | ||
| 29 | |||
| 30 | SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org" | ||
| 31 | SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs" | ||
| 32 | SPDX_PRETTY ??= "0" | ||
| 33 | |||
| 34 | SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json" | ||
| 35 | |||
| 36 | SPDX_CUSTOM_ANNOTATION_VARS ??= "" | ||
| 37 | |||
| 38 | SPDX_ORG ??= "OpenEmbedded ()" | ||
| 39 | SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}" | ||
| 40 | SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \ | ||
| 41 | this recipe. For SPDX documents create using this class during the build, this \ | ||
| 42 | is the contact information for the person or organization who is doing the \ | ||
| 43 | build." | ||
| 44 | |||
| 45 | def extract_licenses(filename): | ||
| 46 | import re | ||
| 47 | |||
| 48 | lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE) | ||
| 49 | |||
| 50 | try: | ||
| 51 | with open(filename, 'rb') as f: | ||
| 52 | size = min(15000, os.stat(filename).st_size) | ||
| 53 | txt = f.read(size) | ||
| 54 | licenses = re.findall(lic_regex, txt) | ||
| 55 | if licenses: | ||
| 56 | ascii_licenses = [lic.decode('ascii') for lic in licenses] | ||
| 57 | return ascii_licenses | ||
| 58 | except Exception as e: | ||
| 59 | bb.warn(f"Exception reading {filename}: {e}") | ||
| 60 | return None | ||
| 61 | |||
| 62 | def get_doc_namespace(d, doc): | ||
| 63 | import uuid | 10 | import uuid |
| 64 | namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE")) | 11 | namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE")) |
| 65 | return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name))) | 12 | return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), name, str(uuid.uuid5(namespace_uuid, name))) |
| 13 | |||
| 66 | 14 | ||
| 67 | def create_annotation(d, comment): | 15 | def create_annotation(d, comment): |
| 68 | from datetime import datetime, timezone | 16 | from datetime import datetime, timezone |
| @@ -80,26 +28,6 @@ def recipe_spdx_is_native(d, recipe): | |||
| 80 | a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and | 28 | a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and |
| 81 | a.comment == "isNative" for a in recipe.annotations) | 29 | a.comment == "isNative" for a in recipe.annotations) |
| 82 | 30 | ||
| 83 | def is_work_shared_spdx(d): | ||
| 84 | return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR')) | ||
| 85 | |||
| 86 | def get_json_indent(d): | ||
| 87 | if d.getVar("SPDX_PRETTY") == "1": | ||
| 88 | return 2 | ||
| 89 | return None | ||
| 90 | |||
| 91 | python() { | ||
| 92 | import json | ||
| 93 | if d.getVar("SPDX_LICENSE_DATA"): | ||
| 94 | return | ||
| 95 | |||
| 96 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
| 97 | data = json.load(f) | ||
| 98 | # Transform the license array to a dictionary | ||
| 99 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
| 100 | d.setVar("SPDX_LICENSE_DATA", data) | ||
| 101 | } | ||
| 102 | |||
| 103 | def convert_license_to_spdx(lic, document, d, existing={}): | 31 | def convert_license_to_spdx(lic, document, d, existing={}): |
| 104 | from pathlib import Path | 32 | from pathlib import Path |
| 105 | import oe.spdx | 33 | import oe.spdx |
| @@ -172,34 +100,6 @@ def convert_license_to_spdx(lic, document, d, existing={}): | |||
| 172 | 100 | ||
| 173 | return ' '.join(convert(l) for l in lic_split) | 101 | return ' '.join(convert(l) for l in lic_split) |
| 174 | 102 | ||
| 175 | def process_sources(d): | ||
| 176 | pn = d.getVar('PN') | ||
| 177 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
| 178 | if pn in assume_provided: | ||
| 179 | for p in d.getVar("PROVIDES").split(): | ||
| 180 | if p != pn: | ||
| 181 | pn = p | ||
| 182 | break | ||
| 183 | |||
| 184 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
| 185 | # so avoid archiving source here. | ||
| 186 | if pn.startswith('glibc-locale'): | ||
| 187 | return False | ||
| 188 | if d.getVar('PN') == "libtool-cross": | ||
| 189 | return False | ||
| 190 | if d.getVar('PN') == "libgcc-initial": | ||
| 191 | return False | ||
| 192 | if d.getVar('PN') == "shadow-sysroot": | ||
| 193 | return False | ||
| 194 | |||
| 195 | # We just archive gcc-source for all the gcc related recipes | ||
| 196 | if d.getVar('BPN') in ['gcc', 'libgcc']: | ||
| 197 | bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn) | ||
| 198 | return False | ||
| 199 | |||
| 200 | return True | ||
| 201 | |||
| 202 | |||
| 203 | def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): | 103 | def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): |
| 204 | from pathlib import Path | 104 | from pathlib import Path |
| 205 | import oe.spdx | 105 | import oe.spdx |
| @@ -348,14 +248,12 @@ def collect_dep_recipes(d, doc, spdx_recipe): | |||
| 348 | import oe.spdx | 248 | import oe.spdx |
| 349 | 249 | ||
| 350 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | 250 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) |
| 351 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
| 352 | package_archs = d.getVar("SSTATE_ARCHS").split() | 251 | package_archs = d.getVar("SSTATE_ARCHS").split() |
| 353 | package_archs.reverse() | 252 | package_archs.reverse() |
| 354 | 253 | ||
| 355 | dep_recipes = [] | 254 | dep_recipes = [] |
| 356 | 255 | ||
| 357 | with spdx_deps_file.open("r") as f: | 256 | deps = get_spdx_deps(d) |
| 358 | deps = json.load(f) | ||
| 359 | 257 | ||
| 360 | for dep_pn, dep_hashfn, in_taskhash in deps: | 258 | for dep_pn, dep_hashfn, in_taskhash in deps: |
| 361 | # If this dependency is not calculated in the taskhash skip it. | 259 | # If this dependency is not calculated in the taskhash skip it. |
| @@ -468,51 +366,6 @@ def add_download_packages(d, doc, recipe): | |||
| 468 | # but this should be sufficient for now | 366 | # but this should be sufficient for now |
| 469 | doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe) | 367 | doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe) |
| 470 | 368 | ||
| 471 | def collect_direct_deps(d, dep_task): | ||
| 472 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
| 473 | pn = d.getVar("PN") | ||
| 474 | |||
| 475 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
| 476 | |||
| 477 | for this_dep in taskdepdata.values(): | ||
| 478 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
| 479 | break | ||
| 480 | else: | ||
| 481 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
| 482 | |||
| 483 | deps = set() | ||
| 484 | for dep_name in this_dep[3]: | ||
| 485 | dep_data = taskdepdata[dep_name] | ||
| 486 | if dep_data[1] == dep_task and dep_data[0] != pn: | ||
| 487 | deps.add((dep_data[0], dep_data[7], dep_name in this_dep[8])) | ||
| 488 | |||
| 489 | return sorted(deps) | ||
| 490 | |||
| 491 | collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA" | ||
| 492 | collect_direct_deps[vardeps] += "DEPENDS" | ||
| 493 | |||
| 494 | python do_collect_spdx_deps() { | ||
| 495 | # This task calculates the build time dependencies of the recipe, and is | ||
| 496 | # required because while a task can deptask on itself, those dependencies | ||
| 497 | # do not show up in BB_TASKDEPDATA. To work around that, this task does the | ||
| 498 | # deptask on do_create_spdx and writes out the dependencies it finds, then | ||
| 499 | # do_create_spdx reads in the found dependencies when writing the actual | ||
| 500 | # SPDX document | ||
| 501 | import json | ||
| 502 | from pathlib import Path | ||
| 503 | |||
| 504 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
| 505 | |||
| 506 | deps = collect_direct_deps(d, "do_create_spdx") | ||
| 507 | |||
| 508 | with spdx_deps_file.open("w") as f: | ||
| 509 | json.dump(deps, f) | ||
| 510 | } | ||
| 511 | # NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source | ||
| 512 | addtask do_collect_spdx_deps after do_unpack | ||
| 513 | do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}" | ||
| 514 | do_collect_spdx_deps[deptask] = "do_create_spdx" | ||
| 515 | do_collect_spdx_deps[dirs] = "${SPDXDIR}" | ||
| 516 | 369 | ||
| 517 | python do_create_spdx() { | 370 | python do_create_spdx() { |
| 518 | from datetime import datetime, timezone | 371 | from datetime import datetime, timezone |
| @@ -551,7 +404,7 @@ python do_create_spdx() { | |||
| 551 | doc = oe.spdx.SPDXDocument() | 404 | doc = oe.spdx.SPDXDocument() |
| 552 | 405 | ||
| 553 | doc.name = "recipe-" + d.getVar("PN") | 406 | doc.name = "recipe-" + d.getVar("PN") |
| 554 | doc.documentNamespace = get_doc_namespace(d, doc) | 407 | doc.documentNamespace = get_namespace(d, doc.name) |
| 555 | doc.creationInfo.created = creation_time | 408 | doc.creationInfo.created = creation_time |
| 556 | doc.creationInfo.comment = "This document was created by analyzing recipe files during the build." | 409 | doc.creationInfo.comment = "This document was created by analyzing recipe files during the build." |
| 557 | doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] | 410 | doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] |
| @@ -655,7 +508,7 @@ python do_create_spdx() { | |||
| 655 | package_doc = oe.spdx.SPDXDocument() | 508 | package_doc = oe.spdx.SPDXDocument() |
| 656 | pkg_name = d.getVar("PKG:%s" % package) or package | 509 | pkg_name = d.getVar("PKG:%s" % package) or package |
| 657 | package_doc.name = pkg_name | 510 | package_doc.name = pkg_name |
| 658 | package_doc.documentNamespace = get_doc_namespace(d, package_doc) | 511 | package_doc.documentNamespace = get_namespace(d, package_doc.name) |
| 659 | package_doc.creationInfo.created = creation_time | 512 | package_doc.creationInfo.created = creation_time |
| 660 | package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build." | 513 | package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build." |
| 661 | package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] | 514 | package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] |
| @@ -716,44 +569,6 @@ do_create_spdx[dirs] = "${SPDXWORK}" | |||
| 716 | do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}" | 569 | do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}" |
| 717 | do_create_spdx[depends] += "${PATCHDEPENDENCY}" | 570 | do_create_spdx[depends] += "${PATCHDEPENDENCY}" |
| 718 | 571 | ||
| 719 | def collect_package_providers(d): | ||
| 720 | from pathlib import Path | ||
| 721 | import oe.sbom | ||
| 722 | import oe.spdx | ||
| 723 | import json | ||
| 724 | |||
| 725 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 726 | |||
| 727 | providers = {} | ||
| 728 | |||
| 729 | deps = collect_direct_deps(d, "do_create_spdx") | ||
| 730 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
| 731 | |||
| 732 | for dep_pn, dep_hashfn, _ in deps: | ||
| 733 | localdata = d | ||
| 734 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
| 735 | if not recipe_data: | ||
| 736 | localdata = bb.data.createCopy(d) | ||
| 737 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
| 738 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
| 739 | |||
| 740 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
| 741 | |||
| 742 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
| 743 | rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items()) | ||
| 744 | rprovides.add(pkg) | ||
| 745 | |||
| 746 | if "PKG" in pkg_data: | ||
| 747 | pkg = pkg_data["PKG"] | ||
| 748 | rprovides.add(pkg) | ||
| 749 | |||
| 750 | for r in rprovides: | ||
| 751 | providers[r] = (pkg, dep_hashfn) | ||
| 752 | |||
| 753 | return providers | ||
| 754 | |||
| 755 | collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA" | ||
| 756 | |||
| 757 | python do_create_runtime_spdx() { | 572 | python do_create_runtime_spdx() { |
| 758 | from datetime import datetime, timezone | 573 | from datetime import datetime, timezone |
| 759 | import oe.sbom | 574 | import oe.sbom |
| @@ -800,7 +615,7 @@ python do_create_runtime_spdx() { | |||
| 800 | 615 | ||
| 801 | runtime_doc = oe.spdx.SPDXDocument() | 616 | runtime_doc = oe.spdx.SPDXDocument() |
| 802 | runtime_doc.name = "runtime-" + pkg_name | 617 | runtime_doc.name = "runtime-" + pkg_name |
| 803 | runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc) | 618 | runtime_doc.documentNamespace = get_namespace(localdata, runtime_doc.name) |
| 804 | runtime_doc.creationInfo.created = creation_time | 619 | runtime_doc.creationInfo.created = creation_time |
| 805 | runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies." | 620 | runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies." |
| 806 | runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] | 621 | runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] |
| @@ -891,60 +706,6 @@ do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}" | |||
| 891 | do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" | 706 | do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" |
| 892 | do_create_runtime_spdx[rdeptask] = "do_create_spdx" | 707 | do_create_runtime_spdx[rdeptask] = "do_create_spdx" |
| 893 | 708 | ||
| 894 | def spdx_get_src(d): | ||
| 895 | """ | ||
| 896 | save patched source of the recipe in SPDX_WORKDIR. | ||
| 897 | """ | ||
| 898 | import shutil | ||
| 899 | spdx_workdir = d.getVar('SPDXWORK') | ||
| 900 | spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | ||
| 901 | pn = d.getVar('PN') | ||
| 902 | |||
| 903 | workdir = d.getVar("WORKDIR") | ||
| 904 | |||
| 905 | try: | ||
| 906 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
| 907 | if not is_work_shared_spdx(d): | ||
| 908 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
| 909 | d.setVar('WORKDIR', spdx_workdir) | ||
| 910 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
| 911 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
| 912 | |||
| 913 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
| 914 | # possibly requiring of the following tasks (such as some recipes's | ||
| 915 | # do_patch required 'B' existed). | ||
| 916 | bb.utils.mkdirhier(d.getVar('B')) | ||
| 917 | |||
| 918 | bb.build.exec_func('do_unpack', d) | ||
| 919 | # Copy source of kernel to spdx_workdir | ||
| 920 | if is_work_shared_spdx(d): | ||
| 921 | share_src = d.getVar('WORKDIR') | ||
| 922 | d.setVar('WORKDIR', spdx_workdir) | ||
| 923 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
| 924 | src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR') | ||
| 925 | bb.utils.mkdirhier(src_dir) | ||
| 926 | if bb.data.inherits_class('kernel',d): | ||
| 927 | share_src = d.getVar('STAGING_KERNEL_DIR') | ||
| 928 | cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/" | ||
| 929 | cmd_copy_shared_res = os.popen(cmd_copy_share).read() | ||
| 930 | bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res) | ||
| 931 | |||
| 932 | git_path = src_dir + "/.git" | ||
| 933 | if os.path.exists(git_path): | ||
| 934 | shutils.rmtree(git_path) | ||
| 935 | |||
| 936 | # Make sure gcc and kernel sources are patched only once | ||
| 937 | if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)): | ||
| 938 | bb.build.exec_func('do_patch', d) | ||
| 939 | |||
| 940 | # Some userland has no source. | ||
| 941 | if not os.path.exists( spdx_workdir ): | ||
| 942 | bb.utils.mkdirhier(spdx_workdir) | ||
| 943 | finally: | ||
| 944 | d.setVar("WORKDIR", workdir) | ||
| 945 | |||
| 946 | spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR" | ||
| 947 | |||
| 948 | do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx" | 709 | do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx" |
| 949 | do_rootfs[cleandirs] += "${SPDXIMAGEWORK}" | 710 | do_rootfs[cleandirs] += "${SPDXIMAGEWORK}" |
| 950 | 711 | ||
| @@ -1019,7 +780,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx | |||
| 1019 | 780 | ||
| 1020 | doc = oe.spdx.SPDXDocument() | 781 | doc = oe.spdx.SPDXDocument() |
| 1021 | doc.name = rootfs_name | 782 | doc.name = rootfs_name |
| 1022 | doc.documentNamespace = get_doc_namespace(d, doc) | 783 | doc.documentNamespace = get_namespace(d, doc.name) |
| 1023 | doc.creationInfo.created = creation_time | 784 | doc.creationInfo.created = creation_time |
| 1024 | doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build." | 785 | doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build." |
| 1025 | doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] | 786 | doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] |
diff --git a/meta/classes/spdx-common.bbclass b/meta/classes/spdx-common.bbclass new file mode 100644 index 0000000000..468a11ca3e --- /dev/null +++ b/meta/classes/spdx-common.bbclass | |||
| @@ -0,0 +1,256 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx" | ||
| 8 | |||
| 9 | # The product name that the CVE database uses. Defaults to BPN, but may need to | ||
| 10 | # be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). | ||
| 11 | CVE_PRODUCT ??= "${BPN}" | ||
| 12 | CVE_VERSION ??= "${PV}" | ||
| 13 | |||
| 14 | SPDXDIR ??= "${WORKDIR}/spdx" | ||
| 15 | SPDXDEPLOY = "${SPDXDIR}/deploy" | ||
| 16 | SPDXWORK = "${SPDXDIR}/work" | ||
| 17 | SPDXIMAGEWORK = "${SPDXDIR}/image-work" | ||
| 18 | SPDXSDKWORK = "${SPDXDIR}/sdk-work" | ||
| 19 | SPDXDEPS = "${SPDXDIR}/deps.json" | ||
| 20 | |||
| 21 | SPDX_TOOL_NAME ??= "oe-spdx-creator" | ||
| 22 | SPDX_TOOL_VERSION ??= "1.0" | ||
| 23 | |||
| 24 | SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy" | ||
| 25 | |||
| 26 | SPDX_INCLUDE_SOURCES ??= "0" | ||
| 27 | SPDX_ARCHIVE_SOURCES ??= "0" | ||
| 28 | SPDX_ARCHIVE_PACKAGED ??= "0" | ||
| 29 | |||
| 30 | SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org" | ||
| 31 | SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs" | ||
| 32 | SPDX_PRETTY ??= "0" | ||
| 33 | |||
| 34 | SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json" | ||
| 35 | |||
| 36 | SPDX_CUSTOM_ANNOTATION_VARS ??= "" | ||
| 37 | |||
| 38 | SPDX_ORG ??= "OpenEmbedded ()" | ||
| 39 | SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}" | ||
| 40 | SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \ | ||
| 41 | this recipe. For SPDX documents create using this class during the build, this \ | ||
| 42 | is the contact information for the person or organization who is doing the \ | ||
| 43 | build." | ||
| 44 | |||
| 45 | def extract_licenses(filename): | ||
| 46 | import re | ||
| 47 | |||
| 48 | lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE) | ||
| 49 | |||
| 50 | try: | ||
| 51 | with open(filename, 'rb') as f: | ||
| 52 | size = min(15000, os.stat(filename).st_size) | ||
| 53 | txt = f.read(size) | ||
| 54 | licenses = re.findall(lic_regex, txt) | ||
| 55 | if licenses: | ||
| 56 | ascii_licenses = [lic.decode('ascii') for lic in licenses] | ||
| 57 | return ascii_licenses | ||
| 58 | except Exception as e: | ||
| 59 | bb.warn(f"Exception reading {filename}: {e}") | ||
| 60 | return None | ||
| 61 | |||
| 62 | def is_work_shared_spdx(d): | ||
| 63 | return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR')) | ||
| 64 | |||
| 65 | def get_json_indent(d): | ||
| 66 | if d.getVar("SPDX_PRETTY") == "1": | ||
| 67 | return 2 | ||
| 68 | return None | ||
| 69 | |||
| 70 | python() { | ||
| 71 | import json | ||
| 72 | if d.getVar("SPDX_LICENSE_DATA"): | ||
| 73 | return | ||
| 74 | |||
| 75 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
| 76 | data = json.load(f) | ||
| 77 | # Transform the license array to a dictionary | ||
| 78 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
| 79 | d.setVar("SPDX_LICENSE_DATA", data) | ||
| 80 | } | ||
| 81 | |||
| 82 | def process_sources(d): | ||
| 83 | pn = d.getVar('PN') | ||
| 84 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
| 85 | if pn in assume_provided: | ||
| 86 | for p in d.getVar("PROVIDES").split(): | ||
| 87 | if p != pn: | ||
| 88 | pn = p | ||
| 89 | break | ||
| 90 | |||
| 91 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
| 92 | # so avoid archiving source here. | ||
| 93 | if pn.startswith('glibc-locale'): | ||
| 94 | return False | ||
| 95 | if d.getVar('PN') == "libtool-cross": | ||
| 96 | return False | ||
| 97 | if d.getVar('PN') == "libgcc-initial": | ||
| 98 | return False | ||
| 99 | if d.getVar('PN') == "shadow-sysroot": | ||
| 100 | return False | ||
| 101 | |||
| 102 | # We just archive gcc-source for all the gcc related recipes | ||
| 103 | if d.getVar('BPN') in ['gcc', 'libgcc']: | ||
| 104 | bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn) | ||
| 105 | return False | ||
| 106 | |||
| 107 | return True | ||
| 108 | |||
| 109 | def collect_direct_deps(d, dep_task): | ||
| 110 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
| 111 | pn = d.getVar("PN") | ||
| 112 | |||
| 113 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
| 114 | |||
| 115 | for this_dep in taskdepdata.values(): | ||
| 116 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
| 117 | break | ||
| 118 | else: | ||
| 119 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
| 120 | |||
| 121 | deps = set() | ||
| 122 | |||
| 123 | for dep_name in this_dep.deps: | ||
| 124 | dep_data = taskdepdata[dep_name] | ||
| 125 | if dep_data.taskname == dep_task and dep_data.pn != pn: | ||
| 126 | deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) | ||
| 127 | |||
| 128 | return sorted(deps) | ||
| 129 | |||
| 130 | collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA" | ||
| 131 | collect_direct_deps[vardeps] += "DEPENDS" | ||
| 132 | |||
| 133 | python do_collect_spdx_deps() { | ||
| 134 | # This task calculates the build time dependencies of the recipe, and is | ||
| 135 | # required because while a task can deptask on itself, those dependencies | ||
| 136 | # do not show up in BB_TASKDEPDATA. To work around that, this task does the | ||
| 137 | # deptask on do_create_spdx and writes out the dependencies it finds, then | ||
| 138 | # do_create_spdx reads in the found dependencies when writing the actual | ||
| 139 | # SPDX document | ||
| 140 | import json | ||
| 141 | from pathlib import Path | ||
| 142 | |||
| 143 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
| 144 | |||
| 145 | deps = collect_direct_deps(d, "do_create_spdx") | ||
| 146 | |||
| 147 | with spdx_deps_file.open("w") as f: | ||
| 148 | json.dump(deps, f) | ||
| 149 | } | ||
| 150 | # NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source | ||
| 151 | addtask do_collect_spdx_deps after do_unpack | ||
| 152 | do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}" | ||
| 153 | do_collect_spdx_deps[deptask] = "do_create_spdx" | ||
| 154 | do_collect_spdx_deps[dirs] = "${SPDXDIR}" | ||
| 155 | |||
| 156 | def get_spdx_deps(d): | ||
| 157 | import json | ||
| 158 | from pathlib import Path | ||
| 159 | |||
| 160 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
| 161 | |||
| 162 | with spdx_deps_file.open("r") as f: | ||
| 163 | return json.load(f) | ||
| 164 | |||
| 165 | def collect_package_providers(d): | ||
| 166 | from pathlib import Path | ||
| 167 | import oe.sbom | ||
| 168 | import oe.spdx | ||
| 169 | import json | ||
| 170 | |||
| 171 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 172 | |||
| 173 | providers = {} | ||
| 174 | |||
| 175 | deps = collect_direct_deps(d, "do_create_spdx") | ||
| 176 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
| 177 | |||
| 178 | for dep_pn, dep_hashfn, _ in deps: | ||
| 179 | localdata = d | ||
| 180 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
| 181 | if not recipe_data: | ||
| 182 | localdata = bb.data.createCopy(d) | ||
| 183 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
| 184 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
| 185 | |||
| 186 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
| 187 | |||
| 188 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
| 189 | rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items()) | ||
| 190 | rprovides.add(pkg) | ||
| 191 | |||
| 192 | if "PKG" in pkg_data: | ||
| 193 | pkg = pkg_data["PKG"] | ||
| 194 | rprovides.add(pkg) | ||
| 195 | |||
| 196 | for r in rprovides: | ||
| 197 | providers[r] = (pkg, dep_hashfn) | ||
| 198 | |||
| 199 | return providers | ||
| 200 | |||
| 201 | collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA" | ||
| 202 | |||
| 203 | def spdx_get_src(d): | ||
| 204 | """ | ||
| 205 | save patched source of the recipe in SPDX_WORKDIR. | ||
| 206 | """ | ||
| 207 | import shutil | ||
| 208 | spdx_workdir = d.getVar('SPDXWORK') | ||
| 209 | spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | ||
| 210 | pn = d.getVar('PN') | ||
| 211 | |||
| 212 | workdir = d.getVar("WORKDIR") | ||
| 213 | |||
| 214 | try: | ||
| 215 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
| 216 | if not is_work_shared_spdx(d): | ||
| 217 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
| 218 | d.setVar('WORKDIR', spdx_workdir) | ||
| 219 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
| 220 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
| 221 | |||
| 222 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
| 223 | # possibly requiring of the following tasks (such as some recipes's | ||
| 224 | # do_patch required 'B' existed). | ||
| 225 | bb.utils.mkdirhier(d.getVar('B')) | ||
| 226 | |||
| 227 | bb.build.exec_func('do_unpack', d) | ||
| 228 | # Copy source of kernel to spdx_workdir | ||
| 229 | if is_work_shared_spdx(d): | ||
| 230 | share_src = d.getVar('WORKDIR') | ||
| 231 | d.setVar('WORKDIR', spdx_workdir) | ||
| 232 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
| 233 | src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR') | ||
| 234 | bb.utils.mkdirhier(src_dir) | ||
| 235 | if bb.data.inherits_class('kernel',d): | ||
| 236 | share_src = d.getVar('STAGING_KERNEL_DIR') | ||
| 237 | cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/" | ||
| 238 | cmd_copy_shared_res = os.popen(cmd_copy_share).read() | ||
| 239 | bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res) | ||
| 240 | |||
| 241 | git_path = src_dir + "/.git" | ||
| 242 | if os.path.exists(git_path): | ||
| 243 | shutils.rmtree(git_path) | ||
| 244 | |||
| 245 | # Make sure gcc and kernel sources are patched only once | ||
| 246 | if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)): | ||
| 247 | bb.build.exec_func('do_patch', d) | ||
| 248 | |||
| 249 | # Some userland has no source. | ||
| 250 | if not os.path.exists( spdx_workdir ): | ||
| 251 | bb.utils.mkdirhier(spdx_workdir) | ||
| 252 | finally: | ||
| 253 | d.setVar("WORKDIR", workdir) | ||
| 254 | |||
| 255 | spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR" | ||
| 256 | |||
