diff options
| author | Joshua Watt <JPEWhacker@gmail.com> | 2024-07-12 09:58:13 -0600 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2024-07-16 14:55:53 +0100 |
| commit | 8426e027e8b157fc39ff3e4e5d3eae7239081a2a (patch) | |
| tree | 3e21369dfdbb93d879c8f753fff544953502616e /meta/classes | |
| parent | 28c9ac0d4f320b9fba261757b0a1c34b5e3b593b (diff) | |
| download | poky-8426e027e8b157fc39ff3e4e5d3eae7239081a2a.tar.gz | |
classes/create-spdx-3.0: Add classes
Adds a class to generate SPDX 3.0 output and an image class that is used
when generating images
(From OE-Core rev: b63f6f50458fc6898e4deda5d6739e7bf3639c15)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes')
| -rw-r--r-- | meta/classes/create-spdx-3.0.bbclass | 1043 | ||||
| -rw-r--r-- | meta/classes/create-spdx-image-3.0.bbclass | 415 | ||||
| -rw-r--r-- | meta/classes/spdx-common.bbclass | 6 |
3 files changed, 1463 insertions, 1 deletions
diff --git a/meta/classes/create-spdx-3.0.bbclass b/meta/classes/create-spdx-3.0.bbclass new file mode 100644 index 0000000000..51168e4876 --- /dev/null +++ b/meta/classes/create-spdx-3.0.bbclass | |||
| @@ -0,0 +1,1043 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | inherit spdx-common | ||
| 8 | |||
| 9 | SPDX_VERSION = "3.0.0" | ||
| 10 | |||
| 11 | # The list of SPDX profiles generated documents will conform to | ||
| 12 | SPDX_PROFILES ?= "core build software simpleLicensing security" | ||
| 13 | |||
| 14 | SPDX_INCLUDE_BUILD_VARIABLES ??= "0" | ||
| 15 | SPDX_INCLUDE_BUILD_VARIABLES[doc] = "If set to '1', the bitbake variables for a \ | ||
| 16 | recipe will be included in the Build object. This will most likely result \ | ||
| 17 | in non-reproducible SPDX output" | ||
| 18 | |||
| 19 | SPDX_INCLUDE_BITBAKE_PARENT_BUILD ??= "0" | ||
| 20 | SPDX_INCLUDE_BITBAKE_PARENT_BUILD[doc] = "Report the parent invocation of bitbake \ | ||
| 21 | for each Build object. This allows you to know who invoked bitbake to perform \ | ||
| 22 | a build, but will result in non-reproducible SPDX output." | ||
| 23 | |||
| 24 | SPDX_PACKAGE_ADDITIONAL_PURPOSE ?= "" | ||
| 25 | SPDX_PACKAGE_ADDITIONAL_PURPOSE[doc] = "The list of additional purposes to assign to \ | ||
| 26 | the generated packages for a recipe. The primary purpose is always `install`. \ | ||
| 27 | Packages overrides are allowed to override the additional purposes for \ | ||
| 28 | individual packages." | ||
| 29 | |||
| 30 | SPDX_IMAGE_PURPOSE ?= "filesystemImage" | ||
| 31 | SPDX_IMAGE_PURPOSE[doc] = "The list of purposes to assign to the generated images. \ | ||
| 32 | The first listed item will be the Primary Purpose and all additional items will \ | ||
| 33 | be added as additional purposes" | ||
| 34 | |||
| 35 | SPDX_SDK_PURPOSE ?= "install" | ||
| 36 | SPDX_SDK_PURPOSE[doc] = "The list of purposes to assign to the generate SDK installer. \ | ||
| 37 | The first listed item will be the Primary Purpose and all additional items will \ | ||
| 38 | be added as additional purposes" | ||
| 39 | |||
| 40 | SPDX_INCLUDE_VEX ??= "current" | ||
| 41 | SPDX_INCLUDE_VEX[doc] = "Controls what VEX information is in the output. Set to \ | ||
| 42 | 'none' to disable all VEX data. Set to 'current' to only include VEX data \ | ||
| 43 | for vulnerabilities not already fixed in the upstream source code \ | ||
| 44 | (recommended). Set to 'all' to get all known historical vulnerabilities, \ | ||
| 45 | including those already fixed upstream (warning: This can be large and \ | ||
| 46 | slow)." | ||
| 47 | |||
| 48 | SPDX_INCLUDE_TIMESTAMPS ?= "0" | ||
| 49 | SPDX_INCLUDE_TIMESTAMPS[doc] = "Include time stamps in SPDX output. This is \ | ||
| 50 | useful if you want to know when artifacts were produced and when builds \ | ||
| 51 | occurred, but will result in non-reproducible SPDX output" | ||
| 52 | |||
| 53 | SPDX_IMPORTS ??= "" | ||
| 54 | SPDX_IMPORTS[doc] = "SPDX_IMPORTS is the base variable that describes how to \ | ||
| 55 | reference external SPDX ids. Each import is defined as a key in this \ | ||
| 56 | variable with a suffix to describe to as a suffix to look up more \ | ||
| 57 | information about the import. Each key can have the following variables: \ | ||
| 58 | SPDX_IMPORTS_<key>_spdxid: The Fully qualified SPDX ID of the object \ | ||
| 59 | SPDX_IMPORTS_<key>_uri: The URI where the SPDX Document that contains \ | ||
| 60 | the external object can be found. Optional but recommended \ | ||
| 61 | SPDX_IMPORTS_<key>_hash_<hash>: The Checksum of the SPDX Document that \ | ||
| 62 | contains the External ID. <hash> must be one the valid SPDX hashing \ | ||
| 63 | algorithms, as described by the HashAlgorithm vocabulary in the\ | ||
| 64 | SPDX 3 spec. Optional but recommended" | ||
| 65 | |||
| 66 | # Agents | ||
| 67 | # Bitbake variables can be used to describe an SPDX Agent that may be used | ||
| 68 | # during the build. An Agent is specified using a set of variables which all | ||
| 69 | # start with some common base name: | ||
| 70 | # | ||
| 71 | # <BASE>_name: The name of the Agent (required) | ||
| 72 | # <BASE>_type: The type of Agent. Must be one of "person", "organization", | ||
| 73 | # "software", or "agent" (the default if not specified) | ||
| 74 | # <BASE>_comment: The comment for the Agent (optional) | ||
| 75 | # <BASE>_id_<ID>: And External Identifier for the Agent. <ID> must be a valid | ||
| 76 | # ExternalIdentifierType from the SPDX 3 spec. Commonly, an E-mail address | ||
| 77 | # can be specified with <BASE>_id_email | ||
| 78 | # | ||
| 79 | # Alternatively, an Agent can be an external reference by referencing a key | ||
| 80 | # in SPDX_IMPORTS like so: | ||
| 81 | # | ||
| 82 | # <BASE>_import = "<key>" | ||
| 83 | # | ||
| 84 | # Finally, the same agent described by another set of agent variables can be | ||
| 85 | # referenced by specifying the basename of the variable that should be | ||
| 86 | # referenced: | ||
| 87 | # | ||
| 88 | # SPDX_PACKAGE_SUPPLIER_ref = "SPDX_AUTHORS_openembedded" | ||
| 89 | |||
| 90 | SPDX_AUTHORS ??= "openembedded" | ||
| 91 | SPDX_AUTHORS[doc] = "A space separated list of the document authors. Each item \ | ||
| 92 | is used to name a base variable like SPDX_AUTHORS_<AUTHOR> that \ | ||
| 93 | describes the author." | ||
| 94 | |||
| 95 | SPDX_AUTHORS_openembedded_name = "OpenEmbedded" | ||
| 96 | SPDX_AUTHORS_openembedded_type = "organization" | ||
| 97 | |||
| 98 | SPDX_BUILD_HOST[doc] = "The base variable name to describe the build host on \ | ||
| 99 | which a build is running. Must be an SPDX_IMPORTS key. Requires \ | ||
| 100 | SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will result in \ | ||
| 101 | non-reproducible SPDX output" | ||
| 102 | |||
| 103 | SPDX_INVOKED_BY[doc] = "The base variable name to describe the Agent that \ | ||
| 104 | invoked the build, which builds will link to if specified. Requires \ | ||
| 105 | SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will likely result in \ | ||
| 106 | non-reproducible SPDX output" | ||
| 107 | |||
| 108 | SPDX_ON_BEHALF_OF[doc] = "The base variable name to describe the Agent on who's \ | ||
| 109 | behalf the invoking Agent (SPDX_INVOKED_BY) is running the build. Requires \ | ||
| 110 | SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will likely result in \ | ||
| 111 | non-reproducible SPDX output" | ||
| 112 | |||
| 113 | SPDX_PACKAGE_SUPPLIER[doc] = "The base variable name to describe the Agent who \ | ||
| 114 | is supplying artifacts produced by the build" | ||
| 115 | |||
| 116 | |||
| 117 | IMAGE_CLASSES:append = " create-spdx-image-3.0" | ||
| 118 | |||
| 119 | def set_timestamp_now(d, o, prop): | ||
| 120 | from datetime import datetime, timezone | ||
| 121 | |||
| 122 | if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1": | ||
| 123 | setattr(o, prop, datetime.now(timezone.utc)) | ||
| 124 | else: | ||
| 125 | # Doing this helps to validated that the property actually exists, and | ||
| 126 | # also that it is not mandatory | ||
| 127 | delattr(o, prop) | ||
| 128 | |||
| 129 | set_timestamp_now[vardepsexclude] = "SPDX_INCLUDE_TIMESTAMPS" | ||
| 130 | |||
| 131 | def add_license_expression(d, objset, license_expression): | ||
| 132 | from pathlib import Path | ||
| 133 | import oe.spdx30 | ||
| 134 | import oe.sbom30 | ||
| 135 | |||
| 136 | license_data = d.getVar("SPDX_LICENSE_DATA") | ||
| 137 | simple_license_text = {} | ||
| 138 | license_text_map = {} | ||
| 139 | license_ref_idx = 0 | ||
| 140 | |||
| 141 | def add_license_text(name): | ||
| 142 | nonlocal objset | ||
| 143 | nonlocal simple_license_text | ||
| 144 | |||
| 145 | if name in simple_license_text: | ||
| 146 | return simple_license_text[name] | ||
| 147 | |||
| 148 | lic = objset.find_filter( | ||
| 149 | oe.spdx30.simplelicensing_SimpleLicensingText, | ||
| 150 | name=name, | ||
| 151 | ) | ||
| 152 | |||
| 153 | if lic is not None: | ||
| 154 | simple_license_text[name] = lic | ||
| 155 | return lic | ||
| 156 | |||
| 157 | lic = objset.add(oe.spdx30.simplelicensing_SimpleLicensingText( | ||
| 158 | _id=objset.new_spdxid("license-text", name), | ||
| 159 | creationInfo=objset.doc.creationInfo, | ||
| 160 | name=name, | ||
| 161 | )) | ||
| 162 | simple_license_text[name] = lic | ||
| 163 | |||
| 164 | if name == "PD": | ||
| 165 | lic.simplelicensing_licenseText = "Software released to the public domain" | ||
| 166 | return lic | ||
| 167 | |||
| 168 | # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH | ||
| 169 | for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split(): | ||
| 170 | try: | ||
| 171 | with (Path(directory) / name).open(errors="replace") as f: | ||
| 172 | lic.simplelicensing_licenseText = f.read() | ||
| 173 | return lic | ||
| 174 | |||
| 175 | except FileNotFoundError: | ||
| 176 | pass | ||
| 177 | |||
| 178 | # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set | ||
| 179 | filename = d.getVarFlag('NO_GENERIC_LICENSE', name) | ||
| 180 | if filename: | ||
| 181 | filename = d.expand("${S}/" + filename) | ||
| 182 | with open(filename, errors="replace") as f: | ||
| 183 | lic.simplelicensing_licenseText = f.read() | ||
| 184 | return lic | ||
| 185 | else: | ||
| 186 | bb.fatal("Cannot find any text for license %s" % name) | ||
| 187 | |||
| 188 | def convert(l): | ||
| 189 | nonlocal license_text_map | ||
| 190 | nonlocal license_ref_idx | ||
| 191 | |||
| 192 | if l == "(" or l == ")": | ||
| 193 | return l | ||
| 194 | |||
| 195 | if l == "&": | ||
| 196 | return "AND" | ||
| 197 | |||
| 198 | if l == "|": | ||
| 199 | return "OR" | ||
| 200 | |||
| 201 | if l == "CLOSED": | ||
| 202 | return "NONE" | ||
| 203 | |||
| 204 | spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l | ||
| 205 | if spdx_license in license_data["licenses"]: | ||
| 206 | return spdx_license | ||
| 207 | |||
| 208 | spdx_license = "LicenseRef-" + l | ||
| 209 | if spdx_license not in license_text_map: | ||
| 210 | license_text_map[spdx_license] = add_license_text(l)._id | ||
| 211 | |||
| 212 | return spdx_license | ||
| 213 | |||
| 214 | lic_split = license_expression.replace("(", " ( ").replace(")", " ) ").replace("|", " | ").replace("&", " & ").split() | ||
| 215 | spdx_license_expression = ' '.join(convert(l) for l in lic_split) | ||
| 216 | |||
| 217 | return objset.new_license_expression(spdx_license_expression, license_text_map) | ||
| 218 | |||
| 219 | |||
| 220 | def add_package_files(d, objset, topdir, get_spdxid, get_purposes, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): | ||
| 221 | from pathlib import Path | ||
| 222 | import oe.spdx30 | ||
| 223 | import oe.sbom30 | ||
| 224 | |||
| 225 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") | ||
| 226 | if source_date_epoch: | ||
| 227 | source_date_epoch = int(source_date_epoch) | ||
| 228 | |||
| 229 | spdx_files = set() | ||
| 230 | |||
| 231 | file_counter = 1 | ||
| 232 | for subdir, dirs, files in os.walk(topdir): | ||
| 233 | dirs[:] = [d for d in dirs if d not in ignore_dirs] | ||
| 234 | if subdir == str(topdir): | ||
| 235 | dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs] | ||
| 236 | |||
| 237 | for file in files: | ||
| 238 | filepath = Path(subdir) / file | ||
| 239 | if filepath.is_symlink() or not filepath.is_file(): | ||
| 240 | continue | ||
| 241 | |||
| 242 | bb.debug(1, "Adding file %s to %s" % (filepath, objset.doc._id)) | ||
| 243 | |||
| 244 | filename = str(filepath.relative_to(topdir)) | ||
| 245 | file_purposes = get_purposes(filepath) | ||
| 246 | |||
| 247 | spdx_file = objset.new_file( | ||
| 248 | get_spdxid(file_counter), | ||
| 249 | filename, | ||
| 250 | filepath, | ||
| 251 | purposes=file_purposes, | ||
| 252 | ) | ||
| 253 | spdx_files.add(spdx_file) | ||
| 254 | |||
| 255 | if oe.spdx30.software_SoftwarePurpose.source in file_purposes: | ||
| 256 | objset.scan_declared_licenses(spdx_file, filepath) | ||
| 257 | |||
| 258 | if archive is not None: | ||
| 259 | with filepath.open("rb") as f: | ||
| 260 | info = archive.gettarinfo(fileobj=f) | ||
| 261 | info.name = filename | ||
| 262 | info.uid = 0 | ||
| 263 | info.gid = 0 | ||
| 264 | info.uname = "root" | ||
| 265 | info.gname = "root" | ||
| 266 | |||
| 267 | if source_date_epoch is not None and info.mtime > source_date_epoch: | ||
| 268 | info.mtime = source_date_epoch | ||
| 269 | |||
| 270 | archive.addfile(info, f) | ||
| 271 | |||
| 272 | file_counter += 1 | ||
| 273 | |||
| 274 | return spdx_files | ||
| 275 | |||
| 276 | |||
| 277 | def get_package_sources_from_debug(d, package, package_files, sources, source_hash_cache): | ||
| 278 | from pathlib import Path | ||
| 279 | import oe.packagedata | ||
| 280 | |||
| 281 | def file_path_match(file_path, pkg_file): | ||
| 282 | if file_path.lstrip("/") == pkg_file.name.lstrip("/"): | ||
| 283 | return True | ||
| 284 | |||
| 285 | for e in pkg_file.extension: | ||
| 286 | if isinstance(e, oe.sbom30.OEFileNameAliasExtension): | ||
| 287 | for a in e.aliases: | ||
| 288 | if file_path.lstrip("/") == a.lstrip("/"): | ||
| 289 | return True | ||
| 290 | |||
| 291 | return False | ||
| 292 | |||
| 293 | debug_search_paths = [ | ||
| 294 | Path(d.getVar('PKGD')), | ||
| 295 | Path(d.getVar('STAGING_DIR_TARGET')), | ||
| 296 | Path(d.getVar('STAGING_DIR_NATIVE')), | ||
| 297 | Path(d.getVar('STAGING_KERNEL_DIR')), | ||
| 298 | ] | ||
| 299 | |||
| 300 | pkg_data = oe.packagedata.read_subpkgdata_extended(package, d) | ||
| 301 | |||
| 302 | if pkg_data is None: | ||
| 303 | return | ||
| 304 | |||
| 305 | dep_source_files = set() | ||
| 306 | |||
| 307 | for file_path, file_data in pkg_data["files_info"].items(): | ||
| 308 | if not "debugsrc" in file_data: | ||
| 309 | continue | ||
| 310 | |||
| 311 | if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files): | ||
| 312 | bb.fatal("No package file found for %s in %s; SPDX found: %s" % (str(file_path), package, | ||
| 313 | " ".join(p.name for p in package_files))) | ||
| 314 | continue | ||
| 315 | |||
| 316 | for debugsrc in file_data["debugsrc"]: | ||
| 317 | for search in debug_search_paths: | ||
| 318 | if debugsrc.startswith("/usr/src/kernel"): | ||
| 319 | debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '') | ||
| 320 | else: | ||
| 321 | debugsrc_path = search / debugsrc.lstrip("/") | ||
| 322 | |||
| 323 | if debugsrc_path in source_hash_cache: | ||
| 324 | file_sha256 = source_hash_cache[debugsrc_path] | ||
| 325 | if file_sha256 is None: | ||
| 326 | continue | ||
| 327 | else: | ||
| 328 | if not debugsrc_path.exists(): | ||
| 329 | source_hash_cache[debugsrc_path] = None | ||
| 330 | continue | ||
| 331 | |||
| 332 | file_sha256 = bb.utils.sha256_file(debugsrc_path) | ||
| 333 | source_hash_cache[debugsrc_path] = file_sha256 | ||
| 334 | |||
| 335 | if file_sha256 in sources: | ||
| 336 | source_file = sources[file_sha256] | ||
| 337 | dep_source_files.add(source_file) | ||
| 338 | else: | ||
| 339 | bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256)) | ||
| 340 | break | ||
| 341 | else: | ||
| 342 | bb.debug(1, "Debug source %s not found" % debugsrc) | ||
| 343 | |||
| 344 | return dep_source_files | ||
| 345 | |||
| 346 | get_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR" | ||
| 347 | |||
| 348 | def collect_dep_objsets(d, build): | ||
| 349 | import json | ||
| 350 | from pathlib import Path | ||
| 351 | import oe.sbom30 | ||
| 352 | import oe.spdx30 | ||
| 353 | |||
| 354 | deps = get_spdx_deps(d) | ||
| 355 | |||
| 356 | dep_objsets = [] | ||
| 357 | dep_builds = set() | ||
| 358 | |||
| 359 | dep_build_spdxids = set() | ||
| 360 | for dep_pn, _, in_taskhash in deps: | ||
| 361 | bb.debug(1, "Fetching SPDX for dependency %s" % (dep_pn)) | ||
| 362 | dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(d, "recipes", dep_pn, oe.spdx30.build_Build) | ||
| 363 | # If the dependency is part of the taskhash, return it to be linked | ||
| 364 | # against. Otherwise, it cannot be linked against because this recipe | ||
| 365 | # will not rebuilt if dependency changes | ||
| 366 | if in_taskhash: | ||
| 367 | dep_objsets.append(dep_objset) | ||
| 368 | |||
| 369 | # The build _can_ be linked against (by alias) | ||
| 370 | dep_builds.add(dep_build) | ||
| 371 | |||
| 372 | return dep_objsets, dep_builds | ||
| 373 | |||
| 374 | collect_dep_objsets[vardepsexclude] = "SSTATE_ARCHS" | ||
| 375 | |||
| 376 | def collect_dep_sources(dep_objsets): | ||
| 377 | import oe.spdx30 | ||
| 378 | import oe.sbom30 | ||
| 379 | |||
| 380 | sources = {} | ||
| 381 | for objset in dep_objsets: | ||
| 382 | # Don't collect sources from native recipes as they | ||
| 383 | # match non-native sources also. | ||
| 384 | if objset.is_native(): | ||
| 385 | continue | ||
| 386 | |||
| 387 | bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name)) | ||
| 388 | |||
| 389 | dep_build = objset.find_root(oe.spdx30.build_Build) | ||
| 390 | if not dep_build: | ||
| 391 | bb.fatal("Unable to find a build") | ||
| 392 | |||
| 393 | for e in objset.foreach_type(oe.spdx30.Relationship): | ||
| 394 | if dep_build is not e.from_: | ||
| 395 | continue | ||
| 396 | |||
| 397 | if e.relationshipType != oe.spdx30.RelationshipType.hasInputs: | ||
| 398 | continue | ||
| 399 | |||
| 400 | for to in e.to: | ||
| 401 | if not isinstance(to, oe.spdx30.software_File): | ||
| 402 | continue | ||
| 403 | |||
| 404 | if to.software_primaryPurpose != oe.spdx30.software_SoftwarePurpose.source: | ||
| 405 | continue | ||
| 406 | |||
| 407 | for v in to.verifiedUsing: | ||
| 408 | if v.algorithm == oe.spdx30.HashAlgorithm.sha256: | ||
| 409 | sources[v.hashValue] = to | ||
| 410 | break | ||
| 411 | else: | ||
| 412 | bb.fatal("No SHA256 found for %s in %s" % (to.name, objset.doc.name)) | ||
| 413 | |||
| 414 | return sources | ||
| 415 | |||
| 416 | def add_download_files(d, objset): | ||
| 417 | import oe.patch | ||
| 418 | import oe.spdx30 | ||
| 419 | import os | ||
| 420 | |||
| 421 | inputs = set() | ||
| 422 | |||
| 423 | urls = d.getVar("SRC_URI").split() | ||
| 424 | fetch = bb.fetch2.Fetch(urls, d) | ||
| 425 | |||
| 426 | for download_idx, src_uri in enumerate(urls): | ||
| 427 | fd = fetch.ud[src_uri] | ||
| 428 | |||
| 429 | for name in fd.names: | ||
| 430 | file_name = os.path.basename(fetch.localpath(src_uri)) | ||
| 431 | if oe.patch.patch_path(src_uri, fetch, '', expand=False): | ||
| 432 | primary_purpose = oe.spdx30.software_SoftwarePurpose.patch | ||
| 433 | else: | ||
| 434 | primary_purpose = oe.spdx30.software_SoftwarePurpose.source | ||
| 435 | |||
| 436 | if fd.type == "file": | ||
| 437 | if os.path.isdir(fd.localpath): | ||
| 438 | walk_idx = 1 | ||
| 439 | for root, dirs, files in os.walk(fd.localpath): | ||
| 440 | for f in files: | ||
| 441 | f_path = os.path.join(root, f) | ||
| 442 | if os.path.islink(f_path): | ||
| 443 | # TODO: SPDX doesn't support symlinks yet | ||
| 444 | continue | ||
| 445 | |||
| 446 | file = objset.new_file( | ||
| 447 | objset.new_spdxid("source", str(download_idx + 1), str(walk_idx)), | ||
| 448 | os.path.join(file_name, os.path.relpath(f_path, fd.localpath)), | ||
| 449 | f_path, | ||
| 450 | purposes=[primary_purpose], | ||
| 451 | ) | ||
| 452 | |||
| 453 | inputs.add(file) | ||
| 454 | walk_idx += 1 | ||
| 455 | |||
| 456 | else: | ||
| 457 | file = objset.new_file( | ||
| 458 | objset.new_spdxid("source", str(download_idx + 1)), | ||
| 459 | file_name, | ||
| 460 | fd.localpath, | ||
| 461 | purposes=[primary_purpose], | ||
| 462 | ) | ||
| 463 | inputs.add(file) | ||
| 464 | |||
| 465 | else: | ||
| 466 | uri = fd.type | ||
| 467 | proto = getattr(fd, "proto", None) | ||
| 468 | if proto is not None: | ||
| 469 | uri = uri + "+" + proto | ||
| 470 | uri = uri + "://" + fd.host + fd.path | ||
| 471 | |||
| 472 | if fd.method.supports_srcrev(): | ||
| 473 | uri = uri + "@" + fd.revisions[name] | ||
| 474 | |||
| 475 | dl = objset.add(oe.spdx30.software_Package( | ||
| 476 | _id=objset.new_spdxid("source", str(download_idx + 1)), | ||
| 477 | creationInfo=objset.doc.creationInfo, | ||
| 478 | name=file_name, | ||
| 479 | software_primaryPurpose=primary_purpose, | ||
| 480 | software_downloadLocation=uri, | ||
| 481 | )) | ||
| 482 | |||
| 483 | if fd.method.supports_checksum(fd): | ||
| 484 | # TODO Need something better than hard coding this | ||
| 485 | for checksum_id in ["sha256", "sha1"]: | ||
| 486 | expected_checksum = getattr(fd, "%s_expected" % checksum_id, None) | ||
| 487 | if expected_checksum is None: | ||
| 488 | continue | ||
| 489 | |||
| 490 | dl.verifiedUsing.append( | ||
| 491 | oe.spdx30.Hash( | ||
| 492 | algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id), | ||
| 493 | hashValue=expected_checksum, | ||
| 494 | ) | ||
| 495 | ) | ||
| 496 | |||
| 497 | inputs.add(dl) | ||
| 498 | |||
| 499 | return inputs | ||
| 500 | |||
| 501 | |||
| 502 | def set_purposes(d, element, *var_names, force_purposes=[]): | ||
| 503 | purposes = force_purposes[:] | ||
| 504 | |||
| 505 | for var_name in var_names: | ||
| 506 | val = d.getVar(var_name) | ||
| 507 | if val: | ||
| 508 | purposes.extend(val.split()) | ||
| 509 | break | ||
| 510 | |||
| 511 | if not purposes: | ||
| 512 | bb.warn("No SPDX purposes found in %s" % " ".join(var_names)) | ||
| 513 | return | ||
| 514 | |||
| 515 | element.software_primaryPurpose = getattr(oe.spdx30.software_SoftwarePurpose, purposes[0]) | ||
| 516 | element.software_additionalPurpose = [getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:]] | ||
| 517 | |||
| 518 | |||
| 519 | python do_create_spdx() { | ||
| 520 | import oe.sbom30 | ||
| 521 | import oe.spdx30 | ||
| 522 | from pathlib import Path | ||
| 523 | from contextlib import contextmanager | ||
| 524 | import oe.cve_check | ||
| 525 | from datetime import datetime | ||
| 526 | |||
| 527 | def set_var_field(var, obj, name, package=None): | ||
| 528 | val = None | ||
| 529 | if package: | ||
| 530 | val = d.getVar("%s:%s" % (var, package)) | ||
| 531 | |||
| 532 | if not val: | ||
| 533 | val = d.getVar(var) | ||
| 534 | |||
| 535 | if val: | ||
| 536 | setattr(obj, name, val) | ||
| 537 | |||
| 538 | deploydir = Path(d.getVar("SPDXDEPLOY")) | ||
| 539 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 540 | spdx_workdir = Path(d.getVar("SPDXWORK")) | ||
| 541 | include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1" | ||
| 542 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
| 543 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d) | ||
| 544 | include_vex = d.getVar("SPDX_INCLUDE_VEX") | ||
| 545 | if not include_vex in ("none", "current", "all"): | ||
| 546 | bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'") | ||
| 547 | |||
| 548 | build_objset = oe.sbom30.ObjectSet.new_objset(d, d.getVar("PN")) | ||
| 549 | |||
| 550 | build = build_objset.new_task_build("recipe", "recipe") | ||
| 551 | build_objset.doc.rootElement.append(build) | ||
| 552 | |||
| 553 | build_objset.set_is_native(is_native) | ||
| 554 | |||
| 555 | for var in (d.getVar('SPDX_CUSTOM_ANNOTATION_VARS') or "").split(): | ||
| 556 | new_annotation( | ||
| 557 | d, | ||
| 558 | build_objset, | ||
| 559 | build, | ||
| 560 | "%s=%s" % (var, d.getVar(var)), | ||
| 561 | oe.spdx30.AnnotationType.other | ||
| 562 | ) | ||
| 563 | |||
| 564 | build_inputs = set() | ||
| 565 | |||
| 566 | # Add CVEs | ||
| 567 | cve_by_status = {} | ||
| 568 | if include_vex != "none": | ||
| 569 | for cve in (d.getVarFlags("CVE_STATUS") or {}): | ||
| 570 | status, detail, description = oe.cve_check.decode_cve_status(d, cve) | ||
| 571 | |||
| 572 | # If this CVE is fixed upstream, skip it unless all CVEs are | ||
| 573 | # specified. | ||
| 574 | if include_vex != "all" and detail in ("fixed-version", "cpe-stable-backport"): | ||
| 575 | bb.debug(1, "Skipping %s since it is already fixed upstream" % cve) | ||
| 576 | continue | ||
| 577 | |||
| 578 | cve_by_status.setdefault(status, {})[cve] = ( | ||
| 579 | build_objset.new_cve_vuln(cve), | ||
| 580 | detail, | ||
| 581 | description, | ||
| 582 | ) | ||
| 583 | |||
| 584 | cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION")) | ||
| 585 | |||
| 586 | source_files = add_download_files(d, build_objset) | ||
| 587 | build_inputs |= source_files | ||
| 588 | |||
| 589 | recipe_spdx_license = add_license_expression(d, build_objset, d.getVar("LICENSE")) | ||
| 590 | build_objset.new_relationship( | ||
| 591 | source_files, | ||
| 592 | oe.spdx30.RelationshipType.hasConcludedLicense, | ||
| 593 | [recipe_spdx_license], | ||
| 594 | ) | ||
| 595 | |||
| 596 | if process_sources(d) and include_sources: | ||
| 597 | bb.debug(1, "Adding source files to SPDX") | ||
| 598 | spdx_get_src(d) | ||
| 599 | |||
| 600 | build_inputs |= add_package_files( | ||
| 601 | d, | ||
| 602 | build_objset, | ||
| 603 | spdx_workdir, | ||
| 604 | lambda file_counter: build_objset.new_spdxid("sourcefile", str(file_counter)), | ||
| 605 | lambda filepath: [oe.spdx30.software_SoftwarePurpose.source], | ||
| 606 | ignore_dirs=[".git"], | ||
| 607 | ignore_top_level_dirs=["temp"], | ||
| 608 | archive=None, | ||
| 609 | ) | ||
| 610 | |||
| 611 | |||
| 612 | dep_objsets, dep_builds = collect_dep_objsets(d, build) | ||
| 613 | if dep_builds: | ||
| 614 | build_objset.new_scoped_relationship( | ||
| 615 | [build], | ||
| 616 | oe.spdx30.RelationshipType.dependsOn, | ||
| 617 | oe.spdx30.LifecycleScopeType.build, | ||
| 618 | sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds), | ||
| 619 | ) | ||
| 620 | |||
| 621 | debug_source_ids = set() | ||
| 622 | source_hash_cache = {} | ||
| 623 | |||
| 624 | # Write out the package SPDX data now. It is not complete as we cannot | ||
| 625 | # write the runtime data, so write it to a staging area and a later task | ||
| 626 | # will write out the final collection | ||
| 627 | |||
| 628 | # TODO: Handle native recipe output | ||
| 629 | if not is_native: | ||
| 630 | bb.debug(1, "Collecting Dependency sources files") | ||
| 631 | sources = collect_dep_sources(dep_objsets) | ||
| 632 | |||
| 633 | bb.build.exec_func("read_subpackage_metadata", d) | ||
| 634 | |||
| 635 | pkgdest = Path(d.getVar("PKGDEST")) | ||
| 636 | for package in d.getVar("PACKAGES").split(): | ||
| 637 | if not oe.packagedata.packaged(package, d): | ||
| 638 | continue | ||
| 639 | |||
| 640 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
| 641 | |||
| 642 | bb.debug(1, "Creating SPDX for package %s" % pkg_name) | ||
| 643 | |||
| 644 | pkg_objset = oe.sbom30.ObjectSet.new_objset(d, pkg_name) | ||
| 645 | |||
| 646 | spdx_package = pkg_objset.add_root(oe.spdx30.software_Package( | ||
| 647 | _id=pkg_objset.new_spdxid("package", pkg_name), | ||
| 648 | creationInfo=pkg_objset.doc.creationInfo, | ||
| 649 | name=pkg_name, | ||
| 650 | software_packageVersion=d.getVar("PV"), | ||
| 651 | )) | ||
| 652 | set_timestamp_now(d, spdx_package, "builtTime") | ||
| 653 | |||
| 654 | set_purposes( | ||
| 655 | d, | ||
| 656 | spdx_package, | ||
| 657 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package, | ||
| 658 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE", | ||
| 659 | force_purposes=["install"], | ||
| 660 | ) | ||
| 661 | |||
| 662 | |||
| 663 | supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER") | ||
| 664 | if supplier is not None: | ||
| 665 | spdx_package.supplier = supplier if isinstance(supplier, str) else supplier._id | ||
| 666 | |||
| 667 | set_var_field("HOMEPAGE", spdx_package, "software_homePage", package=package) | ||
| 668 | set_var_field("SUMMARY", spdx_package, "summary", package=package) | ||
| 669 | set_var_field("DESCRIPTION", spdx_package, "description", package=package) | ||
| 670 | |||
| 671 | pkg_objset.new_scoped_relationship( | ||
| 672 | [build._id], | ||
| 673 | oe.spdx30.RelationshipType.hasOutputs, | ||
| 674 | oe.spdx30.LifecycleScopeType.build, | ||
| 675 | [spdx_package], | ||
| 676 | ) | ||
| 677 | |||
| 678 | for cpe_id in cpe_ids: | ||
| 679 | spdx_package.externalIdentifier.append( | ||
| 680 | oe.spdx30.ExternalIdentifier( | ||
| 681 | externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23, | ||
| 682 | identifier=cpe_id, | ||
| 683 | )) | ||
| 684 | |||
| 685 | # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file | ||
| 686 | # generated and link it to the package | ||
| 687 | #spdx_package_file = pkg_objset.add(oe.spdx30.software_File( | ||
| 688 | # _id=pkg_objset.new_spdxid("distribution", pkg_name), | ||
| 689 | # creationInfo=pkg_objset.doc.creationInfo, | ||
| 690 | # name=pkg_name, | ||
| 691 | # software_primaryPurpose=spdx_package.software_primaryPurpose, | ||
| 692 | # software_additionalPurpose=spdx_package.software_additionalPurpose, | ||
| 693 | #)) | ||
| 694 | #set_timestamp_now(d, spdx_package_file, "builtTime") | ||
| 695 | |||
| 696 | ## TODO add hashes | ||
| 697 | #pkg_objset.new_relationship( | ||
| 698 | # [spdx_package], | ||
| 699 | # oe.spdx30.RelationshipType.hasDistributionArtifact, | ||
| 700 | # [spdx_package_file], | ||
| 701 | #) | ||
| 702 | |||
| 703 | # NOTE: licenses live in the recipe collection and are referenced | ||
| 704 | # by ID in the package collection(s). This helps reduce duplication | ||
| 705 | # (since a lot of packages will have the same license), and also | ||
| 706 | # prevents duplicate license SPDX IDs in the packages | ||
| 707 | package_license = d.getVar("LICENSE:%s" % package) | ||
| 708 | if package_license and package_license != d.getVar("LICENSE"): | ||
| 709 | package_spdx_license = add_license_expression(d, build_objset, package_license) | ||
| 710 | else: | ||
| 711 | package_spdx_license = recipe_spdx_license | ||
| 712 | |||
| 713 | pkg_objset.new_relationship( | ||
| 714 | [spdx_package], | ||
| 715 | oe.spdx30.RelationshipType.hasConcludedLicense, | ||
| 716 | [package_spdx_license._id], | ||
| 717 | ) | ||
| 718 | |||
| 719 | # NOTE: CVE Elements live in the recipe collection | ||
| 720 | all_cves = set() | ||
| 721 | for status, cves in cve_by_status.items(): | ||
| 722 | for cve, items in cves.items(): | ||
| 723 | spdx_cve, detail, description = items | ||
| 724 | |||
| 725 | all_cves.add(spdx_cve._id) | ||
| 726 | |||
| 727 | if status == "Patched": | ||
| 728 | pkg_objset.new_vex_patched_relationship([spdx_cve._id], [spdx_package]) | ||
| 729 | elif status == "Unpatched": | ||
| 730 | pkg_objset.new_vex_unpatched_relationship([spdx_cve._id], [spdx_package]) | ||
| 731 | elif status == "Ignored": | ||
| 732 | spdx_vex = pkg_objset.new_vex_ignored_relationship( | ||
| 733 | [spdx_cve._id], | ||
| 734 | [spdx_package], | ||
| 735 | impact_statement=description, | ||
| 736 | ) | ||
| 737 | |||
| 738 | if detail in ("ignored", "cpe-incorrect", "disputed", "upstream-wontfix"): | ||
| 739 | # VEX doesn't have justifications for this | ||
| 740 | pass | ||
| 741 | elif detail in ("not-applicable-config", "not-applicable-platform"): | ||
| 742 | for v in spdx_vex: | ||
| 743 | v.security_justificationType = oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent | ||
| 744 | else: | ||
| 745 | bb.fatal(f"Unknown detail '{detail}' for ignored {cve}") | ||
| 746 | else: | ||
| 747 | bb.fatal(f"Unknown CVE status {status}") | ||
| 748 | |||
| 749 | if all_cves: | ||
| 750 | pkg_objset.new_relationship( | ||
| 751 | [spdx_package], | ||
| 752 | oe.spdx30.RelationshipType.hasAssociatedVulnerability, | ||
| 753 | sorted(list(all_cves)), | ||
| 754 | ) | ||
| 755 | |||
| 756 | bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name) | ||
| 757 | package_files = add_package_files( | ||
| 758 | d, | ||
| 759 | pkg_objset, | ||
| 760 | pkgdest / package, | ||
| 761 | lambda file_counter: pkg_objset.new_spdxid("package", pkg_name, "file", str(file_counter)), | ||
| 762 | # TODO: Can we know the purpose here? | ||
| 763 | lambda filepath: [], | ||
| 764 | ignore_top_level_dirs=['CONTROL', 'DEBIAN'], | ||
| 765 | archive=None, | ||
| 766 | ) | ||
| 767 | |||
| 768 | if package_files: | ||
| 769 | pkg_objset.new_relationship( | ||
| 770 | [spdx_package], | ||
| 771 | oe.spdx30.RelationshipType.contains, | ||
| 772 | sorted(list(package_files)), | ||
| 773 | ) | ||
| 774 | |||
| 775 | if include_sources: | ||
| 776 | debug_sources = get_package_sources_from_debug(d, package, package_files, sources, source_hash_cache) | ||
| 777 | debug_source_ids |= set(oe.sbom30.get_element_link_id(d) for d in debug_sources) | ||
| 778 | |||
| 779 | oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False) | ||
| 780 | |||
| 781 | if include_sources: | ||
| 782 | bb.debug(1, "Adding sysroot files to SPDX") | ||
| 783 | sysroot_files = add_package_files( | ||
| 784 | d, | ||
| 785 | build_objset, | ||
| 786 | d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"), | ||
| 787 | lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)), | ||
| 788 | lambda filepath: [], | ||
| 789 | archive=None, | ||
| 790 | ) | ||
| 791 | |||
| 792 | if sysroot_files: | ||
| 793 | build_objset.new_scoped_relationship( | ||
| 794 | [build], | ||
| 795 | oe.spdx30.RelationshipType.hasOutputs, | ||
| 796 | oe.spdx30.LifecycleScopeType.build, | ||
| 797 | sorted(list(sysroot_files)), | ||
| 798 | ) | ||
| 799 | |||
| 800 | if build_inputs or debug_source_ids: | ||
| 801 | build_objset.new_scoped_relationship( | ||
| 802 | [build], | ||
| 803 | oe.spdx30.RelationshipType.hasInputs, | ||
| 804 | oe.spdx30.LifecycleScopeType.build, | ||
| 805 | sorted(list(build_inputs)) + sorted(list(debug_source_ids)), | ||
| 806 | ) | ||
| 807 | |||
| 808 | oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir) | ||
| 809 | } | ||
| 810 | do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS" | ||
| 811 | addtask do_create_spdx after \ | ||
| 812 | do_collect_spdx_deps \ | ||
| 813 | do_deploy_source_date_epoch \ | ||
| 814 | do_populate_sysroot do_package do_packagedata \ | ||
| 815 | ${create_spdx_source_deps(d)} \ | ||
| 816 | before do_populate_sdk do_populate_sdk_ext do_build do_rm_work | ||
| 817 | |||
| 818 | def create_spdx_source_deps(d): | ||
| 819 | deps = [] | ||
| 820 | if d.getVar("SPDX_INCLUDE_SOURCES") == "1": | ||
| 821 | deps.extend([ | ||
| 822 | # do_unpack is a hack for now; we only need it to get the | ||
| 823 | # dependencies do_unpack already has so we can extract the source | ||
| 824 | # ourselves | ||
| 825 | "do_unpack", | ||
| 826 | # For kernel source code | ||
| 827 | "do_shared_workdir", | ||
| 828 | ]) | ||
| 829 | return " ".join(deps) | ||
| 830 | |||
| 831 | SSTATETASKS += "do_create_spdx" | ||
| 832 | do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}" | ||
| 833 | do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" | ||
| 834 | |||
| 835 | python do_create_spdx_setscene () { | ||
| 836 | sstate_setscene(d) | ||
| 837 | } | ||
| 838 | addtask do_create_spdx_setscene | ||
| 839 | |||
| 840 | do_create_spdx[dirs] = "${SPDXWORK}" | ||
| 841 | do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}" | ||
| 842 | do_create_spdx[depends] += "${PATCHDEPENDENCY}" | ||
| 843 | |||
| 844 | python do_create_package_spdx() { | ||
| 845 | import oe.sbom30 | ||
| 846 | import oe.spdx30 | ||
| 847 | import oe.packagedata | ||
| 848 | from pathlib import Path | ||
| 849 | |||
| 850 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 851 | deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY")) | ||
| 852 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d) | ||
| 853 | |||
| 854 | providers = collect_package_providers(d) | ||
| 855 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
| 856 | |||
| 857 | if not is_native: | ||
| 858 | bb.build.exec_func("read_subpackage_metadata", d) | ||
| 859 | |||
| 860 | dep_package_cache = {} | ||
| 861 | |||
| 862 | # Any element common to all packages that need to be referenced by ID | ||
| 863 | # should be written into this objset set | ||
| 864 | common_objset = oe.sbom30.ObjectSet.new_objset(d, "%s-package-common" % d.getVar("PN")) | ||
| 865 | |||
| 866 | pkgdest = Path(d.getVar("PKGDEST")) | ||
| 867 | for package in d.getVar("PACKAGES").split(): | ||
| 868 | localdata = bb.data.createCopy(d) | ||
| 869 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
| 870 | localdata.setVar("PKG", pkg_name) | ||
| 871 | localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package) | ||
| 872 | |||
| 873 | if not oe.packagedata.packaged(package, localdata): | ||
| 874 | continue | ||
| 875 | |||
| 876 | spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld( | ||
| 877 | d, | ||
| 878 | pkg_arch, | ||
| 879 | "packages-staging", | ||
| 880 | pkg_name, | ||
| 881 | oe.spdx30.software_Package, | ||
| 882 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
| 883 | ) | ||
| 884 | |||
| 885 | # We will write out a new collection, so link it to the new | ||
| 886 | # creation info in the common package data. The old creation info | ||
| 887 | # should still exist and be referenced by all the existing elements | ||
| 888 | # in the package | ||
| 889 | pkg_objset.creationInfo = pkg_objset.copy_creation_info(common_objset.doc.creationInfo) | ||
| 890 | |||
| 891 | runtime_spdx_deps = set() | ||
| 892 | |||
| 893 | deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") | ||
| 894 | seen_deps = set() | ||
| 895 | for dep, _ in deps.items(): | ||
| 896 | if dep in seen_deps: | ||
| 897 | continue | ||
| 898 | |||
| 899 | if dep not in providers: | ||
| 900 | continue | ||
| 901 | |||
| 902 | (dep, _) = providers[dep] | ||
| 903 | |||
| 904 | if not oe.packagedata.packaged(dep, localdata): | ||
| 905 | continue | ||
| 906 | |||
| 907 | dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d) | ||
| 908 | dep_pkg = dep_pkg_data["PKG"] | ||
| 909 | |||
| 910 | if dep in dep_package_cache: | ||
| 911 | dep_spdx_package = dep_package_cache[dep] | ||
| 912 | else: | ||
| 913 | bb.debug(1, "Searching for %s" % dep_pkg) | ||
| 914 | dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
| 915 | d, | ||
| 916 | "packages-staging", | ||
| 917 | dep_pkg, | ||
| 918 | oe.spdx30.software_Package, | ||
| 919 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
| 920 | ) | ||
| 921 | dep_package_cache[dep] = dep_spdx_package | ||
| 922 | |||
| 923 | runtime_spdx_deps.add(dep_spdx_package) | ||
| 924 | seen_deps.add(dep) | ||
| 925 | |||
| 926 | if runtime_spdx_deps: | ||
| 927 | pkg_objset.new_scoped_relationship( | ||
| 928 | [spdx_package], | ||
| 929 | oe.spdx30.RelationshipType.dependsOn, | ||
| 930 | oe.spdx30.LifecycleScopeType.runtime, | ||
| 931 | [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps], | ||
| 932 | ) | ||
| 933 | |||
| 934 | oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir) | ||
| 935 | |||
| 936 | oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir) | ||
| 937 | } | ||
| 938 | |||
| 939 | do_create_package_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS" | ||
| 940 | |||
| 941 | addtask do_create_package_spdx after do_create_spdx before do_build do_rm_work | ||
| 942 | SSTATETASKS += "do_create_package_spdx" | ||
| 943 | do_create_package_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}" | ||
| 944 | do_create_package_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" | ||
| 945 | |||
| 946 | python do_create_package_spdx_setscene () { | ||
| 947 | sstate_setscene(d) | ||
| 948 | } | ||
| 949 | addtask do_create_package_spdx_setscene | ||
| 950 | |||
| 951 | do_create_package_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}" | ||
| 952 | do_create_package_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" | ||
| 953 | do_create_package_spdx[rdeptask] = "do_create_spdx" | ||
| 954 | |||
| 955 | |||
| 956 | |||
| 957 | python spdx30_build_started_handler () { | ||
| 958 | import oe.spdx30 | ||
| 959 | import oe.sbom30 | ||
| 960 | import os | ||
| 961 | from pathlib import Path | ||
| 962 | from datetime import datetime, timezone | ||
| 963 | |||
| 964 | # Create a copy of the datastore. Set PN to "bitbake" so that SPDX IDs can | ||
| 965 | # be generated | ||
| 966 | d = e.data.createCopy() | ||
| 967 | d.setVar("PN", "bitbake") | ||
| 968 | d.setVar("BB_TASKHASH", "bitbake") | ||
| 969 | load_spdx_license_data(d) | ||
| 970 | |||
| 971 | deploy_dir_spdx = Path(e.data.getVar("DEPLOY_DIR_SPDX")) | ||
| 972 | |||
| 973 | objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False) | ||
| 974 | |||
| 975 | host_import_key = d.getVar("SPDX_BUILD_HOST") | ||
| 976 | invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False) | ||
| 977 | on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False) | ||
| 978 | |||
| 979 | if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1": | ||
| 980 | # Since the Build objects are unique, we may as well set the creation | ||
| 981 | # time to the current time instead of the fallback SDE | ||
| 982 | objset.doc.creationInfo.created = datetime.now(timezone.utc) | ||
| 983 | |||
| 984 | # Each invocation of bitbake should have a unique ID since it is a | ||
| 985 | # unique build | ||
| 986 | nonce = os.urandom(16).hex() | ||
| 987 | |||
| 988 | build = objset.add_root(oe.spdx30.build_Build( | ||
| 989 | _id=objset.new_spdxid(nonce, include_unihash=False), | ||
| 990 | creationInfo=objset.doc.creationInfo, | ||
| 991 | build_buildType=oe.sbom30.SPDX_BUILD_TYPE, | ||
| 992 | )) | ||
| 993 | set_timestamp_now(d, build, "build_buildStartTime") | ||
| 994 | |||
| 995 | if host_import_key: | ||
| 996 | objset.new_scoped_relationship( | ||
| 997 | [build], | ||
| 998 | oe.spdx30.RelationshipType.hasHost, | ||
| 999 | oe.spdx30.LifecycleScopeType.build, | ||
| 1000 | [objset.new_import("SPDX_BUILD_HOST")], | ||
| 1001 | ) | ||
| 1002 | |||
| 1003 | if invoked_by: | ||
| 1004 | objset.add(invoked_by) | ||
| 1005 | invoked_by_spdx = objset.new_scoped_relationship( | ||
| 1006 | [build], | ||
| 1007 | oe.spdx30.RelationshipType.invokedBy, | ||
| 1008 | oe.spdx30.LifecycleScopeType.build, | ||
| 1009 | [invoked_by], | ||
| 1010 | ) | ||
| 1011 | |||
| 1012 | if on_behalf_of: | ||
| 1013 | objset.add(on_behalf_of) | ||
| 1014 | objset.new_scoped_relationship( | ||
| 1015 | [on_behalf_of], | ||
| 1016 | oe.spdx30.RelationshipType.delegatedTo, | ||
| 1017 | oe.spdx30.LifecycleScopeType.build, | ||
| 1018 | invoked_by_spdx, | ||
| 1019 | ) | ||
| 1020 | |||
| 1021 | elif on_behalf_of: | ||
| 1022 | bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set") | ||
| 1023 | |||
| 1024 | else: | ||
| 1025 | if host_import_key: | ||
| 1026 | bb.warn("SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set") | ||
| 1027 | |||
| 1028 | if invoked_by: | ||
| 1029 | bb.warn("SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set") | ||
| 1030 | |||
| 1031 | if on_behalf_of: | ||
| 1032 | bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set") | ||
| 1033 | |||
| 1034 | for obj in objset.foreach_type(oe.spdx30.Element): | ||
| 1035 | obj.extension.append(oe.sbom30.OELinkExtension(link_spdx_id=False)) | ||
| 1036 | obj.extension.append(oe.sbom30.OEIdAliasExtension()) | ||
| 1037 | |||
| 1038 | oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json") | ||
| 1039 | } | ||
| 1040 | |||
| 1041 | addhandler spdx30_build_started_handler | ||
| 1042 | spdx30_build_started_handler[eventmask] = "bb.event.BuildStarted" | ||
| 1043 | |||
diff --git a/meta/classes/create-spdx-image-3.0.bbclass b/meta/classes/create-spdx-image-3.0.bbclass new file mode 100644 index 0000000000..bda11d54d4 --- /dev/null +++ b/meta/classes/create-spdx-image-3.0.bbclass | |||
| @@ -0,0 +1,415 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # SPDX image tasks | ||
| 7 | |||
| 8 | SPDX_ROOTFS_PACKAGES = "${SPDXDIR}/rootfs-packages.json" | ||
| 9 | SPDXIMAGEDEPLOYDIR = "${SPDXDIR}/image-deploy" | ||
| 10 | SPDXROOTFSDEPLOY = "${SPDXDIR}/rootfs-deploy" | ||
| 11 | |||
| 12 | def collect_build_package_inputs(d, objset, build, packages): | ||
| 13 | providers = collect_package_providers(d) | ||
| 14 | |||
| 15 | build_deps = set() | ||
| 16 | |||
| 17 | for name in sorted(packages.keys()): | ||
| 18 | if name not in providers: | ||
| 19 | bb.fatal("Unable to find SPDX provider for '%s'" % name) | ||
| 20 | |||
| 21 | pkg_name, pkg_hashfn = providers[name] | ||
| 22 | |||
| 23 | # Copy all of the package SPDX files into the Sbom elements | ||
| 24 | pkg_spdx, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
| 25 | d, | ||
| 26 | "packages", | ||
| 27 | pkg_name, | ||
| 28 | oe.spdx30.software_Package, | ||
| 29 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
| 30 | ) | ||
| 31 | build_deps.add(pkg_spdx._id) | ||
| 32 | |||
| 33 | if build_deps: | ||
| 34 | objset.new_scoped_relationship( | ||
| 35 | [build], | ||
| 36 | oe.spdx30.RelationshipType.hasInputs, | ||
| 37 | oe.spdx30.LifecycleScopeType.build, | ||
| 38 | sorted(list(build_deps)), | ||
| 39 | ) | ||
| 40 | |||
| 41 | |||
| 42 | python spdx_collect_rootfs_packages() { | ||
| 43 | import json | ||
| 44 | from pathlib import Path | ||
| 45 | from oe.rootfs import image_list_installed_packages | ||
| 46 | |||
| 47 | root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES")) | ||
| 48 | |||
| 49 | packages = image_list_installed_packages(d) | ||
| 50 | if not packages: | ||
| 51 | packages = {} | ||
| 52 | |||
| 53 | root_packages_file.parent.mkdir(parents=True, exist_ok=True) | ||
| 54 | with root_packages_file.open("w") as f: | ||
| 55 | json.dump(packages, f) | ||
| 56 | } | ||
| 57 | ROOTFS_POSTUNINSTALL_COMMAND =+ "spdx_collect_rootfs_packages" | ||
| 58 | |||
| 59 | python do_create_rootfs_spdx() { | ||
| 60 | import json | ||
| 61 | from pathlib import Path | ||
| 62 | import oe.spdx30 | ||
| 63 | import oe.sbom30 | ||
| 64 | from datetime import datetime | ||
| 65 | |||
| 66 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 67 | deploydir = Path(d.getVar("SPDXROOTFSDEPLOY")) | ||
| 68 | root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES")) | ||
| 69 | image_basename = d.getVar("IMAGE_BASENAME") | ||
| 70 | machine = d.getVar("MACHINE") | ||
| 71 | |||
| 72 | with root_packages_file.open("r") as f: | ||
| 73 | packages = json.load(f) | ||
| 74 | |||
| 75 | objset = oe.sbom30.ObjectSet.new_objset(d, "%s-%s" % (image_basename, machine)) | ||
| 76 | |||
| 77 | rootfs = objset.add_root(oe.spdx30.software_Package( | ||
| 78 | _id=objset.new_spdxid("rootfs", image_basename), | ||
| 79 | creationInfo=objset.doc.creationInfo, | ||
| 80 | name=image_basename, | ||
| 81 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
| 82 | )) | ||
| 83 | set_timestamp_now(d, rootfs, "builtTime") | ||
| 84 | |||
| 85 | rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs")) | ||
| 86 | set_timestamp_now(d, rootfs_build, "build_buildEndTime") | ||
| 87 | |||
| 88 | objset.new_scoped_relationship( | ||
| 89 | [rootfs_build], | ||
| 90 | oe.spdx30.RelationshipType.hasOutputs, | ||
| 91 | oe.spdx30.LifecycleScopeType.build, | ||
| 92 | [rootfs], | ||
| 93 | ) | ||
| 94 | |||
| 95 | collect_build_package_inputs(d, objset, rootfs_build, packages) | ||
| 96 | |||
| 97 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir) | ||
| 98 | } | ||
| 99 | addtask do_create_rootfs_spdx after do_rootfs before do_image | ||
| 100 | SSTATETASKS += "do_create_rootfs_spdx" | ||
| 101 | do_create_rootfs_spdx[sstate-inputdirs] = "${SPDXROOTFSDEPLOY}" | ||
| 102 | do_create_rootfs_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" | ||
| 103 | do_create_rootfs_spdx[recrdeptask] += "do_create_spdx do_create_package_spdx" | ||
| 104 | do_create_rootfs_spdx[cleandirs] += "${SPDXROOTFSDEPLOY}" | ||
| 105 | |||
| 106 | python do_create_rootfs_spdx_setscene() { | ||
| 107 | sstate_setscene(d) | ||
| 108 | } | ||
| 109 | addtask do_create_rootfs_spdx_setscene | ||
| 110 | |||
| 111 | python do_create_image_spdx() { | ||
| 112 | import oe.spdx30 | ||
| 113 | import oe.sbom30 | ||
| 114 | import json | ||
| 115 | from pathlib import Path | ||
| 116 | |||
| 117 | image_deploy_dir = Path(d.getVar('IMGDEPLOYDIR')) | ||
| 118 | manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST")) | ||
| 119 | spdx_work_dir = Path(d.getVar('SPDXIMAGEWORK')) | ||
| 120 | |||
| 121 | image_basename = d.getVar('IMAGE_BASENAME') | ||
| 122 | machine = d.getVar("MACHINE") | ||
| 123 | |||
| 124 | objset = oe.sbom30.ObjectSet.new_objset(d, "%s-%s" % (image_basename, machine)) | ||
| 125 | |||
| 126 | with manifest_path.open("r") as f: | ||
| 127 | manifest = json.load(f) | ||
| 128 | |||
| 129 | builds = [] | ||
| 130 | for task in manifest: | ||
| 131 | imagetype = task["imagetype"] | ||
| 132 | taskname = task["taskname"] | ||
| 133 | |||
| 134 | image_build = objset.add_root(objset.new_task_build(taskname, "image/%s" % imagetype)) | ||
| 135 | set_timestamp_now(d, image_build, "build_buildEndTime") | ||
| 136 | builds.append(image_build) | ||
| 137 | |||
| 138 | artifacts = [] | ||
| 139 | |||
| 140 | for image in task["images"]: | ||
| 141 | image_filename = image["filename"] | ||
| 142 | image_path = image_deploy_dir / image_filename | ||
| 143 | a = objset.add_root(oe.spdx30.software_File( | ||
| 144 | _id=objset.new_spdxid("image", image_filename), | ||
| 145 | creationInfo=objset.doc.creationInfo, | ||
| 146 | name=image_filename, | ||
| 147 | verifiedUsing=[ | ||
| 148 | oe.spdx30.Hash( | ||
| 149 | algorithm=oe.spdx30.HashAlgorithm.sha256, | ||
| 150 | hashValue=bb.utils.sha256_file(image_path), | ||
| 151 | ) | ||
| 152 | ] | ||
| 153 | )) | ||
| 154 | set_purposes(d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE") | ||
| 155 | set_timestamp_now(d, a, "builtTime") | ||
| 156 | |||
| 157 | artifacts.append(a) | ||
| 158 | |||
| 159 | if artifacts: | ||
| 160 | objset.new_scoped_relationship( | ||
| 161 | [image_build], | ||
| 162 | oe.spdx30.RelationshipType.hasOutputs, | ||
| 163 | oe.spdx30.LifecycleScopeType.build, | ||
| 164 | artifacts, | ||
| 165 | ) | ||
| 166 | |||
| 167 | if builds: | ||
| 168 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
| 169 | d, | ||
| 170 | "rootfs", | ||
| 171 | "%s-%s" % (image_basename, machine), | ||
| 172 | oe.spdx30.software_Package, | ||
| 173 | # TODO: Should use a purpose to filter here? | ||
| 174 | ) | ||
| 175 | objset.new_scoped_relationship( | ||
| 176 | builds, | ||
| 177 | oe.spdx30.RelationshipType.hasInputs, | ||
| 178 | oe.spdx30.LifecycleScopeType.build, | ||
| 179 | [rootfs_image._id], | ||
| 180 | ) | ||
| 181 | |||
| 182 | objset.add_aliases() | ||
| 183 | objset.link() | ||
| 184 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir) | ||
| 185 | } | ||
| 186 | addtask do_create_image_spdx after do_image_complete do_create_rootfs_spdx before do_build | ||
| 187 | SSTATETASKS += "do_create_image_spdx" | ||
| 188 | SSTATE_SKIP_CREATION:task-combine-image-type-spdx = "1" | ||
| 189 | do_create_image_spdx[sstate-inputdirs] = "${SPDXIMAGEWORK}" | ||
| 190 | do_create_image_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" | ||
| 191 | do_create_image_spdx[cleandirs] = "${SPDXIMAGEWORK}" | ||
| 192 | do_create_image_spdx[dirs] = "${SPDXIMAGEWORK}" | ||
| 193 | |||
| 194 | python do_create_image_spdx_setscene() { | ||
| 195 | sstate_setscene(d) | ||
| 196 | } | ||
| 197 | addtask do_create_image_spdx_setscene | ||
| 198 | |||
| 199 | |||
| 200 | python do_create_image_sbom_spdx() { | ||
| 201 | import os | ||
| 202 | from pathlib import Path | ||
| 203 | import oe.spdx30 | ||
| 204 | import oe.sbom30 | ||
| 205 | |||
| 206 | image_name = d.getVar("IMAGE_NAME") | ||
| 207 | image_basename = d.getVar("IMAGE_BASENAME") | ||
| 208 | image_link_name = d.getVar("IMAGE_LINK_NAME") | ||
| 209 | imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR")) | ||
| 210 | machine = d.getVar("MACHINE") | ||
| 211 | |||
| 212 | spdx_path = imgdeploydir / (image_name + ".spdx.json") | ||
| 213 | |||
| 214 | root_elements = [] | ||
| 215 | |||
| 216 | # TODO: Do we need to add the rootfs or are the image files sufficient? | ||
| 217 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
| 218 | d, | ||
| 219 | "rootfs", | ||
| 220 | "%s-%s" % (image_basename, machine), | ||
| 221 | oe.spdx30.software_Package, | ||
| 222 | # TODO: Should use a purpose here? | ||
| 223 | ) | ||
| 224 | root_elements.append(rootfs_image._id) | ||
| 225 | |||
| 226 | image_objset, _ = oe.sbom30.find_jsonld(d, "image", "%s-%s" % (image_basename, machine), required=True) | ||
| 227 | for o in image_objset.foreach_root(oe.spdx30.software_File): | ||
| 228 | root_elements.append(o._id) | ||
| 229 | |||
| 230 | objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements) | ||
| 231 | |||
| 232 | oe.sbom30.write_jsonld_doc(d, objset, spdx_path) | ||
| 233 | |||
| 234 | def make_image_link(target_path, suffix): | ||
| 235 | if image_link_name: | ||
| 236 | link = imgdeploydir / (image_link_name + suffix) | ||
| 237 | if link != target_path: | ||
| 238 | link.symlink_to(os.path.relpath(target_path, link.parent)) | ||
| 239 | |||
| 240 | make_image_link(spdx_path, ".spdx.json") | ||
| 241 | } | ||
| 242 | addtask do_create_image_sbom_spdx after do_create_rootfs_spdx do_create_image_spdx before do_build | ||
| 243 | SSTATETASKS += "do_create_image_sbom_spdx" | ||
| 244 | SSTATE_SKIP_CREATION:task-create-image-sbom = "1" | ||
| 245 | do_create_image_sbom_spdx[sstate-inputdirs] = "${SPDXIMAGEDEPLOYDIR}" | ||
| 246 | do_create_image_sbom_spdx[sstate-outputdirs] = "${DEPLOY_DIR_IMAGE}" | ||
| 247 | do_create_image_sbom_spdx[stamp-extra-info] = "${MACHINE_ARCH}" | ||
| 248 | do_create_image_sbom_spdx[cleandirs] = "${SPDXIMAGEDEPLOYDIR}" | ||
| 249 | do_create_image_sbom_spdx[recrdeptask] += "do_create_spdx do_create_package_spdx" | ||
| 250 | |||
| 251 | python do_create_image_sbom_spdx_setscene() { | ||
| 252 | sstate_setscene(d) | ||
| 253 | } | ||
| 254 | addtask do_create_image_sbom_spdx_setscene | ||
| 255 | |||
| 256 | do_populate_sdk[recrdeptask] += "do_create_spdx do_create_package_spdx" | ||
| 257 | do_populate_sdk[cleandirs] += "${SPDXSDKWORK}" | ||
| 258 | do_populate_sdk[postfuncs] += "sdk_create_sbom" | ||
| 259 | POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_create_spdx" | ||
| 260 | POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_create_spdx" | ||
| 261 | |||
| 262 | do_populate_sdk_ext[recrdeptask] += "do_create_spdx do_create_package_spdx" | ||
| 263 | do_populate_sdk_ext[cleandirs] += "${SPDXSDKEXTWORK}" | ||
| 264 | do_populate_sdk_ext[postfuncs] += "sdk_ext_create_sbom" | ||
| 265 | POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk-ext = " sdk_ext_host_create_spdx" | ||
| 266 | POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk-ext = " sdk_ext_target_create_spdx" | ||
| 267 | |||
| 268 | python sdk_host_create_spdx() { | ||
| 269 | from pathlib import Path | ||
| 270 | spdx_work_dir = Path(d.getVar('SPDXSDKWORK')) | ||
| 271 | |||
| 272 | sdk_create_spdx(d, "host", spdx_work_dir, d.getVar("TOOLCHAIN_OUTPUTNAME")) | ||
| 273 | } | ||
| 274 | |||
| 275 | python sdk_target_create_spdx() { | ||
| 276 | from pathlib import Path | ||
| 277 | spdx_work_dir = Path(d.getVar('SPDXSDKWORK')) | ||
| 278 | |||
| 279 | sdk_create_spdx(d, "target", spdx_work_dir, d.getVar("TOOLCHAIN_OUTPUTNAME")) | ||
| 280 | } | ||
| 281 | |||
| 282 | python sdk_ext_host_create_spdx() { | ||
| 283 | from pathlib import Path | ||
| 284 | spdx_work_dir = Path(d.getVar('SPDXSDKEXTWORK')) | ||
| 285 | |||
| 286 | # TODO: This doesn't seem to work | ||
| 287 | sdk_create_spdx(d, "host", spdx_work_dir, d.getVar("TOOLCHAINEXT_OUTPUTNAME")) | ||
| 288 | } | ||
| 289 | |||
| 290 | python sdk_ext_target_create_spdx() { | ||
| 291 | from pathlib import Path | ||
| 292 | spdx_work_dir = Path(d.getVar('SPDXSDKEXTWORK')) | ||
| 293 | |||
| 294 | # TODO: This doesn't seem to work | ||
| 295 | sdk_create_spdx(d, "target", spdx_work_dir, d.getVar("TOOLCHAINEXT_OUTPUTNAME")) | ||
| 296 | } | ||
| 297 | |||
| 298 | def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname): | ||
| 299 | from pathlib import Path | ||
| 300 | from oe.sdk import sdk_list_installed_packages | ||
| 301 | import oe.spdx30 | ||
| 302 | import oe.sbom30 | ||
| 303 | from datetime import datetime | ||
| 304 | |||
| 305 | sdk_name = toolchain_outputname + "-" + sdk_type | ||
| 306 | sdk_packages = sdk_list_installed_packages(d, sdk_type == "target") | ||
| 307 | |||
| 308 | objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name) | ||
| 309 | |||
| 310 | sdk_rootfs = objset.add_root(oe.spdx30.software_Package( | ||
| 311 | _id=objset.new_spdxid("sdk-rootfs", sdk_name), | ||
| 312 | creationInfo=objset.doc.creationInfo, | ||
| 313 | name=sdk_name, | ||
| 314 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
| 315 | )) | ||
| 316 | set_timestamp_now(d, sdk_rootfs, "builtTime") | ||
| 317 | |||
| 318 | sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs")) | ||
| 319 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
| 320 | |||
| 321 | objset.new_scoped_relationship( | ||
| 322 | [sdk_build], | ||
| 323 | oe.spdx30.RelationshipType.hasOutputs, | ||
| 324 | oe.spdx30.LifecycleScopeType.build, | ||
| 325 | [sdk_rootfs], | ||
| 326 | ) | ||
| 327 | |||
| 328 | collect_build_package_inputs(d, objset, sdk_build, sdk_packages) | ||
| 329 | |||
| 330 | objset.add_aliases() | ||
| 331 | oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json") | ||
| 332 | |||
| 333 | python sdk_create_sbom() { | ||
| 334 | from pathlib import Path | ||
| 335 | sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR")) | ||
| 336 | spdx_work_dir = Path(d.getVar('SPDXSDKWORK')) | ||
| 337 | |||
| 338 | create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, d.getVar("TOOLCHAIN_OUTPUTNAME")) | ||
| 339 | } | ||
| 340 | |||
| 341 | python sdk_ext_create_sbom() { | ||
| 342 | from pathlib import Path | ||
| 343 | sdk_deploydir = Path(d.getVar("SDKEXTDEPLOYDIR")) | ||
| 344 | spdx_work_dir = Path(d.getVar('SPDXSDKEXTWORK')) | ||
| 345 | |||
| 346 | create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, d.getVar("TOOLCHAINEXT_OUTPUTNAME")) | ||
| 347 | } | ||
| 348 | |||
| 349 | def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname): | ||
| 350 | import oe.spdx30 | ||
| 351 | import oe.sbom30 | ||
| 352 | from pathlib import Path | ||
| 353 | from datetime import datetime | ||
| 354 | |||
| 355 | # Load the document written earlier | ||
| 356 | rootfs_objset = oe.sbom30.load_jsonld(d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True) | ||
| 357 | |||
| 358 | # Create a new build for the SDK installer | ||
| 359 | sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate") | ||
| 360 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
| 361 | |||
| 362 | rootfs = rootfs_objset.find_root(oe.spdx30.software_Package) | ||
| 363 | if rootfs is None: | ||
| 364 | bb.fatal("Unable to find rootfs artifact") | ||
| 365 | |||
| 366 | rootfs_objset.new_scoped_relationship( | ||
| 367 | [sdk_build], | ||
| 368 | oe.spdx30.RelationshipType.hasInputs, | ||
| 369 | oe.spdx30.LifecycleScopeType.build, | ||
| 370 | [rootfs] | ||
| 371 | ) | ||
| 372 | |||
| 373 | files = set() | ||
| 374 | root_files = [] | ||
| 375 | |||
| 376 | # NOTE: os.walk() doesn't return symlinks | ||
| 377 | for dirpath, dirnames, filenames in os.walk(sdk_deploydir): | ||
| 378 | for fn in filenames: | ||
| 379 | fpath = Path(dirpath) / fn | ||
| 380 | if not fpath.is_file() or fpath.is_symlink(): | ||
| 381 | continue | ||
| 382 | |||
| 383 | relpath = str(fpath.relative_to(sdk_deploydir)) | ||
| 384 | |||
| 385 | f = rootfs_objset.new_file( | ||
| 386 | rootfs_objset.new_spdxid("sdk-installer", relpath), | ||
| 387 | relpath, | ||
| 388 | fpath, | ||
| 389 | ) | ||
| 390 | set_timestamp_now(d, f, "builtTime") | ||
| 391 | |||
| 392 | if fn.endswith(".manifest"): | ||
| 393 | f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest | ||
| 394 | elif fn.endswith(".testdata.json"): | ||
| 395 | f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.configuration | ||
| 396 | else: | ||
| 397 | set_purposes(d, f, "SPDX_SDK_PURPOSE") | ||
| 398 | root_files.append(f) | ||
| 399 | |||
| 400 | files.add(f) | ||
| 401 | |||
| 402 | if files: | ||
| 403 | rootfs_objset.new_scoped_relationship( | ||
| 404 | [sdk_build], | ||
| 405 | oe.spdx30.RelationshipType.hasOutputs, | ||
| 406 | oe.spdx30.LifecycleScopeType.build, | ||
| 407 | files, | ||
| 408 | ) | ||
| 409 | else: | ||
| 410 | bb.warn(f"No SDK output files found in {sdk_deploydir}") | ||
| 411 | |||
| 412 | objset, sbom = oe.sbom30.create_sbom(d, toolchain_outputname, sorted(list(files)), [rootfs_objset]) | ||
| 413 | |||
| 414 | oe.sbom30.write_jsonld_doc(d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json")) | ||
| 415 | |||
diff --git a/meta/classes/spdx-common.bbclass b/meta/classes/spdx-common.bbclass index 18254c36aa..6dfc1fd9e4 100644 --- a/meta/classes/spdx-common.bbclass +++ b/meta/classes/spdx-common.bbclass | |||
| @@ -17,6 +17,7 @@ SPDXDEPLOY = "${SPDXDIR}/deploy" | |||
| 17 | SPDXWORK = "${SPDXDIR}/work" | 17 | SPDXWORK = "${SPDXDIR}/work" |
| 18 | SPDXIMAGEWORK = "${SPDXDIR}/image-work" | 18 | SPDXIMAGEWORK = "${SPDXDIR}/image-work" |
| 19 | SPDXSDKWORK = "${SPDXDIR}/sdk-work" | 19 | SPDXSDKWORK = "${SPDXDIR}/sdk-work" |
| 20 | SPDXSDKEXTWORK = "${SPDXDIR}/sdk-ext-work" | ||
| 20 | SPDXDEPS = "${SPDXDIR}/deps.json" | 21 | SPDXDEPS = "${SPDXDIR}/deps.json" |
| 21 | 22 | ||
| 22 | SPDX_TOOL_NAME ??= "oe-spdx-creator" | 23 | SPDX_TOOL_NAME ??= "oe-spdx-creator" |
| @@ -61,7 +62,7 @@ def get_json_indent(d): | |||
| 61 | return 2 | 62 | return 2 |
| 62 | return None | 63 | return None |
| 63 | 64 | ||
| 64 | python() { | 65 | def load_spdx_license_data(d): |
| 65 | import json | 66 | import json |
| 66 | if d.getVar("SPDX_LICENSE_DATA"): | 67 | if d.getVar("SPDX_LICENSE_DATA"): |
| 67 | return | 68 | return |
| @@ -71,6 +72,9 @@ python() { | |||
| 71 | # Transform the license array to a dictionary | 72 | # Transform the license array to a dictionary |
| 72 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | 73 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} |
| 73 | d.setVar("SPDX_LICENSE_DATA", data) | 74 | d.setVar("SPDX_LICENSE_DATA", data) |
| 75 | |||
| 76 | python() { | ||
| 77 | load_spdx_license_data(d) | ||
| 74 | } | 78 | } |
| 75 | 79 | ||
| 76 | def process_sources(d): | 80 | def process_sources(d): |
