diff options
author | Joshua Watt <JPEWhacker@gmail.com> | 2024-06-10 15:41:48 -0600 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2024-06-12 16:16:03 +0100 |
commit | 92b202c2460b6d82df585a47ed56f4ed818a76c0 (patch) | |
tree | bd5234b04562ecb3d61a1d6f3cc4d9e8fdd83c6b /meta/classes/spdx-common.bbclass | |
parent | a43f15565b18f818d5519376e54849b604156e38 (diff) | |
download | poky-92b202c2460b6d82df585a47ed56f4ed818a76c0.tar.gz |
classes/spdx-common: Move common SPDX to new class
Moves SPDX code that can be shared between different SPDX versions into
a common class
(From OE-Core rev: 769a390adc9fc0b52978abe0f19f885967af0117)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/spdx-common.bbclass')
-rw-r--r-- | meta/classes/spdx-common.bbclass | 256 |
1 files changed, 256 insertions, 0 deletions
diff --git a/meta/classes/spdx-common.bbclass b/meta/classes/spdx-common.bbclass new file mode 100644 index 0000000000..468a11ca3e --- /dev/null +++ b/meta/classes/spdx-common.bbclass | |||
@@ -0,0 +1,256 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx" | ||
8 | |||
9 | # The product name that the CVE database uses. Defaults to BPN, but may need to | ||
10 | # be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). | ||
11 | CVE_PRODUCT ??= "${BPN}" | ||
12 | CVE_VERSION ??= "${PV}" | ||
13 | |||
14 | SPDXDIR ??= "${WORKDIR}/spdx" | ||
15 | SPDXDEPLOY = "${SPDXDIR}/deploy" | ||
16 | SPDXWORK = "${SPDXDIR}/work" | ||
17 | SPDXIMAGEWORK = "${SPDXDIR}/image-work" | ||
18 | SPDXSDKWORK = "${SPDXDIR}/sdk-work" | ||
19 | SPDXDEPS = "${SPDXDIR}/deps.json" | ||
20 | |||
21 | SPDX_TOOL_NAME ??= "oe-spdx-creator" | ||
22 | SPDX_TOOL_VERSION ??= "1.0" | ||
23 | |||
24 | SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy" | ||
25 | |||
26 | SPDX_INCLUDE_SOURCES ??= "0" | ||
27 | SPDX_ARCHIVE_SOURCES ??= "0" | ||
28 | SPDX_ARCHIVE_PACKAGED ??= "0" | ||
29 | |||
30 | SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org" | ||
31 | SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs" | ||
32 | SPDX_PRETTY ??= "0" | ||
33 | |||
34 | SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json" | ||
35 | |||
36 | SPDX_CUSTOM_ANNOTATION_VARS ??= "" | ||
37 | |||
38 | SPDX_ORG ??= "OpenEmbedded ()" | ||
39 | SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}" | ||
40 | SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \ | ||
41 | this recipe. For SPDX documents create using this class during the build, this \ | ||
42 | is the contact information for the person or organization who is doing the \ | ||
43 | build." | ||
44 | |||
45 | def extract_licenses(filename): | ||
46 | import re | ||
47 | |||
48 | lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE) | ||
49 | |||
50 | try: | ||
51 | with open(filename, 'rb') as f: | ||
52 | size = min(15000, os.stat(filename).st_size) | ||
53 | txt = f.read(size) | ||
54 | licenses = re.findall(lic_regex, txt) | ||
55 | if licenses: | ||
56 | ascii_licenses = [lic.decode('ascii') for lic in licenses] | ||
57 | return ascii_licenses | ||
58 | except Exception as e: | ||
59 | bb.warn(f"Exception reading {filename}: {e}") | ||
60 | return None | ||
61 | |||
62 | def is_work_shared_spdx(d): | ||
63 | return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR')) | ||
64 | |||
65 | def get_json_indent(d): | ||
66 | if d.getVar("SPDX_PRETTY") == "1": | ||
67 | return 2 | ||
68 | return None | ||
69 | |||
70 | python() { | ||
71 | import json | ||
72 | if d.getVar("SPDX_LICENSE_DATA"): | ||
73 | return | ||
74 | |||
75 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
76 | data = json.load(f) | ||
77 | # Transform the license array to a dictionary | ||
78 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
79 | d.setVar("SPDX_LICENSE_DATA", data) | ||
80 | } | ||
81 | |||
82 | def process_sources(d): | ||
83 | pn = d.getVar('PN') | ||
84 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
85 | if pn in assume_provided: | ||
86 | for p in d.getVar("PROVIDES").split(): | ||
87 | if p != pn: | ||
88 | pn = p | ||
89 | break | ||
90 | |||
91 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
92 | # so avoid archiving source here. | ||
93 | if pn.startswith('glibc-locale'): | ||
94 | return False | ||
95 | if d.getVar('PN') == "libtool-cross": | ||
96 | return False | ||
97 | if d.getVar('PN') == "libgcc-initial": | ||
98 | return False | ||
99 | if d.getVar('PN') == "shadow-sysroot": | ||
100 | return False | ||
101 | |||
102 | # We just archive gcc-source for all the gcc related recipes | ||
103 | if d.getVar('BPN') in ['gcc', 'libgcc']: | ||
104 | bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn) | ||
105 | return False | ||
106 | |||
107 | return True | ||
108 | |||
109 | def collect_direct_deps(d, dep_task): | ||
110 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
111 | pn = d.getVar("PN") | ||
112 | |||
113 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
114 | |||
115 | for this_dep in taskdepdata.values(): | ||
116 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
117 | break | ||
118 | else: | ||
119 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
120 | |||
121 | deps = set() | ||
122 | |||
123 | for dep_name in this_dep.deps: | ||
124 | dep_data = taskdepdata[dep_name] | ||
125 | if dep_data.taskname == dep_task and dep_data.pn != pn: | ||
126 | deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) | ||
127 | |||
128 | return sorted(deps) | ||
129 | |||
130 | collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA" | ||
131 | collect_direct_deps[vardeps] += "DEPENDS" | ||
132 | |||
133 | python do_collect_spdx_deps() { | ||
134 | # This task calculates the build time dependencies of the recipe, and is | ||
135 | # required because while a task can deptask on itself, those dependencies | ||
136 | # do not show up in BB_TASKDEPDATA. To work around that, this task does the | ||
137 | # deptask on do_create_spdx and writes out the dependencies it finds, then | ||
138 | # do_create_spdx reads in the found dependencies when writing the actual | ||
139 | # SPDX document | ||
140 | import json | ||
141 | from pathlib import Path | ||
142 | |||
143 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
144 | |||
145 | deps = collect_direct_deps(d, "do_create_spdx") | ||
146 | |||
147 | with spdx_deps_file.open("w") as f: | ||
148 | json.dump(deps, f) | ||
149 | } | ||
150 | # NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source | ||
151 | addtask do_collect_spdx_deps after do_unpack | ||
152 | do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}" | ||
153 | do_collect_spdx_deps[deptask] = "do_create_spdx" | ||
154 | do_collect_spdx_deps[dirs] = "${SPDXDIR}" | ||
155 | |||
156 | def get_spdx_deps(d): | ||
157 | import json | ||
158 | from pathlib import Path | ||
159 | |||
160 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
161 | |||
162 | with spdx_deps_file.open("r") as f: | ||
163 | return json.load(f) | ||
164 | |||
165 | def collect_package_providers(d): | ||
166 | from pathlib import Path | ||
167 | import oe.sbom | ||
168 | import oe.spdx | ||
169 | import json | ||
170 | |||
171 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
172 | |||
173 | providers = {} | ||
174 | |||
175 | deps = collect_direct_deps(d, "do_create_spdx") | ||
176 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
177 | |||
178 | for dep_pn, dep_hashfn, _ in deps: | ||
179 | localdata = d | ||
180 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
181 | if not recipe_data: | ||
182 | localdata = bb.data.createCopy(d) | ||
183 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
184 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
185 | |||
186 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
187 | |||
188 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
189 | rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items()) | ||
190 | rprovides.add(pkg) | ||
191 | |||
192 | if "PKG" in pkg_data: | ||
193 | pkg = pkg_data["PKG"] | ||
194 | rprovides.add(pkg) | ||
195 | |||
196 | for r in rprovides: | ||
197 | providers[r] = (pkg, dep_hashfn) | ||
198 | |||
199 | return providers | ||
200 | |||
201 | collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA" | ||
202 | |||
203 | def spdx_get_src(d): | ||
204 | """ | ||
205 | save patched source of the recipe in SPDX_WORKDIR. | ||
206 | """ | ||
207 | import shutil | ||
208 | spdx_workdir = d.getVar('SPDXWORK') | ||
209 | spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | ||
210 | pn = d.getVar('PN') | ||
211 | |||
212 | workdir = d.getVar("WORKDIR") | ||
213 | |||
214 | try: | ||
215 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
216 | if not is_work_shared_spdx(d): | ||
217 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
218 | d.setVar('WORKDIR', spdx_workdir) | ||
219 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
220 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
221 | |||
222 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
223 | # possibly requiring of the following tasks (such as some recipes's | ||
224 | # do_patch required 'B' existed). | ||
225 | bb.utils.mkdirhier(d.getVar('B')) | ||
226 | |||
227 | bb.build.exec_func('do_unpack', d) | ||
228 | # Copy source of kernel to spdx_workdir | ||
229 | if is_work_shared_spdx(d): | ||
230 | share_src = d.getVar('WORKDIR') | ||
231 | d.setVar('WORKDIR', spdx_workdir) | ||
232 | d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) | ||
233 | src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR') | ||
234 | bb.utils.mkdirhier(src_dir) | ||
235 | if bb.data.inherits_class('kernel',d): | ||
236 | share_src = d.getVar('STAGING_KERNEL_DIR') | ||
237 | cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/" | ||
238 | cmd_copy_shared_res = os.popen(cmd_copy_share).read() | ||
239 | bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res) | ||
240 | |||
241 | git_path = src_dir + "/.git" | ||
242 | if os.path.exists(git_path): | ||
243 | shutils.rmtree(git_path) | ||
244 | |||
245 | # Make sure gcc and kernel sources are patched only once | ||
246 | if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)): | ||
247 | bb.build.exec_func('do_patch', d) | ||
248 | |||
249 | # Some userland has no source. | ||
250 | if not os.path.exists( spdx_workdir ): | ||
251 | bb.utils.mkdirhier(spdx_workdir) | ||
252 | finally: | ||
253 | d.setVar("WORKDIR", workdir) | ||
254 | |||
255 | spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR" | ||
256 | |||