diff options
Diffstat (limited to 'scripts/contrib')
-rwxr-xr-x | scripts/contrib/convert-spdx-licenses.py | 2 | ||||
-rwxr-xr-x | scripts/contrib/improve_kernel_cve_report.py | 467 | ||||
-rwxr-xr-x | scripts/contrib/make-spdx-bindings.sh | 12 | ||||
-rw-r--r-- | scripts/contrib/oe-image-files-spdx/.gitignore | 8 | ||||
-rw-r--r-- | scripts/contrib/oe-image-files-spdx/README.md | 24 | ||||
-rw-r--r-- | scripts/contrib/oe-image-files-spdx/pyproject.toml | 23 | ||||
-rw-r--r-- | scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py | 1 | ||||
-rw-r--r-- | scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py | 86 | ||||
-rw-r--r-- | scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py | 1 | ||||
-rwxr-xr-x | scripts/contrib/patchreview.py | 11 |
10 files changed, 630 insertions, 5 deletions
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py index 4e194dee3f..13cf12a33f 100755 --- a/scripts/contrib/convert-spdx-licenses.py +++ b/scripts/contrib/convert-spdx-licenses.py | |||
@@ -93,7 +93,7 @@ license_map = { | |||
93 | "Nauman" : "Naumen", | 93 | "Nauman" : "Naumen", |
94 | "tcl" : "TCL", | 94 | "tcl" : "TCL", |
95 | "vim" : "Vim", | 95 | "vim" : "Vim", |
96 | "SGIv1" : "SGI-1", | 96 | "SGIv1" : "SGI-OpenGL", |
97 | } | 97 | } |
98 | 98 | ||
99 | def processfile(fn): | 99 | def processfile(fn): |
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py new file mode 100755 index 0000000000..829cc4cd30 --- /dev/null +++ b/scripts/contrib/improve_kernel_cve_report.py | |||
@@ -0,0 +1,467 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # The script uses another source of CVE information from linux-vulns | ||
6 | # to enrich the cve-summary from cve-check or vex. | ||
7 | # It can also use the list of compiled files from the kernel spdx to ignore CVEs | ||
8 | # that are not affected since the files are not compiled. | ||
9 | # | ||
10 | # It creates a new json file with updated CVE information | ||
11 | # | ||
12 | # Compiled files can be extracted adding the following in local.conf | ||
13 | # SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1" | ||
14 | # | ||
15 | # Tested with the following CVE sources: | ||
16 | # - https://git.kernel.org/pub/scm/linux/security/vulns.git | ||
17 | # - https://github.com/CVEProject/cvelistV5 | ||
18 | # | ||
19 | # Example: | ||
20 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns | ||
21 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json | ||
22 | # | ||
23 | # SPDX-License-Identifier: GPLv2 | ||
24 | |||
25 | import argparse | ||
26 | import json | ||
27 | import sys | ||
28 | import logging | ||
29 | import glob | ||
30 | import os | ||
31 | import pathlib | ||
32 | from packaging.version import Version | ||
33 | |||
34 | def is_linux_cve(cve_info): | ||
35 | '''Return true is the CVE belongs to Linux''' | ||
36 | if not "affected" in cve_info["containers"]["cna"]: | ||
37 | return False | ||
38 | for affected in cve_info["containers"]["cna"]["affected"]: | ||
39 | if not "product" in affected: | ||
40 | return False | ||
41 | if affected["product"] == "Linux" and affected["vendor"] == "Linux": | ||
42 | return True | ||
43 | return False | ||
44 | |||
45 | def get_kernel_cves(datadir, compiled_files, version): | ||
46 | """ | ||
47 | Get CVEs for the kernel | ||
48 | """ | ||
49 | cves = {} | ||
50 | |||
51 | check_config = len(compiled_files) > 0 | ||
52 | |||
53 | base_version = Version(f"{version.major}.{version.minor}") | ||
54 | |||
55 | # Check all CVES from kernel vulns | ||
56 | pattern = os.path.join(datadir, '**', "CVE-*.json") | ||
57 | cve_files = glob.glob(pattern, recursive=True) | ||
58 | not_applicable_config = 0 | ||
59 | fixed_as_later_backport = 0 | ||
60 | vulnerable = 0 | ||
61 | not_vulnerable = 0 | ||
62 | for cve_file in sorted(cve_files): | ||
63 | cve_info = {} | ||
64 | with open(cve_file, "r", encoding='ISO-8859-1') as f: | ||
65 | cve_info = json.load(f) | ||
66 | |||
67 | if len(cve_info) == 0: | ||
68 | logging.error("Not valid data in %s. Aborting", cve_file) | ||
69 | break | ||
70 | |||
71 | if not is_linux_cve(cve_info): | ||
72 | continue | ||
73 | cve_id = os.path.basename(cve_file)[:-5] | ||
74 | description = cve_info["containers"]["cna"]["descriptions"][0]["value"] | ||
75 | if cve_file.find("rejected") >= 0: | ||
76 | logging.debug("%s is rejected by the CNA", cve_id) | ||
77 | cves[cve_id] = { | ||
78 | "id": cve_id, | ||
79 | "status": "Ignored", | ||
80 | "detail": "rejected", | ||
81 | "summary": description, | ||
82 | "description": f"Rejected by CNA" | ||
83 | } | ||
84 | continue | ||
85 | if any(elem in cve_file for elem in ["review", "reverved", "testing"]): | ||
86 | continue | ||
87 | |||
88 | is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version) | ||
89 | |||
90 | logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected) | ||
91 | |||
92 | if is_vulnerable is None: | ||
93 | logging.warning("%s doesn't have good metadata", cve_id) | ||
94 | if is_vulnerable: | ||
95 | is_affected = True | ||
96 | affected_files = [] | ||
97 | if check_config: | ||
98 | is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info) | ||
99 | |||
100 | if not is_affected and len(affected_files) > 0: | ||
101 | logging.debug( | ||
102 | "%s - not applicable configuration since affected files not compiled: %s", | ||
103 | cve_id, affected_files) | ||
104 | cves[cve_id] = { | ||
105 | "id": cve_id, | ||
106 | "status": "Ignored", | ||
107 | "detail": "not-applicable-config", | ||
108 | "summary": description, | ||
109 | "description": f"Source code not compiled by config. {affected_files}" | ||
110 | } | ||
111 | not_applicable_config +=1 | ||
112 | # Check if we have backport | ||
113 | else: | ||
114 | if not better_match_last: | ||
115 | fixed_in = last_affected | ||
116 | else: | ||
117 | fixed_in = better_match_last | ||
118 | logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in) | ||
119 | cves[cve_id] = { | ||
120 | "id": cve_id, | ||
121 | "status": "Unpatched", | ||
122 | "detail": "version-in-range", | ||
123 | "summary": description, | ||
124 | "description": f"Needs backporting (fixed from {fixed_in})" | ||
125 | } | ||
126 | vulnerable += 1 | ||
127 | if (better_match_last and | ||
128 | Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version): | ||
129 | fixed_as_later_backport += 1 | ||
130 | # Not vulnerable | ||
131 | else: | ||
132 | if not first_affected: | ||
133 | logging.debug("%s - not known affected %s", | ||
134 | cve_id, | ||
135 | better_match_last) | ||
136 | cves[cve_id] = { | ||
137 | "id": cve_id, | ||
138 | "status": "Patched", | ||
139 | "detail": "version-not-in-range", | ||
140 | "summary": description, | ||
141 | "description": "No CPE match" | ||
142 | } | ||
143 | not_vulnerable += 1 | ||
144 | continue | ||
145 | backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}") | ||
146 | if version < first_affected: | ||
147 | logging.debug('%s - fixed-version: only affects %s onwards', | ||
148 | cve_id, | ||
149 | first_affected) | ||
150 | cves[cve_id] = { | ||
151 | "id": cve_id, | ||
152 | "status": "Patched", | ||
153 | "detail": "fixed-version", | ||
154 | "summary": description, | ||
155 | "description": f"only affects {first_affected} onwards" | ||
156 | } | ||
157 | not_vulnerable += 1 | ||
158 | elif last_affected <= version: | ||
159 | logging.debug("%s - fixed-version: Fixed from version %s", | ||
160 | cve_id, | ||
161 | last_affected) | ||
162 | cves[cve_id] = { | ||
163 | "id": cve_id, | ||
164 | "status": "Patched", | ||
165 | "detail": "fixed-version", | ||
166 | "summary": description, | ||
167 | "description": f"fixed-version: Fixed from version {last_affected}" | ||
168 | } | ||
169 | not_vulnerable += 1 | ||
170 | elif backport_base == base_version: | ||
171 | logging.debug("%s - cpe-stable-backport: Backported in %s", | ||
172 | cve_id, | ||
173 | better_match_last) | ||
174 | cves[cve_id] = { | ||
175 | "id": cve_id, | ||
176 | "status": "Patched", | ||
177 | "detail": "cpe-stable-backport", | ||
178 | "summary": description, | ||
179 | "description": f"Backported in {better_match_last}" | ||
180 | } | ||
181 | not_vulnerable += 1 | ||
182 | else: | ||
183 | logging.debug("%s - version not affected %s", cve_id, str(affected_versions)) | ||
184 | cves[cve_id] = { | ||
185 | "id": cve_id, | ||
186 | "status": "Patched", | ||
187 | "detail": "version-not-in-range", | ||
188 | "summary": description, | ||
189 | "description": f"Range {affected_versions}" | ||
190 | } | ||
191 | not_vulnerable += 1 | ||
192 | |||
193 | logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config) | ||
194 | logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable) | ||
195 | logging.info("Total vulnerable CVEs: %d", vulnerable) | ||
196 | |||
197 | logging.info("Total CVEs already backported in %s: %s", base_version, | ||
198 | fixed_as_later_backport) | ||
199 | return cves | ||
200 | |||
201 | def read_spdx(spdx_file): | ||
202 | '''Open SPDX file and extract compiled files''' | ||
203 | with open(spdx_file, 'r', encoding='ISO-8859-1') as f: | ||
204 | spdx = json.load(f) | ||
205 | if "spdxVersion" in spdx: | ||
206 | if spdx["spdxVersion"] == "SPDX-2.2": | ||
207 | return read_spdx2(spdx) | ||
208 | if "@graph" in spdx: | ||
209 | return read_spdx3(spdx) | ||
210 | return [] | ||
211 | |||
212 | def read_spdx2(spdx): | ||
213 | ''' | ||
214 | Read spdx2 compiled files from spdx | ||
215 | ''' | ||
216 | cfiles = set() | ||
217 | if 'files' not in spdx: | ||
218 | return cfiles | ||
219 | for item in spdx['files']: | ||
220 | for ftype in item['fileTypes']: | ||
221 | if ftype == "SOURCE": | ||
222 | filename = item["fileName"][item["fileName"].find("/")+1:] | ||
223 | cfiles.add(filename) | ||
224 | return cfiles | ||
225 | |||
226 | def read_spdx3(spdx): | ||
227 | ''' | ||
228 | Read spdx3 compiled files from spdx | ||
229 | ''' | ||
230 | cfiles = set() | ||
231 | for item in spdx["@graph"]: | ||
232 | if "software_primaryPurpose" not in item: | ||
233 | continue | ||
234 | if item["software_primaryPurpose"] == "source": | ||
235 | filename = item['name'][item['name'].find("/")+1:] | ||
236 | cfiles.add(filename) | ||
237 | return cfiles | ||
238 | |||
239 | def check_kernel_compiled_files(compiled_files, cve_info): | ||
240 | """ | ||
241 | Return if a CVE affected us depending on compiled files | ||
242 | """ | ||
243 | files_affected = set() | ||
244 | is_affected = False | ||
245 | |||
246 | for item in cve_info['containers']['cna']['affected']: | ||
247 | if "programFiles" in item: | ||
248 | for f in item['programFiles']: | ||
249 | if f not in files_affected: | ||
250 | files_affected.add(f) | ||
251 | |||
252 | if len(files_affected) > 0: | ||
253 | for f in files_affected: | ||
254 | if f in compiled_files: | ||
255 | logging.debug("File match: %s", f) | ||
256 | is_affected = True | ||
257 | return is_affected, files_affected | ||
258 | |||
259 | def get_cpe_applicability(cve_info, v): | ||
260 | ''' | ||
261 | Check if version is affected and return affected versions | ||
262 | ''' | ||
263 | base_branch = Version(f"{v.major}.{v.minor}") | ||
264 | affected = [] | ||
265 | if not 'cpeApplicability' in cve_info["containers"]["cna"]: | ||
266 | return None, None, None, None, None, None | ||
267 | |||
268 | for nodes in cve_info["containers"]["cna"]["cpeApplicability"]: | ||
269 | for node in nodes.values(): | ||
270 | vulnerable = False | ||
271 | matched_branch = False | ||
272 | first_affected = Version("5000") | ||
273 | last_affected = Version("0") | ||
274 | better_match_first = Version("0") | ||
275 | better_match_last = Version("5000") | ||
276 | |||
277 | if len(node[0]['cpeMatch']) == 0: | ||
278 | first_affected = None | ||
279 | last_affected = None | ||
280 | better_match_first = None | ||
281 | better_match_last = None | ||
282 | |||
283 | for cpe_match in node[0]['cpeMatch']: | ||
284 | version_start_including = Version("0") | ||
285 | version_end_excluding = Version("0") | ||
286 | if 'versionStartIncluding' in cpe_match: | ||
287 | version_start_including = Version(cpe_match['versionStartIncluding']) | ||
288 | else: | ||
289 | version_start_including = Version("0") | ||
290 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
291 | if "versionEndExcluding" in cpe_match: | ||
292 | version_end_excluding = Version(cpe_match["versionEndExcluding"]) | ||
293 | else: | ||
294 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
295 | version_end_excluding = Version( | ||
296 | f"{version_start_including.major}.{version_start_including.minor}.5000" | ||
297 | ) | ||
298 | affected.append(f" {version_start_including}-{version_end_excluding}") | ||
299 | # Detect if versionEnd is in fixed in base branch. It has precedence over the rest | ||
300 | branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}") | ||
301 | if branch_end == base_branch: | ||
302 | if version_start_including <= v < version_end_excluding: | ||
303 | vulnerable = cpe_match['vulnerable'] | ||
304 | # If we don't match in our branch, we are not vulnerable, | ||
305 | # since we have a backport | ||
306 | matched_branch = True | ||
307 | better_match_first = version_start_including | ||
308 | better_match_last = version_end_excluding | ||
309 | if version_start_including <= v < version_end_excluding and not matched_branch: | ||
310 | if version_end_excluding < better_match_last: | ||
311 | better_match_first = max(version_start_including, better_match_first) | ||
312 | better_match_last = min(better_match_last, version_end_excluding) | ||
313 | vulnerable = cpe_match['vulnerable'] | ||
314 | matched_branch = True | ||
315 | |||
316 | first_affected = min(version_start_including, first_affected) | ||
317 | last_affected = max(version_end_excluding, last_affected) | ||
318 | # Not a better match, we use the first and last affected instead of the fake .5000 | ||
319 | if vulnerable and better_match_last == Version(f"{base_branch}.5000"): | ||
320 | better_match_last = last_affected | ||
321 | better_match_first = first_affected | ||
322 | return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected | ||
323 | |||
324 | def copy_data(old, new): | ||
325 | '''Update dictionary with new entries, while keeping the old ones''' | ||
326 | for k in new.keys(): | ||
327 | old[k] = new[k] | ||
328 | return old | ||
329 | |||
330 | # Function taken from cve_check.bbclass. Adapted to cve fields | ||
331 | def cve_update(cve_data, cve, entry): | ||
332 | # If no entry, just add it | ||
333 | if cve not in cve_data: | ||
334 | cve_data[cve] = entry | ||
335 | return | ||
336 | # If we are updating, there might be change in the status | ||
337 | if cve_data[cve]['status'] == "Unknown": | ||
338 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
339 | return | ||
340 | if cve_data[cve]['status'] == entry['status']: | ||
341 | return | ||
342 | if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched": | ||
343 | logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve) | ||
344 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
345 | return | ||
346 | if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched": | ||
347 | logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve) | ||
348 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
349 | return | ||
350 | # If we have an "Ignored", it has a priority | ||
351 | if cve_data[cve]['status'] == "Ignored": | ||
352 | logging.debug("CVE %s not updating because Ignored", cve) | ||
353 | return | ||
354 | # If we have an "Ignored", it has a priority | ||
355 | if entry['status'] == "Ignored": | ||
356 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
357 | logging.debug("CVE entry %s updated from Unpatched to Ignored", cve) | ||
358 | return | ||
359 | logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s", | ||
360 | cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail']) | ||
361 | |||
362 | def main(): | ||
363 | parser = argparse.ArgumentParser( | ||
364 | description="Update cve-summary with kernel compiled files and kernel CVE information" | ||
365 | ) | ||
366 | parser.add_argument( | ||
367 | "-s", | ||
368 | "--spdx", | ||
369 | help="SPDX2/3 for the kernel. Needs to include compiled sources", | ||
370 | ) | ||
371 | parser.add_argument( | ||
372 | "--datadir", | ||
373 | type=pathlib.Path, | ||
374 | help="Directory where CVE data is", | ||
375 | required=True | ||
376 | ) | ||
377 | parser.add_argument( | ||
378 | "--old-cve-report", | ||
379 | help="CVE report to update. (Optional)", | ||
380 | ) | ||
381 | parser.add_argument( | ||
382 | "--kernel-version", | ||
383 | help="Kernel version. Needed if old cve_report is not provided (Optional)", | ||
384 | type=Version | ||
385 | ) | ||
386 | parser.add_argument( | ||
387 | "--new-cve-report", | ||
388 | help="Output file", | ||
389 | default="cve-summary-enhance.json" | ||
390 | ) | ||
391 | parser.add_argument( | ||
392 | "-D", | ||
393 | "--debug", | ||
394 | help='Enable debug ', | ||
395 | action="store_true") | ||
396 | |||
397 | args = parser.parse_args() | ||
398 | |||
399 | if args.debug: | ||
400 | log_level=logging.DEBUG | ||
401 | else: | ||
402 | log_level=logging.INFO | ||
403 | logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level) | ||
404 | |||
405 | if not args.kernel_version and not args.old_cve_report: | ||
406 | parser.error("either --kernel-version or --old-cve-report are needed") | ||
407 | return -1 | ||
408 | |||
409 | # by default we don't check the compiled files, unless provided | ||
410 | compiled_files = [] | ||
411 | if args.spdx: | ||
412 | compiled_files = read_spdx(args.spdx) | ||
413 | logging.info("Total compiled files %d", len(compiled_files)) | ||
414 | |||
415 | if args.old_cve_report: | ||
416 | with open(args.old_cve_report, encoding='ISO-8859-1') as f: | ||
417 | cve_report = json.load(f) | ||
418 | else: | ||
419 | #If summary not provided, we create one | ||
420 | cve_report = { | ||
421 | "version": "1", | ||
422 | "package": [ | ||
423 | { | ||
424 | "name": "linux-yocto", | ||
425 | "version": str(args.kernel_version), | ||
426 | "products": [ | ||
427 | { | ||
428 | "product": "linux_kernel", | ||
429 | "cvesInRecord": "Yes" | ||
430 | } | ||
431 | ], | ||
432 | "issue": [] | ||
433 | } | ||
434 | ] | ||
435 | } | ||
436 | |||
437 | for pkg in cve_report['package']: | ||
438 | is_kernel = False | ||
439 | for product in pkg['products']: | ||
440 | if product['product'] == "linux_kernel": | ||
441 | is_kernel=True | ||
442 | if not is_kernel: | ||
443 | continue | ||
444 | |||
445 | kernel_cves = get_kernel_cves(args.datadir, | ||
446 | compiled_files, | ||
447 | Version(pkg["version"])) | ||
448 | logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves)) | ||
449 | cves = {issue["id"]: issue for issue in pkg["issue"]} | ||
450 | logging.info("Total kernel before processing cves: %s", len(cves)) | ||
451 | |||
452 | for cve in kernel_cves: | ||
453 | cve_update(cves, cve, kernel_cves[cve]) | ||
454 | |||
455 | pkg["issue"] = [] | ||
456 | for cve in sorted(cves): | ||
457 | pkg["issue"].extend([cves[cve]]) | ||
458 | logging.info("Total kernel cves after processing: %s", len(pkg['issue'])) | ||
459 | |||
460 | with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f: | ||
461 | json.dump(cve_report, f, indent=2) | ||
462 | |||
463 | return 0 | ||
464 | |||
465 | if __name__ == "__main__": | ||
466 | sys.exit(main()) | ||
467 | |||
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh new file mode 100755 index 0000000000..31caaf339d --- /dev/null +++ b/scripts/contrib/make-spdx-bindings.sh | |||
@@ -0,0 +1,12 @@ | |||
1 | #! /bin/sh | ||
2 | # | ||
3 | # SPDX-License-Identifier: MIT | ||
4 | |||
5 | THIS_DIR="$(dirname "$0")" | ||
6 | |||
7 | VERSION="3.0.1" | ||
8 | |||
9 | shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \ | ||
10 | --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \ | ||
11 | --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \ | ||
12 | python -o $THIS_DIR/../../meta/lib/oe/spdx30.py | ||
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore new file mode 100644 index 0000000000..285851c984 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/.gitignore | |||
@@ -0,0 +1,8 @@ | |||
1 | *.spdx.json | ||
2 | *.pyc | ||
3 | *.bak | ||
4 | *.swp | ||
5 | *.swo | ||
6 | *.swn | ||
7 | venv/* | ||
8 | .venv/* | ||
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md new file mode 100644 index 0000000000..44f76eacd8 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/README.md | |||
@@ -0,0 +1,24 @@ | |||
1 | # OE Image Files from SBoM | ||
2 | |||
3 | This is an example python script that will list the packaged files with their | ||
4 | checksums based on the SPDX 3.0.1 SBoM. | ||
5 | |||
6 | It can be used as a template for other programs to investigate output based on | ||
7 | OE SPDX SBoMs | ||
8 | |||
9 | ## Installation | ||
10 | |||
11 | This project can be installed using an virtual environment: | ||
12 | ``` | ||
13 | python3 -m venv .venv | ||
14 | .venv/bin/activate | ||
15 | python3 -m pip install -e '.[dev]' | ||
16 | ``` | ||
17 | |||
18 | ## Usage | ||
19 | |||
20 | After installing, the `oe-image-files` program can be used to show the files, e.g.: | ||
21 | |||
22 | ``` | ||
23 | oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json | ||
24 | ``` | ||
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml new file mode 100644 index 0000000000..3fab5dd605 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/pyproject.toml | |||
@@ -0,0 +1,23 @@ | |||
1 | [project] | ||
2 | name = "oe-image-files" | ||
3 | description = "Displays all packaged files on the root file system" | ||
4 | dynamic = ["version"] | ||
5 | requires-python = ">= 3.8" | ||
6 | readme = "README.md" | ||
7 | |||
8 | dependencies = [ | ||
9 | "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179", | ||
10 | ] | ||
11 | |||
12 | [project.scripts] | ||
13 | oe-image-files = "oe_image_files:main" | ||
14 | |||
15 | [build-system] | ||
16 | requires = ["hatchling"] | ||
17 | build-backend = "hatchling.build" | ||
18 | |||
19 | [tool.hatch.version] | ||
20 | path = "src/oe_image_files/version.py" | ||
21 | |||
22 | [tool.hatch.metadata] | ||
23 | allow-direct-references = true | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py new file mode 100644 index 0000000000..c28a133f2d --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py | |||
@@ -0,0 +1 @@ | |||
from .main import main | |||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py new file mode 100644 index 0000000000..8476bf6369 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py | |||
@@ -0,0 +1,86 @@ | |||
1 | # SPDX-License-Identifier: MIT | ||
2 | |||
3 | import argparse | ||
4 | from pathlib import Path | ||
5 | |||
6 | |||
7 | from spdx_python_model import v3_0_1 as spdx_3_0_1 | ||
8 | from .version import VERSION | ||
9 | |||
10 | |||
11 | def main(): | ||
12 | parser = argparse.ArgumentParser( | ||
13 | description="Show the packaged files and checksums in an OE image from the SPDX SBoM" | ||
14 | ) | ||
15 | parser.add_argument("file", help="SPDX 3 input file", type=Path) | ||
16 | parser.add_argument("--version", "-V", action="version", version=VERSION) | ||
17 | |||
18 | args = parser.parse_args() | ||
19 | |||
20 | # Load SPDX data from file into a new object set | ||
21 | objset = spdx_3_0_1.SHACLObjectSet() | ||
22 | with args.file.open("r") as f: | ||
23 | d = spdx_3_0_1.JSONLDDeserializer() | ||
24 | d.read(f, objset) | ||
25 | |||
26 | # Find the top level SPDX Document object | ||
27 | for o in objset.foreach_type(spdx_3_0_1.SpdxDocument): | ||
28 | doc = o | ||
29 | break | ||
30 | else: | ||
31 | print("ERROR: No SPDX Document found!") | ||
32 | return 1 | ||
33 | |||
34 | # Find the root SBoM in the document | ||
35 | for o in doc.rootElement: | ||
36 | if isinstance(o, spdx_3_0_1.software_Sbom): | ||
37 | sbom = o | ||
38 | break | ||
39 | else: | ||
40 | print("ERROR: SBoM not found in document") | ||
41 | return 1 | ||
42 | |||
43 | # Find the root file system package in the SBoM | ||
44 | for o in sbom.rootElement: | ||
45 | if ( | ||
46 | isinstance(o, spdx_3_0_1.software_Package) | ||
47 | and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive | ||
48 | ): | ||
49 | root_package = o | ||
50 | break | ||
51 | else: | ||
52 | print("ERROR: Package not found in document") | ||
53 | return 1 | ||
54 | |||
55 | # Find all relationships of type "contains" that go FROM the root file | ||
56 | # system | ||
57 | files = [] | ||
58 | for rel in objset.foreach_type(spdx_3_0_1.Relationship): | ||
59 | if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains: | ||
60 | continue | ||
61 | |||
62 | if not rel.from_ is root_package: | ||
63 | continue | ||
64 | |||
65 | # Iterate over all files in the TO of the relationship | ||
66 | for o in rel.to: | ||
67 | if not isinstance(o, spdx_3_0_1.software_File): | ||
68 | continue | ||
69 | |||
70 | # Find the SHA 256 hash of the file (if any) | ||
71 | for h in o.verifiedUsing: | ||
72 | if ( | ||
73 | isinstance(h, spdx_3_0_1.Hash) | ||
74 | and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256 | ||
75 | ): | ||
76 | files.append((o.name, h.hashValue)) | ||
77 | break | ||
78 | else: | ||
79 | files.append((o.name, "")) | ||
80 | |||
81 | # Print files | ||
82 | files.sort(key=lambda x: x[0]) | ||
83 | for name, hash_val in files: | ||
84 | print(f"{name} - {hash_val}") | ||
85 | |||
86 | return 0 | ||
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py new file mode 100644 index 0000000000..901e5110b2 --- /dev/null +++ b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py | |||
@@ -0,0 +1 @@ | |||
VERSION = "0.0.1" | |||
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py index bceae06561..d8d7b214e5 100755 --- a/scripts/contrib/patchreview.py +++ b/scripts/contrib/patchreview.py | |||
@@ -14,6 +14,10 @@ import pathlib | |||
14 | import re | 14 | import re |
15 | import subprocess | 15 | import subprocess |
16 | 16 | ||
17 | import sys | ||
18 | sys.path.append(os.path.join(sys.path[0], '../../meta/lib')) | ||
19 | import oe.qa | ||
20 | |||
17 | # TODO | 21 | # TODO |
18 | # - option to just list all broken files | 22 | # - option to just list all broken files |
19 | # - test suite | 23 | # - test suite |
@@ -47,7 +51,7 @@ def blame_patch(patch): | |||
47 | return subprocess.check_output(("git", "log", | 51 | return subprocess.check_output(("git", "log", |
48 | "--follow", "--find-renames", "--diff-filter=A", | 52 | "--follow", "--find-renames", "--diff-filter=A", |
49 | "--format=%s (%aN <%aE>)", | 53 | "--format=%s (%aN <%aE>)", |
50 | "--", patch)).decode("utf-8").splitlines() | 54 | "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines() |
51 | 55 | ||
52 | def patchreview(patches): | 56 | def patchreview(patches): |
53 | 57 | ||
@@ -78,12 +82,11 @@ def patchreview(patches): | |||
78 | else: | 82 | else: |
79 | result.missing_sob = True | 83 | result.missing_sob = True |
80 | 84 | ||
81 | |||
82 | # Find the Upstream-Status tag | 85 | # Find the Upstream-Status tag |
83 | match = status_re.search(content) | 86 | match = status_re.search(content) |
84 | if match: | 87 | if match: |
85 | value = match.group(1) | 88 | value = oe.qa.check_upstream_status(patch) |
86 | if value != "Upstream-Status:": | 89 | if value: |
87 | result.malformed_upstream_status = value | 90 | result.malformed_upstream_status = value |
88 | 91 | ||
89 | value = match.group(2).lower() | 92 | value = match.group(2).lower() |