summaryrefslogtreecommitdiffstats
path: root/meta/lib
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib')
-rw-r--r--meta/lib/bblayers/machines.py37
-rw-r--r--meta/lib/bblayers/makesetup.py9
-rw-r--r--meta/lib/bblayers/setupwriters/oe-setup-layers.py5
-rw-r--r--meta/lib/oe/__init__.py6
-rw-r--r--meta/lib/oe/bootfiles.py57
-rw-r--r--meta/lib/oe/buildcfg.py2
-rw-r--r--meta/lib/oe/copy_buildsystem.py10
-rw-r--r--meta/lib/oe/cve_check.py72
-rw-r--r--meta/lib/oe/package.py85
-rw-r--r--meta/lib/oe/package_manager/__init__.py9
-rw-r--r--meta/lib/oe/package_manager/common_deb_ipk.py97
-rw-r--r--meta/lib/oe/package_manager/deb/__init__.py85
-rw-r--r--meta/lib/oe/package_manager/ipk/__init__.py89
-rw-r--r--meta/lib/oe/qa.py22
-rw-r--r--meta/lib/oe/recipeutils.py64
-rw-r--r--meta/lib/oe/reproducible.py19
-rw-r--r--meta/lib/oe/rootfs.py4
-rw-r--r--meta/lib/oe/sbom30.py1121
-rw-r--r--meta/lib/oe/spdx30.py6020
-rw-r--r--meta/lib/oe/spdx30_tasks.py1243
-rw-r--r--meta/lib/oe/spdx_common.py227
-rw-r--r--meta/lib/oe/sstatesig.py54
-rw-r--r--meta/lib/oe/utils.py2
-rw-r--r--meta/lib/oeqa/core/target/serial.py315
-rw-r--r--meta/lib/oeqa/core/target/ssh.py16
-rw-r--r--meta/lib/oeqa/core/tests/common.py1
-rw-r--r--meta/lib/oeqa/manual/crops.json294
-rw-r--r--meta/lib/oeqa/manual/eclipse-plugin.json322
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt8
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ssh.py31
-rw-r--r--meta/lib/oeqa/runtime/cases/systemd.py23
-rw-r--r--meta/lib/oeqa/runtime/context.py12
-rw-r--r--meta/lib/oeqa/sdk/case.py9
-rw-r--r--meta/lib/oeqa/sdk/cases/autotools.py (renamed from meta/lib/oeqa/sdk/cases/buildcpio.py)7
-rw-r--r--meta/lib/oeqa/sdk/cases/cmake.py (renamed from meta/lib/oeqa/sdk/cases/assimp.py)16
-rw-r--r--meta/lib/oeqa/sdk/cases/gcc.py4
-rw-r--r--meta/lib/oeqa/sdk/cases/gtk3.py (renamed from meta/lib/oeqa/sdk/cases/buildgalculator.py)6
-rw-r--r--meta/lib/oeqa/sdk/cases/kmod.py41
-rw-r--r--meta/lib/oeqa/sdk/cases/makefile.py (renamed from meta/lib/oeqa/sdk/cases/buildlzip.py)10
-rw-r--r--meta/lib/oeqa/sdk/cases/maturin.py1
-rw-r--r--meta/lib/oeqa/sdk/cases/meson.py (renamed from meta/lib/oeqa/sdk/cases/buildepoxy.py)8
-rw-r--r--meta/lib/oeqa/sdk/cases/python.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/rust.py1
-rw-r--r--meta/lib/oeqa/sdkext/cases/devtool.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/bbclasses.py106
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py51
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py109
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py20
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py264
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py41
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/picolibc.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/retain.py241
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py118
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py134
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py47
-rw-r--r--meta/lib/oeqa/selftest/context.py5
-rw-r--r--meta/lib/oeqa/utils/__init__.py8
-rw-r--r--meta/lib/oeqa/utils/commands.py18
-rw-r--r--meta/lib/oeqa/utils/postactions.py69
-rw-r--r--meta/lib/patchtest/patch.py19
-rw-r--r--meta/lib/patchtest/repo.py88
-rw-r--r--meta/lib/patchtest/requirements.txt1
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail57
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass56
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail56
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass56
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail78
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass78
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail68
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass72
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail36
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail35
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail66
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass79
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip2
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail53
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass53
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail53
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass53
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail60
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail62
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass60
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail25
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass26
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail23
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass25
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail65
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass65
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail23
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass25
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail64
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass68
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail57
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass53
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail60
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass60
-rw-r--r--meta/lib/patchtest/tests/test_mbox.py8
-rw-r--r--meta/lib/patchtest/utils.py129
118 files changed, 11552 insertions, 2343 deletions
diff --git a/meta/lib/bblayers/machines.py b/meta/lib/bblayers/machines.py
new file mode 100644
index 0000000000..5fd970af0e
--- /dev/null
+++ b/meta/lib/bblayers/machines.py
@@ -0,0 +1,37 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import logging
8import pathlib
9
10from bblayers.common import LayerPlugin
11
12logger = logging.getLogger('bitbake-layers')
13
14def plugin_init(plugins):
15 return ShowMachinesPlugin()
16
17class ShowMachinesPlugin(LayerPlugin):
18 def do_show_machines(self, args):
19 """List the machines available in the currently configured layers."""
20
21 for layer_dir in self.bblayers:
22 layer_name = self.get_layer_name(layer_dir)
23
24 if args.layer and args.layer != layer_name:
25 continue
26
27 for p in sorted(pathlib.Path(layer_dir).glob("conf/machine/*.conf")):
28 if args.bare:
29 logger.plain("%s" % (p.stem))
30 else:
31 logger.plain("%s (%s)" % (p.stem, layer_name))
32
33
34 def register_commands(self, sp):
35 parser_show_machines = self.add_command(sp, "show-machines", self.do_show_machines)
36 parser_show_machines.add_argument('-b', '--bare', help='output just the machine names, not the source layer', action='store_true')
37 parser_show_machines.add_argument('-l', '--layer', help='Limit to machines in the specified layer')
diff --git a/meta/lib/bblayers/makesetup.py b/meta/lib/bblayers/makesetup.py
index 99d5973760..4199b5f069 100644
--- a/meta/lib/bblayers/makesetup.py
+++ b/meta/lib/bblayers/makesetup.py
@@ -48,8 +48,9 @@ class MakeSetupPlugin(LayerPlugin):
48 if l_name == 'workspace': 48 if l_name == 'workspace':
49 continue 49 continue
50 if l_ismodified: 50 if l_ismodified:
51 logger.error("Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path)) 51 e = "Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path)
52 return 52 logger.error(e)
53 raise Exception(e)
53 repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path) 54 repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path)
54 55
55 if self._is_submodule(repo_path): 56 if self._is_submodule(repo_path):
@@ -62,9 +63,6 @@ class MakeSetupPlugin(LayerPlugin):
62 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}} 63 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}}
63 if repo_path == destdir_repo: 64 if repo_path == destdir_repo:
64 repos[repo_path]['contains_this_file'] = True 65 repos[repo_path]['contains_this_file'] = True
65 if not repos[repo_path]['git-remote']['remotes'] and not repos[repo_path]['contains_this_file']:
66 logger.error("Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=repo_path))
67 return
68 66
69 top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()]) 67 top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()])
70 68
@@ -74,6 +72,7 @@ class MakeSetupPlugin(LayerPlugin):
74 repos_nopaths[r_nopath] = repos[r] 72 repos_nopaths[r_nopath] = repos[r]
75 r_relpath = os.path.relpath(r, top_path) 73 r_relpath = os.path.relpath(r, top_path)
76 repos_nopaths[r_nopath]['path'] = r_relpath 74 repos_nopaths[r_nopath]['path'] = r_relpath
75 repos_nopaths[r_nopath]['originpath'] = r
77 return repos_nopaths 76 return repos_nopaths
78 77
79 def do_make_setup(self, args): 78 def do_make_setup(self, args):
diff --git a/meta/lib/bblayers/setupwriters/oe-setup-layers.py b/meta/lib/bblayers/setupwriters/oe-setup-layers.py
index 59ca968ff3..8faeabfabc 100644
--- a/meta/lib/bblayers/setupwriters/oe-setup-layers.py
+++ b/meta/lib/bblayers/setupwriters/oe-setup-layers.py
@@ -85,6 +85,11 @@ class OeSetupLayersWriter():
85 if not os.path.exists(args.destdir): 85 if not os.path.exists(args.destdir):
86 os.makedirs(args.destdir) 86 os.makedirs(args.destdir)
87 repos = parent.make_repo_config(args.destdir) 87 repos = parent.make_repo_config(args.destdir)
88 for r in repos.values():
89 if not r['git-remote']['remotes'] and not r.get('contains_this_file', False):
90 e = "Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=r['originpath'])
91 raise Exception(e)
92 del r['originpath']
88 json = {"version":"1.0","sources":repos} 93 json = {"version":"1.0","sources":repos}
89 if not repos: 94 if not repos:
90 err = "Could not determine layer sources" 95 err = "Could not determine layer sources"
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py
index 6eb536ad28..d760481283 100644
--- a/meta/lib/oe/__init__.py
+++ b/meta/lib/oe/__init__.py
@@ -7,6 +7,8 @@
7from pkgutil import extend_path 7from pkgutil import extend_path
8__path__ = extend_path(__path__, __name__) 8__path__ = extend_path(__path__, __name__)
9 9
10BBIMPORTS = ["data", "path", "utils", "types", "package", "packagedata", \ 10# Modules with vistorcode need to go first else anything depending on them won't be
11# processed correctly (e.g. qa)
12BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \
11 "packagegroup", "sstatesig", "lsb", "cachedpath", "license", \ 13 "packagegroup", "sstatesig", "lsb", "cachedpath", "license", \
12 "qa", "reproducible", "rust", "buildcfg", "go"] 14 "reproducible", "rust", "buildcfg", "go"]
diff --git a/meta/lib/oe/bootfiles.py b/meta/lib/oe/bootfiles.py
new file mode 100644
index 0000000000..155fe742db
--- /dev/null
+++ b/meta/lib/oe/bootfiles.py
@@ -0,0 +1,57 @@
1#
2# SPDX-License-Identifier: MIT
3#
4# Copyright (C) 2024 Marcus Folkesson
5# Author: Marcus Folkesson <marcus.folkesson@gmail.com>
6#
7# Utility functions handling boot files
8#
9# Look into deploy_dir and search for boot_files.
10# Returns a list of tuples with (original filepath relative to
11# deploy_dir, desired filepath renaming)
12#
13# Heavily inspired of bootimg-partition.py
14#
15def get_boot_files(deploy_dir, boot_files):
16 import re
17 import os
18 from glob import glob
19
20 if boot_files is None:
21 return None
22
23 # list of tuples (src_name, dst_name)
24 deploy_files = []
25 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
26 if ';' in src_entry:
27 dst_entry = tuple(src_entry.split(';'))
28 if not dst_entry[0] or not dst_entry[1]:
29 raise ValueError('Malformed boot file entry: %s' % src_entry)
30 else:
31 dst_entry = (src_entry, src_entry)
32
33 deploy_files.append(dst_entry)
34
35 install_files = []
36 for deploy_entry in deploy_files:
37 src, dst = deploy_entry
38 if '*' in src:
39 # by default install files under their basename
40 entry_name_fn = os.path.basename
41 if dst != src:
42 # unless a target name was given, then treat name
43 # as a directory and append a basename
44 entry_name_fn = lambda name: \
45 os.path.join(dst,
46 os.path.basename(name))
47
48 srcs = glob(os.path.join(deploy_dir, src))
49
50 for entry in srcs:
51 src = os.path.relpath(entry, deploy_dir)
52 entry_dst_name = entry_name_fn(entry)
53 install_files.append((src, entry_dst_name))
54 else:
55 install_files.append((src, dst))
56
57 return install_files
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py
index 27b059b834..4b22f18f36 100644
--- a/meta/lib/oe/buildcfg.py
+++ b/meta/lib/oe/buildcfg.py
@@ -52,7 +52,7 @@ def get_metadata_git_remote_url(path, remote):
52 52
53def get_metadata_git_describe(path): 53def get_metadata_git_describe(path):
54 try: 54 try:
55 describe, _ = bb.process.run('git describe --tags', cwd=path) 55 describe, _ = bb.process.run('git describe --tags --dirty', cwd=path)
56 except bb.process.ExecutionError: 56 except bb.process.ExecutionError:
57 return "" 57 return ""
58 return describe.strip() 58 return describe.strip()
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py
index 81abfbf9e2..ced751b835 100644
--- a/meta/lib/oe/copy_buildsystem.py
+++ b/meta/lib/oe/copy_buildsystem.py
@@ -193,13 +193,17 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, p
193 else: 193 else:
194 f.write(line) 194 f.write(line)
195 invalue = False 195 invalue = False
196 elif line.startswith('SIGGEN_LOCKEDSIGS'): 196 elif line.startswith('SIGGEN_LOCKEDSIGS_t'):
197 invalue = True 197 invalue = True
198 f.write(line) 198 f.write(line)
199 else:
200 invalue = False
201 f.write(line)
199 202
200def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): 203def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None):
201 merged = {} 204 merged = {}
202 arch_order = [] 205 arch_order = []
206 otherdata = []
203 with open(lockedsigs_main, 'r') as f: 207 with open(lockedsigs_main, 'r') as f:
204 invalue = None 208 invalue = None
205 for line in f: 209 for line in f:
@@ -212,6 +216,9 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
212 invalue = line[18:].split('=', 1)[0].rstrip() 216 invalue = line[18:].split('=', 1)[0].rstrip()
213 merged[invalue] = [] 217 merged[invalue] = []
214 arch_order.append(invalue) 218 arch_order.append(invalue)
219 else:
220 invalue = None
221 otherdata.append(line)
215 222
216 with open(lockedsigs_extra, 'r') as f: 223 with open(lockedsigs_extra, 'r') as f:
217 invalue = None 224 invalue = None
@@ -246,6 +253,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
246 f.write(' "\n') 253 f.write(' "\n')
247 fulltypes.append(typename) 254 fulltypes.append(typename)
248 f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) 255 f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes))
256 f.write('\n' + ''.join(otherdata))
249 257
250 if copy_output: 258 if copy_output:
251 write_sigs_file(copy_output, list(tocopy.keys()), tocopy) 259 write_sigs_file(copy_output, list(tocopy.keys()), tocopy)
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py
index ed5c714cb8..487f30dc25 100644
--- a/meta/lib/oe/cve_check.py
+++ b/meta/lib/oe/cve_check.py
@@ -88,7 +88,7 @@ def get_patched_cves(d):
88 # (cve_match regular expression) 88 # (cve_match regular expression)
89 cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE) 89 cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE)
90 90
91 patched_cves = set() 91 patched_cves = {}
92 patches = oe.patch.src_patches(d) 92 patches = oe.patch.src_patches(d)
93 bb.debug(2, "Scanning %d patches for CVEs" % len(patches)) 93 bb.debug(2, "Scanning %d patches for CVEs" % len(patches))
94 for url in patches: 94 for url in patches:
@@ -98,7 +98,7 @@ def get_patched_cves(d):
98 fname_match = cve_file_name_match.search(patch_file) 98 fname_match = cve_file_name_match.search(patch_file)
99 if fname_match: 99 if fname_match:
100 cve = fname_match.group(1).upper() 100 cve = fname_match.group(1).upper()
101 patched_cves.add(cve) 101 patched_cves[cve] = {"abbrev-status": "Patched", "status": "fix-file-included", "resource": patch_file}
102 bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file)) 102 bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file))
103 103
104 # Remote patches won't be present and compressed patches won't be 104 # Remote patches won't be present and compressed patches won't be
@@ -124,7 +124,7 @@ def get_patched_cves(d):
124 cves = patch_text[match.start()+5:match.end()] 124 cves = patch_text[match.start()+5:match.end()]
125 for cve in cves.split(): 125 for cve in cves.split():
126 bb.debug(2, "Patch %s solves %s" % (patch_file, cve)) 126 bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
127 patched_cves.add(cve) 127 patched_cves[cve] = {"abbrev-status": "Patched", "status": "fix-file-included", "resource": patch_file}
128 text_match = True 128 text_match = True
129 129
130 if not fname_match and not text_match: 130 if not fname_match and not text_match:
@@ -132,10 +132,16 @@ def get_patched_cves(d):
132 132
133 # Search for additional patched CVEs 133 # Search for additional patched CVEs
134 for cve in (d.getVarFlags("CVE_STATUS") or {}): 134 for cve in (d.getVarFlags("CVE_STATUS") or {}):
135 decoded_status, _, _ = decode_cve_status(d, cve) 135 decoded_status = decode_cve_status(d, cve)
136 if decoded_status == "Patched": 136 products = d.getVar("CVE_PRODUCT")
137 bb.debug(2, "CVE %s is additionally patched" % cve) 137 if has_cve_product_match(decoded_status, products) == True:
138 patched_cves.add(cve) 138 patched_cves[cve] = {
139 "abbrev-status": decoded_status["mapping"],
140 "status": decoded_status["detail"],
141 "justification": decoded_status["description"],
142 "affected-vendor": decoded_status["vendor"],
143 "affected-product": decoded_status["product"]
144 }
139 145
140 return patched_cves 146 return patched_cves
141 147
@@ -227,19 +233,57 @@ def convert_cve_version(version):
227 233
228def decode_cve_status(d, cve): 234def decode_cve_status(d, cve):
229 """ 235 """
230 Convert CVE_STATUS into status, detail and description. 236 Convert CVE_STATUS into status, vendor, product, detail and description.
231 """ 237 """
232 status = d.getVarFlag("CVE_STATUS", cve) 238 status = d.getVarFlag("CVE_STATUS", cve)
233 if not status: 239 if not status:
234 return ("", "", "") 240 return {}
241
242 status_split = status.split(':', 5)
243 status_out = {}
244 status_out["detail"] = status_split[0]
245 product = "*"
246 vendor = "*"
247 description = ""
248 if len(status_split) >= 4 and status_split[1].strip() == "cpe":
249 # Both vendor and product are mandatory if cpe: present, the syntax is then:
250 # detail: cpe:vendor:product:description
251 vendor = status_split[2].strip()
252 product = status_split[3].strip()
253 description = status_split[4].strip()
254 elif len(status_split) >= 2 and status_split[1].strip() == "cpe":
255 # Malformed CPE
256 bb.warn('Invalid CPE information for CVE_STATUS[%s] = "%s", not setting CPE' % (detail, cve, status))
257 else:
258 # Other case: no CPE, the syntax is then:
259 # detail: description
260 description = status_split[len(status_split)-1].strip() if (len(status_split) > 1) else ""
235 261
236 status_split = status.split(':', 1) 262 status_out["vendor"] = vendor
237 detail = status_split[0] 263 status_out["product"] = product
238 description = status_split[1].strip() if (len(status_split) > 1) else "" 264 status_out["description"] = description
239 265
240 status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail) 266 status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", status_out['detail'])
241 if status_mapping is None: 267 if status_mapping is None:
242 bb.warn('Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' % (detail, cve, status)) 268 bb.warn('Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' % (detail, cve, status))
243 status_mapping = "Unpatched" 269 status_mapping = "Unpatched"
270 status_out["mapping"] = status_mapping
271
272 return status_out
273
274def has_cve_product_match(detailed_status, products):
275 """
276 Check product/vendor match between detailed_status from decode_cve_status and a string of
277 products (like from CVE_PRODUCT)
278 """
279 for product in products.split():
280 vendor = "*"
281 if ":" in product:
282 vendor, product = product.split(":", 1)
283
284 if (vendor == detailed_status["vendor"] or detailed_status["vendor"] == "*") and \
285 (product == detailed_status["product"] or detailed_status["product"] == "*"):
286 return True
244 287
245 return (status_mapping, detail, description) 288 #if no match, return False
289 return False
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index 1511ba47c4..c213a9a3ca 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -14,6 +14,7 @@ import glob
14import stat 14import stat
15import mmap 15import mmap
16import subprocess 16import subprocess
17import shutil
17 18
18import oe.cachedpath 19import oe.cachedpath
19 20
@@ -195,14 +196,28 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_alre
195 196
196 oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process) 197 oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
197 198
199TRANSLATE = (
200 ("@", "@at@"),
201 (" ", "@space@"),
202 ("\t", "@tab@"),
203 ("[", "@openbrace@"),
204 ("]", "@closebrace@"),
205 ("_", "@underscore@"),
206 (":", "@colon@"),
207)
198 208
199def file_translate(file): 209def file_translate(file):
200 ft = file.replace("@", "@at@") 210 ft = file
201 ft = ft.replace(" ", "@space@") 211 for s, replace in TRANSLATE:
202 ft = ft.replace("\t", "@tab@") 212 ft = ft.replace(s, replace)
203 ft = ft.replace("[", "@openbrace@") 213
204 ft = ft.replace("]", "@closebrace@") 214 return ft
205 ft = ft.replace("_", "@underscore@") 215
216def file_reverse_translate(file):
217 ft = file
218 for s, replace in reversed(TRANSLATE):
219 ft = ft.replace(replace, s)
220
206 return ft 221 return ft
207 222
208def filedeprunner(arg): 223def filedeprunner(arg):
@@ -1064,6 +1079,7 @@ def process_split_and_strip_files(d):
1064 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): 1079 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1065 checkelf = {} 1080 checkelf = {}
1066 checkelflinks = {} 1081 checkelflinks = {}
1082 checkstatic = {}
1067 for root, dirs, files in cpath.walk(dvar): 1083 for root, dirs, files in cpath.walk(dvar):
1068 for f in files: 1084 for f in files:
1069 file = os.path.join(root, f) 1085 file = os.path.join(root, f)
@@ -1077,10 +1093,6 @@ def process_split_and_strip_files(d):
1077 if file in skipfiles: 1093 if file in skipfiles:
1078 continue 1094 continue
1079 1095
1080 if oe.package.is_static_lib(file):
1081 staticlibs.append(file)
1082 continue
1083
1084 try: 1096 try:
1085 ltarget = cpath.realpath(file, dvar, False) 1097 ltarget = cpath.realpath(file, dvar, False)
1086 s = cpath.lstat(ltarget) 1098 s = cpath.lstat(ltarget)
@@ -1092,6 +1104,13 @@ def process_split_and_strip_files(d):
1092 continue 1104 continue
1093 if not s: 1105 if not s:
1094 continue 1106 continue
1107
1108 if oe.package.is_static_lib(file):
1109 # Use a reference of device ID and inode number to identify files
1110 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1111 checkstatic[file] = (file, file_reference)
1112 continue
1113
1095 # Check its an executable 1114 # Check its an executable
1096 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ 1115 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1097 or (s[stat.ST_MODE] & stat.S_IXOTH) \ 1116 or (s[stat.ST_MODE] & stat.S_IXOTH) \
@@ -1156,6 +1175,27 @@ def process_split_and_strip_files(d):
1156 # Modified the file so clear the cache 1175 # Modified the file so clear the cache
1157 cpath.updatecache(file) 1176 cpath.updatecache(file)
1158 1177
1178 # Do the same hardlink processing as above, but for static libraries
1179 results = list(checkstatic.keys())
1180
1181 # As above, sort the results.
1182 results.sort(key=lambda x: x[0])
1183
1184 for file in results:
1185 # Use a reference of device ID and inode number to identify files
1186 file_reference = checkstatic[file][1]
1187 if file_reference in inodes:
1188 os.unlink(file)
1189 os.link(inodes[file_reference][0], file)
1190 inodes[file_reference].append(file)
1191 else:
1192 inodes[file_reference] = [file]
1193 # break hardlink
1194 bb.utils.break_hardlinks(file)
1195 staticlibs.append(file)
1196 # Modified the file so clear the cache
1197 cpath.updatecache(file)
1198
1159 def strip_pkgd_prefix(f): 1199 def strip_pkgd_prefix(f):
1160 nonlocal dvar 1200 nonlocal dvar
1161 1201
@@ -1194,11 +1234,24 @@ def process_split_and_strip_files(d):
1194 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] 1234 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1195 fpath = dvar + dest 1235 fpath = dvar + dest
1196 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] 1236 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1197 bb.utils.mkdirhier(os.path.dirname(fpath)) 1237 if os.access(ftarget, os.R_OK):
1198 # Only one hardlink of separated debug info file in each directory 1238 bb.utils.mkdirhier(os.path.dirname(fpath))
1199 if not os.access(fpath, os.R_OK): 1239 # Only one hardlink of separated debug info file in each directory
1200 #bb.note("Link %s -> %s" % (fpath, ftarget)) 1240 if not os.access(fpath, os.R_OK):
1201 os.link(ftarget, fpath) 1241 #bb.note("Link %s -> %s" % (fpath, ftarget))
1242 os.link(ftarget, fpath)
1243 elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1244 deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"]
1245 fpath = dvar + deststatic
1246 ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"]
1247 if os.access(ftarget, os.R_OK):
1248 bb.utils.mkdirhier(os.path.dirname(fpath))
1249 # Only one hardlink of separated debug info file in each directory
1250 if not os.access(fpath, os.R_OK):
1251 #bb.note("Link %s -> %s" % (fpath, ftarget))
1252 os.link(ftarget, fpath)
1253 else:
1254 bb.note("Unable to find inode link target %s" % (target))
1202 1255
1203 # Create symlinks for all cases we were able to split symbols 1256 # Create symlinks for all cases we were able to split symbols
1204 for file in symlinks: 1257 for file in symlinks:
@@ -1839,7 +1892,7 @@ def process_pkgconfig(pkgfiles, d):
1839 if m: 1892 if m:
1840 hdr = m.group(1) 1893 hdr = m.group(1)
1841 exp = pd.expand(m.group(2)) 1894 exp = pd.expand(m.group(2))
1842 if hdr == 'Requires': 1895 if hdr == 'Requires' or hdr == 'Requires.private':
1843 pkgconfig_needed[pkg] += exp.replace(',', ' ').split() 1896 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1844 continue 1897 continue
1845 m = var_re.match(l) 1898 m = var_re.match(l)
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py
index 6774cdb794..d3b2317894 100644
--- a/meta/lib/oe/package_manager/__init__.py
+++ b/meta/lib/oe/package_manager/__init__.py
@@ -449,7 +449,7 @@ class PackageManager(object, metaclass=ABCMeta):
449 return res 449 return res
450 return _append(uris, base_paths) 450 return _append(uris, base_paths)
451 451
452def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies): 452def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies, include_self=False):
453 """ 453 """
454 Go through our do_package_write_X dependencies and hardlink the packages we depend 454 Go through our do_package_write_X dependencies and hardlink the packages we depend
455 upon into the repo directory. This prevents us seeing other packages that may 455 upon into the repo directory. This prevents us seeing other packages that may
@@ -486,14 +486,17 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
486 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") 486 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
487 pkgdeps = set() 487 pkgdeps = set()
488 start = [start] 488 start = [start]
489 seen = set(start) 489 if include_self:
490 seen = set()
491 else:
492 seen = set(start)
490 # Support direct dependencies (do_rootfs -> do_package_write_X) 493 # Support direct dependencies (do_rootfs -> do_package_write_X)
491 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) 494 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X)
492 while start: 495 while start:
493 next = [] 496 next = []
494 for dep2 in start: 497 for dep2 in start:
495 for dep in taskdepdata[dep2][3]: 498 for dep in taskdepdata[dep2][3]:
496 if taskdepdata[dep][0] != pn: 499 if include_self or taskdepdata[dep][0] != pn:
497 if "do_" + taskname in dep: 500 if "do_" + taskname in dep:
498 pkgdeps.add(dep) 501 pkgdeps.add(dep)
499 elif dep not in seen: 502 elif dep not in seen:
diff --git a/meta/lib/oe/package_manager/common_deb_ipk.py b/meta/lib/oe/package_manager/common_deb_ipk.py
new file mode 100644
index 0000000000..6a1e28ee6f
--- /dev/null
+++ b/meta/lib/oe/package_manager/common_deb_ipk.py
@@ -0,0 +1,97 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import glob
8import os
9import subprocess
10import tempfile
11
12import bb
13
14from oe.package_manager import opkg_query, PackageManager
15
16class OpkgDpkgPM(PackageManager):
17 def __init__(self, d, target_rootfs):
18 """
19 This is an abstract class. Do not instantiate this directly.
20 """
21 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
22
23 def package_info(self, pkg):
24 """
25 Returns a dictionary with the package info.
26 """
27 raise NotImplementedError
28
29 def _common_package_info(self, cmd):
30 """
31 "Returns a dictionary with the package info.
32
33 This method extracts the common parts for Opkg and Dpkg
34 """
35
36 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
37 if proc.returncode:
38 bb.fatal("Unable to list available packages. Command '%s' "
39 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
40 elif proc.stderr:
41 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
42
43 return opkg_query(proc.stdout)
44
45 def extract(self, pkg):
46 """
47 Returns the path to a tmpdir where resides the contents of a package.
48
49 Deleting the tmpdir is responsability of the caller.
50 """
51 pkg_info = self.package_info(pkg)
52 if not pkg_info:
53 bb.fatal("Unable to get information for package '%s' while "
54 "trying to extract the package." % pkg)
55
56 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
57 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
58 pkg_path = pkg_info[pkg]["filepath"]
59
60 if not os.path.isfile(pkg_path):
61 bb.fatal("Unable to extract package for '%s'."
62 "File %s doesn't exists" % (pkg, pkg_path))
63
64 tmp_dir = tempfile.mkdtemp()
65 current_dir = os.getcwd()
66 os.chdir(tmp_dir)
67
68 try:
69 cmd = [ar_cmd, 'x', pkg_path]
70 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
71 data_tar = glob.glob("data.tar.*")
72 if len(data_tar) != 1:
73 bb.fatal("Unable to extract %s package. Failed to identify "
74 "data tarball (found tarballs '%s').",
75 pkg_path, data_tar)
76 data_tar = data_tar[0]
77 cmd = [tar_cmd, 'xf', data_tar]
78 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
79 except subprocess.CalledProcessError as e:
80 bb.utils.remove(tmp_dir, recurse=True)
81 bb.fatal("Unable to extract %s package. Command '%s' "
82 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
83 except OSError as e:
84 bb.utils.remove(tmp_dir, recurse=True)
85 bb.fatal("Unable to extract %s package. Command '%s' "
86 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
87
88 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
89 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
90 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
91 bb.utils.remove(os.path.join(tmp_dir, data_tar))
92 os.chdir(current_dir)
93
94 return tmp_dir
95
96 def _handle_intercept_failure(self, registered_pkgs):
97 self.mark_packages("unpacked", registered_pkgs.split())
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py
index 0c23c884c1..e09e81e490 100644
--- a/meta/lib/oe/package_manager/deb/__init__.py
+++ b/meta/lib/oe/package_manager/deb/__init__.py
@@ -7,6 +7,7 @@
7import re 7import re
8import subprocess 8import subprocess
9from oe.package_manager import * 9from oe.package_manager import *
10from oe.package_manager.common_deb_ipk import OpkgDpkgPM
10 11
11class DpkgIndexer(Indexer): 12class DpkgIndexer(Indexer):
12 def _create_configs(self): 13 def _create_configs(self):
@@ -111,72 +112,6 @@ class PMPkgsList(PkgsList):
111 112
112 return opkg_query(cmd_output) 113 return opkg_query(cmd_output)
113 114
114class OpkgDpkgPM(PackageManager):
115 def __init__(self, d, target_rootfs):
116 """
117 This is an abstract class. Do not instantiate this directly.
118 """
119 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
120
121 def package_info(self, pkg, cmd):
122 """
123 Returns a dictionary with the package info.
124
125 This method extracts the common parts for Opkg and Dpkg
126 """
127
128 try:
129 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
130 except subprocess.CalledProcessError as e:
131 bb.fatal("Unable to list available packages. Command '%s' "
132 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
133 return opkg_query(output)
134
135 def extract(self, pkg, pkg_info):
136 """
137 Returns the path to a tmpdir where resides the contents of a package.
138
139 Deleting the tmpdir is responsability of the caller.
140
141 This method extracts the common parts for Opkg and Dpkg
142 """
143
144 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
145 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
146 pkg_path = pkg_info[pkg]["filepath"]
147
148 if not os.path.isfile(pkg_path):
149 bb.fatal("Unable to extract package for '%s'."
150 "File %s doesn't exists" % (pkg, pkg_path))
151
152 tmp_dir = tempfile.mkdtemp()
153 current_dir = os.getcwd()
154 os.chdir(tmp_dir)
155 data_tar = 'data.tar.xz'
156
157 try:
158 cmd = [ar_cmd, 'x', pkg_path]
159 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
160 cmd = [tar_cmd, 'xf', data_tar]
161 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
162 except subprocess.CalledProcessError as e:
163 bb.utils.remove(tmp_dir, recurse=True)
164 bb.fatal("Unable to extract %s package. Command '%s' "
165 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
166 except OSError as e:
167 bb.utils.remove(tmp_dir, recurse=True)
168 bb.fatal("Unable to extract %s package. Command '%s' "
169 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
170
171 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
172 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
173 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
174 os.chdir(current_dir)
175
176 return tmp_dir
177
178 def _handle_intercept_failure(self, registered_pkgs):
179 self.mark_packages("unpacked", registered_pkgs.split())
180 115
181class DpkgPM(OpkgDpkgPM): 116class DpkgPM(OpkgDpkgPM):
182 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): 117 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True):
@@ -496,7 +431,7 @@ class DpkgPM(OpkgDpkgPM):
496 Returns a dictionary with the package info. 431 Returns a dictionary with the package info.
497 """ 432 """
498 cmd = "%s show %s" % (self.apt_cache_cmd, pkg) 433 cmd = "%s show %s" % (self.apt_cache_cmd, pkg)
499 pkg_info = super(DpkgPM, self).package_info(pkg, cmd) 434 pkg_info = self._common_package_info(cmd)
500 435
501 pkg_arch = pkg_info[pkg]["pkgarch"] 436 pkg_arch = pkg_info[pkg]["pkgarch"]
502 pkg_filename = pkg_info[pkg]["filename"] 437 pkg_filename = pkg_info[pkg]["filename"]
@@ -504,19 +439,3 @@ class DpkgPM(OpkgDpkgPM):
504 os.path.join(self.deploy_dir, pkg_arch, pkg_filename) 439 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
505 440
506 return pkg_info 441 return pkg_info
507
508 def extract(self, pkg):
509 """
510 Returns the path to a tmpdir where resides the contents of a package.
511
512 Deleting the tmpdir is responsability of the caller.
513 """
514 pkg_info = self.package_info(pkg)
515 if not pkg_info:
516 bb.fatal("Unable to get information for package '%s' while "
517 "trying to extract the package." % pkg)
518
519 tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info)
520 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
521
522 return tmp_dir
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py
index 8cc9953a02..3d998e52ff 100644
--- a/meta/lib/oe/package_manager/ipk/__init__.py
+++ b/meta/lib/oe/package_manager/ipk/__init__.py
@@ -8,6 +8,7 @@ import re
8import shutil 8import shutil
9import subprocess 9import subprocess
10from oe.package_manager import * 10from oe.package_manager import *
11from oe.package_manager.common_deb_ipk import OpkgDpkgPM
11 12
12class OpkgIndexer(Indexer): 13class OpkgIndexer(Indexer):
13 def write_index(self): 14 def write_index(self):
@@ -90,76 +91,6 @@ class PMPkgsList(PkgsList):
90 return opkg_query(cmd_output) 91 return opkg_query(cmd_output)
91 92
92 93
93
94class OpkgDpkgPM(PackageManager):
95 def __init__(self, d, target_rootfs):
96 """
97 This is an abstract class. Do not instantiate this directly.
98 """
99 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
100
101 def package_info(self, pkg, cmd):
102 """
103 Returns a dictionary with the package info.
104
105 This method extracts the common parts for Opkg and Dpkg
106 """
107
108 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
109 if proc.returncode:
110 bb.fatal("Unable to list available packages. Command '%s' "
111 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
112 elif proc.stderr:
113 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
114
115 return opkg_query(proc.stdout)
116
117 def extract(self, pkg, pkg_info):
118 """
119 Returns the path to a tmpdir where resides the contents of a package.
120
121 Deleting the tmpdir is responsability of the caller.
122
123 This method extracts the common parts for Opkg and Dpkg
124 """
125
126 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
127 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
128 pkg_path = pkg_info[pkg]["filepath"]
129
130 if not os.path.isfile(pkg_path):
131 bb.fatal("Unable to extract package for '%s'."
132 "File %s doesn't exists" % (pkg, pkg_path))
133
134 tmp_dir = tempfile.mkdtemp()
135 current_dir = os.getcwd()
136 os.chdir(tmp_dir)
137 data_tar = 'data.tar.zst'
138
139 try:
140 cmd = [ar_cmd, 'x', pkg_path]
141 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
142 cmd = [tar_cmd, 'xf', data_tar]
143 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
144 except subprocess.CalledProcessError as e:
145 bb.utils.remove(tmp_dir, recurse=True)
146 bb.fatal("Unable to extract %s package. Command '%s' "
147 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
148 except OSError as e:
149 bb.utils.remove(tmp_dir, recurse=True)
150 bb.fatal("Unable to extract %s package. Command '%s' "
151 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
152
153 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
154 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
155 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
156 os.chdir(current_dir)
157
158 return tmp_dir
159
160 def _handle_intercept_failure(self, registered_pkgs):
161 self.mark_packages("unpacked", registered_pkgs.split())
162
163class OpkgPM(OpkgDpkgPM): 94class OpkgPM(OpkgDpkgPM):
164 def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): 95 def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True):
165 super(OpkgPM, self).__init__(d, target_rootfs) 96 super(OpkgPM, self).__init__(d, target_rootfs)
@@ -485,7 +416,7 @@ class OpkgPM(OpkgDpkgPM):
485 Returns a dictionary with the package info. 416 Returns a dictionary with the package info.
486 """ 417 """
487 cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) 418 cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg)
488 pkg_info = super(OpkgPM, self).package_info(pkg, cmd) 419 pkg_info = self._common_package_info(cmd)
489 420
490 pkg_arch = pkg_info[pkg]["arch"] 421 pkg_arch = pkg_info[pkg]["arch"]
491 pkg_filename = pkg_info[pkg]["filename"] 422 pkg_filename = pkg_info[pkg]["filename"]
@@ -493,19 +424,3 @@ class OpkgPM(OpkgDpkgPM):
493 os.path.join(self.deploy_dir, pkg_arch, pkg_filename) 424 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
494 425
495 return pkg_info 426 return pkg_info
496
497 def extract(self, pkg):
498 """
499 Returns the path to a tmpdir where resides the contents of a package.
500
501 Deleting the tmpdir is responsability of the caller.
502 """
503 pkg_info = self.package_info(pkg)
504 if not pkg_info:
505 bb.fatal("Unable to get information for package '%s' while "
506 "trying to extract the package." % pkg)
507
508 tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info)
509 bb.utils.remove(os.path.join(tmp_dir, "data.tar.zst"))
510
511 return tmp_dir
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py
index f8ae3c743f..cd36cb5070 100644
--- a/meta/lib/oe/qa.py
+++ b/meta/lib/oe/qa.py
@@ -4,6 +4,7 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6 6
7import ast
7import os, struct, mmap 8import os, struct, mmap
8 9
9class NotELFFileError(Exception): 10class NotELFFileError(Exception):
@@ -186,6 +187,20 @@ def write_error(type, error, d):
186 with open(logfile, "a+") as f: 187 with open(logfile, "a+") as f:
187 f.write("%s: %s [%s]\n" % (p, error, type)) 188 f.write("%s: %s [%s]\n" % (p, error, type))
188 189
190def handle_error_visitorcode(name, args):
191 execs = set()
192 contains = {}
193 warn = None
194 if isinstance(args[0], ast.Constant) and isinstance(args[0].value, str):
195 for i in ["ERROR_QA", "WARN_QA"]:
196 if i not in contains:
197 contains[i] = set()
198 contains[i].add(args[0].value)
199 else:
200 warn = args[0]
201 execs.add(name)
202 return contains, execs, warn
203
189def handle_error(error_class, error_msg, d): 204def handle_error(error_class, error_msg, d):
190 if error_class in (d.getVar("ERROR_QA") or "").split(): 205 if error_class in (d.getVar("ERROR_QA") or "").split():
191 write_error(error_class, error_msg, d) 206 write_error(error_class, error_msg, d)
@@ -198,12 +213,7 @@ def handle_error(error_class, error_msg, d):
198 else: 213 else:
199 bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) 214 bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
200 return True 215 return True
201 216handle_error.visitorcode = handle_error_visitorcode
202def add_message(messages, section, new_msg):
203 if section not in messages:
204 messages[section] = new_msg
205 else:
206 messages[section] = messages[section] + "\n" + new_msg
207 217
208def exit_with_message_if_errors(message, d): 218def exit_with_message_if_errors(message, d):
209 qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False) 219 qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False)
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index de1fbdd3a8..56be75dc9c 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -818,7 +818,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
818 instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) 818 instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath)
819 if not instdirline in instfunclines: 819 if not instdirline in instfunclines:
820 instfunclines.append(instdirline) 820 instfunclines.append(instdirline)
821 instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) 821 instfunclines.append('install -m %s ${UNPACKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath))
822 if instfunclines: 822 if instfunclines:
823 bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines)) 823 bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines))
824 824
@@ -1112,7 +1112,7 @@ def _get_recipe_upgrade_status(data):
1112 maintainer = data.getVar('RECIPE_MAINTAINER') 1112 maintainer = data.getVar('RECIPE_MAINTAINER')
1113 no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') 1113 no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')
1114 1114
1115 return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason) 1115 return {'pn':pn, 'status':status, 'cur_ver':cur_ver, 'next_ver':next_ver, 'maintainer':maintainer, 'revision':revision, 'no_upgrade_reason':no_upgrade_reason}
1116 1116
1117def get_recipe_upgrade_status(recipes=None): 1117def get_recipe_upgrade_status(recipes=None):
1118 pkgs_list = [] 1118 pkgs_list = []
@@ -1154,6 +1154,7 @@ def get_recipe_upgrade_status(recipes=None):
1154 if not recipes: 1154 if not recipes:
1155 recipes = tinfoil.all_recipe_files(variants=False) 1155 recipes = tinfoil.all_recipe_files(variants=False)
1156 1156
1157 recipeincludes = {}
1157 for fn in recipes: 1158 for fn in recipes:
1158 try: 1159 try:
1159 if fn.startswith("/"): 1160 if fn.startswith("/"):
@@ -1178,8 +1179,65 @@ def get_recipe_upgrade_status(recipes=None):
1178 1179
1179 data_copy_list.append(data_copy) 1180 data_copy_list.append(data_copy)
1180 1181
1182 recipeincludes[data.getVar('FILE')] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
1183
1181 from concurrent.futures import ProcessPoolExecutor 1184 from concurrent.futures import ProcessPoolExecutor
1182 with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: 1185 with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
1183 pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) 1186 pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)
1184 1187
1185 return pkgs_list 1188 return _group_recipes(pkgs_list, _get_common_include_recipes(recipeincludes))
1189
1190def get_common_include_recipes():
1191 with bb.tinfoil.Tinfoil() as tinfoil:
1192 tinfoil.prepare(config_only=False)
1193
1194 recipes = tinfoil.all_recipe_files(variants=False)
1195
1196 recipeincludes = {}
1197 for fn in recipes:
1198 data = tinfoil.parse_recipe_file(fn)
1199 recipeincludes[fn] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
1200 return _get_common_include_recipes(recipeincludes)
1201
1202def _get_common_include_recipes(recipeincludes_all):
1203 recipeincludes = {}
1204 for fn,data in recipeincludes_all.items():
1205 bbincluded_filtered = [i for i in data['bbincluded'] if os.path.dirname(i) == os.path.dirname(fn) and i != fn]
1206 if bbincluded_filtered:
1207 recipeincludes[data['pn']] = bbincluded_filtered
1208
1209 recipeincludes_inverted = {}
1210 for k,v in recipeincludes.items():
1211 for i in v:
1212 recipeincludes_inverted.setdefault(i,set()).add(k)
1213
1214 recipeincludes_inverted_filtered = {k:v for k,v in recipeincludes_inverted.items() if len(v) > 1}
1215
1216 recipes_with_shared_includes = list()
1217 for v in recipeincludes_inverted_filtered.values():
1218 recipeset = v
1219 for v1 in recipeincludes_inverted_filtered.values():
1220 if recipeset.intersection(v1):
1221 recipeset.update(v1)
1222 if recipeset not in recipes_with_shared_includes:
1223 recipes_with_shared_includes.append(recipeset)
1224
1225 return recipes_with_shared_includes
1226
1227def _group_recipes(recipes, groups):
1228 recipedict = {}
1229 for r in recipes:
1230 recipedict[r['pn']] = r
1231
1232 recipegroups = []
1233 for g in groups:
1234 recipeset = []
1235 for r in g:
1236 if r in recipedict.keys():
1237 recipeset.append(recipedict[r])
1238 del recipedict[r]
1239 recipegroups.append(recipeset)
1240
1241 for r in recipedict.values():
1242 recipegroups.append([r])
1243 return recipegroups
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py
index a9f717159e..1957c97434 100644
--- a/meta/lib/oe/reproducible.py
+++ b/meta/lib/oe/reproducible.py
@@ -75,10 +75,10 @@ def get_source_date_epoch_from_known_files(d, sourcedir):
75 return source_date_epoch 75 return source_date_epoch
76 76
77def find_git_folder(d, sourcedir): 77def find_git_folder(d, sourcedir):
78 # First guess: WORKDIR/git 78 # First guess: UNPACKDIR/git
79 # This is the default git fetcher unpack path 79 # This is the default git fetcher unpack path
80 workdir = d.getVar('WORKDIR') 80 unpackdir = d.getVar('UNPACKDIR')
81 gitpath = os.path.join(workdir, "git/.git") 81 gitpath = os.path.join(unpackdir, "git/.git")
82 if os.path.isdir(gitpath): 82 if os.path.isdir(gitpath):
83 return gitpath 83 return gitpath
84 84
@@ -88,15 +88,16 @@ def find_git_folder(d, sourcedir):
88 return gitpath 88 return gitpath
89 89
90 # Perhaps there was a subpath or destsuffix specified. 90 # Perhaps there was a subpath or destsuffix specified.
91 # Go looking in the WORKDIR 91 # Go looking in the UNPACKDIR
92 exclude = set(["build", "image", "license-destdir", "patches", "pseudo", 92 for root, dirs, files in os.walk(unpackdir, topdown=True):
93 "recipe-sysroot", "recipe-sysroot-native", "sysroot-destdir", "temp"])
94 for root, dirs, files in os.walk(workdir, topdown=True):
95 dirs[:] = [d for d in dirs if d not in exclude]
96 if '.git' in dirs: 93 if '.git' in dirs:
97 return os.path.join(root, ".git") 94 return os.path.join(root, ".git")
98 95
99 bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) 96 for root, dirs, files in os.walk(sourcedir, topdown=True):
97 if '.git' in dirs:
98 return os.path.join(root, ".git")
99
100 bb.warn("Failed to find a git repository in UNPACKDIR: %s" % unpackdir)
100 return None 101 return None
101 102
102def get_source_date_epoch_from_git(d, sourcedir): 103def get_source_date_epoch_from_git(d, sourcedir):
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 8cd48f9450..5abce4ad7d 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -269,7 +269,11 @@ class Rootfs(object, metaclass=ABCMeta):
269 self.pm.remove(["run-postinsts"]) 269 self.pm.remove(["run-postinsts"])
270 270
271 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", 271 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
272 True, False, self.d) and \
273 not bb.utils.contains("IMAGE_FEATURES",
274 "read-only-rootfs-delayed-postinsts",
272 True, False, self.d) 275 True, False, self.d)
276
273 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') 277 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE')
274 278
275 if image_rorfs or image_rorfs_force == "1": 279 if image_rorfs or image_rorfs_force == "1":
diff --git a/meta/lib/oe/sbom30.py b/meta/lib/oe/sbom30.py
new file mode 100644
index 0000000000..76bfb752ef
--- /dev/null
+++ b/meta/lib/oe/sbom30.py
@@ -0,0 +1,1121 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from pathlib import Path
8
9import oe.spdx30
10import bb
11import re
12import hashlib
13import uuid
14import os
15import oe.spdx_common
16from datetime import datetime, timezone
17
18OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/"
19
20VEX_VERSION = "1.0.0"
21
22SPDX_BUILD_TYPE = "http://openembedded.org/bitbake"
23
24
25@oe.spdx30.register(OE_SPDX_BASE + "link-extension")
26class OELinkExtension(oe.spdx30.extension_Extension):
27 """
28 This custom extension controls if an Element creates a symlink based on
29 its SPDX ID in the deploy directory. Some elements may not be able to be
30 linked because they are duplicated in multiple documents (e.g. the bitbake
31 Build Element). Those elements can add this extension and set link_spdx_id
32 to False
33
34 It is in internal extension that should be removed when writing out a final
35 SBoM
36 """
37
38 CLOSED = True
39 INTERNAL = True
40
41 @classmethod
42 def _register_props(cls):
43 super()._register_props()
44 cls._add_property(
45 "link_spdx_id",
46 oe.spdx30.BooleanProp(),
47 OE_SPDX_BASE + "link-spdx-id",
48 min_count=1,
49 max_count=1,
50 )
51
52 # The symlinks written to the deploy directory are based on the hash of
53 # the SPDX ID. While this makes it easy to look them up, it can be
54 # difficult to trace a Element to the hashed symlink name. As a
55 # debugging aid, this property is set to the basename of the symlink
56 # when the symlink is created to make it easier to trace
57 cls._add_property(
58 "link_name",
59 oe.spdx30.StringProp(),
60 OE_SPDX_BASE + "link-name",
61 max_count=1,
62 )
63
64
65@oe.spdx30.register(OE_SPDX_BASE + "id-alias")
66class OEIdAliasExtension(oe.spdx30.extension_Extension):
67 """
68 This extension allows an Element to provide an internal alias for the SPDX
69 ID. Since SPDX requires unique URIs for each SPDX ID, most of the objects
70 created have a unique UUID namespace and the unihash of the task encoded in
71 their SPDX ID. However, this causes a problem for referencing documents
72 across recipes, since the taskhash of a dependency may not factor into the
73 taskhash of the current task and thus the current task won't rebuild and
74 see the new SPDX ID when the dependency changes (e.g. ABI safe recipes and
75 tasks).
76
77 To help work around this, this extension provides a non-unique alias for an
78 Element by which it can be referenced from other tasks/recipes. When a
79 final SBoM is created, references to these aliases will be replaced with
80 the actual unique SPDX ID.
81
82 Most Elements will automatically get an alias created when they are written
83 out if they do not already have one. To suppress the creation of an alias,
84 add an extension with a blank `alias` property.
85
86
87 It is in internal extension that should be removed when writing out a final
88 SBoM
89 """
90
91 CLOSED = True
92 INTERNAL = True
93
94 @classmethod
95 def _register_props(cls):
96 super()._register_props()
97 cls._add_property(
98 "alias",
99 oe.spdx30.StringProp(),
100 OE_SPDX_BASE + "alias",
101 max_count=1,
102 )
103
104 cls._add_property(
105 "link_name",
106 oe.spdx30.StringProp(),
107 OE_SPDX_BASE + "link-name",
108 max_count=1,
109 )
110
111
112@oe.spdx30.register(OE_SPDX_BASE + "file-name-alias")
113class OEFileNameAliasExtension(oe.spdx30.extension_Extension):
114 CLOSED = True
115 INTERNAL = True
116
117 @classmethod
118 def _register_props(cls):
119 super()._register_props()
120 cls._add_property(
121 "aliases",
122 oe.spdx30.ListProp(oe.spdx30.StringProp()),
123 OE_SPDX_BASE + "filename-alias",
124 )
125
126
127@oe.spdx30.register(OE_SPDX_BASE + "license-scanned")
128class OELicenseScannedExtension(oe.spdx30.extension_Extension):
129 """
130 The presence of this extension means the file has already been scanned for
131 license information
132 """
133
134 CLOSED = True
135 INTERNAL = True
136
137
138@oe.spdx30.register(OE_SPDX_BASE + "document-extension")
139class OEDocumentExtension(oe.spdx30.extension_Extension):
140 """
141 This extension is added to a SpdxDocument to indicate various useful bits
142 of information about its contents
143 """
144
145 CLOSED = True
146
147 @classmethod
148 def _register_props(cls):
149 super()._register_props()
150 cls._add_property(
151 "is_native",
152 oe.spdx30.BooleanProp(),
153 OE_SPDX_BASE + "is-native",
154 max_count=1,
155 )
156
157
158def spdxid_hash(*items):
159 h = hashlib.md5()
160 for i in items:
161 if isinstance(i, oe.spdx30.Element):
162 h.update(i._id.encode("utf-8"))
163 else:
164 h.update(i.encode("utf-8"))
165 return h.hexdigest()
166
167
168def spdx_sde(d):
169 sde = d.getVar("SOURCE_DATE_EPOCH")
170 if not sde:
171 return datetime.now(timezone.utc)
172
173 return datetime.fromtimestamp(int(sde), timezone.utc)
174
175
176def get_element_link_id(e):
177 """
178 Get the string ID which should be used to link to an Element. If the
179 element has an alias, that will be preferred, otherwise its SPDX ID will be
180 used.
181 """
182 ext = get_alias(e)
183 if ext is not None and ext.alias:
184 return ext.alias
185 return e._id
186
187
188def set_alias(obj, alias):
189 for ext in obj.extension:
190 if not isinstance(ext, OEIdAliasExtension):
191 continue
192 ext.alias = alias
193 return ext
194
195 ext = OEIdAliasExtension(alias=alias)
196 obj.extension.append(ext)
197 return ext
198
199
200def get_alias(obj):
201 for ext in obj.extension:
202 if not isinstance(ext, OEIdAliasExtension):
203 continue
204 return ext
205
206 return None
207
208
209def to_list(l):
210 if isinstance(l, set):
211 l = sorted(list(l))
212
213 if not isinstance(l, (list, tuple)):
214 raise TypeError("Must be a list or tuple. Got %s" % type(l))
215
216 return l
217
218
219class ObjectSet(oe.spdx30.SHACLObjectSet):
220 def __init__(self, d):
221 super().__init__()
222 self.d = d
223
224 def create_index(self):
225 self.by_sha256_hash = {}
226 super().create_index()
227
228 def add_index(self, obj):
229 # Check that all elements are given an ID before being inserted
230 if isinstance(obj, oe.spdx30.Element):
231 if not obj._id:
232 raise ValueError("Element missing ID")
233 for ext in obj.extension:
234 if not isinstance(ext, OEIdAliasExtension):
235 continue
236 if ext.alias:
237 self.obj_by_id[ext.alias] = obj
238
239 for v in obj.verifiedUsing:
240 if not isinstance(v, oe.spdx30.Hash):
241 continue
242
243 if v.algorithm == oe.spdx30.HashAlgorithm.sha256:
244 continue
245
246 self.by_sha256_hash.setdefault(v.hashValue, set()).add(obj)
247
248 super().add_index(obj)
249 if isinstance(obj, oe.spdx30.SpdxDocument):
250 self.doc = obj
251
252 def __filter_obj(self, obj, attr_filter):
253 return all(getattr(obj, k) == v for k, v in attr_filter.items())
254
255 def foreach_filter(self, typ, *, match_subclass=True, **attr_filter):
256 for obj in self.foreach_type(typ, match_subclass=match_subclass):
257 if self.__filter_obj(obj, attr_filter):
258 yield obj
259
260 def find_filter(self, typ, *, match_subclass=True, **attr_filter):
261 for obj in self.foreach_filter(
262 typ, match_subclass=match_subclass, **attr_filter
263 ):
264 return obj
265 return None
266
267 def foreach_root(self, typ, **attr_filter):
268 for obj in self.doc.rootElement:
269 if not isinstance(obj, typ):
270 continue
271
272 if self.__filter_obj(obj, attr_filter):
273 yield obj
274
275 def find_root(self, typ, **attr_filter):
276 for obj in self.foreach_root(typ, **attr_filter):
277 return obj
278 return None
279
280 def add_root(self, obj):
281 self.add(obj)
282 self.doc.rootElement.append(obj)
283 return obj
284
285 def is_native(self):
286 for e in self.doc.extension:
287 if not isinstance(e, oe.sbom30.OEDocumentExtension):
288 continue
289
290 if e.is_native is not None:
291 return e.is_native
292
293 return False
294
295 def set_is_native(self, is_native):
296 for e in self.doc.extension:
297 if not isinstance(e, oe.sbom30.OEDocumentExtension):
298 continue
299
300 e.is_native = is_native
301 return
302
303 if is_native:
304 self.doc.extension.append(oe.sbom30.OEDocumentExtension(is_native=True))
305
306 def add_aliases(self):
307 for o in self.foreach_type(oe.spdx30.Element):
308 if not o._id or o._id.startswith("_:"):
309 continue
310
311 alias_ext = get_alias(o)
312 if alias_ext is None:
313 unihash = self.d.getVar("BB_UNIHASH")
314 namespace = self.get_namespace()
315 if unihash not in o._id:
316 bb.warn(f"Unihash {unihash} not found in {o._id}")
317 elif namespace not in o._id:
318 bb.warn(f"Namespace {namespace} not found in {o._id}")
319 else:
320 alias_ext = set_alias(
321 o,
322 o._id.replace(unihash, "UNIHASH").replace(
323 namespace, self.d.getVar("PN")
324 ),
325 )
326
327 def remove_internal_extensions(self):
328 def remove(o):
329 o.extension = [e for e in o.extension if not getattr(e, "INTERNAL", False)]
330
331 for o in self.foreach_type(oe.spdx30.Element):
332 remove(o)
333
334 if self.doc:
335 remove(self.doc)
336
337 def get_namespace(self):
338 namespace_uuid = uuid.uuid5(
339 uuid.NAMESPACE_DNS, self.d.getVar("SPDX_UUID_NAMESPACE")
340 )
341 pn = self.d.getVar("PN")
342 return "%s/%s-%s" % (
343 self.d.getVar("SPDX_NAMESPACE_PREFIX"),
344 pn,
345 str(uuid.uuid5(namespace_uuid, pn)),
346 )
347
348 def new_spdxid(self, *suffix, include_unihash=True):
349 items = [self.get_namespace()]
350 if include_unihash:
351 unihash = self.d.getVar("BB_UNIHASH")
352 items.append(unihash)
353 items.extend(re.sub(r"[^a-zA-Z0-9_-]", "_", s) for s in suffix)
354 return "/".join(items)
355
356 def new_import(self, key):
357 base = f"SPDX_IMPORTS_{key}"
358 spdxid = self.d.getVar(f"{base}_spdxid")
359 if not spdxid:
360 bb.fatal(f"{key} is not a valid SPDX_IMPORTS key")
361
362 for i in self.docs.imports:
363 if i.externalSpdxId == spdxid:
364 # Already imported
365 return spdxid
366
367 m = oe.spdx30.ExternalMap(externalSpdxId=spdxid)
368
369 uri = self.d.getVar(f"{base}_uri")
370 if uri:
371 m.locationHint = uri
372
373 for pyname, algorithm in oe.spdx30.HashAlgorithm.NAMED_INDIVIDUALS.items():
374 value = self.d.getVar(f"{base}_hash_{pyname}")
375 if value:
376 m.verifiedUsing.append(
377 oe.spdx30.Hash(
378 algorithm=algorithm,
379 hashValue=value,
380 )
381 )
382
383 self.doc.imports.append(m)
384 return spdxid
385
386 def new_agent(self, varname, *, creation_info=None, add=True):
387 ref_varname = self.d.getVar(f"{varname}_ref")
388 if ref_varname:
389 if ref_varname == varname:
390 bb.fatal(f"{varname} cannot reference itself")
391 return new_agent(varname, creation_info=creation_info)
392
393 import_key = self.d.getVar(f"{varname}_import")
394 if import_key:
395 return self.new_import(import_key)
396
397 name = self.d.getVar(f"{varname}_name")
398 if not name:
399 return None
400
401 spdxid = self.new_spdxid("agent", name)
402 agent = self.find_by_id(spdxid)
403 if agent is not None:
404 return agent
405
406 agent_type = self.d.getVar("%s_type" % varname)
407 if agent_type == "person":
408 agent = oe.spdx30.Person()
409 elif agent_type == "software":
410 agent = oe.spdx30.SoftwareAgent()
411 elif agent_type == "organization":
412 agent = oe.spdx30.Organization()
413 elif not agent_type or agent_type == "agent":
414 agent = oe.spdx30.Agent()
415 else:
416 bb.fatal("Unknown agent type '%s' in %s_type" % (agent_type, varname))
417
418 agent._id = spdxid
419 agent.creationInfo = creation_info or self.doc.creationInfo
420 agent.name = name
421
422 comment = self.d.getVar("%s_comment" % varname)
423 if comment:
424 agent.comment = comment
425
426 for (
427 pyname,
428 idtype,
429 ) in oe.spdx30.ExternalIdentifierType.NAMED_INDIVIDUALS.items():
430 value = self.d.getVar("%s_id_%s" % (varname, pyname))
431 if value:
432 agent.externalIdentifier.append(
433 oe.spdx30.ExternalIdentifier(
434 externalIdentifierType=idtype,
435 identifier=value,
436 )
437 )
438
439 if add:
440 self.add(agent)
441
442 return agent
443
444 def new_creation_info(self):
445 creation_info = oe.spdx30.CreationInfo()
446
447 name = "%s %s" % (
448 self.d.getVar("SPDX_TOOL_NAME"),
449 self.d.getVar("SPDX_TOOL_VERSION"),
450 )
451 tool = self.add(
452 oe.spdx30.Tool(
453 _id=self.new_spdxid("tool", name),
454 creationInfo=creation_info,
455 name=name,
456 )
457 )
458
459 authors = []
460 for a in self.d.getVar("SPDX_AUTHORS").split():
461 varname = "SPDX_AUTHORS_%s" % a
462 author = self.new_agent(varname, creation_info=creation_info)
463
464 if not author:
465 bb.fatal("Unable to find or create author %s" % a)
466
467 authors.append(author)
468
469 creation_info.created = spdx_sde(self.d)
470 creation_info.specVersion = self.d.getVar("SPDX_VERSION")
471 creation_info.createdBy = authors
472 creation_info.createdUsing = [tool]
473
474 return creation_info
475
476 def copy_creation_info(self, copy):
477 c = oe.spdx30.CreationInfo(
478 created=spdx_sde(self.d),
479 specVersion=self.d.getVar("SPDX_VERSION"),
480 )
481
482 for author in copy.createdBy:
483 if isinstance(author, str):
484 c.createdBy.append(author)
485 else:
486 c.createdBy.append(author._id)
487
488 for tool in copy.createdUsing:
489 if isinstance(tool, str):
490 c.createdUsing.append(tool)
491 else:
492 c.createdUsing.append(tool._id)
493
494 return c
495
496 def new_annotation(self, subject, comment, typ):
497 return self.add(
498 oe.spdx30.Annotation(
499 _id=self.new_spdxid("annotation", spdxid_hash(comment, typ)),
500 creationInfo=self.doc.creationInfo,
501 annotationType=typ,
502 subject=subject,
503 statement=comment,
504 )
505 )
506
507 def _new_relationship(
508 self,
509 cls,
510 from_,
511 typ,
512 to,
513 *,
514 spdxid_name="relationship",
515 **props,
516 ):
517 from_ = to_list(from_)
518 to = to_list(to)
519
520 if not from_:
521 return []
522
523 if not to:
524 # TODO: Switch to the code constant once SPDX 3.0.1 is released
525 to = ["https://spdx.org/rdf/3.0.0/terms/Core/NoneElement"]
526
527 ret = []
528
529 for f in from_:
530 hash_args = [typ, f]
531 for k in sorted(props.keys()):
532 hash_args.append(props[k])
533 hash_args.extend(to)
534
535 relationship = self.add(
536 cls(
537 _id=self.new_spdxid(spdxid_name, spdxid_hash(*hash_args)),
538 creationInfo=self.doc.creationInfo,
539 from_=f,
540 relationshipType=typ,
541 to=to,
542 **props,
543 )
544 )
545 ret.append(relationship)
546
547 return ret
548
549 def new_relationship(self, from_, typ, to):
550 return self._new_relationship(oe.spdx30.Relationship, from_, typ, to)
551
552 def new_scoped_relationship(self, from_, typ, scope, to):
553 return self._new_relationship(
554 oe.spdx30.LifecycleScopedRelationship,
555 from_,
556 typ,
557 to,
558 scope=scope,
559 )
560
561 def new_license_expression(self, license_expression, license_data, license_text_map={}):
562 license_list_version = license_data["licenseListVersion"]
563 # SPDX 3 requires that the license list version be a semver
564 # MAJOR.MINOR.MICRO, but the actual license version might be
565 # MAJOR.MINOR on some older versions. As such, manually append a .0
566 # micro version if its missing to keep SPDX happy
567 if license_list_version.count(".") < 2:
568 license_list_version += ".0"
569
570 spdxid = [
571 "license",
572 license_list_version,
573 re.sub(r"[^a-zA-Z0-9_-]", "_", license_expression),
574 ]
575
576 license_text = (
577 (k, license_text_map[k]) for k in sorted(license_text_map.keys())
578 )
579
580 if not license_text:
581 lic = self.find_filter(
582 oe.spdx30.simplelicensing_LicenseExpression,
583 simplelicensing_licenseExpression=license_expression,
584 simplelicensing_licenseListVersion=license_list_version,
585 )
586 if lic is not None:
587 return lic
588 else:
589 spdxid.append(spdxid_hash(*(v for _, v in license_text)))
590 lic = self.find_by_id(self.new_spdxid(*spdxid))
591 if lic is not None:
592 return lic
593
594 lic = self.add(
595 oe.spdx30.simplelicensing_LicenseExpression(
596 _id=self.new_spdxid(*spdxid),
597 creationInfo=self.doc.creationInfo,
598 simplelicensing_licenseExpression=license_expression,
599 simplelicensing_licenseListVersion=license_list_version,
600 )
601 )
602
603 for key, value in license_text:
604 lic.simplelicensing_customIdToUri.append(
605 oe.spdx30.DictionaryEntry(key=key, value=value)
606 )
607
608 return lic
609
610 def scan_declared_licenses(self, spdx_file, filepath, license_data):
611 for e in spdx_file.extension:
612 if isinstance(e, OELicenseScannedExtension):
613 return
614
615 file_licenses = set()
616 for extracted_lic in oe.spdx_common.extract_licenses(filepath):
617 file_licenses.add(self.new_license_expression(extracted_lic, license_data))
618
619 self.new_relationship(
620 [spdx_file],
621 oe.spdx30.RelationshipType.hasDeclaredLicense,
622 file_licenses,
623 )
624 spdx_file.extension.append(OELicenseScannedExtension())
625
626 def new_file(self, _id, name, path, *, purposes=[]):
627 sha256_hash = bb.utils.sha256_file(path)
628
629 for f in self.by_sha256_hash.get(sha256_hash, []):
630 if not isinstance(oe.spdx30.software_File):
631 continue
632
633 if purposes:
634 new_primary = purposes[0]
635 new_additional = []
636
637 if f.software_primaryPurpose:
638 new_additional.append(f.software_primaryPurpose)
639 new_additional.extend(f.software_additionalPurpose)
640
641 new_additional = sorted(
642 list(set(p for p in new_additional if p != new_primary))
643 )
644
645 f.software_primaryPurpose = new_primary
646 f.software_additionalPurpose = new_additional
647
648 if f.name != name:
649 for e in f.extension:
650 if isinstance(e, OEFileNameAliasExtension):
651 e.aliases.append(name)
652 break
653 else:
654 f.extension.append(OEFileNameAliasExtension(aliases=[name]))
655
656 return f
657
658 spdx_file = oe.spdx30.software_File(
659 _id=_id,
660 creationInfo=self.doc.creationInfo,
661 name=name,
662 )
663 if purposes:
664 spdx_file.software_primaryPurpose = purposes[0]
665 spdx_file.software_additionalPurpose = purposes[1:]
666
667 spdx_file.verifiedUsing.append(
668 oe.spdx30.Hash(
669 algorithm=oe.spdx30.HashAlgorithm.sha256,
670 hashValue=sha256_hash,
671 )
672 )
673
674 return self.add(spdx_file)
675
676 def new_cve_vuln(self, cve):
677 v = oe.spdx30.security_Vulnerability()
678 v._id = self.new_spdxid("vulnerability", cve)
679 v.creationInfo = self.doc.creationInfo
680
681 v.externalIdentifier.append(
682 oe.spdx30.ExternalIdentifier(
683 externalIdentifierType=oe.spdx30.ExternalIdentifierType.cve,
684 identifier=cve,
685 identifierLocator=[
686 f"https://cveawg.mitre.org/api/cve/{cve}",
687 f"https://www.cve.org/CVERecord?id={cve}",
688 ],
689 )
690 )
691 return self.add(v)
692
693 def new_vex_patched_relationship(self, from_, to):
694 return self._new_relationship(
695 oe.spdx30.security_VexFixedVulnAssessmentRelationship,
696 from_,
697 oe.spdx30.RelationshipType.fixedIn,
698 to,
699 spdxid_name="vex-fixed",
700 security_vexVersion=VEX_VERSION,
701 )
702
703 def new_vex_unpatched_relationship(self, from_, to):
704 return self._new_relationship(
705 oe.spdx30.security_VexAffectedVulnAssessmentRelationship,
706 from_,
707 oe.spdx30.RelationshipType.affects,
708 to,
709 spdxid_name="vex-affected",
710 security_vexVersion=VEX_VERSION,
711 )
712
713 def new_vex_ignored_relationship(self, from_, to, *, impact_statement):
714 return self._new_relationship(
715 oe.spdx30.security_VexNotAffectedVulnAssessmentRelationship,
716 from_,
717 oe.spdx30.RelationshipType.doesNotAffect,
718 to,
719 spdxid_name="vex-not-affected",
720 security_vexVersion=VEX_VERSION,
721 security_impactStatement=impact_statement,
722 )
723
724 def import_bitbake_build_objset(self):
725 deploy_dir_spdx = Path(self.d.getVar("DEPLOY_DIR_SPDX"))
726 bb_objset = load_jsonld(
727 self.d, deploy_dir_spdx / "bitbake.spdx.json", required=True
728 )
729 self.doc.imports.extend(bb_objset.doc.imports)
730 self.update(bb_objset.objects)
731
732 return bb_objset
733
734 def import_bitbake_build(self):
735 def find_bitbake_build(objset):
736 return objset.find_filter(
737 oe.spdx30.build_Build,
738 build_buildType=SPDX_BUILD_TYPE,
739 )
740
741 build = find_bitbake_build(self)
742 if build:
743 return build
744
745 bb_objset = self.import_bitbake_build_objset()
746 build = find_bitbake_build(bb_objset)
747 if build is None:
748 bb.fatal(f"No build found in {deploy_dir_spdx}")
749
750 return build
751
752 def new_task_build(self, name, typ):
753 current_task = self.d.getVar("BB_CURRENTTASK")
754 pn = self.d.getVar("PN")
755
756 build = self.add(
757 oe.spdx30.build_Build(
758 _id=self.new_spdxid("build", name),
759 creationInfo=self.doc.creationInfo,
760 name=f"{pn}:do_{current_task}:{name}",
761 build_buildType=f"{SPDX_BUILD_TYPE}/do_{current_task}/{typ}",
762 )
763 )
764
765 if self.d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
766 bitbake_build = self.import_bitbake_build()
767
768 self.new_relationship(
769 [bitbake_build],
770 oe.spdx30.RelationshipType.ancestorOf,
771 [build],
772 )
773
774 if self.d.getVar("SPDX_INCLUDE_BUILD_VARIABLES") == "1":
775 for varname in sorted(self.d.keys()):
776 if varname.startswith("__"):
777 continue
778
779 value = self.d.getVar(varname, expand=False)
780
781 # TODO: Deal with non-string values
782 if not isinstance(value, str):
783 continue
784
785 build.build_parameters.append(
786 oe.spdx30.DictionaryEntry(key=varname, value=value)
787 )
788
789 return build
790
791 def new_archive(self, archive_name):
792 return self.add(
793 oe.spdx30.software_File(
794 _id=self.new_spdxid("archive", str(archive_name)),
795 creationInfo=self.doc.creationInfo,
796 name=str(archive_name),
797 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
798 )
799 )
800
801 @classmethod
802 def new_objset(cls, d, name, copy_from_bitbake_doc=True):
803 objset = cls(d)
804
805 document = oe.spdx30.SpdxDocument(
806 _id=objset.new_spdxid("document", name),
807 name=name,
808 )
809 document.extension.append(OEIdAliasExtension())
810 document.extension.append(OELinkExtension(link_spdx_id=False))
811 objset.doc = document
812
813 if copy_from_bitbake_doc:
814 bb_objset = objset.import_bitbake_build_objset()
815 document.creationInfo = objset.copy_creation_info(
816 bb_objset.doc.creationInfo
817 )
818 else:
819 document.creationInfo = objset.new_creation_info()
820
821 return objset
822
823 def expand_collection(self, *, add_objectsets=[]):
824 """
825 Expands a collection to pull in all missing elements
826
827 Returns the set of ids that could not be found to link into the document
828 """
829 missing_spdxids = set()
830 imports = {e.externalSpdxId: e for e in self.doc.imports}
831
832 def merge_doc(other):
833 nonlocal imports
834
835 for e in other.doc.imports:
836 if not e.externalSpdxId in imports:
837 imports[e.externalSpdxId] = e
838
839 self.objects |= other.objects
840
841 for o in add_objectsets:
842 merge_doc(o)
843
844 needed_spdxids = self.link()
845 provided_spdxids = set(self.obj_by_id.keys())
846
847 while True:
848 import_spdxids = set(imports.keys())
849 searching_spdxids = (
850 needed_spdxids - provided_spdxids - missing_spdxids - import_spdxids
851 )
852 if not searching_spdxids:
853 break
854
855 spdxid = searching_spdxids.pop()
856 bb.debug(
857 1,
858 f"Searching for {spdxid}. Remaining: {len(searching_spdxids)}, Total: {len(provided_spdxids)}, Missing: {len(missing_spdxids)}, Imports: {len(import_spdxids)}",
859 )
860 dep_objset, dep_path = find_by_spdxid(self.d, spdxid)
861
862 if dep_objset:
863 dep_provided = set(dep_objset.obj_by_id.keys())
864 if spdxid not in dep_provided:
865 bb.fatal(f"{spdxid} not found in {dep_path}")
866 provided_spdxids |= dep_provided
867 needed_spdxids |= dep_objset.missing_ids
868 merge_doc(dep_objset)
869 else:
870 missing_spdxids.add(spdxid)
871
872 bb.debug(1, "Linking...")
873 missing = self.link()
874 if missing != missing_spdxids:
875 bb.fatal(
876 f"Linked document doesn't match missing SPDX ID list. Got: {missing}\nExpected: {missing_spdxids}"
877 )
878
879 self.doc.imports = sorted(imports.values(), key=lambda e: e.externalSpdxId)
880
881 return missing_spdxids
882
883
884def load_jsonld(d, path, required=False):
885 deserializer = oe.spdx30.JSONLDDeserializer()
886 objset = ObjectSet(d)
887 try:
888 with path.open("rb") as f:
889 deserializer.read(f, objset)
890 except FileNotFoundError:
891 if required:
892 bb.fatal("No SPDX document named %s found" % path)
893 return None
894
895 if not objset.doc:
896 bb.fatal("SPDX Document %s has no SPDXDocument element" % path)
897 return None
898
899 objset.objects.remove(objset.doc)
900 return objset
901
902
903def jsonld_arch_path(d, arch, subdir, name, deploydir=None):
904 if deploydir is None:
905 deploydir = Path(d.getVar("DEPLOY_DIR_SPDX"))
906 return deploydir / arch / subdir / (name + ".spdx.json")
907
908
909def jsonld_hash_path(_id):
910 h = hashlib.sha256(_id.encode("utf-8")).hexdigest()
911
912 return Path("by-spdxid-hash") / h[:2], h
913
914
915def load_jsonld_by_arch(d, arch, subdir, name, *, required=False):
916 path = jsonld_arch_path(d, arch, subdir, name)
917 objset = load_jsonld(d, path, required=required)
918 if objset is not None:
919 return (objset, path)
920 return (None, None)
921
922
923def find_jsonld(d, subdir, name, *, required=False):
924 package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
925 package_archs.reverse()
926
927 for arch in package_archs:
928 objset, path = load_jsonld_by_arch(d, arch, subdir, name)
929 if objset is not None:
930 return (objset, path)
931
932 if required:
933 bb.fatal("Could not find a %s SPDX document named %s" % (subdir, name))
934
935 return (None, None)
936
937
938def write_jsonld_doc(d, objset, dest):
939 if not isinstance(objset, ObjectSet):
940 bb.fatal("Only an ObjsetSet can be serialized")
941 return
942
943 if not objset.doc:
944 bb.fatal("ObjectSet is missing a SpdxDocument")
945 return
946
947 objset.doc.rootElement = sorted(list(set(objset.doc.rootElement)))
948 objset.doc.profileConformance = sorted(
949 list(
950 getattr(oe.spdx30.ProfileIdentifierType, p)
951 for p in d.getVar("SPDX_PROFILES").split()
952 )
953 )
954
955 dest.parent.mkdir(exist_ok=True, parents=True)
956
957 if d.getVar("SPDX_PRETTY") == "1":
958 serializer = oe.spdx30.JSONLDSerializer(
959 indent=2,
960 )
961 else:
962 serializer = oe.spdx30.JSONLDInlineSerializer()
963
964 objset.objects.add(objset.doc)
965 with dest.open("wb") as f:
966 serializer.write(objset, f, force_at_graph=True)
967 objset.objects.remove(objset.doc)
968
969
970def write_recipe_jsonld_doc(
971 d,
972 objset,
973 subdir,
974 deploydir,
975 *,
976 create_spdx_id_links=True,
977):
978 pkg_arch = d.getVar("SSTATE_PKGARCH")
979
980 dest = jsonld_arch_path(d, pkg_arch, subdir, objset.doc.name, deploydir=deploydir)
981
982 def link_id(_id):
983 hash_path = jsonld_hash_path(_id)
984
985 link_name = jsonld_arch_path(
986 d,
987 pkg_arch,
988 *hash_path,
989 deploydir=deploydir,
990 )
991 try:
992 link_name.parent.mkdir(exist_ok=True, parents=True)
993 link_name.symlink_to(os.path.relpath(dest, link_name.parent))
994 except:
995 target = link_name.readlink()
996 bb.warn(
997 f"Unable to link {_id} in {dest} as {link_name}. Already points to {target}"
998 )
999 raise
1000
1001 return hash_path[-1]
1002
1003 objset.add_aliases()
1004
1005 try:
1006 if create_spdx_id_links:
1007 for o in objset.foreach_type(oe.spdx30.Element):
1008 if not o._id or o._id.startswith("_:"):
1009 continue
1010
1011 ext = None
1012 for e in o.extension:
1013 if not isinstance(e, OELinkExtension):
1014 continue
1015
1016 ext = e
1017 break
1018
1019 if ext is None:
1020 ext = OELinkExtension(link_spdx_id=True)
1021 o.extension.append(ext)
1022
1023 if ext.link_spdx_id:
1024 ext.link_name = link_id(o._id)
1025
1026 alias_ext = get_alias(o)
1027 if alias_ext is not None and alias_ext.alias:
1028 alias_ext.link_name = link_id(alias_ext.alias)
1029
1030 finally:
1031 # It is really helpful for debugging if the JSON document is written
1032 # out, so always do that even if there is an error making the links
1033 write_jsonld_doc(d, objset, dest)
1034
1035
1036def find_root_obj_in_jsonld(d, subdir, fn_name, obj_type, **attr_filter):
1037 objset, fn = find_jsonld(d, subdir, fn_name, required=True)
1038
1039 spdx_obj = objset.find_root(obj_type, **attr_filter)
1040 if not spdx_obj:
1041 bb.fatal("No root %s found in %s" % (obj_type.__name__, fn))
1042
1043 return spdx_obj, objset
1044
1045
1046def load_obj_in_jsonld(d, arch, subdir, fn_name, obj_type, **attr_filter):
1047 objset, fn = load_jsonld_by_arch(d, arch, subdir, fn_name, required=True)
1048
1049 spdx_obj = objset.find_filter(obj_type, **attr_filter)
1050 if not spdx_obj:
1051 bb.fatal("No %s found in %s" % (obj_type.__name__, fn))
1052
1053 return spdx_obj, objset
1054
1055
1056def find_by_spdxid(d, spdxid, *, required=False):
1057 return find_jsonld(d, *jsonld_hash_path(spdxid), required=required)
1058
1059
1060def create_sbom(d, name, root_elements, add_objectsets=[]):
1061 objset = ObjectSet.new_objset(d, name)
1062
1063 sbom = objset.add(
1064 oe.spdx30.software_Sbom(
1065 _id=objset.new_spdxid("sbom", name),
1066 name=name,
1067 creationInfo=objset.doc.creationInfo,
1068 software_sbomType=[oe.spdx30.software_SbomType.build],
1069 rootElement=root_elements,
1070 )
1071 )
1072
1073 missing_spdxids = objset.expand_collection(add_objectsets=add_objectsets)
1074 if missing_spdxids:
1075 bb.warn(
1076 "The following SPDX IDs were unable to be resolved:\n "
1077 + "\n ".join(sorted(list(missing_spdxids)))
1078 )
1079
1080 # Filter out internal extensions from final SBoMs
1081 objset.remove_internal_extensions()
1082
1083 # SBoM should be the only root element of the document
1084 objset.doc.rootElement = [sbom]
1085
1086 # De-duplicate licenses
1087 unique = set()
1088 dedup = {}
1089 for lic in objset.foreach_type(oe.spdx30.simplelicensing_LicenseExpression):
1090 for u in unique:
1091 if (
1092 u.simplelicensing_licenseExpression
1093 == lic.simplelicensing_licenseExpression
1094 and u.simplelicensing_licenseListVersion
1095 == lic.simplelicensing_licenseListVersion
1096 ):
1097 dedup[lic] = u
1098 break
1099 else:
1100 unique.add(lic)
1101
1102 if dedup:
1103 for rel in objset.foreach_filter(
1104 oe.spdx30.Relationship,
1105 relationshipType=oe.spdx30.RelationshipType.hasDeclaredLicense,
1106 ):
1107 rel.to = [dedup.get(to, to) for to in rel.to]
1108
1109 for rel in objset.foreach_filter(
1110 oe.spdx30.Relationship,
1111 relationshipType=oe.spdx30.RelationshipType.hasConcludedLicense,
1112 ):
1113 rel.to = [dedup.get(to, to) for to in rel.to]
1114
1115 for k, v in dedup.items():
1116 bb.debug(1, f"Removing duplicate License {k._id} -> {v._id}")
1117 objset.objects.remove(k)
1118
1119 objset.create_index()
1120
1121 return objset, sbom
diff --git a/meta/lib/oe/spdx30.py b/meta/lib/oe/spdx30.py
new file mode 100644
index 0000000000..ae74ce36f4
--- /dev/null
+++ b/meta/lib/oe/spdx30.py
@@ -0,0 +1,6020 @@
1#! /usr/bin/env python3
2#
3# Generated Python bindings from a SHACL model
4#
5# This file was automatically generated by shacl2code. DO NOT MANUALLY MODIFY IT
6#
7# SPDX-License-Identifier: MIT
8
9import functools
10import hashlib
11import json
12import re
13import sys
14import threading
15import time
16from contextlib import contextmanager
17from datetime import datetime, timezone, timedelta
18from enum import Enum
19from abc import ABC, abstractmethod
20
21
22def check_type(obj, types):
23 if not isinstance(obj, types):
24 if isinstance(types, (list, tuple)):
25 raise TypeError(
26 f"Value must be one of type: {', '.join(t.__name__ for t in types)}. Got {type(obj)}"
27 )
28 raise TypeError(f"Value must be of type {types.__name__}. Got {type(obj)}")
29
30
31class Property(ABC):
32 """
33 A generic SHACL object property. The different types will derive from this
34 class
35 """
36
37 def __init__(self, *, pattern=None):
38 self.pattern = pattern
39
40 def init(self):
41 return None
42
43 def validate(self, value):
44 check_type(value, self.VALID_TYPES)
45 if self.pattern is not None and not re.search(
46 self.pattern, self.to_string(value)
47 ):
48 raise ValueError(
49 f"Value is not correctly formatted. Got '{self.to_string(value)}'"
50 )
51
52 def set(self, value):
53 return value
54
55 def check_min_count(self, value, min_count):
56 return min_count == 1
57
58 def check_max_count(self, value, max_count):
59 return max_count == 1
60
61 def elide(self, value):
62 return value is None
63
64 def walk(self, value, callback, path):
65 callback(value, path)
66
67 def iter_objects(self, value, recursive, visited):
68 return []
69
70 def link_prop(self, value, objectset, missing, visited):
71 return value
72
73 def to_string(self, value):
74 return str(value)
75
76 @abstractmethod
77 def encode(self, encoder, value, state):
78 pass
79
80 @abstractmethod
81 def decode(self, decoder, *, objectset=None):
82 pass
83
84
85class StringProp(Property):
86 """
87 A scalar string property for an SHACL object
88 """
89
90 VALID_TYPES = str
91
92 def set(self, value):
93 return str(value)
94
95 def encode(self, encoder, value, state):
96 encoder.write_string(value)
97
98 def decode(self, decoder, *, objectset=None):
99 return decoder.read_string()
100
101
102class AnyURIProp(StringProp):
103 def encode(self, encoder, value, state):
104 encoder.write_iri(value)
105
106 def decode(self, decoder, *, objectset=None):
107 return decoder.read_iri()
108
109
110class DateTimeProp(Property):
111 """
112 A Date/Time Object with optional timezone
113 """
114
115 VALID_TYPES = datetime
116 UTC_FORMAT_STR = "%Y-%m-%dT%H:%M:%SZ"
117 REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})?$"
118
119 def set(self, value):
120 return self._normalize(value)
121
122 def encode(self, encoder, value, state):
123 encoder.write_datetime(self.to_string(value))
124
125 def decode(self, decoder, *, objectset=None):
126 s = decoder.read_datetime()
127 if s is None:
128 return None
129 v = self.from_string(s)
130 return self._normalize(v)
131
132 def _normalize(self, value):
133 if value.utcoffset() is None:
134 value = value.astimezone()
135 offset = value.utcoffset()
136 if offset % timedelta(minutes=1):
137 offset = offset - (offset % timedelta(minutes=1))
138 value = value.replace(tzinfo=timezone(offset))
139 value = value.replace(microsecond=0)
140 return value
141
142 def to_string(self, value):
143 value = self._normalize(value)
144 if value.tzinfo == timezone.utc:
145 return value.strftime(self.UTC_FORMAT_STR)
146 return value.isoformat()
147
148 def from_string(self, value):
149 if not re.match(self.REGEX, value):
150 raise ValueError(f"'{value}' is not a correctly formatted datetime")
151 if "Z" in value:
152 d = datetime(
153 *(time.strptime(value, self.UTC_FORMAT_STR)[0:6]),
154 tzinfo=timezone.utc,
155 )
156 else:
157 d = datetime.fromisoformat(value)
158
159 return self._normalize(d)
160
161
162class DateTimeStampProp(DateTimeProp):
163 """
164 A Date/Time Object with required timestamp
165 """
166
167 REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})$"
168
169
170class IntegerProp(Property):
171 VALID_TYPES = int
172
173 def set(self, value):
174 return int(value)
175
176 def encode(self, encoder, value, state):
177 encoder.write_integer(value)
178
179 def decode(self, decoder, *, objectset=None):
180 return decoder.read_integer()
181
182
183class PositiveIntegerProp(IntegerProp):
184 def validate(self, value):
185 super().validate(value)
186 if value < 1:
187 raise ValueError(f"Value must be >=1. Got {value}")
188
189
190class NonNegativeIntegerProp(IntegerProp):
191 def validate(self, value):
192 super().validate(value)
193 if value < 0:
194 raise ValueError(f"Value must be >= 0. Got {value}")
195
196
197class BooleanProp(Property):
198 VALID_TYPES = bool
199
200 def set(self, value):
201 return bool(value)
202
203 def encode(self, encoder, value, state):
204 encoder.write_bool(value)
205
206 def decode(self, decoder, *, objectset=None):
207 return decoder.read_bool()
208
209
210class FloatProp(Property):
211 VALID_TYPES = (float, int)
212
213 def set(self, value):
214 return float(value)
215
216 def encode(self, encoder, value, state):
217 encoder.write_float(value)
218
219 def decode(self, decoder, *, objectset=None):
220 return decoder.read_float()
221
222
223class ObjectProp(Property):
224 """
225 A scalar SHACL object property of a SHACL object
226 """
227
228 def __init__(self, cls, required):
229 super().__init__()
230 self.cls = cls
231 self.required = required
232
233 def init(self):
234 if self.required and not self.cls.IS_ABSTRACT:
235 return self.cls()
236 return None
237
238 def validate(self, value):
239 check_type(value, (self.cls, str))
240
241 def walk(self, value, callback, path):
242 if value is None:
243 return
244
245 if not isinstance(value, str):
246 value.walk(callback, path)
247 else:
248 callback(value, path)
249
250 def iter_objects(self, value, recursive, visited):
251 if value is None or isinstance(value, str):
252 return
253
254 if value not in visited:
255 visited.add(value)
256 yield value
257
258 if recursive:
259 for c in value.iter_objects(recursive=True, visited=visited):
260 yield c
261
262 def encode(self, encoder, value, state):
263 if value is None:
264 raise ValueError("Object cannot be None")
265
266 if isinstance(value, str):
267 value = _NI_ENCODE_CONTEXT.get(value, value)
268 encoder.write_iri(value)
269 return
270
271 return value.encode(encoder, state)
272
273 def decode(self, decoder, *, objectset=None):
274 iri = decoder.read_iri()
275 if iri is None:
276 return self.cls.decode(decoder, objectset=objectset)
277
278 iri = _NI_DECODE_CONTEXT.get(iri, iri)
279
280 if objectset is None:
281 return iri
282
283 obj = objectset.find_by_id(iri)
284 if obj is None:
285 return iri
286
287 self.validate(obj)
288 return obj
289
290 def link_prop(self, value, objectset, missing, visited):
291 if value is None:
292 return value
293
294 if isinstance(value, str):
295 o = objectset.find_by_id(value)
296 if o is not None:
297 self.validate(o)
298 return o
299
300 if missing is not None:
301 missing.add(value)
302
303 return value
304
305 # De-duplicate IDs
306 if value._id:
307 value = objectset.find_by_id(value._id, value)
308 self.validate(value)
309
310 value.link_helper(objectset, missing, visited)
311 return value
312
313
314class ListProxy(object):
315 def __init__(self, prop, data=None):
316 if data is None:
317 self.__data = []
318 else:
319 self.__data = data
320 self.__prop = prop
321
322 def append(self, value):
323 self.__prop.validate(value)
324 self.__data.append(self.__prop.set(value))
325
326 def insert(self, idx, value):
327 self.__prop.validate(value)
328 self.__data.insert(idx, self.__prop.set(value))
329
330 def extend(self, items):
331 for i in items:
332 self.append(i)
333
334 def sort(self, *args, **kwargs):
335 self.__data.sort(*args, **kwargs)
336
337 def __getitem__(self, key):
338 return self.__data[key]
339
340 def __setitem__(self, key, value):
341 if isinstance(key, slice):
342 for v in value:
343 self.__prop.validate(v)
344 self.__data[key] = [self.__prop.set(v) for v in value]
345 else:
346 self.__prop.validate(value)
347 self.__data[key] = self.__prop.set(value)
348
349 def __delitem__(self, key):
350 del self.__data[key]
351
352 def __contains__(self, item):
353 return item in self.__data
354
355 def __iter__(self):
356 return iter(self.__data)
357
358 def __len__(self):
359 return len(self.__data)
360
361 def __str__(self):
362 return str(self.__data)
363
364 def __repr__(self):
365 return repr(self.__data)
366
367 def __eq__(self, other):
368 if isinstance(other, ListProxy):
369 return self.__data == other.__data
370
371 return self.__data == other
372
373
374class ListProp(Property):
375 """
376 A list of SHACL properties
377 """
378
379 VALID_TYPES = (list, ListProxy)
380
381 def __init__(self, prop):
382 super().__init__()
383 self.prop = prop
384
385 def init(self):
386 return ListProxy(self.prop)
387
388 def validate(self, value):
389 super().validate(value)
390
391 for i in value:
392 self.prop.validate(i)
393
394 def set(self, value):
395 if isinstance(value, ListProxy):
396 return value
397
398 return ListProxy(self.prop, [self.prop.set(d) for d in value])
399
400 def check_min_count(self, value, min_count):
401 check_type(value, ListProxy)
402 return len(value) >= min_count
403
404 def check_max_count(self, value, max_count):
405 check_type(value, ListProxy)
406 return len(value) <= max_count
407
408 def elide(self, value):
409 check_type(value, ListProxy)
410 return len(value) == 0
411
412 def walk(self, value, callback, path):
413 callback(value, path)
414 for idx, v in enumerate(value):
415 self.prop.walk(v, callback, path + [f"[{idx}]"])
416
417 def iter_objects(self, value, recursive, visited):
418 for v in value:
419 for c in self.prop.iter_objects(v, recursive, visited):
420 yield c
421
422 def link_prop(self, value, objectset, missing, visited):
423 if isinstance(value, ListProxy):
424 data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
425 else:
426 data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
427
428 return ListProxy(self.prop, data=data)
429
430 def encode(self, encoder, value, state):
431 check_type(value, ListProxy)
432
433 with encoder.write_list() as list_s:
434 for v in value:
435 with list_s.write_list_item() as item_s:
436 self.prop.encode(item_s, v, state)
437
438 def decode(self, decoder, *, objectset=None):
439 data = []
440 for val_d in decoder.read_list():
441 v = self.prop.decode(val_d, objectset=objectset)
442 self.prop.validate(v)
443 data.append(v)
444
445 return ListProxy(self.prop, data=data)
446
447
448class EnumProp(Property):
449 VALID_TYPES = str
450
451 def __init__(self, values, *, pattern=None):
452 super().__init__(pattern=pattern)
453 self.values = values
454
455 def validate(self, value):
456 super().validate(value)
457
458 valid_values = (iri for iri, _ in self.values)
459 if value not in valid_values:
460 raise ValueError(
461 f"'{value}' is not a valid value. Choose one of {' '.join(valid_values)}"
462 )
463
464 def encode(self, encoder, value, state):
465 for iri, compact in self.values:
466 if iri == value:
467 encoder.write_enum(value, self, compact)
468 return
469
470 encoder.write_enum(value, self)
471
472 def decode(self, decoder, *, objectset=None):
473 v = decoder.read_enum(self)
474 for iri, compact in self.values:
475 if v == compact:
476 return iri
477 return v
478
479
480class NodeKind(Enum):
481 BlankNode = 1
482 IRI = 2
483 BlankNodeOrIRI = 3
484
485
486def is_IRI(s):
487 if not isinstance(s, str):
488 return False
489 if s.startswith("_:"):
490 return False
491 if ":" not in s:
492 return False
493 return True
494
495
496def is_blank_node(s):
497 if not isinstance(s, str):
498 return False
499 if not s.startswith("_:"):
500 return False
501 return True
502
503
504def register(type_iri, *, compact_type=None, abstract=False):
505 def add_class(key, c):
506 assert (
507 key not in SHACLObject.CLASSES
508 ), f"{key} already registered to {SHACLObject.CLASSES[key].__name__}"
509 SHACLObject.CLASSES[key] = c
510
511 def decorator(c):
512 global NAMED_INDIVIDUALS
513
514 assert issubclass(
515 c, SHACLObject
516 ), f"{c.__name__} is not derived from SHACLObject"
517
518 c._OBJ_TYPE = type_iri
519 c.IS_ABSTRACT = abstract
520 add_class(type_iri, c)
521
522 c._OBJ_COMPACT_TYPE = compact_type
523 if compact_type:
524 add_class(compact_type, c)
525
526 NAMED_INDIVIDUALS |= set(c.NAMED_INDIVIDUALS.values())
527
528 # Registration is deferred until the first instance of class is created
529 # so that it has access to any other defined class
530 c._NEEDS_REG = True
531 return c
532
533 return decorator
534
535
536register_lock = threading.Lock()
537NAMED_INDIVIDUALS = set()
538
539
540@functools.total_ordering
541class SHACLObject(object):
542 CLASSES = {}
543 NODE_KIND = NodeKind.BlankNodeOrIRI
544 ID_ALIAS = None
545 IS_ABSTRACT = True
546
547 def __init__(self, **kwargs):
548 if self._is_abstract():
549 raise NotImplementedError(
550 f"{self.__class__.__name__} is abstract and cannot be implemented"
551 )
552
553 with register_lock:
554 cls = self.__class__
555 if cls._NEEDS_REG:
556 cls._OBJ_PROPERTIES = {}
557 cls._OBJ_IRIS = {}
558 cls._register_props()
559 cls._NEEDS_REG = False
560
561 self.__dict__["_obj_data"] = {}
562 self.__dict__["_obj_metadata"] = {}
563
564 for iri, prop, _, _, _, _ in self.__iter_props():
565 self.__dict__["_obj_data"][iri] = prop.init()
566
567 for k, v in kwargs.items():
568 setattr(self, k, v)
569
570 def _is_abstract(self):
571 return self.__class__.IS_ABSTRACT
572
573 @classmethod
574 def _register_props(cls):
575 cls._add_property("_id", StringProp(), iri="@id")
576
577 @classmethod
578 def _add_property(
579 cls,
580 pyname,
581 prop,
582 iri,
583 min_count=None,
584 max_count=None,
585 compact=None,
586 ):
587 if pyname in cls._OBJ_IRIS:
588 raise KeyError(f"'{pyname}' is already defined for '{cls.__name__}'")
589 if iri in cls._OBJ_PROPERTIES:
590 raise KeyError(f"'{iri}' is already defined for '{cls.__name__}'")
591
592 while hasattr(cls, pyname):
593 pyname = pyname + "_"
594
595 pyname = sys.intern(pyname)
596 iri = sys.intern(iri)
597
598 cls._OBJ_IRIS[pyname] = iri
599 cls._OBJ_PROPERTIES[iri] = (prop, min_count, max_count, pyname, compact)
600
601 def __setattr__(self, name, value):
602 if name == self.ID_ALIAS:
603 self["@id"] = value
604 return
605
606 try:
607 iri = self._OBJ_IRIS[name]
608 self[iri] = value
609 except KeyError:
610 raise AttributeError(
611 f"'{name}' is not a valid property of {self.__class__.__name__}"
612 )
613
614 def __getattr__(self, name):
615 if name in self._OBJ_IRIS:
616 return self.__dict__["_obj_data"][self._OBJ_IRIS[name]]
617
618 if name == self.ID_ALIAS:
619 return self.__dict__["_obj_data"]["@id"]
620
621 if name == "_metadata":
622 return self.__dict__["_obj_metadata"]
623
624 if name == "_IRI":
625 return self._OBJ_IRIS
626
627 if name == "TYPE":
628 return self.__class__._OBJ_TYPE
629
630 if name == "COMPACT_TYPE":
631 return self.__class__._OBJ_COMPACT_TYPE
632
633 raise AttributeError(
634 f"'{name}' is not a valid property of {self.__class__.__name__}"
635 )
636
637 def __delattr__(self, name):
638 if name == self.ID_ALIAS:
639 del self["@id"]
640 return
641
642 try:
643 iri = self._OBJ_IRIS[name]
644 del self[iri]
645 except KeyError:
646 raise AttributeError(
647 f"'{name}' is not a valid property of {self.__class__.__name__}"
648 )
649
650 def __get_prop(self, iri):
651 if iri not in self._OBJ_PROPERTIES:
652 raise KeyError(
653 f"'{iri}' is not a valid property of {self.__class__.__name__}"
654 )
655
656 return self._OBJ_PROPERTIES[iri]
657
658 def __iter_props(self):
659 for iri, v in self._OBJ_PROPERTIES.items():
660 yield iri, *v
661
662 def __getitem__(self, iri):
663 return self.__dict__["_obj_data"][iri]
664
665 def __setitem__(self, iri, value):
666 if iri == "@id":
667 if self.NODE_KIND == NodeKind.BlankNode:
668 if not is_blank_node(value):
669 raise ValueError(
670 f"{self.__class__.__name__} ({id(self)}) can only have local reference. Property '{iri}' cannot be set to '{value}' and must start with '_:'"
671 )
672 elif self.NODE_KIND == NodeKind.IRI:
673 if not is_IRI(value):
674 raise ValueError(
675 f"{self.__class__.__name__} ({id(self)}) can only have an IRI value. Property '{iri}' cannot be set to '{value}'"
676 )
677 else:
678 if not is_blank_node(value) and not is_IRI(value):
679 raise ValueError(
680 f"{self.__class__.__name__} ({id(self)}) Has invalid Property '{iri}' '{value}'. Must be a blank node or IRI"
681 )
682
683 prop, _, _, _, _ = self.__get_prop(iri)
684 prop.validate(value)
685 self.__dict__["_obj_data"][iri] = prop.set(value)
686
687 def __delitem__(self, iri):
688 prop, _, _, _, _ = self.__get_prop(iri)
689 self.__dict__["_obj_data"][iri] = prop.init()
690
691 def __iter__(self):
692 return self._OBJ_PROPERTIES.keys()
693
694 def walk(self, callback, path=None):
695 """
696 Walk object tree, invoking the callback for each item
697
698 Callback has the form:
699
700 def callback(object, path):
701 """
702 if path is None:
703 path = ["."]
704
705 if callback(self, path):
706 for iri, prop, _, _, _, _ in self.__iter_props():
707 prop.walk(self.__dict__["_obj_data"][iri], callback, path + [f".{iri}"])
708
709 def property_keys(self):
710 for iri, _, _, _, pyname, compact in self.__iter_props():
711 if iri == "@id":
712 compact = self.ID_ALIAS
713 yield pyname, iri, compact
714
715 def iter_objects(self, *, recursive=False, visited=None):
716 """
717 Iterate of all objects that are a child of this one
718 """
719 if visited is None:
720 visited = set()
721
722 for iri, prop, _, _, _, _ in self.__iter_props():
723 for c in prop.iter_objects(
724 self.__dict__["_obj_data"][iri], recursive=recursive, visited=visited
725 ):
726 yield c
727
728 def encode(self, encoder, state):
729 idname = self.ID_ALIAS or self._OBJ_IRIS["_id"]
730 if not self._id and self.NODE_KIND == NodeKind.IRI:
731 raise ValueError(
732 f"{self.__class__.__name__} ({id(self)}) must have a IRI for property '{idname}'"
733 )
734
735 if state.is_written(self):
736 encoder.write_iri(state.get_object_id(self))
737 return
738
739 state.add_written(self)
740
741 with encoder.write_object(
742 self,
743 state.get_object_id(self),
744 bool(self._id) or state.is_refed(self),
745 ) as obj_s:
746 self._encode_properties(obj_s, state)
747
748 def _encode_properties(self, encoder, state):
749 for iri, prop, min_count, max_count, pyname, compact in self.__iter_props():
750 value = self.__dict__["_obj_data"][iri]
751 if prop.elide(value):
752 if min_count:
753 raise ValueError(
754 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) is required (currently {value!r})"
755 )
756 continue
757
758 if min_count is not None:
759 if not prop.check_min_count(value, min_count):
760 raise ValueError(
761 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a minimum of {min_count} elements"
762 )
763
764 if max_count is not None:
765 if not prop.check_max_count(value, max_count):
766 raise ValueError(
767 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a maximum of {max_count} elements"
768 )
769
770 if iri == self._OBJ_IRIS["_id"]:
771 continue
772
773 with encoder.write_property(iri, compact) as prop_s:
774 prop.encode(prop_s, value, state)
775
776 @classmethod
777 def _make_object(cls, typ):
778 if typ not in cls.CLASSES:
779 raise TypeError(f"Unknown type {typ}")
780
781 return cls.CLASSES[typ]()
782
783 @classmethod
784 def decode(cls, decoder, *, objectset=None):
785 typ, obj_d = decoder.read_object()
786 if typ is None:
787 raise TypeError("Unable to determine type for object")
788
789 obj = cls._make_object(typ)
790 for key in (obj.ID_ALIAS, obj._OBJ_IRIS["_id"]):
791 with obj_d.read_property(key) as prop_d:
792 if prop_d is None:
793 continue
794
795 _id = prop_d.read_iri()
796 if _id is None:
797 raise TypeError(f"Object key '{key}' is the wrong type")
798
799 obj._id = _id
800 break
801
802 if obj.NODE_KIND == NodeKind.IRI and not obj._id:
803 raise ValueError("Object is missing required IRI")
804
805 if objectset is not None:
806 if obj._id:
807 v = objectset.find_by_id(_id)
808 if v is not None:
809 return v
810
811 obj._decode_properties(obj_d, objectset=objectset)
812
813 if objectset is not None:
814 objectset.add_index(obj)
815 return obj
816
817 def _decode_properties(self, decoder, objectset=None):
818 for key in decoder.object_keys():
819 if not self._decode_prop(decoder, key, objectset=objectset):
820 raise KeyError(f"Unknown property '{key}'")
821
822 def _decode_prop(self, decoder, key, objectset=None):
823 if key in (self._OBJ_IRIS["_id"], self.ID_ALIAS):
824 return True
825
826 for iri, prop, _, _, _, compact in self.__iter_props():
827 if compact == key:
828 read_key = compact
829 elif iri == key:
830 read_key = iri
831 else:
832 continue
833
834 with decoder.read_property(read_key) as prop_d:
835 v = prop.decode(prop_d, objectset=objectset)
836 prop.validate(v)
837 self.__dict__["_obj_data"][iri] = v
838 return True
839
840 return False
841
842 def link_helper(self, objectset, missing, visited):
843 if self in visited:
844 return
845
846 visited.add(self)
847
848 for iri, prop, _, _, _, _ in self.__iter_props():
849 self.__dict__["_obj_data"][iri] = prop.link_prop(
850 self.__dict__["_obj_data"][iri],
851 objectset,
852 missing,
853 visited,
854 )
855
856 def __str__(self):
857 parts = [
858 f"{self.__class__.__name__}(",
859 ]
860 if self._id:
861 parts.append(f"@id='{self._id}'")
862 parts.append(")")
863 return "".join(parts)
864
865 def __hash__(self):
866 return super().__hash__()
867
868 def __eq__(self, other):
869 return super().__eq__(other)
870
871 def __lt__(self, other):
872 def sort_key(obj):
873 if isinstance(obj, str):
874 return (obj, "", "", "")
875 return (
876 obj._id or "",
877 obj.TYPE,
878 getattr(obj, "name", None) or "",
879 id(obj),
880 )
881
882 return sort_key(self) < sort_key(other)
883
884
885class SHACLExtensibleObject(object):
886 CLOSED = False
887
888 def __init__(self, typ=None, **kwargs):
889 if typ:
890 self.__dict__["_obj_TYPE"] = (typ, None)
891 else:
892 self.__dict__["_obj_TYPE"] = (self._OBJ_TYPE, self._OBJ_COMPACT_TYPE)
893 super().__init__(**kwargs)
894
895 def _is_abstract(self):
896 # Unknown classes are assumed to not be abstract so that they can be
897 # deserialized
898 typ = self.__dict__["_obj_TYPE"][0]
899 if typ in self.__class__.CLASSES:
900 return self.__class__.CLASSES[typ].IS_ABSTRACT
901
902 return False
903
904 @classmethod
905 def _make_object(cls, typ):
906 # Check for a known type, and if so, deserialize as that instead
907 if typ in cls.CLASSES:
908 return cls.CLASSES[typ]()
909
910 obj = cls(typ)
911 return obj
912
913 def _decode_properties(self, decoder, objectset=None):
914 if self.CLOSED:
915 super()._decode_properties(decoder, objectset=objectset)
916 return
917
918 for key in decoder.object_keys():
919 if self._decode_prop(decoder, key, objectset=objectset):
920 continue
921
922 if not is_IRI(key):
923 raise KeyError(
924 f"Extensible object properties must be IRIs. Got '{key}'"
925 )
926
927 with decoder.read_property(key) as prop_d:
928 self.__dict__["_obj_data"][key] = prop_d.read_value()
929
930 def _encode_properties(self, encoder, state):
931 def encode_value(encoder, v):
932 if isinstance(v, bool):
933 encoder.write_bool(v)
934 elif isinstance(v, str):
935 encoder.write_string(v)
936 elif isinstance(v, int):
937 encoder.write_integer(v)
938 elif isinstance(v, float):
939 encoder.write_float(v)
940 else:
941 raise TypeError(
942 f"Unsupported serialized type {type(v)} with value '{v}'"
943 )
944
945 super()._encode_properties(encoder, state)
946 if self.CLOSED:
947 return
948
949 for iri, value in self.__dict__["_obj_data"].items():
950 if iri in self._OBJ_PROPERTIES:
951 continue
952
953 with encoder.write_property(iri) as prop_s:
954 encode_value(prop_s, value)
955
956 def __setitem__(self, iri, value):
957 try:
958 super().__setitem__(iri, value)
959 except KeyError:
960 if self.CLOSED:
961 raise
962
963 if not is_IRI(iri):
964 raise KeyError(f"Key '{iri}' must be an IRI")
965 self.__dict__["_obj_data"][iri] = value
966
967 def __delitem__(self, iri):
968 try:
969 super().__delitem__(iri)
970 except KeyError:
971 if self.CLOSED:
972 raise
973
974 if not is_IRI(iri):
975 raise KeyError(f"Key '{iri}' must be an IRI")
976 del self.__dict__["_obj_data"][iri]
977
978 def __getattr__(self, name):
979 if name == "TYPE":
980 return self.__dict__["_obj_TYPE"][0]
981 if name == "COMPACT_TYPE":
982 return self.__dict__["_obj_TYPE"][1]
983 return super().__getattr__(name)
984
985 def property_keys(self):
986 iris = set()
987 for pyname, iri, compact in super().property_keys():
988 iris.add(iri)
989 yield pyname, iri, compact
990
991 if self.CLOSED:
992 return
993
994 for iri in self.__dict__["_obj_data"].keys():
995 if iri not in iris:
996 yield None, iri, None
997
998
999class SHACLObjectSet(object):
1000 def __init__(self, objects=[], *, link=False):
1001 self.objects = set()
1002 self.missing_ids = set()
1003 for o in objects:
1004 self.objects.add(o)
1005 self.create_index()
1006 if link:
1007 self._link()
1008
1009 def create_index(self):
1010 """
1011 (re)Create object index
1012
1013 Creates or recreates the indices for the object set to enable fast
1014 lookup. All objects and their children are walked and indexed
1015 """
1016 self.obj_by_id = {}
1017 self.obj_by_type = {}
1018 for o in self.foreach():
1019 self.add_index(o)
1020
1021 def add_index(self, obj):
1022 """
1023 Add object to index
1024
1025 Adds the object to all appropriate indices
1026 """
1027
1028 def reg_type(typ, compact, o, exact):
1029 self.obj_by_type.setdefault(typ, set()).add((exact, o))
1030 if compact:
1031 self.obj_by_type.setdefault(compact, set()).add((exact, o))
1032
1033 if not isinstance(obj, SHACLObject):
1034 raise TypeError("Object is not of type SHACLObject")
1035
1036 for typ in SHACLObject.CLASSES.values():
1037 if isinstance(obj, typ):
1038 reg_type(
1039 typ._OBJ_TYPE, typ._OBJ_COMPACT_TYPE, obj, obj.__class__ is typ
1040 )
1041
1042 # This covers custom extensions
1043 reg_type(obj.TYPE, obj.COMPACT_TYPE, obj, True)
1044
1045 if not obj._id:
1046 return
1047
1048 self.missing_ids.discard(obj._id)
1049
1050 if obj._id in self.obj_by_id:
1051 return
1052
1053 self.obj_by_id[obj._id] = obj
1054
1055 def add(self, obj):
1056 """
1057 Add object to object set
1058
1059 Adds a SHACLObject to the object set and index it.
1060
1061 NOTE: Child objects of the attached object are not indexes
1062 """
1063 if not isinstance(obj, SHACLObject):
1064 raise TypeError("Object is not of type SHACLObject")
1065
1066 if obj not in self.objects:
1067 self.objects.add(obj)
1068 self.add_index(obj)
1069 return obj
1070
1071 def update(self, *others):
1072 """
1073 Update object set adding all objects in each other iterable
1074 """
1075 for o in others:
1076 for obj in o:
1077 self.add(obj)
1078
1079 def __contains__(self, item):
1080 """
1081 Returns True if the item is in the object set
1082 """
1083 return item in self.objects
1084
1085 def link(self):
1086 """
1087 Link object set
1088
1089 Links the object in the object set by replacing string object
1090 references with references to the objects themselves. e.g.
1091 a property that references object "https://foo/bar" by a string
1092 reference will be replaced with an actual reference to the object in
1093 the object set with the same ID if it exists in the object set
1094
1095 If multiple objects with the same ID are found, the duplicates are
1096 eliminated
1097 """
1098 self.create_index()
1099 return self._link()
1100
1101 def _link(self):
1102 global NAMED_INDIVIDUALS
1103
1104 self.missing_ids = set()
1105 visited = set()
1106
1107 new_objects = set()
1108
1109 for o in self.objects:
1110 if o._id:
1111 o = self.find_by_id(o._id, o)
1112 o.link_helper(self, self.missing_ids, visited)
1113 new_objects.add(o)
1114
1115 self.objects = new_objects
1116
1117 # Remove blank nodes
1118 obj_by_id = {}
1119 for _id, obj in self.obj_by_id.items():
1120 if _id.startswith("_:"):
1121 del obj._id
1122 else:
1123 obj_by_id[_id] = obj
1124 self.obj_by_id = obj_by_id
1125
1126 # Named individuals aren't considered missing
1127 self.missing_ids -= NAMED_INDIVIDUALS
1128
1129 return self.missing_ids
1130
1131 def find_by_id(self, _id, default=None):
1132 """
1133 Find object by ID
1134
1135 Returns objects that match the specified ID, or default if there is no
1136 object with the specified ID
1137 """
1138 if _id not in self.obj_by_id:
1139 return default
1140 return self.obj_by_id[_id]
1141
1142 def foreach(self):
1143 """
1144 Iterate over every object in the object set, and all child objects
1145 """
1146 visited = set()
1147 for o in self.objects:
1148 if o not in visited:
1149 yield o
1150 visited.add(o)
1151
1152 for child in o.iter_objects(recursive=True, visited=visited):
1153 yield child
1154
1155 def foreach_type(self, typ, *, match_subclass=True):
1156 """
1157 Iterate over each object of a specified type (or subclass there of)
1158
1159 If match_subclass is True, and class derived from typ will also match
1160 (similar to isinstance()). If False, only exact matches will be
1161 returned
1162 """
1163 if not isinstance(typ, str):
1164 if not issubclass(typ, SHACLObject):
1165 raise TypeError(f"Type must be derived from SHACLObject, got {typ}")
1166 typ = typ._OBJ_TYPE
1167
1168 if typ not in self.obj_by_type:
1169 return
1170
1171 for exact, o in self.obj_by_type[typ]:
1172 if match_subclass or exact:
1173 yield o
1174
1175 def merge(self, *objectsets):
1176 """
1177 Merge object sets
1178
1179 Returns a new object set that is the combination of this object set and
1180 all provided arguments
1181 """
1182 new_objects = set()
1183 new_objects |= self.objects
1184 for d in objectsets:
1185 new_objects |= d.objects
1186
1187 return SHACLObjectSet(new_objects, link=True)
1188
1189 def encode(self, encoder, force_list=False):
1190 """
1191 Serialize a list of objects to a serialization encoder
1192
1193 If force_list is true, a list will always be written using the encoder.
1194 """
1195 ref_counts = {}
1196 state = EncodeState()
1197
1198 def walk_callback(value, path):
1199 nonlocal state
1200 nonlocal ref_counts
1201
1202 if not isinstance(value, SHACLObject):
1203 return True
1204
1205 # Remove blank node ID for re-assignment
1206 if value._id and value._id.startswith("_:"):
1207 del value._id
1208
1209 if value._id:
1210 state.add_refed(value)
1211
1212 # If the object is referenced more than once, add it to the set of
1213 # referenced objects
1214 ref_counts.setdefault(value, 0)
1215 ref_counts[value] += 1
1216 if ref_counts[value] > 1:
1217 state.add_refed(value)
1218 return False
1219
1220 return True
1221
1222 for o in self.objects:
1223 if o._id:
1224 state.add_refed(o)
1225 o.walk(walk_callback)
1226
1227 use_list = force_list or len(self.objects) > 1
1228
1229 if use_list:
1230 # If we are making a list add all the objects referred to by reference
1231 # to the list
1232 objects = list(self.objects | state.ref_objects)
1233 else:
1234 objects = list(self.objects)
1235
1236 objects.sort()
1237
1238 if use_list:
1239 # Ensure top level objects are only written in the top level graph
1240 # node, and referenced by ID everywhere else. This is done by setting
1241 # the flag that indicates this object has been written for all the top
1242 # level objects, then clearing it right before serializing the object.
1243 #
1244 # In this way, if an object is referenced before it is supposed to be
1245 # serialized into the @graph, it will serialize as a string instead of
1246 # the actual object
1247 for o in objects:
1248 state.written_objects.add(o)
1249
1250 with encoder.write_list() as list_s:
1251 for o in objects:
1252 # Allow this specific object to be written now
1253 state.written_objects.remove(o)
1254 with list_s.write_list_item() as item_s:
1255 o.encode(item_s, state)
1256
1257 else:
1258 objects[0].encode(encoder, state)
1259
1260 def decode(self, decoder):
1261 self.create_index()
1262
1263 for obj_d in decoder.read_list():
1264 o = SHACLObject.decode(obj_d, objectset=self)
1265 self.objects.add(o)
1266
1267 self._link()
1268
1269
1270class EncodeState(object):
1271 def __init__(self):
1272 self.ref_objects = set()
1273 self.written_objects = set()
1274 self.blank_objects = {}
1275
1276 def get_object_id(self, o):
1277 if o._id:
1278 return o._id
1279
1280 if o not in self.blank_objects:
1281 _id = f"_:{o.__class__.__name__}{len(self.blank_objects)}"
1282 self.blank_objects[o] = _id
1283
1284 return self.blank_objects[o]
1285
1286 def is_refed(self, o):
1287 return o in self.ref_objects
1288
1289 def add_refed(self, o):
1290 self.ref_objects.add(o)
1291
1292 def is_written(self, o):
1293 return o in self.written_objects
1294
1295 def add_written(self, o):
1296 self.written_objects.add(o)
1297
1298
1299class Decoder(ABC):
1300 @abstractmethod
1301 def read_value(self):
1302 """
1303 Consume next item
1304
1305 Consumes the next item of any type
1306 """
1307 pass
1308
1309 @abstractmethod
1310 def read_string(self):
1311 """
1312 Consume the next item as a string.
1313
1314 Returns the string value of the next item, or `None` if the next item
1315 is not a string
1316 """
1317 pass
1318
1319 @abstractmethod
1320 def read_datetime(self):
1321 """
1322 Consumes the next item as a date & time string
1323
1324 Returns the string value of the next item, if it is a ISO datetime, or
1325 `None` if the next item is not a ISO datetime string.
1326
1327 Note that validation of the string is done by the caller, so a minimal
1328 implementation can just check if the next item is a string without
1329 worrying about the format
1330 """
1331 pass
1332
1333 @abstractmethod
1334 def read_integer(self):
1335 """
1336 Consumes the next item as an integer
1337
1338 Returns the integer value of the next item, or `None` if the next item
1339 is not an integer
1340 """
1341 pass
1342
1343 @abstractmethod
1344 def read_iri(self):
1345 """
1346 Consumes the next item as an IRI string
1347
1348 Returns the string value of the next item an IRI, or `None` if the next
1349 item is not an IRI.
1350
1351 The returned string should be either a fully-qualified IRI, or a blank
1352 node ID
1353 """
1354 pass
1355
1356 @abstractmethod
1357 def read_enum(self, e):
1358 """
1359 Consumes the next item as an Enum value string
1360
1361 Returns the fully qualified IRI of the next enum item, or `None` if the
1362 next item is not an enum value.
1363
1364 The callee is responsible for validating that the returned IRI is
1365 actually a member of the specified Enum, so the `Decoder` does not need
1366 to check that, but can if it wishes
1367 """
1368 pass
1369
1370 @abstractmethod
1371 def read_bool(self):
1372 """
1373 Consume the next item as a boolean value
1374
1375 Returns the boolean value of the next item, or `None` if the next item
1376 is not a boolean
1377 """
1378 pass
1379
1380 @abstractmethod
1381 def read_float(self):
1382 """
1383 Consume the next item as a float value
1384
1385 Returns the float value of the next item, or `None` if the next item is
1386 not a float
1387 """
1388 pass
1389
1390 @abstractmethod
1391 def read_list(self):
1392 """
1393 Consume the next item as a list generator
1394
1395 This should generate a `Decoder` object for each item in the list. The
1396 generated `Decoder` can be used to read the corresponding item from the
1397 list
1398 """
1399 pass
1400
1401 @abstractmethod
1402 def read_object(self):
1403 """
1404 Consume next item as an object
1405
1406 A context manager that "enters" the next item as a object and yields a
1407 `Decoder` that can read properties from it. If the next item is not an
1408 object, yields `None`
1409
1410 Properties will be read out of the object using `read_property` and
1411 `read_object_id`
1412 """
1413 pass
1414
1415 @abstractmethod
1416 @contextmanager
1417 def read_property(self, key):
1418 """
1419 Read property from object
1420
1421 A context manager that yields a `Decoder` that can be used to read the
1422 value of the property with the given key in current object, or `None`
1423 if the property does not exist in the current object.
1424 """
1425 pass
1426
1427 @abstractmethod
1428 def object_keys(self):
1429 """
1430 Read property keys from an object
1431
1432 Iterates over all the serialized keys for the current object
1433 """
1434 pass
1435
1436 @abstractmethod
1437 def read_object_id(self, alias=None):
1438 """
1439 Read current object ID property
1440
1441 Returns the ID of the current object if one is defined, or `None` if
1442 the current object has no ID.
1443
1444 The ID must be a fully qualified IRI or a blank node
1445
1446 If `alias` is provided, is is a hint as to another name by which the ID
1447 might be found, if the `Decoder` supports aliases for an ID
1448 """
1449 pass
1450
1451
1452class JSONLDDecoder(Decoder):
1453 def __init__(self, data, root=False):
1454 self.data = data
1455 self.root = root
1456
1457 def read_value(self):
1458 if isinstance(self.data, str):
1459 try:
1460 return float(self.data)
1461 except ValueError:
1462 pass
1463 return self.data
1464
1465 def read_string(self):
1466 if isinstance(self.data, str):
1467 return self.data
1468 return None
1469
1470 def read_datetime(self):
1471 return self.read_string()
1472
1473 def read_integer(self):
1474 if isinstance(self.data, int):
1475 return self.data
1476 return None
1477
1478 def read_bool(self):
1479 if isinstance(self.data, bool):
1480 return self.data
1481 return None
1482
1483 def read_float(self):
1484 if isinstance(self.data, (int, float, str)):
1485 return float(self.data)
1486 return None
1487
1488 def read_iri(self):
1489 if isinstance(self.data, str):
1490 return self.data
1491 return None
1492
1493 def read_enum(self, e):
1494 if isinstance(self.data, str):
1495 return self.data
1496 return None
1497
1498 def read_list(self):
1499 if isinstance(self.data, (list, tuple, set)):
1500 for v in self.data:
1501 yield self.__class__(v)
1502 else:
1503 yield self
1504
1505 def __get_value(self, *keys):
1506 for k in keys:
1507 if k and k in self.data:
1508 return self.data[k]
1509 return None
1510
1511 @contextmanager
1512 def read_property(self, key):
1513 v = self.__get_value(key)
1514 if v is not None:
1515 yield self.__class__(v)
1516 else:
1517 yield None
1518
1519 def object_keys(self):
1520 for key in self.data.keys():
1521 if key in ("@type", "type"):
1522 continue
1523 if self.root and key == "@context":
1524 continue
1525 yield key
1526
1527 def read_object(self):
1528 typ = self.__get_value("@type", "type")
1529 if typ is not None:
1530 return typ, self
1531
1532 return None, self
1533
1534 def read_object_id(self, alias=None):
1535 return self.__get_value(alias, "@id")
1536
1537
1538class JSONLDDeserializer(object):
1539 def deserialize_data(self, data, objectset: SHACLObjectSet):
1540 if "@graph" in data:
1541 h = JSONLDDecoder(data["@graph"], True)
1542 else:
1543 h = JSONLDDecoder(data, True)
1544
1545 objectset.decode(h)
1546
1547 def read(self, f, objectset: SHACLObjectSet):
1548 data = json.load(f)
1549 self.deserialize_data(data, objectset)
1550
1551
1552class Encoder(ABC):
1553 @abstractmethod
1554 def write_string(self, v):
1555 """
1556 Write a string value
1557
1558 Encodes the value as a string in the output
1559 """
1560 pass
1561
1562 @abstractmethod
1563 def write_datetime(self, v):
1564 """
1565 Write a date & time string
1566
1567 Encodes the value as an ISO datetime string
1568
1569 Note: The provided string is already correctly encoded as an ISO datetime
1570 """
1571 pass
1572
1573 @abstractmethod
1574 def write_integer(self, v):
1575 """
1576 Write an integer value
1577
1578 Encodes the value as an integer in the output
1579 """
1580 pass
1581
1582 @abstractmethod
1583 def write_iri(self, v, compact=None):
1584 """
1585 Write IRI
1586
1587 Encodes the string as an IRI. Note that the string will be either a
1588 fully qualified IRI or a blank node ID. If `compact` is provided and
1589 the serialization supports compacted IRIs, it should be preferred to
1590 the full IRI
1591 """
1592 pass
1593
1594 @abstractmethod
1595 def write_enum(self, v, e, compact=None):
1596 """
1597 Write enum value IRI
1598
1599 Encodes the string enum value IRI. Note that the string will be a fully
1600 qualified IRI. If `compact` is provided and the serialization supports
1601 compacted IRIs, it should be preferred to the full IRI.
1602 """
1603 pass
1604
1605 @abstractmethod
1606 def write_bool(self, v):
1607 """
1608 Write boolean
1609
1610 Encodes the value as a boolean in the output
1611 """
1612 pass
1613
1614 @abstractmethod
1615 def write_float(self, v):
1616 """
1617 Write float
1618
1619 Encodes the value as a floating point number in the output
1620 """
1621 pass
1622
1623 @abstractmethod
1624 @contextmanager
1625 def write_object(self, o, _id, needs_id):
1626 """
1627 Write object
1628
1629 A context manager that yields an `Encoder` that can be used to encode
1630 the given object properties.
1631
1632 The provided ID will always be a valid ID (even if o._id is `None`), in
1633 case the `Encoder` _must_ have an ID. `needs_id` is a hint to indicate
1634 to the `Encoder` if an ID must be written or not (if that is even an
1635 option). If it is `True`, the `Encoder` must encode an ID for the
1636 object. If `False`, the encoder is not required to encode an ID and may
1637 omit it.
1638
1639 The ID will be either a fully qualified IRI, or a blank node IRI.
1640
1641 Properties will be written the object using `write_property`
1642 """
1643 pass
1644
1645 @abstractmethod
1646 @contextmanager
1647 def write_property(self, iri, compact=None):
1648 """
1649 Write object property
1650
1651 A context manager that yields an `Encoder` that can be used to encode
1652 the value for the property with the given IRI in the current object
1653
1654 Note that the IRI will be fully qualified. If `compact` is provided and
1655 the serialization supports compacted IRIs, it should be preferred to
1656 the full IRI.
1657 """
1658 pass
1659
1660 @abstractmethod
1661 @contextmanager
1662 def write_list(self):
1663 """
1664 Write list
1665
1666 A context manager that yields an `Encoder` that can be used to encode a
1667 list.
1668
1669 Each item of the list will be added using `write_list_item`
1670 """
1671 pass
1672
1673 @abstractmethod
1674 @contextmanager
1675 def write_list_item(self):
1676 """
1677 Write list item
1678
1679 A context manager that yields an `Encoder` that can be used to encode
1680 the value for a list item
1681 """
1682 pass
1683
1684
1685class JSONLDEncoder(Encoder):
1686 def __init__(self, data=None):
1687 self.data = data
1688
1689 def write_string(self, v):
1690 self.data = v
1691
1692 def write_datetime(self, v):
1693 self.data = v
1694
1695 def write_integer(self, v):
1696 self.data = v
1697
1698 def write_iri(self, v, compact=None):
1699 self.write_string(compact or v)
1700
1701 def write_enum(self, v, e, compact=None):
1702 self.write_string(compact or v)
1703
1704 def write_bool(self, v):
1705 self.data = v
1706
1707 def write_float(self, v):
1708 self.data = str(v)
1709
1710 @contextmanager
1711 def write_property(self, iri, compact=None):
1712 s = self.__class__(None)
1713 yield s
1714 if s.data is not None:
1715 self.data[compact or iri] = s.data
1716
1717 @contextmanager
1718 def write_object(self, o, _id, needs_id):
1719 self.data = {
1720 "type": o.COMPACT_TYPE or o.TYPE,
1721 }
1722 if needs_id:
1723 self.data[o.ID_ALIAS or "@id"] = _id
1724 yield self
1725
1726 @contextmanager
1727 def write_list(self):
1728 self.data = []
1729 yield self
1730 if not self.data:
1731 self.data = None
1732
1733 @contextmanager
1734 def write_list_item(self):
1735 s = self.__class__(None)
1736 yield s
1737 if s.data is not None:
1738 self.data.append(s.data)
1739
1740
1741class JSONLDSerializer(object):
1742 def __init__(self, **args):
1743 self.args = args
1744
1745 def serialize_data(
1746 self,
1747 objectset: SHACLObjectSet,
1748 force_at_graph=False,
1749 ):
1750 h = JSONLDEncoder()
1751 objectset.encode(h, force_at_graph)
1752 data = {}
1753 if len(CONTEXT_URLS) == 1:
1754 data["@context"] = CONTEXT_URLS[0]
1755 elif CONTEXT_URLS:
1756 data["@context"] = CONTEXT_URLS
1757
1758 if isinstance(h.data, list):
1759 data["@graph"] = h.data
1760 else:
1761 for k, v in h.data.items():
1762 data[k] = v
1763
1764 return data
1765
1766 def write(
1767 self,
1768 objectset: SHACLObjectSet,
1769 f,
1770 force_at_graph=False,
1771 **kwargs,
1772 ):
1773 """
1774 Write a SHACLObjectSet to a JSON LD file
1775
1776 If force_at_graph is True, a @graph node will always be written
1777 """
1778 data = self.serialize_data(objectset, force_at_graph)
1779
1780 args = {**self.args, **kwargs}
1781
1782 sha1 = hashlib.sha1()
1783 for chunk in json.JSONEncoder(**args).iterencode(data):
1784 chunk = chunk.encode("utf-8")
1785 f.write(chunk)
1786 sha1.update(chunk)
1787
1788 return sha1.hexdigest()
1789
1790
1791class JSONLDInlineEncoder(Encoder):
1792 def __init__(self, f, sha1):
1793 self.f = f
1794 self.comma = False
1795 self.sha1 = sha1
1796
1797 def write(self, s):
1798 s = s.encode("utf-8")
1799 self.f.write(s)
1800 self.sha1.update(s)
1801
1802 def _write_comma(self):
1803 if self.comma:
1804 self.write(",")
1805 self.comma = False
1806
1807 def write_string(self, v):
1808 self.write(json.dumps(v))
1809
1810 def write_datetime(self, v):
1811 self.write_string(v)
1812
1813 def write_integer(self, v):
1814 self.write(f"{v}")
1815
1816 def write_iri(self, v, compact=None):
1817 self.write_string(compact or v)
1818
1819 def write_enum(self, v, e, compact=None):
1820 self.write_iri(v, compact)
1821
1822 def write_bool(self, v):
1823 if v:
1824 self.write("true")
1825 else:
1826 self.write("false")
1827
1828 def write_float(self, v):
1829 self.write(json.dumps(str(v)))
1830
1831 @contextmanager
1832 def write_property(self, iri, compact=None):
1833 self._write_comma()
1834 self.write_string(compact or iri)
1835 self.write(":")
1836 yield self
1837 self.comma = True
1838
1839 @contextmanager
1840 def write_object(self, o, _id, needs_id):
1841 self._write_comma()
1842
1843 self.write("{")
1844 self.write_string("type")
1845 self.write(":")
1846 self.write_string(o.COMPACT_TYPE or o.TYPE)
1847 self.comma = True
1848
1849 if needs_id:
1850 self._write_comma()
1851 self.write_string(o.ID_ALIAS or "@id")
1852 self.write(":")
1853 self.write_string(_id)
1854 self.comma = True
1855
1856 self.comma = True
1857 yield self
1858
1859 self.write("}")
1860 self.comma = True
1861
1862 @contextmanager
1863 def write_list(self):
1864 self._write_comma()
1865 self.write("[")
1866 yield self.__class__(self.f, self.sha1)
1867 self.write("]")
1868 self.comma = True
1869
1870 @contextmanager
1871 def write_list_item(self):
1872 self._write_comma()
1873 yield self.__class__(self.f, self.sha1)
1874 self.comma = True
1875
1876
1877class JSONLDInlineSerializer(object):
1878 def write(
1879 self,
1880 objectset: SHACLObjectSet,
1881 f,
1882 force_at_graph=False,
1883 ):
1884 """
1885 Write a SHACLObjectSet to a JSON LD file
1886
1887 Note: force_at_graph is included for compatibility, but ignored. This
1888 serializer always writes out a graph
1889 """
1890 sha1 = hashlib.sha1()
1891 h = JSONLDInlineEncoder(f, sha1)
1892 h.write('{"@context":')
1893 if len(CONTEXT_URLS) == 1:
1894 h.write(f'"{CONTEXT_URLS[0]}"')
1895 elif CONTEXT_URLS:
1896 h.write('["')
1897 h.write('","'.join(CONTEXT_URLS))
1898 h.write('"]')
1899 h.write(",")
1900
1901 h.write('"@graph":')
1902
1903 objectset.encode(h, True)
1904 h.write("}")
1905 return sha1.hexdigest()
1906
1907
1908def print_tree(objects, all_fields=False):
1909 """
1910 Print object tree
1911 """
1912 seen = set()
1913
1914 def callback(value, path):
1915 nonlocal seen
1916
1917 s = (" " * (len(path) - 1)) + f"{path[-1]}"
1918 if isinstance(value, SHACLObject):
1919 s += f" {value} ({id(value)})"
1920 is_empty = False
1921 elif isinstance(value, ListProxy):
1922 is_empty = len(value) == 0
1923 if is_empty:
1924 s += " []"
1925 else:
1926 s += f" {value!r}"
1927 is_empty = value is None
1928
1929 if all_fields or not is_empty:
1930 print(s)
1931
1932 if isinstance(value, SHACLObject):
1933 if value in seen:
1934 return False
1935 seen.add(value)
1936 return True
1937
1938 return True
1939
1940 for o in objects:
1941 o.walk(callback)
1942
1943
1944# fmt: off
1945"""Format Guard"""
1946
1947
1948CONTEXT_URLS = [
1949 "https://spdx.org/rdf/3.0.0/spdx-context.jsonld",
1950]
1951
1952_NI_ENCODE_CONTEXT = {
1953 "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour": "ai_EnergyUnitType:kilowattHour",
1954 "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule": "ai_EnergyUnitType:megajoule",
1955 "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other": "ai_EnergyUnitType:other",
1956 "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high": "ai_SafetyRiskAssessmentType:high",
1957 "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low": "ai_SafetyRiskAssessmentType:low",
1958 "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium": "ai_SafetyRiskAssessmentType:medium",
1959 "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious": "ai_SafetyRiskAssessmentType:serious",
1960 "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other": "AnnotationType:other",
1961 "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review": "AnnotationType:review",
1962 "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement": "spdx:Core/NoAssertionElement",
1963 "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement": "spdx:Core/NoneElement",
1964 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22": "ExternalIdentifierType:cpe22",
1965 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23": "ExternalIdentifierType:cpe23",
1966 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve": "ExternalIdentifierType:cve",
1967 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email": "ExternalIdentifierType:email",
1968 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid": "ExternalIdentifierType:gitoid",
1969 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other": "ExternalIdentifierType:other",
1970 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl": "ExternalIdentifierType:packageUrl",
1971 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther": "ExternalIdentifierType:securityOther",
1972 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid": "ExternalIdentifierType:swhid",
1973 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid": "ExternalIdentifierType:swid",
1974 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme": "ExternalIdentifierType:urlScheme",
1975 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation": "ExternalRefType:altDownloadLocation",
1976 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage": "ExternalRefType:altWebPage",
1977 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact": "ExternalRefType:binaryArtifact",
1978 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower": "ExternalRefType:bower",
1979 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta": "ExternalRefType:buildMeta",
1980 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem": "ExternalRefType:buildSystem",
1981 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport": "ExternalRefType:certificationReport",
1982 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat": "ExternalRefType:chat",
1983 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport": "ExternalRefType:componentAnalysisReport",
1984 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe": "ExternalRefType:cwe",
1985 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation": "ExternalRefType:documentation",
1986 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport": "ExternalRefType:dynamicAnalysisReport",
1987 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice": "ExternalRefType:eolNotice",
1988 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment": "ExternalRefType:exportControlAssessment",
1989 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding": "ExternalRefType:funding",
1990 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker": "ExternalRefType:issueTracker",
1991 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license": "ExternalRefType:license",
1992 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList": "ExternalRefType:mailingList",
1993 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral": "ExternalRefType:mavenCentral",
1994 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics": "ExternalRefType:metrics",
1995 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm": "ExternalRefType:npm",
1996 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget": "ExternalRefType:nuget",
1997 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other": "ExternalRefType:other",
1998 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment": "ExternalRefType:privacyAssessment",
1999 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata": "ExternalRefType:productMetadata",
2000 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder": "ExternalRefType:purchaseOrder",
2001 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport": "ExternalRefType:qualityAssessmentReport",
2002 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory": "ExternalRefType:releaseHistory",
2003 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes": "ExternalRefType:releaseNotes",
2004 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment": "ExternalRefType:riskAssessment",
2005 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport": "ExternalRefType:runtimeAnalysisReport",
2006 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation": "ExternalRefType:secureSoftwareAttestation",
2007 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel": "ExternalRefType:securityAdversaryModel",
2008 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory": "ExternalRefType:securityAdvisory",
2009 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix": "ExternalRefType:securityFix",
2010 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther": "ExternalRefType:securityOther",
2011 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport": "ExternalRefType:securityPenTestReport",
2012 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy": "ExternalRefType:securityPolicy",
2013 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel": "ExternalRefType:securityThreatModel",
2014 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia": "ExternalRefType:socialMedia",
2015 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact": "ExternalRefType:sourceArtifact",
2016 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport": "ExternalRefType:staticAnalysisReport",
2017 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support": "ExternalRefType:support",
2018 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs": "ExternalRefType:vcs",
2019 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport": "ExternalRefType:vulnerabilityDisclosureReport",
2020 "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment": "ExternalRefType:vulnerabilityExploitabilityAssessment",
2021 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256": "HashAlgorithm:blake2b256",
2022 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384": "HashAlgorithm:blake2b384",
2023 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512": "HashAlgorithm:blake2b512",
2024 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3": "HashAlgorithm:blake3",
2025 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium": "HashAlgorithm:crystalsDilithium",
2026 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber": "HashAlgorithm:crystalsKyber",
2027 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon": "HashAlgorithm:falcon",
2028 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2": "HashAlgorithm:md2",
2029 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4": "HashAlgorithm:md4",
2030 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5": "HashAlgorithm:md5",
2031 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6": "HashAlgorithm:md6",
2032 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other": "HashAlgorithm:other",
2033 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1": "HashAlgorithm:sha1",
2034 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224": "HashAlgorithm:sha224",
2035 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256": "HashAlgorithm:sha256",
2036 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384": "HashAlgorithm:sha384",
2037 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224": "HashAlgorithm:sha3_224",
2038 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256": "HashAlgorithm:sha3_256",
2039 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384": "HashAlgorithm:sha3_384",
2040 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512": "HashAlgorithm:sha3_512",
2041 "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512": "HashAlgorithm:sha512",
2042 "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build": "LifecycleScopeType:build",
2043 "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design": "LifecycleScopeType:design",
2044 "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development": "LifecycleScopeType:development",
2045 "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other": "LifecycleScopeType:other",
2046 "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime": "LifecycleScopeType:runtime",
2047 "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test": "LifecycleScopeType:test",
2048 "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no": "PresenceType:no",
2049 "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion": "PresenceType:noAssertion",
2050 "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes": "PresenceType:yes",
2051 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai": "ProfileIdentifierType:ai",
2052 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build": "ProfileIdentifierType:build",
2053 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core": "ProfileIdentifierType:core",
2054 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset": "ProfileIdentifierType:dataset",
2055 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing": "ProfileIdentifierType:expandedLicensing",
2056 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension": "ProfileIdentifierType:extension",
2057 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite": "ProfileIdentifierType:lite",
2058 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security": "ProfileIdentifierType:security",
2059 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing": "ProfileIdentifierType:simpleLicensing",
2060 "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software": "ProfileIdentifierType:software",
2061 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete": "RelationshipCompleteness:complete",
2062 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete": "RelationshipCompleteness:incomplete",
2063 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion": "RelationshipCompleteness:noAssertion",
2064 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects": "RelationshipType:affects",
2065 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy": "RelationshipType:amendedBy",
2066 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf": "RelationshipType:ancestorOf",
2067 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom": "RelationshipType:availableFrom",
2068 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures": "RelationshipType:configures",
2069 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains": "RelationshipType:contains",
2070 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy": "RelationshipType:coordinatedBy",
2071 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo": "RelationshipType:copiedTo",
2072 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo": "RelationshipType:delegatedTo",
2073 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn": "RelationshipType:dependsOn",
2074 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf": "RelationshipType:descendantOf",
2075 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes": "RelationshipType:describes",
2076 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect": "RelationshipType:doesNotAffect",
2077 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo": "RelationshipType:expandsTo",
2078 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy": "RelationshipType:exploitCreatedBy",
2079 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy": "RelationshipType:fixedBy",
2080 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn": "RelationshipType:fixedIn",
2081 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy": "RelationshipType:foundBy",
2082 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates": "RelationshipType:generates",
2083 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile": "RelationshipType:hasAddedFile",
2084 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor": "RelationshipType:hasAssessmentFor",
2085 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability": "RelationshipType:hasAssociatedVulnerability",
2086 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense": "RelationshipType:hasConcludedLicense",
2087 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile": "RelationshipType:hasDataFile",
2088 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense": "RelationshipType:hasDeclaredLicense",
2089 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile": "RelationshipType:hasDeletedFile",
2090 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest": "RelationshipType:hasDependencyManifest",
2091 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact": "RelationshipType:hasDistributionArtifact",
2092 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation": "RelationshipType:hasDocumentation",
2093 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink": "RelationshipType:hasDynamicLink",
2094 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence": "RelationshipType:hasEvidence",
2095 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample": "RelationshipType:hasExample",
2096 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost": "RelationshipType:hasHost",
2097 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs": "RelationshipType:hasInputs",
2098 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata": "RelationshipType:hasMetadata",
2099 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent": "RelationshipType:hasOptionalComponent",
2100 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency": "RelationshipType:hasOptionalDependency",
2101 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs": "RelationshipType:hasOutputs",
2102 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite": "RelationshipType:hasPrerequsite",
2103 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency": "RelationshipType:hasProvidedDependency",
2104 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement": "RelationshipType:hasRequirement",
2105 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification": "RelationshipType:hasSpecification",
2106 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink": "RelationshipType:hasStaticLink",
2107 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest": "RelationshipType:hasTest",
2108 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase": "RelationshipType:hasTestCase",
2109 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant": "RelationshipType:hasVariant",
2110 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy": "RelationshipType:invokedBy",
2111 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy": "RelationshipType:modifiedBy",
2112 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other": "RelationshipType:other",
2113 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy": "RelationshipType:packagedBy",
2114 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy": "RelationshipType:patchedBy",
2115 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy": "RelationshipType:publishedBy",
2116 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy": "RelationshipType:reportedBy",
2117 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy": "RelationshipType:republishedBy",
2118 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact": "RelationshipType:serializedInArtifact",
2119 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn": "RelationshipType:testedOn",
2120 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn": "RelationshipType:trainedOn",
2121 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor": "RelationshipType:underInvestigationFor",
2122 "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool": "RelationshipType:usesTool",
2123 "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed": "SupportType:deployed",
2124 "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development": "SupportType:development",
2125 "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport": "SupportType:endOfSupport",
2126 "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport": "SupportType:limitedSupport",
2127 "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion": "SupportType:noAssertion",
2128 "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport": "SupportType:noSupport",
2129 "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support": "SupportType:support",
2130 "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber": "dataset_ConfidentialityLevelType:amber",
2131 "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear": "dataset_ConfidentialityLevelType:clear",
2132 "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green": "dataset_ConfidentialityLevelType:green",
2133 "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red": "dataset_ConfidentialityLevelType:red",
2134 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough": "dataset_DatasetAvailabilityType:clickthrough",
2135 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload": "dataset_DatasetAvailabilityType:directDownload",
2136 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query": "dataset_DatasetAvailabilityType:query",
2137 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration": "dataset_DatasetAvailabilityType:registration",
2138 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript": "dataset_DatasetAvailabilityType:scrapingScript",
2139 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio": "dataset_DatasetType:audio",
2140 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical": "dataset_DatasetType:categorical",
2141 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph": "dataset_DatasetType:graph",
2142 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image": "dataset_DatasetType:image",
2143 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion": "dataset_DatasetType:noAssertion",
2144 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric": "dataset_DatasetType:numeric",
2145 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other": "dataset_DatasetType:other",
2146 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor": "dataset_DatasetType:sensor",
2147 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured": "dataset_DatasetType:structured",
2148 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic": "dataset_DatasetType:syntactic",
2149 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text": "dataset_DatasetType:text",
2150 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries": "dataset_DatasetType:timeseries",
2151 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp": "dataset_DatasetType:timestamp",
2152 "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video": "dataset_DatasetType:video",
2153 "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical": "security_CvssSeverityType:critical",
2154 "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high": "security_CvssSeverityType:high",
2155 "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low": "security_CvssSeverityType:low",
2156 "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium": "security_CvssSeverityType:medium",
2157 "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none": "security_CvssSeverityType:none",
2158 "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev": "security_ExploitCatalogType:kev",
2159 "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other": "security_ExploitCatalogType:other",
2160 "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act": "security_SsvcDecisionType:act",
2161 "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend": "security_SsvcDecisionType:attend",
2162 "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track": "security_SsvcDecisionType:track",
2163 "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar": "security_SsvcDecisionType:trackStar",
2164 "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent": "security_VexJustificationType:componentNotPresent",
2165 "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist": "security_VexJustificationType:inlineMitigationsAlreadyExist",
2166 "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary": "security_VexJustificationType:vulnerableCodeCannotBeControlledByAdversary",
2167 "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath": "security_VexJustificationType:vulnerableCodeNotInExecutePath",
2168 "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent": "security_VexJustificationType:vulnerableCodeNotPresent",
2169 "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid": "software_ContentIdentifierType:gitoid",
2170 "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid": "software_ContentIdentifierType:swhid",
2171 "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory": "software_FileKindType:directory",
2172 "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file": "software_FileKindType:file",
2173 "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed": "software_SbomType:analyzed",
2174 "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build": "software_SbomType:build",
2175 "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed": "software_SbomType:deployed",
2176 "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design": "software_SbomType:design",
2177 "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime": "software_SbomType:runtime",
2178 "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source": "software_SbomType:source",
2179 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application": "software_SoftwarePurpose:application",
2180 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive": "software_SoftwarePurpose:archive",
2181 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom": "software_SoftwarePurpose:bom",
2182 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration": "software_SoftwarePurpose:configuration",
2183 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container": "software_SoftwarePurpose:container",
2184 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data": "software_SoftwarePurpose:data",
2185 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device": "software_SoftwarePurpose:device",
2186 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver": "software_SoftwarePurpose:deviceDriver",
2187 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage": "software_SoftwarePurpose:diskImage",
2188 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation": "software_SoftwarePurpose:documentation",
2189 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence": "software_SoftwarePurpose:evidence",
2190 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable": "software_SoftwarePurpose:executable",
2191 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file": "software_SoftwarePurpose:file",
2192 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage": "software_SoftwarePurpose:filesystemImage",
2193 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware": "software_SoftwarePurpose:firmware",
2194 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework": "software_SoftwarePurpose:framework",
2195 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install": "software_SoftwarePurpose:install",
2196 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library": "software_SoftwarePurpose:library",
2197 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest": "software_SoftwarePurpose:manifest",
2198 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model": "software_SoftwarePurpose:model",
2199 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module": "software_SoftwarePurpose:module",
2200 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem": "software_SoftwarePurpose:operatingSystem",
2201 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other": "software_SoftwarePurpose:other",
2202 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch": "software_SoftwarePurpose:patch",
2203 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform": "software_SoftwarePurpose:platform",
2204 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement": "software_SoftwarePurpose:requirement",
2205 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source": "software_SoftwarePurpose:source",
2206 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification": "software_SoftwarePurpose:specification",
2207 "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test": "software_SoftwarePurpose:test",
2208 "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense": "spdx:ExpandedLicensing/NoAssertionLicense",
2209 "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense": "spdx:ExpandedLicensing/NoneLicense",
2210}
2211
2212_NI_DECODE_CONTEXT = {
2213 "ai_EnergyUnitType:kilowattHour": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour",
2214 "ai_EnergyUnitType:megajoule": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule",
2215 "ai_EnergyUnitType:other": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other",
2216 "ai_SafetyRiskAssessmentType:high": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high",
2217 "ai_SafetyRiskAssessmentType:low": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low",
2218 "ai_SafetyRiskAssessmentType:medium": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium",
2219 "ai_SafetyRiskAssessmentType:serious": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious",
2220 "AnnotationType:other": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other",
2221 "AnnotationType:review": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review",
2222 "spdx:Core/NoAssertionElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement",
2223 "spdx:Core/NoneElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement",
2224 "ExternalIdentifierType:cpe22": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22",
2225 "ExternalIdentifierType:cpe23": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23",
2226 "ExternalIdentifierType:cve": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve",
2227 "ExternalIdentifierType:email": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email",
2228 "ExternalIdentifierType:gitoid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid",
2229 "ExternalIdentifierType:other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other",
2230 "ExternalIdentifierType:packageUrl": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl",
2231 "ExternalIdentifierType:securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther",
2232 "ExternalIdentifierType:swhid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid",
2233 "ExternalIdentifierType:swid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid",
2234 "ExternalIdentifierType:urlScheme": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme",
2235 "ExternalRefType:altDownloadLocation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation",
2236 "ExternalRefType:altWebPage": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage",
2237 "ExternalRefType:binaryArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact",
2238 "ExternalRefType:bower": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower",
2239 "ExternalRefType:buildMeta": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta",
2240 "ExternalRefType:buildSystem": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem",
2241 "ExternalRefType:certificationReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport",
2242 "ExternalRefType:chat": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat",
2243 "ExternalRefType:componentAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport",
2244 "ExternalRefType:cwe": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe",
2245 "ExternalRefType:documentation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation",
2246 "ExternalRefType:dynamicAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport",
2247 "ExternalRefType:eolNotice": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice",
2248 "ExternalRefType:exportControlAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment",
2249 "ExternalRefType:funding": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding",
2250 "ExternalRefType:issueTracker": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker",
2251 "ExternalRefType:license": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license",
2252 "ExternalRefType:mailingList": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList",
2253 "ExternalRefType:mavenCentral": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral",
2254 "ExternalRefType:metrics": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics",
2255 "ExternalRefType:npm": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm",
2256 "ExternalRefType:nuget": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget",
2257 "ExternalRefType:other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other",
2258 "ExternalRefType:privacyAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment",
2259 "ExternalRefType:productMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata",
2260 "ExternalRefType:purchaseOrder": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder",
2261 "ExternalRefType:qualityAssessmentReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport",
2262 "ExternalRefType:releaseHistory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory",
2263 "ExternalRefType:releaseNotes": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes",
2264 "ExternalRefType:riskAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment",
2265 "ExternalRefType:runtimeAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport",
2266 "ExternalRefType:secureSoftwareAttestation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation",
2267 "ExternalRefType:securityAdversaryModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel",
2268 "ExternalRefType:securityAdvisory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory",
2269 "ExternalRefType:securityFix": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix",
2270 "ExternalRefType:securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther",
2271 "ExternalRefType:securityPenTestReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport",
2272 "ExternalRefType:securityPolicy": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy",
2273 "ExternalRefType:securityThreatModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel",
2274 "ExternalRefType:socialMedia": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia",
2275 "ExternalRefType:sourceArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact",
2276 "ExternalRefType:staticAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport",
2277 "ExternalRefType:support": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support",
2278 "ExternalRefType:vcs": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs",
2279 "ExternalRefType:vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport",
2280 "ExternalRefType:vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment",
2281 "HashAlgorithm:blake2b256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256",
2282 "HashAlgorithm:blake2b384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384",
2283 "HashAlgorithm:blake2b512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512",
2284 "HashAlgorithm:blake3": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3",
2285 "HashAlgorithm:crystalsDilithium": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium",
2286 "HashAlgorithm:crystalsKyber": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber",
2287 "HashAlgorithm:falcon": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon",
2288 "HashAlgorithm:md2": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2",
2289 "HashAlgorithm:md4": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4",
2290 "HashAlgorithm:md5": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5",
2291 "HashAlgorithm:md6": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6",
2292 "HashAlgorithm:other": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other",
2293 "HashAlgorithm:sha1": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1",
2294 "HashAlgorithm:sha224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224",
2295 "HashAlgorithm:sha256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256",
2296 "HashAlgorithm:sha384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384",
2297 "HashAlgorithm:sha3_224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224",
2298 "HashAlgorithm:sha3_256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256",
2299 "HashAlgorithm:sha3_384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384",
2300 "HashAlgorithm:sha3_512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512",
2301 "HashAlgorithm:sha512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512",
2302 "LifecycleScopeType:build": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build",
2303 "LifecycleScopeType:design": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design",
2304 "LifecycleScopeType:development": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development",
2305 "LifecycleScopeType:other": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other",
2306 "LifecycleScopeType:runtime": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime",
2307 "LifecycleScopeType:test": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test",
2308 "PresenceType:no": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no",
2309 "PresenceType:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion",
2310 "PresenceType:yes": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes",
2311 "ProfileIdentifierType:ai": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai",
2312 "ProfileIdentifierType:build": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build",
2313 "ProfileIdentifierType:core": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core",
2314 "ProfileIdentifierType:dataset": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset",
2315 "ProfileIdentifierType:expandedLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing",
2316 "ProfileIdentifierType:extension": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension",
2317 "ProfileIdentifierType:lite": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite",
2318 "ProfileIdentifierType:security": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security",
2319 "ProfileIdentifierType:simpleLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing",
2320 "ProfileIdentifierType:software": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software",
2321 "RelationshipCompleteness:complete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete",
2322 "RelationshipCompleteness:incomplete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete",
2323 "RelationshipCompleteness:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion",
2324 "RelationshipType:affects": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects",
2325 "RelationshipType:amendedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy",
2326 "RelationshipType:ancestorOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf",
2327 "RelationshipType:availableFrom": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom",
2328 "RelationshipType:configures": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures",
2329 "RelationshipType:contains": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains",
2330 "RelationshipType:coordinatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy",
2331 "RelationshipType:copiedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo",
2332 "RelationshipType:delegatedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo",
2333 "RelationshipType:dependsOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn",
2334 "RelationshipType:descendantOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf",
2335 "RelationshipType:describes": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes",
2336 "RelationshipType:doesNotAffect": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect",
2337 "RelationshipType:expandsTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo",
2338 "RelationshipType:exploitCreatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy",
2339 "RelationshipType:fixedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy",
2340 "RelationshipType:fixedIn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn",
2341 "RelationshipType:foundBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy",
2342 "RelationshipType:generates": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates",
2343 "RelationshipType:hasAddedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile",
2344 "RelationshipType:hasAssessmentFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor",
2345 "RelationshipType:hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability",
2346 "RelationshipType:hasConcludedLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense",
2347 "RelationshipType:hasDataFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile",
2348 "RelationshipType:hasDeclaredLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense",
2349 "RelationshipType:hasDeletedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile",
2350 "RelationshipType:hasDependencyManifest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest",
2351 "RelationshipType:hasDistributionArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact",
2352 "RelationshipType:hasDocumentation": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation",
2353 "RelationshipType:hasDynamicLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink",
2354 "RelationshipType:hasEvidence": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence",
2355 "RelationshipType:hasExample": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample",
2356 "RelationshipType:hasHost": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost",
2357 "RelationshipType:hasInputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs",
2358 "RelationshipType:hasMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata",
2359 "RelationshipType:hasOptionalComponent": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent",
2360 "RelationshipType:hasOptionalDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency",
2361 "RelationshipType:hasOutputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs",
2362 "RelationshipType:hasPrerequsite": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite",
2363 "RelationshipType:hasProvidedDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency",
2364 "RelationshipType:hasRequirement": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement",
2365 "RelationshipType:hasSpecification": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification",
2366 "RelationshipType:hasStaticLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink",
2367 "RelationshipType:hasTest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest",
2368 "RelationshipType:hasTestCase": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase",
2369 "RelationshipType:hasVariant": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant",
2370 "RelationshipType:invokedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy",
2371 "RelationshipType:modifiedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy",
2372 "RelationshipType:other": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other",
2373 "RelationshipType:packagedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy",
2374 "RelationshipType:patchedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy",
2375 "RelationshipType:publishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy",
2376 "RelationshipType:reportedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy",
2377 "RelationshipType:republishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy",
2378 "RelationshipType:serializedInArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact",
2379 "RelationshipType:testedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn",
2380 "RelationshipType:trainedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn",
2381 "RelationshipType:underInvestigationFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor",
2382 "RelationshipType:usesTool": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool",
2383 "SupportType:deployed": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed",
2384 "SupportType:development": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development",
2385 "SupportType:endOfSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport",
2386 "SupportType:limitedSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport",
2387 "SupportType:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion",
2388 "SupportType:noSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport",
2389 "SupportType:support": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support",
2390 "dataset_ConfidentialityLevelType:amber": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber",
2391 "dataset_ConfidentialityLevelType:clear": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear",
2392 "dataset_ConfidentialityLevelType:green": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green",
2393 "dataset_ConfidentialityLevelType:red": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red",
2394 "dataset_DatasetAvailabilityType:clickthrough": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough",
2395 "dataset_DatasetAvailabilityType:directDownload": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload",
2396 "dataset_DatasetAvailabilityType:query": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query",
2397 "dataset_DatasetAvailabilityType:registration": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration",
2398 "dataset_DatasetAvailabilityType:scrapingScript": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript",
2399 "dataset_DatasetType:audio": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio",
2400 "dataset_DatasetType:categorical": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical",
2401 "dataset_DatasetType:graph": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph",
2402 "dataset_DatasetType:image": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image",
2403 "dataset_DatasetType:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion",
2404 "dataset_DatasetType:numeric": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric",
2405 "dataset_DatasetType:other": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other",
2406 "dataset_DatasetType:sensor": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor",
2407 "dataset_DatasetType:structured": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured",
2408 "dataset_DatasetType:syntactic": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic",
2409 "dataset_DatasetType:text": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text",
2410 "dataset_DatasetType:timeseries": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries",
2411 "dataset_DatasetType:timestamp": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp",
2412 "dataset_DatasetType:video": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video",
2413 "security_CvssSeverityType:critical": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical",
2414 "security_CvssSeverityType:high": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high",
2415 "security_CvssSeverityType:low": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low",
2416 "security_CvssSeverityType:medium": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium",
2417 "security_CvssSeverityType:none": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none",
2418 "security_ExploitCatalogType:kev": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev",
2419 "security_ExploitCatalogType:other": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other",
2420 "security_SsvcDecisionType:act": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act",
2421 "security_SsvcDecisionType:attend": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend",
2422 "security_SsvcDecisionType:track": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track",
2423 "security_SsvcDecisionType:trackStar": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar",
2424 "security_VexJustificationType:componentNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent",
2425 "security_VexJustificationType:inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist",
2426 "security_VexJustificationType:vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary",
2427 "security_VexJustificationType:vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath",
2428 "security_VexJustificationType:vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent",
2429 "software_ContentIdentifierType:gitoid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid",
2430 "software_ContentIdentifierType:swhid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid",
2431 "software_FileKindType:directory": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory",
2432 "software_FileKindType:file": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file",
2433 "software_SbomType:analyzed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed",
2434 "software_SbomType:build": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build",
2435 "software_SbomType:deployed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed",
2436 "software_SbomType:design": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design",
2437 "software_SbomType:runtime": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime",
2438 "software_SbomType:source": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source",
2439 "software_SoftwarePurpose:application": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application",
2440 "software_SoftwarePurpose:archive": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive",
2441 "software_SoftwarePurpose:bom": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom",
2442 "software_SoftwarePurpose:configuration": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration",
2443 "software_SoftwarePurpose:container": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container",
2444 "software_SoftwarePurpose:data": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data",
2445 "software_SoftwarePurpose:device": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device",
2446 "software_SoftwarePurpose:deviceDriver": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver",
2447 "software_SoftwarePurpose:diskImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage",
2448 "software_SoftwarePurpose:documentation": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation",
2449 "software_SoftwarePurpose:evidence": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence",
2450 "software_SoftwarePurpose:executable": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable",
2451 "software_SoftwarePurpose:file": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file",
2452 "software_SoftwarePurpose:filesystemImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage",
2453 "software_SoftwarePurpose:firmware": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware",
2454 "software_SoftwarePurpose:framework": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework",
2455 "software_SoftwarePurpose:install": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install",
2456 "software_SoftwarePurpose:library": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library",
2457 "software_SoftwarePurpose:manifest": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest",
2458 "software_SoftwarePurpose:model": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model",
2459 "software_SoftwarePurpose:module": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module",
2460 "software_SoftwarePurpose:operatingSystem": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem",
2461 "software_SoftwarePurpose:other": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other",
2462 "software_SoftwarePurpose:patch": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch",
2463 "software_SoftwarePurpose:platform": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform",
2464 "software_SoftwarePurpose:requirement": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement",
2465 "software_SoftwarePurpose:source": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source",
2466 "software_SoftwarePurpose:specification": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification",
2467 "software_SoftwarePurpose:test": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test",
2468 "spdx:ExpandedLicensing/NoAssertionLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense",
2469 "spdx:ExpandedLicensing/NoneLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense",
2470}
2471
2472
2473# CLASSES
2474# The class that contains properties to describe energy consumption incurred
2475# by an AI model in different stages of its lifecycle.
2476@register("https://spdx.org/rdf/3.0.0/terms/AI/EnergyConsumption", compact_type="ai_EnergyConsumption", abstract=False)
2477class ai_EnergyConsumption(SHACLObject):
2478 NODE_KIND = NodeKind.BlankNodeOrIRI
2479 NAMED_INDIVIDUALS = {
2480 }
2481
2482 @classmethod
2483 def _register_props(cls):
2484 super()._register_props()
2485 # Specifies the amount of energy consumed when finetuning the AI model that is
2486 # being used in the AI system.
2487 cls._add_property(
2488 "ai_finetuningEnergyConsumption",
2489 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2490 iri="https://spdx.org/rdf/3.0.0/terms/AI/finetuningEnergyConsumption",
2491 compact="ai_finetuningEnergyConsumption",
2492 )
2493 # Specifies the amount of energy consumed during inference time by an AI model
2494 # that is being used in the AI system.
2495 cls._add_property(
2496 "ai_inferenceEnergyConsumption",
2497 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2498 iri="https://spdx.org/rdf/3.0.0/terms/AI/inferenceEnergyConsumption",
2499 compact="ai_inferenceEnergyConsumption",
2500 )
2501 # Specifies the amount of energy consumed when training the AI model that is
2502 # being used in the AI system.
2503 cls._add_property(
2504 "ai_trainingEnergyConsumption",
2505 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2506 iri="https://spdx.org/rdf/3.0.0/terms/AI/trainingEnergyConsumption",
2507 compact="ai_trainingEnergyConsumption",
2508 )
2509
2510
2511# The class that helps note down the quantity of energy consumption and the unit
2512# used for measurement.
2513@register("https://spdx.org/rdf/3.0.0/terms/AI/EnergyConsumptionDescription", compact_type="ai_EnergyConsumptionDescription", abstract=False)
2514class ai_EnergyConsumptionDescription(SHACLObject):
2515 NODE_KIND = NodeKind.BlankNodeOrIRI
2516 NAMED_INDIVIDUALS = {
2517 }
2518
2519 @classmethod
2520 def _register_props(cls):
2521 super()._register_props()
2522 # Represents the energy quantity.
2523 cls._add_property(
2524 "ai_energyQuantity",
2525 FloatProp(),
2526 iri="https://spdx.org/rdf/3.0.0/terms/AI/energyQuantity",
2527 min_count=1,
2528 compact="ai_energyQuantity",
2529 )
2530 # Specifies the unit in which energy is measured.
2531 cls._add_property(
2532 "ai_energyUnit",
2533 EnumProp([
2534 ("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour", "kilowattHour"),
2535 ("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule", "megajoule"),
2536 ("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other", "other"),
2537 ]),
2538 iri="https://spdx.org/rdf/3.0.0/terms/AI/energyUnit",
2539 min_count=1,
2540 compact="ai_energyUnit",
2541 )
2542
2543
2544# Specifies the unit of energy consumption.
2545@register("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType", compact_type="ai_EnergyUnitType", abstract=False)
2546class ai_EnergyUnitType(SHACLObject):
2547 NODE_KIND = NodeKind.BlankNodeOrIRI
2548 NAMED_INDIVIDUALS = {
2549 "kilowattHour": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour",
2550 "megajoule": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule",
2551 "other": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other",
2552 }
2553 # Kilowatt-hour.
2554 kilowattHour = "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour"
2555 # Megajoule.
2556 megajoule = "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule"
2557 # Any other units of energy measurement.
2558 other = "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other"
2559
2560
2561# Specifies the safety risk level.
2562@register("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType", compact_type="ai_SafetyRiskAssessmentType", abstract=False)
2563class ai_SafetyRiskAssessmentType(SHACLObject):
2564 NODE_KIND = NodeKind.BlankNodeOrIRI
2565 NAMED_INDIVIDUALS = {
2566 "high": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high",
2567 "low": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low",
2568 "medium": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium",
2569 "serious": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious",
2570 }
2571 # The second-highest level of risk posed by an AI system.
2572 high = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high"
2573 # Low/no risk is posed by an AI system.
2574 low = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low"
2575 # The third-highest level of risk posed by an AI system.
2576 medium = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium"
2577 # The highest level of risk posed by an AI system.
2578 serious = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious"
2579
2580
2581# Specifies the type of an annotation.
2582@register("https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType", compact_type="AnnotationType", abstract=False)
2583class AnnotationType(SHACLObject):
2584 NODE_KIND = NodeKind.BlankNodeOrIRI
2585 NAMED_INDIVIDUALS = {
2586 "other": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other",
2587 "review": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review",
2588 }
2589 # Used to store extra information about an Element which is not part of a Review (e.g. extra information provided during the creation of the Element).
2590 other = "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other"
2591 # Used when someone reviews the Element.
2592 review = "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review"
2593
2594
2595# Provides information about the creation of the Element.
2596@register("https://spdx.org/rdf/3.0.0/terms/Core/CreationInfo", compact_type="CreationInfo", abstract=False)
2597class CreationInfo(SHACLObject):
2598 NODE_KIND = NodeKind.BlankNodeOrIRI
2599 NAMED_INDIVIDUALS = {
2600 }
2601
2602 @classmethod
2603 def _register_props(cls):
2604 super()._register_props()
2605 # Provide consumers with comments by the creator of the Element about the
2606 # Element.
2607 cls._add_property(
2608 "comment",
2609 StringProp(),
2610 iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
2611 compact="comment",
2612 )
2613 # Identifies when the Element was originally created.
2614 cls._add_property(
2615 "created",
2616 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
2617 iri="https://spdx.org/rdf/3.0.0/terms/Core/created",
2618 min_count=1,
2619 compact="created",
2620 )
2621 # Identifies who or what created the Element.
2622 cls._add_property(
2623 "createdBy",
2624 ListProp(ObjectProp(Agent, False)),
2625 iri="https://spdx.org/rdf/3.0.0/terms/Core/createdBy",
2626 min_count=1,
2627 compact="createdBy",
2628 )
2629 # Identifies the tooling that was used during the creation of the Element.
2630 cls._add_property(
2631 "createdUsing",
2632 ListProp(ObjectProp(Tool, False)),
2633 iri="https://spdx.org/rdf/3.0.0/terms/Core/createdUsing",
2634 compact="createdUsing",
2635 )
2636 # Provides a reference number that can be used to understand how to parse and interpret an Element.
2637 cls._add_property(
2638 "specVersion",
2639 StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
2640 iri="https://spdx.org/rdf/3.0.0/terms/Core/specVersion",
2641 min_count=1,
2642 compact="specVersion",
2643 )
2644
2645
2646# A key with an associated value.
2647@register("https://spdx.org/rdf/3.0.0/terms/Core/DictionaryEntry", compact_type="DictionaryEntry", abstract=False)
2648class DictionaryEntry(SHACLObject):
2649 NODE_KIND = NodeKind.BlankNodeOrIRI
2650 NAMED_INDIVIDUALS = {
2651 }
2652
2653 @classmethod
2654 def _register_props(cls):
2655 super()._register_props()
2656 # A key used in a generic key-value pair.
2657 cls._add_property(
2658 "key",
2659 StringProp(),
2660 iri="https://spdx.org/rdf/3.0.0/terms/Core/key",
2661 min_count=1,
2662 compact="key",
2663 )
2664 # A value used in a generic key-value pair.
2665 cls._add_property(
2666 "value",
2667 StringProp(),
2668 iri="https://spdx.org/rdf/3.0.0/terms/Core/value",
2669 compact="value",
2670 )
2671
2672
2673# Base domain class from which all other SPDX-3.0 domain classes derive.
2674@register("https://spdx.org/rdf/3.0.0/terms/Core/Element", compact_type="Element", abstract=True)
2675class Element(SHACLObject):
2676 NODE_KIND = NodeKind.BlankNodeOrIRI
2677 ID_ALIAS = "spdxId"
2678 NAMED_INDIVIDUALS = {
2679 "NoAssertionElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement",
2680 "NoneElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement",
2681 }
2682 # An Individual Value for Element representing a set of Elements of unknown
2683 # identify or cardinality (number).
2684 NoAssertionElement = "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement"
2685 # An Individual Value for Element representing a set of Elements with
2686 # cardinality (number/count) of zero.
2687 NoneElement = "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement"
2688
2689 @classmethod
2690 def _register_props(cls):
2691 super()._register_props()
2692 # Provide consumers with comments by the creator of the Element about the
2693 # Element.
2694 cls._add_property(
2695 "comment",
2696 StringProp(),
2697 iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
2698 compact="comment",
2699 )
2700 # Provides information about the creation of the Element.
2701 cls._add_property(
2702 "creationInfo",
2703 ObjectProp(CreationInfo, True),
2704 iri="https://spdx.org/rdf/3.0.0/terms/Core/creationInfo",
2705 min_count=1,
2706 compact="creationInfo",
2707 )
2708 # Provides a detailed description of the Element.
2709 cls._add_property(
2710 "description",
2711 StringProp(),
2712 iri="https://spdx.org/rdf/3.0.0/terms/Core/description",
2713 compact="description",
2714 )
2715 # Specifies an Extension characterization of some aspect of an Element.
2716 cls._add_property(
2717 "extension",
2718 ListProp(ObjectProp(extension_Extension, False)),
2719 iri="https://spdx.org/rdf/3.0.0/terms/Core/extension",
2720 compact="extension",
2721 )
2722 # Provides a reference to a resource outside the scope of SPDX-3.0 content
2723 # that uniquely identifies an Element.
2724 cls._add_property(
2725 "externalIdentifier",
2726 ListProp(ObjectProp(ExternalIdentifier, False)),
2727 iri="https://spdx.org/rdf/3.0.0/terms/Core/externalIdentifier",
2728 compact="externalIdentifier",
2729 )
2730 # Points to a resource outside the scope of the SPDX-3.0 content
2731 # that provides additional characteristics of an Element.
2732 cls._add_property(
2733 "externalRef",
2734 ListProp(ObjectProp(ExternalRef, False)),
2735 iri="https://spdx.org/rdf/3.0.0/terms/Core/externalRef",
2736 compact="externalRef",
2737 )
2738 # Identifies the name of an Element as designated by the creator.
2739 cls._add_property(
2740 "name",
2741 StringProp(),
2742 iri="https://spdx.org/rdf/3.0.0/terms/Core/name",
2743 compact="name",
2744 )
2745 # A short description of an Element.
2746 cls._add_property(
2747 "summary",
2748 StringProp(),
2749 iri="https://spdx.org/rdf/3.0.0/terms/Core/summary",
2750 compact="summary",
2751 )
2752 # Provides an IntegrityMethod with which the integrity of an Element can be
2753 # asserted.
2754 cls._add_property(
2755 "verifiedUsing",
2756 ListProp(ObjectProp(IntegrityMethod, False)),
2757 iri="https://spdx.org/rdf/3.0.0/terms/Core/verifiedUsing",
2758 compact="verifiedUsing",
2759 )
2760
2761
2762# A collection of Elements, not necessarily with unifying context.
2763@register("https://spdx.org/rdf/3.0.0/terms/Core/ElementCollection", compact_type="ElementCollection", abstract=True)
2764class ElementCollection(Element):
2765 NODE_KIND = NodeKind.BlankNodeOrIRI
2766 ID_ALIAS = "spdxId"
2767 NAMED_INDIVIDUALS = {
2768 }
2769
2770 @classmethod
2771 def _register_props(cls):
2772 super()._register_props()
2773 # Refers to one or more Elements that are part of an ElementCollection.
2774 cls._add_property(
2775 "element",
2776 ListProp(ObjectProp(Element, False)),
2777 iri="https://spdx.org/rdf/3.0.0/terms/Core/element",
2778 compact="element",
2779 )
2780 # Describes one a profile which the creator of this ElementCollection intends to
2781 # conform to.
2782 cls._add_property(
2783 "profileConformance",
2784 ListProp(EnumProp([
2785 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai", "ai"),
2786 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build", "build"),
2787 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core", "core"),
2788 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset", "dataset"),
2789 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing", "expandedLicensing"),
2790 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension", "extension"),
2791 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite", "lite"),
2792 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security", "security"),
2793 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing", "simpleLicensing"),
2794 ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software", "software"),
2795 ])),
2796 iri="https://spdx.org/rdf/3.0.0/terms/Core/profileConformance",
2797 compact="profileConformance",
2798 )
2799 # This property is used to denote the root Element(s) of a tree of elements contained in a BOM.
2800 cls._add_property(
2801 "rootElement",
2802 ListProp(ObjectProp(Element, False)),
2803 iri="https://spdx.org/rdf/3.0.0/terms/Core/rootElement",
2804 compact="rootElement",
2805 )
2806
2807
2808# A reference to a resource identifier defined outside the scope of SPDX-3.0 content that uniquely identifies an Element.
2809@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifier", compact_type="ExternalIdentifier", abstract=False)
2810class ExternalIdentifier(SHACLObject):
2811 NODE_KIND = NodeKind.BlankNodeOrIRI
2812 NAMED_INDIVIDUALS = {
2813 }
2814
2815 @classmethod
2816 def _register_props(cls):
2817 super()._register_props()
2818 # Provide consumers with comments by the creator of the Element about the
2819 # Element.
2820 cls._add_property(
2821 "comment",
2822 StringProp(),
2823 iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
2824 compact="comment",
2825 )
2826 # Specifies the type of the external identifier.
2827 cls._add_property(
2828 "externalIdentifierType",
2829 EnumProp([
2830 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22", "cpe22"),
2831 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23", "cpe23"),
2832 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve", "cve"),
2833 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email", "email"),
2834 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid", "gitoid"),
2835 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other", "other"),
2836 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl", "packageUrl"),
2837 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther", "securityOther"),
2838 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid", "swhid"),
2839 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid", "swid"),
2840 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme", "urlScheme"),
2841 ]),
2842 iri="https://spdx.org/rdf/3.0.0/terms/Core/externalIdentifierType",
2843 min_count=1,
2844 compact="externalIdentifierType",
2845 )
2846 # Uniquely identifies an external element.
2847 cls._add_property(
2848 "identifier",
2849 StringProp(),
2850 iri="https://spdx.org/rdf/3.0.0/terms/Core/identifier",
2851 min_count=1,
2852 compact="identifier",
2853 )
2854 # Provides the location for more information regarding an external identifier.
2855 cls._add_property(
2856 "identifierLocator",
2857 ListProp(AnyURIProp()),
2858 iri="https://spdx.org/rdf/3.0.0/terms/Core/identifierLocator",
2859 compact="identifierLocator",
2860 )
2861 # An entity that is authorized to issue identification credentials.
2862 cls._add_property(
2863 "issuingAuthority",
2864 StringProp(),
2865 iri="https://spdx.org/rdf/3.0.0/terms/Core/issuingAuthority",
2866 compact="issuingAuthority",
2867 )
2868
2869
2870# Specifies the type of an external identifier.
2871@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType", compact_type="ExternalIdentifierType", abstract=False)
2872class ExternalIdentifierType(SHACLObject):
2873 NODE_KIND = NodeKind.BlankNodeOrIRI
2874 NAMED_INDIVIDUALS = {
2875 "cpe22": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22",
2876 "cpe23": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23",
2877 "cve": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve",
2878 "email": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email",
2879 "gitoid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid",
2880 "other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other",
2881 "packageUrl": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl",
2882 "securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther",
2883 "swhid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid",
2884 "swid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid",
2885 "urlScheme": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme",
2886 }
2887 # https://cpe.mitre.org/files/cpe-specification_2.2.pdf
2888 cpe22 = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22"
2889 # https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf
2890 cpe23 = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23"
2891 # An identifier for a specific software flaw defined within the official CVE Dictionary and that conforms to the CVE specification as defined by https://csrc.nist.gov/glossary/term/cve_id.
2892 cve = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve"
2893 # https://datatracker.ietf.org/doc/html/rfc3696#section-3
2894 email = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email"
2895 # https://www.iana.org/assignments/uri-schemes/prov/gitoid Gitoid stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) and a gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent the software [Artifact ID](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#artifact-id) or the [OmniBOR Identifier](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-identifier) for the software artifact's associated [OmniBOR Document](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-document); this ambiguity exists because the OmniBOR Document is itself an artifact, and the gitoid of that artifact is its valid identifier. Omnibor is a minimalistic schema to describe software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#artifact-dependency-graph-adg). Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's ContentIdentifier property. Gitoids calculated on the OmniBOR Document (OmniBOR Identifiers) should be recorded in the SPDX 3.0 Element's ExternalIdentifier property.
2896 gitoid = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid"
2897 # Used when the type doesn't match any of the other options.
2898 other = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other"
2899 # https://github.com/package-url/purl-spec
2900 packageUrl = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl"
2901 # Used when there is a security related identifier of unspecified type.
2902 securityOther = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther"
2903 # SoftWare Hash IDentifier, persistent intrinsic identifiers for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The syntax of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) and they typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
2904 swhid = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid"
2905 # https://www.ietf.org/archive/id/draft-ietf-sacm-coswid-21.html#section-2.3
2906 swid = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid"
2907 # the scheme used in order to locate a resource https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml
2908 urlScheme = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme"
2909
2910
2911# A map of Element identifiers that are used within a Document but defined external to that Document.
2912@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalMap", compact_type="ExternalMap", abstract=False)
2913class ExternalMap(SHACLObject):
2914 NODE_KIND = NodeKind.BlankNodeOrIRI
2915 NAMED_INDIVIDUALS = {
2916 }
2917
2918 @classmethod
2919 def _register_props(cls):
2920 super()._register_props()
2921 # Artifact representing a serialization instance of SPDX data containing the
2922 # definition of a particular Element.
2923 cls._add_property(
2924 "definingArtifact",
2925 ObjectProp(Artifact, False),
2926 iri="https://spdx.org/rdf/3.0.0/terms/Core/definingArtifact",
2927 compact="definingArtifact",
2928 )
2929 # Identifies an external Element used within a Document but defined external to
2930 # that Document.
2931 cls._add_property(
2932 "externalSpdxId",
2933 AnyURIProp(),
2934 iri="https://spdx.org/rdf/3.0.0/terms/Core/externalSpdxId",
2935 min_count=1,
2936 compact="externalSpdxId",
2937 )
2938 # Provides an indication of where to retrieve an external Element.
2939 cls._add_property(
2940 "locationHint",
2941 AnyURIProp(),
2942 iri="https://spdx.org/rdf/3.0.0/terms/Core/locationHint",
2943 compact="locationHint",
2944 )
2945 # Provides an IntegrityMethod with which the integrity of an Element can be
2946 # asserted.
2947 cls._add_property(
2948 "verifiedUsing",
2949 ListProp(ObjectProp(IntegrityMethod, False)),
2950 iri="https://spdx.org/rdf/3.0.0/terms/Core/verifiedUsing",
2951 compact="verifiedUsing",
2952 )
2953
2954
2955# A reference to a resource outside the scope of SPDX-3.0 content related to an Element.
2956@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRef", compact_type="ExternalRef", abstract=False)
2957class ExternalRef(SHACLObject):
2958 NODE_KIND = NodeKind.BlankNodeOrIRI
2959 NAMED_INDIVIDUALS = {
2960 }
2961
2962 @classmethod
2963 def _register_props(cls):
2964 super()._register_props()
2965 # Provide consumers with comments by the creator of the Element about the
2966 # Element.
2967 cls._add_property(
2968 "comment",
2969 StringProp(),
2970 iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
2971 compact="comment",
2972 )
2973 # Specifies the media type of an Element or Property.
2974 cls._add_property(
2975 "contentType",
2976 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
2977 iri="https://spdx.org/rdf/3.0.0/terms/Core/contentType",
2978 compact="contentType",
2979 )
2980 # Specifies the type of the external reference.
2981 cls._add_property(
2982 "externalRefType",
2983 EnumProp([
2984 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation", "altDownloadLocation"),
2985 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage", "altWebPage"),
2986 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact", "binaryArtifact"),
2987 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower", "bower"),
2988 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta", "buildMeta"),
2989 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem", "buildSystem"),
2990 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport", "certificationReport"),
2991 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat", "chat"),
2992 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport", "componentAnalysisReport"),
2993 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe", "cwe"),
2994 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation", "documentation"),
2995 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport", "dynamicAnalysisReport"),
2996 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice", "eolNotice"),
2997 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment", "exportControlAssessment"),
2998 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding", "funding"),
2999 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker", "issueTracker"),
3000 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license", "license"),
3001 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList", "mailingList"),
3002 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral", "mavenCentral"),
3003 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics", "metrics"),
3004 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm", "npm"),
3005 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget", "nuget"),
3006 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other", "other"),
3007 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment", "privacyAssessment"),
3008 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata", "productMetadata"),
3009 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder", "purchaseOrder"),
3010 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport", "qualityAssessmentReport"),
3011 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory", "releaseHistory"),
3012 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes", "releaseNotes"),
3013 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment", "riskAssessment"),
3014 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport", "runtimeAnalysisReport"),
3015 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation", "secureSoftwareAttestation"),
3016 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel", "securityAdversaryModel"),
3017 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory", "securityAdvisory"),
3018 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix", "securityFix"),
3019 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther", "securityOther"),
3020 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport", "securityPenTestReport"),
3021 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy", "securityPolicy"),
3022 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel", "securityThreatModel"),
3023 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia", "socialMedia"),
3024 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact", "sourceArtifact"),
3025 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport", "staticAnalysisReport"),
3026 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support", "support"),
3027 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs", "vcs"),
3028 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", "vulnerabilityDisclosureReport"),
3029 ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", "vulnerabilityExploitabilityAssessment"),
3030 ]),
3031 iri="https://spdx.org/rdf/3.0.0/terms/Core/externalRefType",
3032 compact="externalRefType",
3033 )
3034 # Provides the location of an external reference.
3035 cls._add_property(
3036 "locator",
3037 ListProp(StringProp()),
3038 iri="https://spdx.org/rdf/3.0.0/terms/Core/locator",
3039 compact="locator",
3040 )
3041
3042
3043# Specifies the type of an external reference.
3044@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType", compact_type="ExternalRefType", abstract=False)
3045class ExternalRefType(SHACLObject):
3046 NODE_KIND = NodeKind.BlankNodeOrIRI
3047 NAMED_INDIVIDUALS = {
3048 "altDownloadLocation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation",
3049 "altWebPage": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage",
3050 "binaryArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact",
3051 "bower": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower",
3052 "buildMeta": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta",
3053 "buildSystem": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem",
3054 "certificationReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport",
3055 "chat": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat",
3056 "componentAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport",
3057 "cwe": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe",
3058 "documentation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation",
3059 "dynamicAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport",
3060 "eolNotice": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice",
3061 "exportControlAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment",
3062 "funding": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding",
3063 "issueTracker": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker",
3064 "license": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license",
3065 "mailingList": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList",
3066 "mavenCentral": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral",
3067 "metrics": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics",
3068 "npm": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm",
3069 "nuget": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget",
3070 "other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other",
3071 "privacyAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment",
3072 "productMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata",
3073 "purchaseOrder": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder",
3074 "qualityAssessmentReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport",
3075 "releaseHistory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory",
3076 "releaseNotes": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes",
3077 "riskAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment",
3078 "runtimeAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport",
3079 "secureSoftwareAttestation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation",
3080 "securityAdversaryModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel",
3081 "securityAdvisory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory",
3082 "securityFix": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix",
3083 "securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther",
3084 "securityPenTestReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport",
3085 "securityPolicy": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy",
3086 "securityThreatModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel",
3087 "socialMedia": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia",
3088 "sourceArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact",
3089 "staticAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport",
3090 "support": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support",
3091 "vcs": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs",
3092 "vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport",
3093 "vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment",
3094 }
3095 # A reference to an alternative download location.
3096 altDownloadLocation = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation"
3097 # A reference to an alternative web page.
3098 altWebPage = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage"
3099 # A reference to binary artifacts related to a package.
3100 binaryArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact"
3101 # A reference to a bower package.
3102 bower = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower"
3103 # A reference build metadata related to a published package.
3104 buildMeta = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta"
3105 # A reference build system used to create or publish the package.
3106 buildSystem = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem"
3107 # A reference to a certification report for a package from an accredited/independent body.
3108 certificationReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport"
3109 # A reference to the instant messaging system used by the maintainer for a package.
3110 chat = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat"
3111 # A reference to a Software Composition Analysis (SCA) report.
3112 componentAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport"
3113 # A reference to a source of software flaw defined within the official CWE Dictionary that conforms to the CWE specification as defined by https://csrc.nist.gov/glossary/term/common_weakness_enumeration.
3114 cwe = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe"
3115 # A reference to the documentation for a package.
3116 documentation = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation"
3117 # A reference to a dynamic analysis report for a package.
3118 dynamicAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport"
3119 # A reference to the End Of Sale (EOS) and/or End Of Life (EOL) information related to a package.
3120 eolNotice = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice"
3121 # A reference to a export control assessment for a package.
3122 exportControlAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment"
3123 # A reference to funding information related to a package.
3124 funding = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding"
3125 # A reference to the issue tracker for a package.
3126 issueTracker = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker"
3127 # A reference to additional license information related to an artifact.
3128 license = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license"
3129 # A reference to the mailing list used by the maintainer for a package.
3130 mailingList = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList"
3131 # A reference to a maven repository artifact.
3132 mavenCentral = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral"
3133 # A reference to metrics related to package such as OpenSSF scorecards.
3134 metrics = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics"
3135 # A reference to an npm package.
3136 npm = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm"
3137 # A reference to a nuget package.
3138 nuget = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget"
3139 # Used when the type doesn't match any of the other options.
3140 other = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other"
3141 # A reference to a privacy assessment for a package.
3142 privacyAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment"
3143 # A reference to additional product metadata such as reference within organization's product catalog.
3144 productMetadata = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata"
3145 # A reference to a purchase order for a package.
3146 purchaseOrder = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder"
3147 # A reference to a quality assessment for a package.
3148 qualityAssessmentReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport"
3149 # A reference to a published list of releases for a package.
3150 releaseHistory = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory"
3151 # A reference to the release notes for a package.
3152 releaseNotes = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes"
3153 # A reference to a risk assessment for a package.
3154 riskAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment"
3155 # A reference to a runtime analysis report for a package.
3156 runtimeAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport"
3157 # A reference to information assuring that the software is developed using security practices as defined by [NIST SP 800-218 Secure Software Development Framework (SSDF) Version 1.1](https://csrc.nist.gov/pubs/sp/800/218/final) or [CISA Secure Software Development Attestation Form](https://www.cisa.gov/resources-tools/resources/secure-software-development-attestation-form).
3158 secureSoftwareAttestation = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation"
3159 # A reference to the security adversary model for a package.
3160 securityAdversaryModel = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel"
3161 # A reference to a published security advisory (where advisory as defined per ISO 29147:2018) that may affect one or more elements, e.g., vendor advisories or specific NVD entries.
3162 securityAdvisory = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory"
3163 # A reference to the patch or source code that fixes a vulnerability.
3164 securityFix = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix"
3165 # A reference to related security information of unspecified type.
3166 securityOther = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther"
3167 # A reference to a [penetration test](https://en.wikipedia.org/wiki/Penetration_test) report for a package.
3168 securityPenTestReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport"
3169 # A reference to instructions for reporting newly discovered security vulnerabilities for a package.
3170 securityPolicy = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy"
3171 # A reference the [security threat model](https://en.wikipedia.org/wiki/Threat_model) for a package.
3172 securityThreatModel = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel"
3173 # A reference to a social media channel for a package.
3174 socialMedia = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia"
3175 # A reference to an artifact containing the sources for a package.
3176 sourceArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact"
3177 # A reference to a static analysis report for a package.
3178 staticAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport"
3179 # A reference to the software support channel or other support information for a package.
3180 support = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support"
3181 # A reference to a version control system related to a software artifact.
3182 vcs = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs"
3183 # A reference to a Vulnerability Disclosure Report (VDR) which provides the software supplier's analysis and findings describing the impact (or lack of impact) that reported vulnerabilities have on packages or products in the supplier's SBOM as defined in [NIST SP 800-161](https://csrc.nist.gov/pubs/sp/800/161/r1/final).
3184 vulnerabilityDisclosureReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport"
3185 # A reference to a Vulnerability Exploitability eXchange (VEX) statement which provides information on whether a product is impacted by a specific vulnerability in an included package and, if affected, whether there are actions recommended to remediate. See also [NTIA VEX one-page summary](https://ntia.gov/files/ntia/publications/vex_one-page_summary.pdf).
3186 vulnerabilityExploitabilityAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment"
3187
3188
3189# A mathematical algorithm that maps data of arbitrary size to a bit string.
3190@register("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm", compact_type="HashAlgorithm", abstract=False)
3191class HashAlgorithm(SHACLObject):
3192 NODE_KIND = NodeKind.BlankNodeOrIRI
3193 NAMED_INDIVIDUALS = {
3194 "blake2b256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256",
3195 "blake2b384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384",
3196 "blake2b512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512",
3197 "blake3": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3",
3198 "crystalsDilithium": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium",
3199 "crystalsKyber": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber",
3200 "falcon": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon",
3201 "md2": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2",
3202 "md4": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4",
3203 "md5": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5",
3204 "md6": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6",
3205 "other": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other",
3206 "sha1": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1",
3207 "sha224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224",
3208 "sha256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256",
3209 "sha384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384",
3210 "sha3_224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224",
3211 "sha3_256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256",
3212 "sha3_384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384",
3213 "sha3_512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512",
3214 "sha512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512",
3215 }
3216 # blake2b algorithm with a digest size of 256 https://datatracker.ietf.org/doc/html/rfc7693#section-4
3217 blake2b256 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256"
3218 # blake2b algorithm with a digest size of 384 https://datatracker.ietf.org/doc/html/rfc7693#section-4
3219 blake2b384 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384"
3220 # blake2b algorithm with a digest size of 512 https://datatracker.ietf.org/doc/html/rfc7693#section-4
3221 blake2b512 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512"
3222 # https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf
3223 blake3 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3"
3224 # https://pq-crystals.org/dilithium/index.shtml
3225 crystalsDilithium = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium"
3226 # https://pq-crystals.org/kyber/index.shtml
3227 crystalsKyber = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber"
3228 # https://falcon-sign.info/falcon.pdf
3229 falcon = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon"
3230 # https://datatracker.ietf.org/doc/rfc1319/
3231 md2 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2"
3232 # https://datatracker.ietf.org/doc/html/rfc1186
3233 md4 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4"
3234 # https://datatracker.ietf.org/doc/html/rfc1321
3235 md5 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5"
3236 # https://people.csail.mit.edu/rivest/pubs/RABCx08.pdf
3237 md6 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6"
3238 # any hashing algorithm that does not exist in this list of entries
3239 other = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other"
3240 # https://datatracker.ietf.org/doc/html/rfc3174
3241 sha1 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1"
3242 # secure hashing algorithm with a digest length of 224 https://datatracker.ietf.org/doc/html/draft-ietf-pkix-sha224-01
3243 sha224 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224"
3244 # secure hashing algorithm with a digest length of 256 https://www.rfc-editor.org/rfc/rfc4634
3245 sha256 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256"
3246 # secure hashing algorithm with a digest length of 384 https://www.rfc-editor.org/rfc/rfc4634
3247 sha384 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384"
3248 # sha3 with a digest length of 224 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
3249 sha3_224 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224"
3250 # sha3 with a digest length of 256 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
3251 sha3_256 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256"
3252 # sha3 with a digest length of 384 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
3253 sha3_384 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384"
3254 # sha3 with a digest length of 512 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
3255 sha3_512 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512"
3256 # secure hashing algorithm with a digest length of 512 https://www.rfc-editor.org/rfc/rfc4634
3257 sha512 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512"
3258
3259
3260# Provides an independently reproducible mechanism that permits verification of a specific Element.
3261@register("https://spdx.org/rdf/3.0.0/terms/Core/IntegrityMethod", compact_type="IntegrityMethod", abstract=True)
3262class IntegrityMethod(SHACLObject):
3263 NODE_KIND = NodeKind.BlankNodeOrIRI
3264 NAMED_INDIVIDUALS = {
3265 }
3266
3267 @classmethod
3268 def _register_props(cls):
3269 super()._register_props()
3270 # Provide consumers with comments by the creator of the Element about the
3271 # Element.
3272 cls._add_property(
3273 "comment",
3274 StringProp(),
3275 iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
3276 compact="comment",
3277 )
3278
3279
3280# Provide an enumerated set of lifecycle phases that can provide context to relationships.
3281@register("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType", compact_type="LifecycleScopeType", abstract=False)
3282class LifecycleScopeType(SHACLObject):
3283 NODE_KIND = NodeKind.BlankNodeOrIRI
3284 NAMED_INDIVIDUALS = {
3285 "build": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build",
3286 "design": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design",
3287 "development": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development",
3288 "other": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other",
3289 "runtime": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime",
3290 "test": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test",
3291 }
3292 # A relationship has specific context implications during an element's build phase, during development.
3293 build = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build"
3294 # A relationship has specific context implications during an element's design.
3295 design = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design"
3296 # A relationship has specific context implications during development phase of an element.
3297 development = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development"
3298 # A relationship has other specific context information necessary to capture that the above set of enumerations does not handle.
3299 other = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other"
3300 # A relationship has specific context implications during the execution phase of an element.
3301 runtime = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime"
3302 # A relationship has specific context implications during an element's testing phase, during development.
3303 test = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test"
3304
3305
3306# A mapping between prefixes and namespace partial URIs.
3307@register("https://spdx.org/rdf/3.0.0/terms/Core/NamespaceMap", compact_type="NamespaceMap", abstract=False)
3308class NamespaceMap(SHACLObject):
3309 NODE_KIND = NodeKind.BlankNodeOrIRI
3310 NAMED_INDIVIDUALS = {
3311 }
3312
3313 @classmethod
3314 def _register_props(cls):
3315 super()._register_props()
3316 # Provides an unambiguous mechanism for conveying a URI fragment portion of an
3317 # ElementID.
3318 cls._add_property(
3319 "namespace",
3320 AnyURIProp(),
3321 iri="https://spdx.org/rdf/3.0.0/terms/Core/namespace",
3322 min_count=1,
3323 compact="namespace",
3324 )
3325 # A substitute for a URI.
3326 cls._add_property(
3327 "prefix",
3328 StringProp(),
3329 iri="https://spdx.org/rdf/3.0.0/terms/Core/prefix",
3330 min_count=1,
3331 compact="prefix",
3332 )
3333
3334
3335# An SPDX version 2.X compatible verification method for software packages.
3336@register("https://spdx.org/rdf/3.0.0/terms/Core/PackageVerificationCode", compact_type="PackageVerificationCode", abstract=False)
3337class PackageVerificationCode(IntegrityMethod):
3338 NODE_KIND = NodeKind.BlankNodeOrIRI
3339 NAMED_INDIVIDUALS = {
3340 }
3341
3342 @classmethod
3343 def _register_props(cls):
3344 super()._register_props()
3345 # Specifies the algorithm used for calculating the hash value.
3346 cls._add_property(
3347 "algorithm",
3348 EnumProp([
3349 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
3350 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
3351 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
3352 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3", "blake3"),
3353 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
3354 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
3355 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon", "falcon"),
3356 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2", "md2"),
3357 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4", "md4"),
3358 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5", "md5"),
3359 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6", "md6"),
3360 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other", "other"),
3361 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1", "sha1"),
3362 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224", "sha224"),
3363 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256", "sha256"),
3364 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384", "sha384"),
3365 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
3366 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
3367 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
3368 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
3369 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512", "sha512"),
3370 ]),
3371 iri="https://spdx.org/rdf/3.0.0/terms/Core/algorithm",
3372 min_count=1,
3373 compact="algorithm",
3374 )
3375 # The result of applying a hash algorithm to an Element.
3376 cls._add_property(
3377 "hashValue",
3378 StringProp(),
3379 iri="https://spdx.org/rdf/3.0.0/terms/Core/hashValue",
3380 min_count=1,
3381 compact="hashValue",
3382 )
3383 # The relative file name of a file to be excluded from the
3384 # `PackageVerificationCode`.
3385 cls._add_property(
3386 "packageVerificationCodeExcludedFile",
3387 ListProp(StringProp()),
3388 iri="https://spdx.org/rdf/3.0.0/terms/Core/packageVerificationCodeExcludedFile",
3389 compact="packageVerificationCodeExcludedFile",
3390 )
3391
3392
3393# A tuple of two positive integers that define a range.
3394@register("https://spdx.org/rdf/3.0.0/terms/Core/PositiveIntegerRange", compact_type="PositiveIntegerRange", abstract=False)
3395class PositiveIntegerRange(SHACLObject):
3396 NODE_KIND = NodeKind.BlankNodeOrIRI
3397 NAMED_INDIVIDUALS = {
3398 }
3399
3400 @classmethod
3401 def _register_props(cls):
3402 super()._register_props()
3403 # Defines the beginning of a range.
3404 cls._add_property(
3405 "beginIntegerRange",
3406 PositiveIntegerProp(),
3407 iri="https://spdx.org/rdf/3.0.0/terms/Core/beginIntegerRange",
3408 min_count=1,
3409 compact="beginIntegerRange",
3410 )
3411 # Defines the end of a range.
3412 cls._add_property(
3413 "endIntegerRange",
3414 PositiveIntegerProp(),
3415 iri="https://spdx.org/rdf/3.0.0/terms/Core/endIntegerRange",
3416 min_count=1,
3417 compact="endIntegerRange",
3418 )
3419
3420
3421# Categories of presence or absence.
3422@register("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType", compact_type="PresenceType", abstract=False)
3423class PresenceType(SHACLObject):
3424 NODE_KIND = NodeKind.BlankNodeOrIRI
3425 NAMED_INDIVIDUALS = {
3426 "no": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no",
3427 "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion",
3428 "yes": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes",
3429 }
3430 # Indicates absence of the field.
3431 no = "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no"
3432 # Makes no assertion about the field.
3433 noAssertion = "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion"
3434 # Indicates presence of the field.
3435 yes = "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes"
3436
3437
3438# Enumeration of the valid profiles.
3439@register("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType", compact_type="ProfileIdentifierType", abstract=False)
3440class ProfileIdentifierType(SHACLObject):
3441 NODE_KIND = NodeKind.BlankNodeOrIRI
3442 NAMED_INDIVIDUALS = {
3443 "ai": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai",
3444 "build": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build",
3445 "core": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core",
3446 "dataset": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset",
3447 "expandedLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing",
3448 "extension": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension",
3449 "lite": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite",
3450 "security": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security",
3451 "simpleLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing",
3452 "software": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software",
3453 }
3454 # the element follows the AI profile specification
3455 ai = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai"
3456 # the element follows the Build profile specification
3457 build = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build"
3458 # the element follows the Core profile specification
3459 core = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core"
3460 # the element follows the Dataset profile specification
3461 dataset = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset"
3462 # the element follows the expanded Licensing profile
3463 expandedLicensing = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing"
3464 # the element follows the Extension profile specification
3465 extension = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension"
3466 # the element follows the Lite profile specification
3467 lite = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite"
3468 # the element follows the Security profile specification
3469 security = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security"
3470 # the element follows the simple Licensing profile
3471 simpleLicensing = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing"
3472 # the element follows the Software profile specification
3473 software = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software"
3474
3475
3476# Describes a relationship between one or more elements.
3477@register("https://spdx.org/rdf/3.0.0/terms/Core/Relationship", compact_type="Relationship", abstract=False)
3478class Relationship(Element):
3479 NODE_KIND = NodeKind.BlankNodeOrIRI
3480 ID_ALIAS = "spdxId"
3481 NAMED_INDIVIDUALS = {
3482 }
3483
3484 @classmethod
3485 def _register_props(cls):
3486 super()._register_props()
3487 # Provides information about the completeness of relationships.
3488 cls._add_property(
3489 "completeness",
3490 EnumProp([
3491 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete", "complete"),
3492 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete", "incomplete"),
3493 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion", "noAssertion"),
3494 ]),
3495 iri="https://spdx.org/rdf/3.0.0/terms/Core/completeness",
3496 compact="completeness",
3497 )
3498 # Specifies the time from which an element is no longer applicable / valid.
3499 cls._add_property(
3500 "endTime",
3501 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3502 iri="https://spdx.org/rdf/3.0.0/terms/Core/endTime",
3503 compact="endTime",
3504 )
3505 # References the Element on the left-hand side of a relationship.
3506 cls._add_property(
3507 "from_",
3508 ObjectProp(Element, True),
3509 iri="https://spdx.org/rdf/3.0.0/terms/Core/from",
3510 min_count=1,
3511 compact="from",
3512 )
3513 # Information about the relationship between two Elements.
3514 cls._add_property(
3515 "relationshipType",
3516 EnumProp([
3517 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects", "affects"),
3518 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy", "amendedBy"),
3519 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf", "ancestorOf"),
3520 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom", "availableFrom"),
3521 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures", "configures"),
3522 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains", "contains"),
3523 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy", "coordinatedBy"),
3524 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo", "copiedTo"),
3525 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo", "delegatedTo"),
3526 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn", "dependsOn"),
3527 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf", "descendantOf"),
3528 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes", "describes"),
3529 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect", "doesNotAffect"),
3530 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo", "expandsTo"),
3531 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy", "exploitCreatedBy"),
3532 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy", "fixedBy"),
3533 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn", "fixedIn"),
3534 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy", "foundBy"),
3535 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates", "generates"),
3536 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile", "hasAddedFile"),
3537 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor", "hasAssessmentFor"),
3538 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability", "hasAssociatedVulnerability"),
3539 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense", "hasConcludedLicense"),
3540 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile", "hasDataFile"),
3541 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense", "hasDeclaredLicense"),
3542 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile", "hasDeletedFile"),
3543 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest", "hasDependencyManifest"),
3544 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact", "hasDistributionArtifact"),
3545 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation", "hasDocumentation"),
3546 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink", "hasDynamicLink"),
3547 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence", "hasEvidence"),
3548 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample", "hasExample"),
3549 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost", "hasHost"),
3550 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs", "hasInputs"),
3551 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata", "hasMetadata"),
3552 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent", "hasOptionalComponent"),
3553 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency", "hasOptionalDependency"),
3554 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs", "hasOutputs"),
3555 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite", "hasPrerequsite"),
3556 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency", "hasProvidedDependency"),
3557 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement", "hasRequirement"),
3558 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification", "hasSpecification"),
3559 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink", "hasStaticLink"),
3560 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest", "hasTest"),
3561 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase", "hasTestCase"),
3562 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant", "hasVariant"),
3563 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy", "invokedBy"),
3564 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy", "modifiedBy"),
3565 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other", "other"),
3566 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy", "packagedBy"),
3567 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy", "patchedBy"),
3568 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy", "publishedBy"),
3569 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy", "reportedBy"),
3570 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy", "republishedBy"),
3571 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact", "serializedInArtifact"),
3572 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn", "testedOn"),
3573 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn", "trainedOn"),
3574 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor", "underInvestigationFor"),
3575 ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool", "usesTool"),
3576 ]),
3577 iri="https://spdx.org/rdf/3.0.0/terms/Core/relationshipType",
3578 min_count=1,
3579 compact="relationshipType",
3580 )
3581 # Specifies the time from which an element is applicable / valid.
3582 cls._add_property(
3583 "startTime",
3584 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3585 iri="https://spdx.org/rdf/3.0.0/terms/Core/startTime",
3586 compact="startTime",
3587 )
3588 # References an Element on the right-hand side of a relationship.
3589 cls._add_property(
3590 "to",
3591 ListProp(ObjectProp(Element, False)),
3592 iri="https://spdx.org/rdf/3.0.0/terms/Core/to",
3593 min_count=1,
3594 compact="to",
3595 )
3596
3597
3598# Indicates whether a relationship is known to be complete, incomplete, or if no assertion is made with respect to relationship completeness.
3599@register("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness", compact_type="RelationshipCompleteness", abstract=False)
3600class RelationshipCompleteness(SHACLObject):
3601 NODE_KIND = NodeKind.BlankNodeOrIRI
3602 NAMED_INDIVIDUALS = {
3603 "complete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete",
3604 "incomplete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete",
3605 "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion",
3606 }
3607 # The relationship is known to be exhaustive.
3608 complete = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete"
3609 # The relationship is known not to be exhaustive.
3610 incomplete = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete"
3611 # No assertion can be made about the completeness of the relationship.
3612 noAssertion = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion"
3613
3614
3615# Information about the relationship between two Elements.
3616@register("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType", compact_type="RelationshipType", abstract=False)
3617class RelationshipType(SHACLObject):
3618 NODE_KIND = NodeKind.BlankNodeOrIRI
3619 NAMED_INDIVIDUALS = {
3620 "affects": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects",
3621 "amendedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy",
3622 "ancestorOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf",
3623 "availableFrom": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom",
3624 "configures": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures",
3625 "contains": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains",
3626 "coordinatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy",
3627 "copiedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo",
3628 "delegatedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo",
3629 "dependsOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn",
3630 "descendantOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf",
3631 "describes": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes",
3632 "doesNotAffect": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect",
3633 "expandsTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo",
3634 "exploitCreatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy",
3635 "fixedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy",
3636 "fixedIn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn",
3637 "foundBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy",
3638 "generates": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates",
3639 "hasAddedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile",
3640 "hasAssessmentFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor",
3641 "hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability",
3642 "hasConcludedLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense",
3643 "hasDataFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile",
3644 "hasDeclaredLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense",
3645 "hasDeletedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile",
3646 "hasDependencyManifest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest",
3647 "hasDistributionArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact",
3648 "hasDocumentation": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation",
3649 "hasDynamicLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink",
3650 "hasEvidence": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence",
3651 "hasExample": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample",
3652 "hasHost": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost",
3653 "hasInputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs",
3654 "hasMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata",
3655 "hasOptionalComponent": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent",
3656 "hasOptionalDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency",
3657 "hasOutputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs",
3658 "hasPrerequsite": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite",
3659 "hasProvidedDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency",
3660 "hasRequirement": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement",
3661 "hasSpecification": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification",
3662 "hasStaticLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink",
3663 "hasTest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest",
3664 "hasTestCase": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase",
3665 "hasVariant": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant",
3666 "invokedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy",
3667 "modifiedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy",
3668 "other": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other",
3669 "packagedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy",
3670 "patchedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy",
3671 "publishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy",
3672 "reportedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy",
3673 "republishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy",
3674 "serializedInArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact",
3675 "testedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn",
3676 "trainedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn",
3677 "underInvestigationFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor",
3678 "usesTool": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool",
3679 }
3680 # (Security/VEX) The `from` vulnerability affect each `to` Element
3681 affects = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects"
3682 # The `from` Element is amended by each `to` Element
3683 amendedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy"
3684 # The `from` Element is an ancestor of each `to` Element
3685 ancestorOf = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf"
3686 # The `from` Element is available from the additional supplier described by each `to` Element
3687 availableFrom = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom"
3688 # The `from` Element is a configuration applied to each `to` Element during a LifecycleScopeType period
3689 configures = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures"
3690 # The `from` Element contains each `to` Element
3691 contains = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains"
3692 # (Security) The `from` Vulnerability is coordinatedBy the `to` Agent(s) (vendor, researcher, or consumer agent)
3693 coordinatedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy"
3694 # The `from` Element has been copied to each `to` Element
3695 copiedTo = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo"
3696 # The `from` Agent is delegating an action to the Agent of the `to` Relationship (which must be of type invokedBy) during a LifecycleScopeType. (e.g. the `to` invokedBy Relationship is being done on behalf of `from`)
3697 delegatedTo = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo"
3698 # The `from` Element depends on each `to` Element during a LifecycleScopeType period.
3699 dependsOn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn"
3700 # The `from` Element is a descendant of each `to` Element
3701 descendantOf = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf"
3702 # The `from` Element describes each `to` Element. To denote the root(s) of a tree of elements in a collection, the rootElement property should be used.
3703 describes = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes"
3704 # (Security/VEX) The `from` Vulnerability has no impact on each `to` Element
3705 doesNotAffect = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect"
3706 # The `from` archive expands out as an artifact described by each `to` Element
3707 expandsTo = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo"
3708 # (Security) The `from` Vulnerability has had an exploit created against it by each `to` Agent
3709 exploitCreatedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy"
3710 # (Security) Designates a `from` Vulnerability has been fixed by the `to` Agent(s)
3711 fixedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy"
3712 # (Security/VEX) A `from` Vulnerability has been fixed in each of the `to` Element(s)
3713 fixedIn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn"
3714 # (Security) Designates a `from` Vulnerability was originally discovered by the `to` Agent(s)
3715 foundBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy"
3716 # The `from` Element generates each `to` Element
3717 generates = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates"
3718 # Every `to` Element is is a file added to the `from` Element (`from` hasAddedFile `to`)
3719 hasAddedFile = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile"
3720 # (Security) Relates a `from` Vulnerability and each `to` Element(s) with a security assessment. To be used with `VulnAssessmentRelationship` types
3721 hasAssessmentFor = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor"
3722 # (Security) Used to associate a `from` Artifact with each `to` Vulnerability
3723 hasAssociatedVulnerability = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability"
3724 # The `from` Software Artifact is concluded by the SPDX data creator to be governed by each `to` license
3725 hasConcludedLicense = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense"
3726 # The `from` Element treats each `to` Element as a data file
3727 hasDataFile = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile"
3728 # The `from` Software Artifact was discovered to actually contain each `to` license, for example as detected by use of automated tooling.
3729 hasDeclaredLicense = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense"
3730 # Every `to` Element is a file deleted from the `from` Element (`from` hasDeletedFile `to`)
3731 hasDeletedFile = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile"
3732 # The `from` Element has manifest files that contain dependency information in each `to` Element
3733 hasDependencyManifest = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest"
3734 # The `from` Element is distributed as an artifact in each Element `to`, (e.g. an RPM or archive file)
3735 hasDistributionArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact"
3736 # The `from` Element is documented by each `to` Element
3737 hasDocumentation = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation"
3738 # The `from` Element dynamically links in each `to` Element, during a LifecycleScopeType period.
3739 hasDynamicLink = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink"
3740 # (Dataset) Every `to` Element is considered as evidence for the `from` Element (`from` hasEvidence `to`)
3741 hasEvidence = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence"
3742 # Every `to` Element is an example for the `from` Element (`from` hasExample `to`)
3743 hasExample = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample"
3744 # The `from` Build was run on the `to` Element during a LifecycleScopeType period (e.g. The host that the build runs on)
3745 hasHost = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost"
3746 # The `from` Build has each `to` Elements as an input during a LifecycleScopeType period.
3747 hasInputs = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs"
3748 # Every `to` Element is metadata about the `from` Element (`from` hasMetadata `to`)
3749 hasMetadata = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata"
3750 # Every `to` Element is an optional component of the `from` Element (`from` hasOptionalComponent `to`)
3751 hasOptionalComponent = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent"
3752 # The `from` Element optionally depends on each `to` Element during a LifecycleScopeType period
3753 hasOptionalDependency = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency"
3754 # The `from` Build element generates each `to` Element as an output during a LifecycleScopeType period.
3755 hasOutputs = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs"
3756 # The `from` Element has a prerequsite on each `to` Element, during a LifecycleScopeType period
3757 hasPrerequsite = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite"
3758 # The `from` Element has a dependency on each `to` Element, but dependency is not in the distributed artifact, but assumed to be provided, during a LifecycleScopeType period
3759 hasProvidedDependency = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency"
3760 # The `from` Element has a requirement on each `to` Element, during a LifecycleScopeType period
3761 hasRequirement = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement"
3762 # Every `to` Element is a specification for the `from` Element (`from` hasSpecification `to`), during a LifecycleScopeType period
3763 hasSpecification = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification"
3764 # The `from` Element statically links in each `to` Element, during a LifecycleScopeType period
3765 hasStaticLink = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink"
3766 # Every `to` Element is a test artifact for the `from` Element (`from` hasTest `to`), during a LifecycleScopeType period
3767 hasTest = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest"
3768 # Every `to` Element is a test case for the `from` Element (`from` hasTestCase `to`)
3769 hasTestCase = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase"
3770 # Every `to` Element is a variant the `from` Element (`from` hasVariant `to`)
3771 hasVariant = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant"
3772 # The `from` Element was invoked by the `to` Agent during a LifecycleScopeType period (for example, a Build element that describes a build step)
3773 invokedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy"
3774 # The `from` Element is modified by each `to` Element
3775 modifiedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy"
3776 # Every `to` Element is related to the `from` Element where the relationship type is not described by any of the SPDX relationhip types (this relationship is directionless)
3777 other = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other"
3778 # Every `to` Element is a packaged instance of the `from` Element (`from` packagedBy `to`)
3779 packagedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy"
3780 # Every `to` Element is a patch for the `from` Element (`from` patchedBy `to`)
3781 patchedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy"
3782 # (Security) Designates a `from` Vulnerability was made available for public use or reference by each `to` Agent
3783 publishedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy"
3784 # (Security) Designates a `from` Vulnerability was first reported to a project, vendor, or tracking database for formal identification by each `to` Agent
3785 reportedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy"
3786 # (Security) Designates a `from` Vulnerability's details were tracked, aggregated, and/or enriched to improve context (i.e. NVD) by a `to` Agent(s)
3787 republishedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy"
3788 # The `from` SPDXDocument can be found in a serialized form in each `to` Artifact
3789 serializedInArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact"
3790 # (AI, Dataset) The `from` Element has been tested on the `to` Element
3791 testedOn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn"
3792 # (AI, Dataset) The `from` Element has been trained by the `to` Element(s)
3793 trainedOn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn"
3794 # (Security/VEX) The `from` Vulnerability impact is being investigated for each `to` Element
3795 underInvestigationFor = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor"
3796 # The `from` Element uses each `to` Element as a tool during a LifecycleScopeType period.
3797 usesTool = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool"
3798
3799
3800# A collection of SPDX Elements that could potentially be serialized.
3801@register("https://spdx.org/rdf/3.0.0/terms/Core/SpdxDocument", compact_type="SpdxDocument", abstract=False)
3802class SpdxDocument(ElementCollection):
3803 NODE_KIND = NodeKind.BlankNodeOrIRI
3804 ID_ALIAS = "spdxId"
3805 NAMED_INDIVIDUALS = {
3806 }
3807
3808 @classmethod
3809 def _register_props(cls):
3810 super()._register_props()
3811 # Provides the license under which the SPDX documentation of the Element can be
3812 # used.
3813 cls._add_property(
3814 "dataLicense",
3815 ObjectProp(simplelicensing_AnyLicenseInfo, False),
3816 iri="https://spdx.org/rdf/3.0.0/terms/Core/dataLicense",
3817 compact="dataLicense",
3818 )
3819 # Provides an ExternalMap of Element identifiers.
3820 cls._add_property(
3821 "imports",
3822 ListProp(ObjectProp(ExternalMap, False)),
3823 iri="https://spdx.org/rdf/3.0.0/terms/Core/imports",
3824 compact="imports",
3825 )
3826 # Provides a NamespaceMap of prefixes and associated namespace partial URIs applicable to an SpdxDocument and independent of any specific serialization format or instance.
3827 cls._add_property(
3828 "namespaceMap",
3829 ListProp(ObjectProp(NamespaceMap, False)),
3830 iri="https://spdx.org/rdf/3.0.0/terms/Core/namespaceMap",
3831 compact="namespaceMap",
3832 )
3833
3834
3835# Indicates the type of support that is associated with an artifact.
3836@register("https://spdx.org/rdf/3.0.0/terms/Core/SupportType", compact_type="SupportType", abstract=False)
3837class SupportType(SHACLObject):
3838 NODE_KIND = NodeKind.BlankNodeOrIRI
3839 NAMED_INDIVIDUALS = {
3840 "deployed": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed",
3841 "development": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development",
3842 "endOfSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport",
3843 "limitedSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport",
3844 "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion",
3845 "noSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport",
3846 "support": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support",
3847 }
3848 # in addition to being supported by the supplier, the software is known to have been deployed and is in use. For a software as a service provider, this implies the software is now available as a service.
3849 deployed = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed"
3850 # the artifact is in active development and is not considered ready for formal support from the supplier.
3851 development = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development"
3852 # there is a defined end of support for the artifact from the supplier. This may also be referred to as end of life. There is a validUntilDate that can be used to signal when support ends for the artifact.
3853 endOfSupport = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport"
3854 # the artifact has been released, and there is limited support available from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
3855 limitedSupport = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport"
3856 # no assertion about the type of support is made. This is considered the default if no other support type is used.
3857 noAssertion = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion"
3858 # there is no support for the artifact from the supplier, consumer assumes any support obligations.
3859 noSupport = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport"
3860 # the artifact has been released, and is supported from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
3861 support = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support"
3862
3863
3864# An element of hardware and/or software utilized to carry out a particular function.
3865@register("https://spdx.org/rdf/3.0.0/terms/Core/Tool", compact_type="Tool", abstract=False)
3866class Tool(Element):
3867 NODE_KIND = NodeKind.BlankNodeOrIRI
3868 ID_ALIAS = "spdxId"
3869 NAMED_INDIVIDUALS = {
3870 }
3871
3872
3873# Categories of confidentiality level.
3874@register("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType", compact_type="dataset_ConfidentialityLevelType", abstract=False)
3875class dataset_ConfidentialityLevelType(SHACLObject):
3876 NODE_KIND = NodeKind.BlankNodeOrIRI
3877 NAMED_INDIVIDUALS = {
3878 "amber": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber",
3879 "clear": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear",
3880 "green": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green",
3881 "red": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red",
3882 }
3883 # Data points in the dataset can be shared only with specific
3884 amber = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber"
3885 # Dataset may be distributed freely, without restriction.
3886 clear = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear"
3887 # Dataset can be shared within a community of peers and partners.
3888 green = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green"
3889 # Data points in the dataset are highly confidential and can only be shared
3890 red = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red"
3891
3892
3893# Availability of dataset.
3894@register("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType", compact_type="dataset_DatasetAvailabilityType", abstract=False)
3895class dataset_DatasetAvailabilityType(SHACLObject):
3896 NODE_KIND = NodeKind.BlankNodeOrIRI
3897 NAMED_INDIVIDUALS = {
3898 "clickthrough": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough",
3899 "directDownload": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload",
3900 "query": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query",
3901 "registration": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration",
3902 "scrapingScript": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript",
3903 }
3904 # the dataset is not publicly available and can only be accessed
3905 clickthrough = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough"
3906 # the dataset is publicly available and can be downloaded
3907 directDownload = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload"
3908 # the dataset is publicly available, but not all at once, and can only
3909 query = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query"
3910 # the dataset is not publicly available and an email registration
3911 registration = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration"
3912 # the dataset provider is not making available the underlying
3913 scrapingScript = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript"
3914
3915
3916# Enumeration of dataset types.
3917@register("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType", compact_type="dataset_DatasetType", abstract=False)
3918class dataset_DatasetType(SHACLObject):
3919 NODE_KIND = NodeKind.BlankNodeOrIRI
3920 NAMED_INDIVIDUALS = {
3921 "audio": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio",
3922 "categorical": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical",
3923 "graph": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph",
3924 "image": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image",
3925 "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion",
3926 "numeric": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric",
3927 "other": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other",
3928 "sensor": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor",
3929 "structured": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured",
3930 "syntactic": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic",
3931 "text": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text",
3932 "timeseries": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries",
3933 "timestamp": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp",
3934 "video": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video",
3935 }
3936 # data is audio based, such as a collection of music from the 80s.
3937 audio = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio"
3938 # data that is classified into a discrete number of categories,
3939 categorical = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical"
3940 # data is in the form of a graph where entries are somehow related to
3941 graph = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph"
3942 # data is a collection of images such as pictures of animals.
3943 image = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image"
3944 # data type is not known.
3945 noAssertion = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion"
3946 # data consists only of numeric entries.
3947 numeric = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric"
3948 # data is of a type not included in this list.
3949 other = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other"
3950 # data is recorded from a physical sensor, such as a thermometer
3951 sensor = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor"
3952 # data is stored in tabular format or retrieved from a relational
3953 structured = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured"
3954 # data describes the syntax or semantics of a language or text, such
3955 syntactic = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic"
3956 # data consists of unstructured text, such as a book, Wikipedia article
3957 text = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text"
3958 # data is recorded in an ordered sequence of timestamped entries,
3959 timeseries = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries"
3960 # data is recorded with a timestamp for each entry, but not
3961 timestamp = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp"
3962 # data is video based, such as a collection of movie clips featuring Tom
3963 video = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video"
3964
3965
3966# Abstract class for additional text intended to be added to a License, but
3967# which is not itself a standalone License.
3968@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/LicenseAddition", compact_type="expandedlicensing_LicenseAddition", abstract=True)
3969class expandedlicensing_LicenseAddition(Element):
3970 NODE_KIND = NodeKind.BlankNodeOrIRI
3971 ID_ALIAS = "spdxId"
3972 NAMED_INDIVIDUALS = {
3973 }
3974
3975 @classmethod
3976 def _register_props(cls):
3977 super()._register_props()
3978 # Identifies the full text of a LicenseAddition.
3979 cls._add_property(
3980 "expandedlicensing_additionText",
3981 StringProp(),
3982 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/additionText",
3983 min_count=1,
3984 compact="expandedlicensing_additionText",
3985 )
3986 # Specifies whether an additional text identifier has been marked as deprecated.
3987 cls._add_property(
3988 "expandedlicensing_isDeprecatedAdditionId",
3989 BooleanProp(),
3990 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isDeprecatedAdditionId",
3991 compact="expandedlicensing_isDeprecatedAdditionId",
3992 )
3993 # Identifies all the text and metadata associated with a license in the license
3994 # XML format.
3995 cls._add_property(
3996 "expandedlicensing_licenseXml",
3997 StringProp(),
3998 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/licenseXml",
3999 compact="expandedlicensing_licenseXml",
4000 )
4001 # Specifies the licenseId that is preferred to be used in place of a deprecated
4002 # License or LicenseAddition.
4003 cls._add_property(
4004 "expandedlicensing_obsoletedBy",
4005 StringProp(),
4006 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/obsoletedBy",
4007 compact="expandedlicensing_obsoletedBy",
4008 )
4009 # Contains a URL where the License or LicenseAddition can be found in use.
4010 cls._add_property(
4011 "expandedlicensing_seeAlso",
4012 ListProp(AnyURIProp()),
4013 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/seeAlso",
4014 compact="expandedlicensing_seeAlso",
4015 )
4016 # Identifies the full text of a LicenseAddition, in SPDX templating format.
4017 cls._add_property(
4018 "expandedlicensing_standardAdditionTemplate",
4019 StringProp(),
4020 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/standardAdditionTemplate",
4021 compact="expandedlicensing_standardAdditionTemplate",
4022 )
4023
4024
4025# A license exception that is listed on the SPDX Exceptions list.
4026@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ListedLicenseException", compact_type="expandedlicensing_ListedLicenseException", abstract=False)
4027class expandedlicensing_ListedLicenseException(expandedlicensing_LicenseAddition):
4028 NODE_KIND = NodeKind.BlankNodeOrIRI
4029 ID_ALIAS = "spdxId"
4030 NAMED_INDIVIDUALS = {
4031 }
4032
4033 @classmethod
4034 def _register_props(cls):
4035 super()._register_props()
4036 # Specifies the SPDX License List version in which this license or exception
4037 # identifier was deprecated.
4038 cls._add_property(
4039 "expandedlicensing_deprecatedVersion",
4040 StringProp(),
4041 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/deprecatedVersion",
4042 compact="expandedlicensing_deprecatedVersion",
4043 )
4044 # Specifies the SPDX License List version in which this ListedLicense or
4045 # ListedLicenseException identifier was first added.
4046 cls._add_property(
4047 "expandedlicensing_listVersionAdded",
4048 StringProp(),
4049 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/listVersionAdded",
4050 compact="expandedlicensing_listVersionAdded",
4051 )
4052
4053
4054# A property name with an associated value.
4055@register("https://spdx.org/rdf/3.0.0/terms/Extension/CdxPropertyEntry", compact_type="extension_CdxPropertyEntry", abstract=False)
4056class extension_CdxPropertyEntry(SHACLObject):
4057 NODE_KIND = NodeKind.BlankNodeOrIRI
4058 NAMED_INDIVIDUALS = {
4059 }
4060
4061 @classmethod
4062 def _register_props(cls):
4063 super()._register_props()
4064 # A name used in a CdxExtension name-value pair.
4065 cls._add_property(
4066 "extension_cdxPropName",
4067 StringProp(),
4068 iri="https://spdx.org/rdf/3.0.0/terms/Extension/cdxPropName",
4069 min_count=1,
4070 compact="extension_cdxPropName",
4071 )
4072 # A value used in a CdxExtension name-value pair.
4073 cls._add_property(
4074 "extension_cdxPropValue",
4075 StringProp(),
4076 iri="https://spdx.org/rdf/3.0.0/terms/Extension/cdxPropValue",
4077 compact="extension_cdxPropValue",
4078 )
4079
4080
4081# A characterization of some aspect of an Element that is associated with the Element in a generalized fashion.
4082@register("https://spdx.org/rdf/3.0.0/terms/Extension/Extension", compact_type="extension_Extension", abstract=True)
4083class extension_Extension(SHACLExtensibleObject, SHACLObject):
4084 NODE_KIND = NodeKind.BlankNodeOrIRI
4085 NAMED_INDIVIDUALS = {
4086 }
4087
4088
4089# Specifies the CVSS base, temporal, threat, or environmental severity type.
4090@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType", compact_type="security_CvssSeverityType", abstract=False)
4091class security_CvssSeverityType(SHACLObject):
4092 NODE_KIND = NodeKind.BlankNodeOrIRI
4093 NAMED_INDIVIDUALS = {
4094 "critical": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical",
4095 "high": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high",
4096 "low": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low",
4097 "medium": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium",
4098 "none": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none",
4099 }
4100 # When a CVSS score is between 9.0 - 10.0
4101 critical = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical"
4102 # When a CVSS score is between 7.0 - 8.9
4103 high = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high"
4104 # When a CVSS score is between 0 - 3.9
4105 low = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low"
4106 # When a CVSS score is between 4 - 6.9
4107 medium = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium"
4108 # When a CVSS score is 0
4109 none = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none"
4110
4111
4112# Specifies the exploit catalog type.
4113@register("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType", compact_type="security_ExploitCatalogType", abstract=False)
4114class security_ExploitCatalogType(SHACLObject):
4115 NODE_KIND = NodeKind.BlankNodeOrIRI
4116 NAMED_INDIVIDUALS = {
4117 "kev": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev",
4118 "other": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other",
4119 }
4120 # CISA's Known Exploited Vulnerability (KEV) Catalog
4121 kev = "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev"
4122 # Other exploit catalogs
4123 other = "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other"
4124
4125
4126# Specifies the SSVC decision type.
4127@register("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType", compact_type="security_SsvcDecisionType", abstract=False)
4128class security_SsvcDecisionType(SHACLObject):
4129 NODE_KIND = NodeKind.BlankNodeOrIRI
4130 NAMED_INDIVIDUALS = {
4131 "act": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act",
4132 "attend": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend",
4133 "track": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track",
4134 "trackStar": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar",
4135 }
4136 # The vulnerability requires attention from the organization's internal, supervisory-level and leadership-level individuals. Necessary actions include requesting assistance or information about the vulnerability, as well as publishing a notification either internally and/or externally. Typically, internal groups would meet to determine the overall response and then execute agreed upon actions. CISA recommends remediating Act vulnerabilities as soon as possible.
4137 act = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act"
4138 # The vulnerability requires attention from the organization's internal, supervisory-level individuals. Necessary actions include requesting assistance or information about the vulnerability, and may involve publishing a notification either internally and/or externally. CISA recommends remediating Attend vulnerabilities sooner than standard update timelines.
4139 attend = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend"
4140 # The vulnerability does not require action at this time. The organization would continue to track the vulnerability and reassess it if new information becomes available. CISA recommends remediating Track vulnerabilities within standard update timelines.
4141 track = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track"
4142 # ("Track*" in the SSVC spec) The vulnerability contains specific characteristics that may require closer monitoring for changes. CISA recommends remediating Track* vulnerabilities within standard update timelines.
4143 trackStar = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar"
4144
4145
4146# Specifies the VEX justification type.
4147@register("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType", compact_type="security_VexJustificationType", abstract=False)
4148class security_VexJustificationType(SHACLObject):
4149 NODE_KIND = NodeKind.BlankNodeOrIRI
4150 NAMED_INDIVIDUALS = {
4151 "componentNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent",
4152 "inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist",
4153 "vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary",
4154 "vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath",
4155 "vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent",
4156 }
4157 # The software is not affected because the vulnerable component is not in the product.
4158 componentNotPresent = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent"
4159 # Built-in inline controls or mitigations prevent an adversary from leveraging the vulnerability.
4160 inlineMitigationsAlreadyExist = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist"
4161 # The vulnerable component is present, and the component contains the vulnerable code. However, vulnerable code is used in such a way that an attacker cannot mount any anticipated attack.
4162 vulnerableCodeCannotBeControlledByAdversary = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary"
4163 # The affected code is not reachable through the execution of the code, including non-anticipated states of the product.
4164 vulnerableCodeNotInExecutePath = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath"
4165 # The product is not affected because the code underlying the vulnerability is not present in the product.
4166 vulnerableCodeNotPresent = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent"
4167
4168
4169# Abstract ancestor class for all vulnerability assessments
4170@register("https://spdx.org/rdf/3.0.0/terms/Security/VulnAssessmentRelationship", compact_type="security_VulnAssessmentRelationship", abstract=True)
4171class security_VulnAssessmentRelationship(Relationship):
4172 NODE_KIND = NodeKind.BlankNodeOrIRI
4173 ID_ALIAS = "spdxId"
4174 NAMED_INDIVIDUALS = {
4175 }
4176
4177 @classmethod
4178 def _register_props(cls):
4179 super()._register_props()
4180 # Identifies who or what supplied the artifact or VulnAssessmentRelationship
4181 # referenced by the Element.
4182 cls._add_property(
4183 "suppliedBy",
4184 ObjectProp(Agent, False),
4185 iri="https://spdx.org/rdf/3.0.0/terms/Core/suppliedBy",
4186 compact="suppliedBy",
4187 )
4188 # Specifies an Element contained in a piece of software where a vulnerability was
4189 # found.
4190 cls._add_property(
4191 "security_assessedElement",
4192 ObjectProp(Element, False),
4193 iri="https://spdx.org/rdf/3.0.0/terms/Security/assessedElement",
4194 compact="security_assessedElement",
4195 )
4196 # Specifies a time when a vulnerability assessment was modified
4197 cls._add_property(
4198 "security_modifiedTime",
4199 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4200 iri="https://spdx.org/rdf/3.0.0/terms/Security/modifiedTime",
4201 compact="security_modifiedTime",
4202 )
4203 # Specifies the time when a vulnerability was published.
4204 cls._add_property(
4205 "security_publishedTime",
4206 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4207 iri="https://spdx.org/rdf/3.0.0/terms/Security/publishedTime",
4208 compact="security_publishedTime",
4209 )
4210 # Specified the time and date when a vulnerability was withdrawn.
4211 cls._add_property(
4212 "security_withdrawnTime",
4213 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4214 iri="https://spdx.org/rdf/3.0.0/terms/Security/withdrawnTime",
4215 compact="security_withdrawnTime",
4216 )
4217
4218
4219# Abstract class representing a license combination consisting of one or more
4220# licenses (optionally including additional text), which may be combined
4221# according to the SPDX license expression syntax.
4222@register("https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/AnyLicenseInfo", compact_type="simplelicensing_AnyLicenseInfo", abstract=True)
4223class simplelicensing_AnyLicenseInfo(Element):
4224 NODE_KIND = NodeKind.BlankNodeOrIRI
4225 ID_ALIAS = "spdxId"
4226 NAMED_INDIVIDUALS = {
4227 }
4228
4229
4230# An SPDX Element containing an SPDX license expression string.
4231@register("https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/LicenseExpression", compact_type="simplelicensing_LicenseExpression", abstract=False)
4232class simplelicensing_LicenseExpression(simplelicensing_AnyLicenseInfo):
4233 NODE_KIND = NodeKind.BlankNodeOrIRI
4234 ID_ALIAS = "spdxId"
4235 NAMED_INDIVIDUALS = {
4236 }
4237
4238 @classmethod
4239 def _register_props(cls):
4240 super()._register_props()
4241 # Maps a LicenseRef or AdditionRef string for a Custom License or a Custom
4242 # License Addition to its URI ID.
4243 cls._add_property(
4244 "simplelicensing_customIdToUri",
4245 ListProp(ObjectProp(DictionaryEntry, False)),
4246 iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/customIdToUri",
4247 compact="simplelicensing_customIdToUri",
4248 )
4249 # A string in the license expression format.
4250 cls._add_property(
4251 "simplelicensing_licenseExpression",
4252 StringProp(),
4253 iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseExpression",
4254 min_count=1,
4255 compact="simplelicensing_licenseExpression",
4256 )
4257 # The version of the SPDX License List used in the license expression.
4258 cls._add_property(
4259 "simplelicensing_licenseListVersion",
4260 StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
4261 iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseListVersion",
4262 compact="simplelicensing_licenseListVersion",
4263 )
4264
4265
4266# A license or addition that is not listed on the SPDX License List.
4267@register("https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/SimpleLicensingText", compact_type="simplelicensing_SimpleLicensingText", abstract=False)
4268class simplelicensing_SimpleLicensingText(Element):
4269 NODE_KIND = NodeKind.BlankNodeOrIRI
4270 ID_ALIAS = "spdxId"
4271 NAMED_INDIVIDUALS = {
4272 }
4273
4274 @classmethod
4275 def _register_props(cls):
4276 super()._register_props()
4277 # Identifies the full text of a License or Addition.
4278 cls._add_property(
4279 "simplelicensing_licenseText",
4280 StringProp(),
4281 iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseText",
4282 min_count=1,
4283 compact="simplelicensing_licenseText",
4284 )
4285
4286
4287# A canonical, unique, immutable identifier
4288@register("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifier", compact_type="software_ContentIdentifier", abstract=False)
4289class software_ContentIdentifier(IntegrityMethod):
4290 NODE_KIND = NodeKind.BlankNodeOrIRI
4291 NAMED_INDIVIDUALS = {
4292 }
4293
4294 @classmethod
4295 def _register_props(cls):
4296 super()._register_props()
4297 # Specifies the type of the content identifier.
4298 cls._add_property(
4299 "software_contentIdentifierType",
4300 EnumProp([
4301 ("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid", "gitoid"),
4302 ("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid", "swhid"),
4303 ]),
4304 iri="https://spdx.org/rdf/3.0.0/terms/Software/contentIdentifierType",
4305 min_count=1,
4306 compact="software_contentIdentifierType",
4307 )
4308 # Specifies the value of the content identifier.
4309 cls._add_property(
4310 "software_contentIdentifierValue",
4311 AnyURIProp(),
4312 iri="https://spdx.org/rdf/3.0.0/terms/Software/contentIdentifierValue",
4313 min_count=1,
4314 compact="software_contentIdentifierValue",
4315 )
4316
4317
4318# Specifies the type of a content identifier.
4319@register("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType", compact_type="software_ContentIdentifierType", abstract=False)
4320class software_ContentIdentifierType(SHACLObject):
4321 NODE_KIND = NodeKind.BlankNodeOrIRI
4322 NAMED_INDIVIDUALS = {
4323 "gitoid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid",
4324 "swhid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid",
4325 }
4326 # Gitoid stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) and a gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent the software [Artifact ID](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#artifact-id) or the [OmniBOR Identifier](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-identifier) for the software artifact's associated [OmniBOR Document](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-document).
4327 gitoid = "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid"
4328 # SoftWare Hash IDentifier, persistent intrinsic identifiers for digital artifacts. The syntax of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) and in the case of filess they typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
4329 swhid = "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid"
4330
4331
4332# Enumeration of the different kinds of SPDX file.
4333@register("https://spdx.org/rdf/3.0.0/terms/Software/FileKindType", compact_type="software_FileKindType", abstract=False)
4334class software_FileKindType(SHACLObject):
4335 NODE_KIND = NodeKind.BlankNodeOrIRI
4336 NAMED_INDIVIDUALS = {
4337 "directory": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory",
4338 "file": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file",
4339 }
4340 # The file represents a directory and all content stored in that
4341 directory = "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory"
4342 # The file represents a single file (default).
4343 file = "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file"
4344
4345
4346# Provides a set of values to be used to describe the common types of SBOMs that
4347# tools may create.
4348@register("https://spdx.org/rdf/3.0.0/terms/Software/SbomType", compact_type="software_SbomType", abstract=False)
4349class software_SbomType(SHACLObject):
4350 NODE_KIND = NodeKind.BlankNodeOrIRI
4351 NAMED_INDIVIDUALS = {
4352 "analyzed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed",
4353 "build": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build",
4354 "deployed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed",
4355 "design": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design",
4356 "runtime": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime",
4357 "source": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source",
4358 }
4359 # SBOM generated through analysis of artifacts (e.g., executables, packages, containers, and virtual machine images) after its build. Such analysis generally requires a variety of heuristics. In some contexts, this may also be referred to as a "3rd party" SBOM.
4360 analyzed = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed"
4361 # SBOM generated as part of the process of building the software to create a releasable artifact (e.g., executable or package) from data such as source files, dependencies, built components, build process ephemeral data, and other SBOMs.
4362 build = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build"
4363 # SBOM provides an inventory of software that is present on a system. This may be an assembly of other SBOMs that combines analysis of configuration options, and examination of execution behavior in a (potentially simulated) deployment environment.
4364 deployed = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed"
4365 # SBOM of intended, planned software project or product with included components (some of which may not yet exist) for a new software artifact.
4366 design = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design"
4367 # SBOM generated through instrumenting the system running the software, to capture only components present in the system, as well as external call-outs or dynamically loaded components. In some contexts, this may also be referred to as an "Instrumented" or "Dynamic" SBOM.
4368 runtime = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime"
4369 # SBOM created directly from the development environment, source files, and included dependencies used to build an product artifact.
4370 source = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source"
4371
4372
4373# Provides information about the primary purpose of an Element.
4374@register("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose", compact_type="software_SoftwarePurpose", abstract=False)
4375class software_SoftwarePurpose(SHACLObject):
4376 NODE_KIND = NodeKind.BlankNodeOrIRI
4377 NAMED_INDIVIDUALS = {
4378 "application": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application",
4379 "archive": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive",
4380 "bom": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom",
4381 "configuration": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration",
4382 "container": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container",
4383 "data": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data",
4384 "device": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device",
4385 "deviceDriver": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver",
4386 "diskImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage",
4387 "documentation": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation",
4388 "evidence": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence",
4389 "executable": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable",
4390 "file": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file",
4391 "filesystemImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage",
4392 "firmware": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware",
4393 "framework": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework",
4394 "install": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install",
4395 "library": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library",
4396 "manifest": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest",
4397 "model": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model",
4398 "module": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module",
4399 "operatingSystem": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem",
4400 "other": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other",
4401 "patch": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch",
4402 "platform": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform",
4403 "requirement": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement",
4404 "source": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source",
4405 "specification": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification",
4406 "test": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test",
4407 }
4408 # the Element is a software application
4409 application = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application"
4410 # the Element is an archived collection of one or more files (.tar, .zip, etc)
4411 archive = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive"
4412 # Element is a bill of materials
4413 bom = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom"
4414 # Element is configuration data
4415 configuration = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration"
4416 # the Element is a container image which can be used by a container runtime application
4417 container = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container"
4418 # Element is data
4419 data = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data"
4420 # the Element refers to a chipset, processor, or electronic board
4421 device = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device"
4422 # Element represents software that controls hardware devices
4423 deviceDriver = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver"
4424 # the Element refers to a disk image that can be written to a disk, booted in a VM, etc. A disk image typically contains most or all of the components necessary to boot, such as bootloaders, kernels, firmware, userspace, etc.
4425 diskImage = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage"
4426 # Element is documentation
4427 documentation = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation"
4428 # the Element is the evidence that a specification or requirement has been fulfilled
4429 evidence = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence"
4430 # Element is an Artifact that can be run on a computer
4431 executable = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable"
4432 # the Element is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc)
4433 file = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file"
4434 # the Element is a file system image that can be written to a disk (or virtual) partition
4435 filesystemImage = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage"
4436 # the Element provides low level control over a device's hardware
4437 firmware = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware"
4438 # the Element is a software framework
4439 framework = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework"
4440 # the Element is used to install software on disk
4441 install = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install"
4442 # the Element is a software library
4443 library = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library"
4444 # the Element is a software manifest
4445 manifest = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest"
4446 # the Element is a machine learning or artificial intelligence model
4447 model = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model"
4448 # the Element is a module of a piece of software
4449 module = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module"
4450 # the Element is an operating system
4451 operatingSystem = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem"
4452 # the Element doesn't fit into any of the other categories
4453 other = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other"
4454 # Element contains a set of changes to update, fix, or improve another Element
4455 patch = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch"
4456 # Element represents a runtime environment
4457 platform = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform"
4458 # the Element provides a requirement needed as input for another Element
4459 requirement = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement"
4460 # the Element is a single or a collection of source files
4461 source = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source"
4462 # the Element is a plan, guideline or strategy how to create, perform or analyse an application
4463 specification = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification"
4464 # The Element is a test used to verify functionality on an software element
4465 test = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test"
4466
4467
4468# Class that describes a build instance of software/artifacts.
4469@register("https://spdx.org/rdf/3.0.0/terms/Build/Build", compact_type="build_Build", abstract=False)
4470class build_Build(Element):
4471 NODE_KIND = NodeKind.BlankNodeOrIRI
4472 ID_ALIAS = "spdxId"
4473 NAMED_INDIVIDUALS = {
4474 }
4475
4476 @classmethod
4477 def _register_props(cls):
4478 super()._register_props()
4479 # Property that describes the time at which a build stops.
4480 cls._add_property(
4481 "build_buildEndTime",
4482 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4483 iri="https://spdx.org/rdf/3.0.0/terms/Build/buildEndTime",
4484 compact="build_buildEndTime",
4485 )
4486 # A buildId is a locally unique identifier used by a builder to identify a unique
4487 # instance of a build produced by it.
4488 cls._add_property(
4489 "build_buildId",
4490 StringProp(),
4491 iri="https://spdx.org/rdf/3.0.0/terms/Build/buildId",
4492 compact="build_buildId",
4493 )
4494 # Property describing the start time of a build.
4495 cls._add_property(
4496 "build_buildStartTime",
4497 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4498 iri="https://spdx.org/rdf/3.0.0/terms/Build/buildStartTime",
4499 compact="build_buildStartTime",
4500 )
4501 # A buildType is a hint that is used to indicate the toolchain, platform, or
4502 # infrastructure that the build was invoked on.
4503 cls._add_property(
4504 "build_buildType",
4505 AnyURIProp(),
4506 iri="https://spdx.org/rdf/3.0.0/terms/Build/buildType",
4507 min_count=1,
4508 compact="build_buildType",
4509 )
4510 # Property that describes the digest of the build configuration file used to
4511 # invoke a build.
4512 cls._add_property(
4513 "build_configSourceDigest",
4514 ListProp(ObjectProp(Hash, False)),
4515 iri="https://spdx.org/rdf/3.0.0/terms/Build/configSourceDigest",
4516 compact="build_configSourceDigest",
4517 )
4518 # Property describes the invocation entrypoint of a build.
4519 cls._add_property(
4520 "build_configSourceEntrypoint",
4521 ListProp(StringProp()),
4522 iri="https://spdx.org/rdf/3.0.0/terms/Build/configSourceEntrypoint",
4523 compact="build_configSourceEntrypoint",
4524 )
4525 # Property that describes the URI of the build configuration source file.
4526 cls._add_property(
4527 "build_configSourceUri",
4528 ListProp(AnyURIProp()),
4529 iri="https://spdx.org/rdf/3.0.0/terms/Build/configSourceUri",
4530 compact="build_configSourceUri",
4531 )
4532 # Property describing the session in which a build is invoked.
4533 cls._add_property(
4534 "build_environment",
4535 ListProp(ObjectProp(DictionaryEntry, False)),
4536 iri="https://spdx.org/rdf/3.0.0/terms/Build/environment",
4537 compact="build_environment",
4538 )
4539 # Property describing the parameters used in an instance of a build.
4540 cls._add_property(
4541 "build_parameters",
4542 ListProp(ObjectProp(DictionaryEntry, False)),
4543 iri="https://spdx.org/rdf/3.0.0/terms/Build/parameters",
4544 compact="build_parameters",
4545 )
4546
4547
4548# Agent represents anything with the potential to act on a system.
4549@register("https://spdx.org/rdf/3.0.0/terms/Core/Agent", compact_type="Agent", abstract=False)
4550class Agent(Element):
4551 NODE_KIND = NodeKind.BlankNodeOrIRI
4552 ID_ALIAS = "spdxId"
4553 NAMED_INDIVIDUALS = {
4554 }
4555
4556
4557# An assertion made in relation to one or more elements.
4558@register("https://spdx.org/rdf/3.0.0/terms/Core/Annotation", compact_type="Annotation", abstract=False)
4559class Annotation(Element):
4560 NODE_KIND = NodeKind.BlankNodeOrIRI
4561 ID_ALIAS = "spdxId"
4562 NAMED_INDIVIDUALS = {
4563 }
4564
4565 @classmethod
4566 def _register_props(cls):
4567 super()._register_props()
4568 # Describes the type of annotation.
4569 cls._add_property(
4570 "annotationType",
4571 EnumProp([
4572 ("https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other", "other"),
4573 ("https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review", "review"),
4574 ]),
4575 iri="https://spdx.org/rdf/3.0.0/terms/Core/annotationType",
4576 min_count=1,
4577 compact="annotationType",
4578 )
4579 # Specifies the media type of an Element or Property.
4580 cls._add_property(
4581 "contentType",
4582 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
4583 iri="https://spdx.org/rdf/3.0.0/terms/Core/contentType",
4584 compact="contentType",
4585 )
4586 # Commentary on an assertion that an annotator has made.
4587 cls._add_property(
4588 "statement",
4589 StringProp(),
4590 iri="https://spdx.org/rdf/3.0.0/terms/Core/statement",
4591 compact="statement",
4592 )
4593 # An Element an annotator has made an assertion about.
4594 cls._add_property(
4595 "subject",
4596 ObjectProp(Element, True),
4597 iri="https://spdx.org/rdf/3.0.0/terms/Core/subject",
4598 min_count=1,
4599 compact="subject",
4600 )
4601
4602
4603# A distinct article or unit within the digital domain.
4604@register("https://spdx.org/rdf/3.0.0/terms/Core/Artifact", compact_type="Artifact", abstract=True)
4605class Artifact(Element):
4606 NODE_KIND = NodeKind.BlankNodeOrIRI
4607 ID_ALIAS = "spdxId"
4608 NAMED_INDIVIDUALS = {
4609 }
4610
4611 @classmethod
4612 def _register_props(cls):
4613 super()._register_props()
4614 # Specifies the time an artifact was built.
4615 cls._add_property(
4616 "builtTime",
4617 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4618 iri="https://spdx.org/rdf/3.0.0/terms/Core/builtTime",
4619 compact="builtTime",
4620 )
4621 # Identifies from where or whom the Element originally came.
4622 cls._add_property(
4623 "originatedBy",
4624 ListProp(ObjectProp(Agent, False)),
4625 iri="https://spdx.org/rdf/3.0.0/terms/Core/originatedBy",
4626 compact="originatedBy",
4627 )
4628 # Specifies the time an artifact was released.
4629 cls._add_property(
4630 "releaseTime",
4631 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4632 iri="https://spdx.org/rdf/3.0.0/terms/Core/releaseTime",
4633 compact="releaseTime",
4634 )
4635 # The name of a relevant standard that may apply to an artifact.
4636 cls._add_property(
4637 "standardName",
4638 ListProp(StringProp()),
4639 iri="https://spdx.org/rdf/3.0.0/terms/Core/standardName",
4640 compact="standardName",
4641 )
4642 # Identifies who or what supplied the artifact or VulnAssessmentRelationship
4643 # referenced by the Element.
4644 cls._add_property(
4645 "suppliedBy",
4646 ObjectProp(Agent, False),
4647 iri="https://spdx.org/rdf/3.0.0/terms/Core/suppliedBy",
4648 compact="suppliedBy",
4649 )
4650 # Specifies the level of support associated with an artifact.
4651 cls._add_property(
4652 "supportLevel",
4653 ListProp(EnumProp([
4654 ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed", "deployed"),
4655 ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development", "development"),
4656 ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport", "endOfSupport"),
4657 ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport", "limitedSupport"),
4658 ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion", "noAssertion"),
4659 ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport", "noSupport"),
4660 ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support", "support"),
4661 ])),
4662 iri="https://spdx.org/rdf/3.0.0/terms/Core/supportLevel",
4663 compact="supportLevel",
4664 )
4665 # Specifies until when the artifact can be used before its usage needs to be
4666 # reassessed.
4667 cls._add_property(
4668 "validUntilTime",
4669 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4670 iri="https://spdx.org/rdf/3.0.0/terms/Core/validUntilTime",
4671 compact="validUntilTime",
4672 )
4673
4674
4675# A collection of Elements that have a shared context.
4676@register("https://spdx.org/rdf/3.0.0/terms/Core/Bundle", compact_type="Bundle", abstract=False)
4677class Bundle(ElementCollection):
4678 NODE_KIND = NodeKind.BlankNodeOrIRI
4679 ID_ALIAS = "spdxId"
4680 NAMED_INDIVIDUALS = {
4681 }
4682
4683 @classmethod
4684 def _register_props(cls):
4685 super()._register_props()
4686 # Gives information about the circumstances or unifying properties
4687 # that Elements of the bundle have been assembled under.
4688 cls._add_property(
4689 "context",
4690 StringProp(),
4691 iri="https://spdx.org/rdf/3.0.0/terms/Core/context",
4692 compact="context",
4693 )
4694
4695
4696# A mathematically calculated representation of a grouping of data.
4697@register("https://spdx.org/rdf/3.0.0/terms/Core/Hash", compact_type="Hash", abstract=False)
4698class Hash(IntegrityMethod):
4699 NODE_KIND = NodeKind.BlankNodeOrIRI
4700 NAMED_INDIVIDUALS = {
4701 }
4702
4703 @classmethod
4704 def _register_props(cls):
4705 super()._register_props()
4706 # Specifies the algorithm used for calculating the hash value.
4707 cls._add_property(
4708 "algorithm",
4709 EnumProp([
4710 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
4711 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
4712 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
4713 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3", "blake3"),
4714 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
4715 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
4716 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon", "falcon"),
4717 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2", "md2"),
4718 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4", "md4"),
4719 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5", "md5"),
4720 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6", "md6"),
4721 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other", "other"),
4722 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1", "sha1"),
4723 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224", "sha224"),
4724 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256", "sha256"),
4725 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384", "sha384"),
4726 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
4727 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
4728 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
4729 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
4730 ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512", "sha512"),
4731 ]),
4732 iri="https://spdx.org/rdf/3.0.0/terms/Core/algorithm",
4733 min_count=1,
4734 compact="algorithm",
4735 )
4736 # The result of applying a hash algorithm to an Element.
4737 cls._add_property(
4738 "hashValue",
4739 StringProp(),
4740 iri="https://spdx.org/rdf/3.0.0/terms/Core/hashValue",
4741 min_count=1,
4742 compact="hashValue",
4743 )
4744
4745
4746# Provide context for a relationship that occurs in the lifecycle.
4747@register("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopedRelationship", compact_type="LifecycleScopedRelationship", abstract=False)
4748class LifecycleScopedRelationship(Relationship):
4749 NODE_KIND = NodeKind.BlankNodeOrIRI
4750 ID_ALIAS = "spdxId"
4751 NAMED_INDIVIDUALS = {
4752 }
4753
4754 @classmethod
4755 def _register_props(cls):
4756 super()._register_props()
4757 # Capture the scope of information about a specific relationship between elements.
4758 cls._add_property(
4759 "scope",
4760 EnumProp([
4761 ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build", "build"),
4762 ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design", "design"),
4763 ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development", "development"),
4764 ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other", "other"),
4765 ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime", "runtime"),
4766 ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test", "test"),
4767 ]),
4768 iri="https://spdx.org/rdf/3.0.0/terms/Core/scope",
4769 compact="scope",
4770 )
4771
4772
4773# A group of people who work together in an organized way for a shared purpose.
4774@register("https://spdx.org/rdf/3.0.0/terms/Core/Organization", compact_type="Organization", abstract=False)
4775class Organization(Agent):
4776 NODE_KIND = NodeKind.BlankNodeOrIRI
4777 ID_ALIAS = "spdxId"
4778 NAMED_INDIVIDUALS = {
4779 }
4780
4781
4782# An individual human being.
4783@register("https://spdx.org/rdf/3.0.0/terms/Core/Person", compact_type="Person", abstract=False)
4784class Person(Agent):
4785 NODE_KIND = NodeKind.BlankNodeOrIRI
4786 ID_ALIAS = "spdxId"
4787 NAMED_INDIVIDUALS = {
4788 }
4789
4790
4791# A software agent.
4792@register("https://spdx.org/rdf/3.0.0/terms/Core/SoftwareAgent", compact_type="SoftwareAgent", abstract=False)
4793class SoftwareAgent(Agent):
4794 NODE_KIND = NodeKind.BlankNodeOrIRI
4795 ID_ALIAS = "spdxId"
4796 NAMED_INDIVIDUALS = {
4797 }
4798
4799
4800# Portion of an AnyLicenseInfo representing a set of licensing information
4801# where all elements apply.
4802@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ConjunctiveLicenseSet", compact_type="expandedlicensing_ConjunctiveLicenseSet", abstract=False)
4803class expandedlicensing_ConjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
4804 NODE_KIND = NodeKind.BlankNodeOrIRI
4805 ID_ALIAS = "spdxId"
4806 NAMED_INDIVIDUALS = {
4807 }
4808
4809 @classmethod
4810 def _register_props(cls):
4811 super()._register_props()
4812 # A license expression participating in a license set.
4813 cls._add_property(
4814 "expandedlicensing_member",
4815 ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False)),
4816 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/member",
4817 min_count=2,
4818 compact="expandedlicensing_member",
4819 )
4820
4821
4822# A license addition that is not listed on the SPDX Exceptions List.
4823@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/CustomLicenseAddition", compact_type="expandedlicensing_CustomLicenseAddition", abstract=False)
4824class expandedlicensing_CustomLicenseAddition(expandedlicensing_LicenseAddition):
4825 NODE_KIND = NodeKind.BlankNodeOrIRI
4826 ID_ALIAS = "spdxId"
4827 NAMED_INDIVIDUALS = {
4828 }
4829
4830
4831# Portion of an AnyLicenseInfo representing a set of licensing information where
4832# only one of the elements applies.
4833@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/DisjunctiveLicenseSet", compact_type="expandedlicensing_DisjunctiveLicenseSet", abstract=False)
4834class expandedlicensing_DisjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
4835 NODE_KIND = NodeKind.BlankNodeOrIRI
4836 ID_ALIAS = "spdxId"
4837 NAMED_INDIVIDUALS = {
4838 }
4839
4840 @classmethod
4841 def _register_props(cls):
4842 super()._register_props()
4843 # A license expression participating in a license set.
4844 cls._add_property(
4845 "expandedlicensing_member",
4846 ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False)),
4847 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/member",
4848 min_count=2,
4849 compact="expandedlicensing_member",
4850 )
4851
4852
4853# Abstract class representing a License or an OrLaterOperator.
4854@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ExtendableLicense", compact_type="expandedlicensing_ExtendableLicense", abstract=True)
4855class expandedlicensing_ExtendableLicense(simplelicensing_AnyLicenseInfo):
4856 NODE_KIND = NodeKind.BlankNodeOrIRI
4857 ID_ALIAS = "spdxId"
4858 NAMED_INDIVIDUALS = {
4859 }
4860
4861
4862# A concrete subclass of AnyLicenseInfo used by Individuals in the
4863# ExpandedLicensing profile.
4864@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/IndividualLicensingInfo", compact_type="expandedlicensing_IndividualLicensingInfo", abstract=False)
4865class expandedlicensing_IndividualLicensingInfo(simplelicensing_AnyLicenseInfo):
4866 NODE_KIND = NodeKind.BlankNodeOrIRI
4867 ID_ALIAS = "spdxId"
4868 NAMED_INDIVIDUALS = {
4869 "NoAssertionLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense",
4870 "NoneLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense",
4871 }
4872 # An Individual Value for License when no assertion can be made about its actual
4873 # value.
4874 NoAssertionLicense = "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense"
4875 # An Individual Value for License where the SPDX data creator determines that no
4876 # license is present.
4877 NoneLicense = "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense"
4878
4879
4880# Abstract class for the portion of an AnyLicenseInfo representing a license.
4881@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/License", compact_type="expandedlicensing_License", abstract=True)
4882class expandedlicensing_License(expandedlicensing_ExtendableLicense):
4883 NODE_KIND = NodeKind.BlankNodeOrIRI
4884 ID_ALIAS = "spdxId"
4885 NAMED_INDIVIDUALS = {
4886 }
4887
4888 @classmethod
4889 def _register_props(cls):
4890 super()._register_props()
4891 # Specifies whether a license or additional text identifier has been marked as
4892 # deprecated.
4893 cls._add_property(
4894 "expandedlicensing_isDeprecatedLicenseId",
4895 BooleanProp(),
4896 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isDeprecatedLicenseId",
4897 compact="expandedlicensing_isDeprecatedLicenseId",
4898 )
4899 # Specifies whether the License is listed as free by the
4900 # [Free Software Foundation (FSF)](https://fsf.org).
4901 cls._add_property(
4902 "expandedlicensing_isFsfLibre",
4903 BooleanProp(),
4904 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isFsfLibre",
4905 compact="expandedlicensing_isFsfLibre",
4906 )
4907 # Specifies whether the License is listed as approved by the
4908 # [Open Source Initiative (OSI)](https://opensource.org).
4909 cls._add_property(
4910 "expandedlicensing_isOsiApproved",
4911 BooleanProp(),
4912 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isOsiApproved",
4913 compact="expandedlicensing_isOsiApproved",
4914 )
4915 # Identifies all the text and metadata associated with a license in the license
4916 # XML format.
4917 cls._add_property(
4918 "expandedlicensing_licenseXml",
4919 StringProp(),
4920 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/licenseXml",
4921 compact="expandedlicensing_licenseXml",
4922 )
4923 # Specifies the licenseId that is preferred to be used in place of a deprecated
4924 # License or LicenseAddition.
4925 cls._add_property(
4926 "expandedlicensing_obsoletedBy",
4927 StringProp(),
4928 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/obsoletedBy",
4929 compact="expandedlicensing_obsoletedBy",
4930 )
4931 # Contains a URL where the License or LicenseAddition can be found in use.
4932 cls._add_property(
4933 "expandedlicensing_seeAlso",
4934 ListProp(AnyURIProp()),
4935 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/seeAlso",
4936 compact="expandedlicensing_seeAlso",
4937 )
4938 # Provides a License author's preferred text to indicate that a file is covered
4939 # by the License.
4940 cls._add_property(
4941 "expandedlicensing_standardLicenseHeader",
4942 StringProp(),
4943 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/standardLicenseHeader",
4944 compact="expandedlicensing_standardLicenseHeader",
4945 )
4946 # Identifies the full text of a License, in SPDX templating format.
4947 cls._add_property(
4948 "expandedlicensing_standardLicenseTemplate",
4949 StringProp(),
4950 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/standardLicenseTemplate",
4951 compact="expandedlicensing_standardLicenseTemplate",
4952 )
4953 # Identifies the full text of a License or Addition.
4954 cls._add_property(
4955 "simplelicensing_licenseText",
4956 StringProp(),
4957 iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseText",
4958 min_count=1,
4959 compact="simplelicensing_licenseText",
4960 )
4961
4962
4963# A license that is listed on the SPDX License List.
4964@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ListedLicense", compact_type="expandedlicensing_ListedLicense", abstract=False)
4965class expandedlicensing_ListedLicense(expandedlicensing_License):
4966 NODE_KIND = NodeKind.BlankNodeOrIRI
4967 ID_ALIAS = "spdxId"
4968 NAMED_INDIVIDUALS = {
4969 }
4970
4971 @classmethod
4972 def _register_props(cls):
4973 super()._register_props()
4974 # Specifies the SPDX License List version in which this license or exception
4975 # identifier was deprecated.
4976 cls._add_property(
4977 "expandedlicensing_deprecatedVersion",
4978 StringProp(),
4979 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/deprecatedVersion",
4980 compact="expandedlicensing_deprecatedVersion",
4981 )
4982 # Specifies the SPDX License List version in which this ListedLicense or
4983 # ListedLicenseException identifier was first added.
4984 cls._add_property(
4985 "expandedlicensing_listVersionAdded",
4986 StringProp(),
4987 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/listVersionAdded",
4988 compact="expandedlicensing_listVersionAdded",
4989 )
4990
4991
4992# Portion of an AnyLicenseInfo representing this version, or any later version,
4993# of the indicated License.
4994@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/OrLaterOperator", compact_type="expandedlicensing_OrLaterOperator", abstract=False)
4995class expandedlicensing_OrLaterOperator(expandedlicensing_ExtendableLicense):
4996 NODE_KIND = NodeKind.BlankNodeOrIRI
4997 ID_ALIAS = "spdxId"
4998 NAMED_INDIVIDUALS = {
4999 }
5000
5001 @classmethod
5002 def _register_props(cls):
5003 super()._register_props()
5004 # A License participating in an 'or later' model.
5005 cls._add_property(
5006 "expandedlicensing_subjectLicense",
5007 ObjectProp(expandedlicensing_License, True),
5008 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/subjectLicense",
5009 min_count=1,
5010 compact="expandedlicensing_subjectLicense",
5011 )
5012
5013
5014# Portion of an AnyLicenseInfo representing a License which has additional
5015# text applied to it.
5016@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/WithAdditionOperator", compact_type="expandedlicensing_WithAdditionOperator", abstract=False)
5017class expandedlicensing_WithAdditionOperator(simplelicensing_AnyLicenseInfo):
5018 NODE_KIND = NodeKind.BlankNodeOrIRI
5019 ID_ALIAS = "spdxId"
5020 NAMED_INDIVIDUALS = {
5021 }
5022
5023 @classmethod
5024 def _register_props(cls):
5025 super()._register_props()
5026 # A LicenseAddition participating in a 'with addition' model.
5027 cls._add_property(
5028 "expandedlicensing_subjectAddition",
5029 ObjectProp(expandedlicensing_LicenseAddition, True),
5030 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/subjectAddition",
5031 min_count=1,
5032 compact="expandedlicensing_subjectAddition",
5033 )
5034 # A License participating in a 'with addition' model.
5035 cls._add_property(
5036 "expandedlicensing_subjectExtendableLicense",
5037 ObjectProp(expandedlicensing_ExtendableLicense, True),
5038 iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/subjectExtendableLicense",
5039 min_count=1,
5040 compact="expandedlicensing_subjectExtendableLicense",
5041 )
5042
5043
5044# A type of extension consisting of a list of name value pairs.
5045@register("https://spdx.org/rdf/3.0.0/terms/Extension/CdxPropertiesExtension", compact_type="extension_CdxPropertiesExtension", abstract=False)
5046class extension_CdxPropertiesExtension(extension_Extension):
5047 NODE_KIND = NodeKind.BlankNodeOrIRI
5048 NAMED_INDIVIDUALS = {
5049 }
5050
5051 @classmethod
5052 def _register_props(cls):
5053 super()._register_props()
5054 # Provides a map of a property names to a values.
5055 cls._add_property(
5056 "extension_cdxProperty",
5057 ListProp(ObjectProp(extension_CdxPropertyEntry, False)),
5058 iri="https://spdx.org/rdf/3.0.0/terms/Extension/cdxProperty",
5059 min_count=1,
5060 compact="extension_cdxProperty",
5061 )
5062
5063
5064# Provides a CVSS version 2.0 assessment for a vulnerability.
5065@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssV2VulnAssessmentRelationship", compact_type="security_CvssV2VulnAssessmentRelationship", abstract=False)
5066class security_CvssV2VulnAssessmentRelationship(security_VulnAssessmentRelationship):
5067 NODE_KIND = NodeKind.BlankNodeOrIRI
5068 ID_ALIAS = "spdxId"
5069 NAMED_INDIVIDUALS = {
5070 }
5071
5072 @classmethod
5073 def _register_props(cls):
5074 super()._register_props()
5075 # Provides a numerical (0-10) representation of the severity of a vulnerability.
5076 cls._add_property(
5077 "security_score",
5078 FloatProp(),
5079 iri="https://spdx.org/rdf/3.0.0/terms/Security/score",
5080 min_count=1,
5081 compact="security_score",
5082 )
5083 # Specifies the CVSS vector string for a vulnerability.
5084 cls._add_property(
5085 "security_vectorString",
5086 StringProp(),
5087 iri="https://spdx.org/rdf/3.0.0/terms/Security/vectorString",
5088 min_count=1,
5089 compact="security_vectorString",
5090 )
5091
5092
5093# Provides a CVSS version 3 assessment for a vulnerability.
5094@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssV3VulnAssessmentRelationship", compact_type="security_CvssV3VulnAssessmentRelationship", abstract=False)
5095class security_CvssV3VulnAssessmentRelationship(security_VulnAssessmentRelationship):
5096 NODE_KIND = NodeKind.BlankNodeOrIRI
5097 ID_ALIAS = "spdxId"
5098 NAMED_INDIVIDUALS = {
5099 }
5100
5101 @classmethod
5102 def _register_props(cls):
5103 super()._register_props()
5104 # Provides a numerical (0-10) representation of the severity of a vulnerability.
5105 cls._add_property(
5106 "security_score",
5107 FloatProp(),
5108 iri="https://spdx.org/rdf/3.0.0/terms/Security/score",
5109 min_count=1,
5110 compact="security_score",
5111 )
5112 # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
5113 cls._add_property(
5114 "security_severity",
5115 EnumProp([
5116 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical", "critical"),
5117 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high", "high"),
5118 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low", "low"),
5119 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium", "medium"),
5120 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none", "none"),
5121 ]),
5122 iri="https://spdx.org/rdf/3.0.0/terms/Security/severity",
5123 min_count=1,
5124 compact="security_severity",
5125 )
5126 # Specifies the CVSS vector string for a vulnerability.
5127 cls._add_property(
5128 "security_vectorString",
5129 StringProp(),
5130 iri="https://spdx.org/rdf/3.0.0/terms/Security/vectorString",
5131 min_count=1,
5132 compact="security_vectorString",
5133 )
5134
5135
5136# Provides a CVSS version 4 assessment for a vulnerability.
5137@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssV4VulnAssessmentRelationship", compact_type="security_CvssV4VulnAssessmentRelationship", abstract=False)
5138class security_CvssV4VulnAssessmentRelationship(security_VulnAssessmentRelationship):
5139 NODE_KIND = NodeKind.BlankNodeOrIRI
5140 ID_ALIAS = "spdxId"
5141 NAMED_INDIVIDUALS = {
5142 }
5143
5144 @classmethod
5145 def _register_props(cls):
5146 super()._register_props()
5147 # Provides a numerical (0-10) representation of the severity of a vulnerability.
5148 cls._add_property(
5149 "security_score",
5150 FloatProp(),
5151 iri="https://spdx.org/rdf/3.0.0/terms/Security/score",
5152 min_count=1,
5153 compact="security_score",
5154 )
5155 # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
5156 cls._add_property(
5157 "security_severity",
5158 EnumProp([
5159 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical", "critical"),
5160 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high", "high"),
5161 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low", "low"),
5162 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium", "medium"),
5163 ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none", "none"),
5164 ]),
5165 iri="https://spdx.org/rdf/3.0.0/terms/Security/severity",
5166 min_count=1,
5167 compact="security_severity",
5168 )
5169 # Specifies the CVSS vector string for a vulnerability.
5170 cls._add_property(
5171 "security_vectorString",
5172 StringProp(),
5173 iri="https://spdx.org/rdf/3.0.0/terms/Security/vectorString",
5174 min_count=1,
5175 compact="security_vectorString",
5176 )
5177
5178
5179# Provides an EPSS assessment for a vulnerability.
5180@register("https://spdx.org/rdf/3.0.0/terms/Security/EpssVulnAssessmentRelationship", compact_type="security_EpssVulnAssessmentRelationship", abstract=False)
5181class security_EpssVulnAssessmentRelationship(security_VulnAssessmentRelationship):
5182 NODE_KIND = NodeKind.BlankNodeOrIRI
5183 ID_ALIAS = "spdxId"
5184 NAMED_INDIVIDUALS = {
5185 }
5186
5187 @classmethod
5188 def _register_props(cls):
5189 super()._register_props()
5190 # The percentile of the current probability score.
5191 cls._add_property(
5192 "security_percentile",
5193 FloatProp(),
5194 iri="https://spdx.org/rdf/3.0.0/terms/Security/percentile",
5195 min_count=1,
5196 compact="security_percentile",
5197 )
5198 # A probability score between 0 and 1 of a vulnerability being exploited.
5199 cls._add_property(
5200 "security_probability",
5201 FloatProp(),
5202 iri="https://spdx.org/rdf/3.0.0/terms/Security/probability",
5203 min_count=1,
5204 compact="security_probability",
5205 )
5206 # Specifies the time when a vulnerability was published.
5207 cls._add_property(
5208 "security_publishedTime",
5209 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5210 iri="https://spdx.org/rdf/3.0.0/terms/Security/publishedTime",
5211 min_count=1,
5212 compact="security_publishedTime",
5213 )
5214
5215
5216# Provides an exploit assessment of a vulnerability.
5217@register("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogVulnAssessmentRelationship", compact_type="security_ExploitCatalogVulnAssessmentRelationship", abstract=False)
5218class security_ExploitCatalogVulnAssessmentRelationship(security_VulnAssessmentRelationship):
5219 NODE_KIND = NodeKind.BlankNodeOrIRI
5220 ID_ALIAS = "spdxId"
5221 NAMED_INDIVIDUALS = {
5222 }
5223
5224 @classmethod
5225 def _register_props(cls):
5226 super()._register_props()
5227 # Specifies the exploit catalog type.
5228 cls._add_property(
5229 "security_catalogType",
5230 EnumProp([
5231 ("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev", "kev"),
5232 ("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other", "other"),
5233 ]),
5234 iri="https://spdx.org/rdf/3.0.0/terms/Security/catalogType",
5235 min_count=1,
5236 compact="security_catalogType",
5237 )
5238 # Describe that a CVE is known to have an exploit because it's been listed in an exploit catalog.
5239 cls._add_property(
5240 "security_exploited",
5241 BooleanProp(),
5242 iri="https://spdx.org/rdf/3.0.0/terms/Security/exploited",
5243 min_count=1,
5244 compact="security_exploited",
5245 )
5246 # Provides the location of an exploit catalog.
5247 cls._add_property(
5248 "security_locator",
5249 AnyURIProp(),
5250 iri="https://spdx.org/rdf/3.0.0/terms/Security/locator",
5251 min_count=1,
5252 compact="security_locator",
5253 )
5254
5255
5256# Provides an SSVC assessment for a vulnerability.
5257@register("https://spdx.org/rdf/3.0.0/terms/Security/SsvcVulnAssessmentRelationship", compact_type="security_SsvcVulnAssessmentRelationship", abstract=False)
5258class security_SsvcVulnAssessmentRelationship(security_VulnAssessmentRelationship):
5259 NODE_KIND = NodeKind.BlankNodeOrIRI
5260 ID_ALIAS = "spdxId"
5261 NAMED_INDIVIDUALS = {
5262 }
5263
5264 @classmethod
5265 def _register_props(cls):
5266 super()._register_props()
5267 # Provide the enumeration of possible decisions in the Stakeholder-Specific Vulnerability Categorization (SSVC) decision tree [https://www.cisa.gov/sites/default/files/publications/cisa-ssvc-guide%20508c.pdf](https://www.cisa.gov/sites/default/files/publications/cisa-ssvc-guide%20508c.pdf)
5268 cls._add_property(
5269 "security_decisionType",
5270 EnumProp([
5271 ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act", "act"),
5272 ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend", "attend"),
5273 ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track", "track"),
5274 ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar", "trackStar"),
5275 ]),
5276 iri="https://spdx.org/rdf/3.0.0/terms/Security/decisionType",
5277 min_count=1,
5278 compact="security_decisionType",
5279 )
5280
5281
5282# Asbtract ancestor class for all VEX relationships
5283@register("https://spdx.org/rdf/3.0.0/terms/Security/VexVulnAssessmentRelationship", compact_type="security_VexVulnAssessmentRelationship", abstract=True)
5284class security_VexVulnAssessmentRelationship(security_VulnAssessmentRelationship):
5285 NODE_KIND = NodeKind.BlankNodeOrIRI
5286 ID_ALIAS = "spdxId"
5287 NAMED_INDIVIDUALS = {
5288 }
5289
5290 @classmethod
5291 def _register_props(cls):
5292 super()._register_props()
5293 # Conveys information about how VEX status was determined.
5294 cls._add_property(
5295 "security_statusNotes",
5296 StringProp(),
5297 iri="https://spdx.org/rdf/3.0.0/terms/Security/statusNotes",
5298 compact="security_statusNotes",
5299 )
5300 # Specifies the version of a VEX statement.
5301 cls._add_property(
5302 "security_vexVersion",
5303 StringProp(),
5304 iri="https://spdx.org/rdf/3.0.0/terms/Security/vexVersion",
5305 compact="security_vexVersion",
5306 )
5307
5308
5309# Specifies a vulnerability and its associated information.
5310@register("https://spdx.org/rdf/3.0.0/terms/Security/Vulnerability", compact_type="security_Vulnerability", abstract=False)
5311class security_Vulnerability(Artifact):
5312 NODE_KIND = NodeKind.BlankNodeOrIRI
5313 ID_ALIAS = "spdxId"
5314 NAMED_INDIVIDUALS = {
5315 }
5316
5317 @classmethod
5318 def _register_props(cls):
5319 super()._register_props()
5320 # Specifies a time when a vulnerability assessment was modified
5321 cls._add_property(
5322 "security_modifiedTime",
5323 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5324 iri="https://spdx.org/rdf/3.0.0/terms/Security/modifiedTime",
5325 compact="security_modifiedTime",
5326 )
5327 # Specifies the time when a vulnerability was published.
5328 cls._add_property(
5329 "security_publishedTime",
5330 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5331 iri="https://spdx.org/rdf/3.0.0/terms/Security/publishedTime",
5332 compact="security_publishedTime",
5333 )
5334 # Specified the time and date when a vulnerability was withdrawn.
5335 cls._add_property(
5336 "security_withdrawnTime",
5337 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5338 iri="https://spdx.org/rdf/3.0.0/terms/Security/withdrawnTime",
5339 compact="security_withdrawnTime",
5340 )
5341
5342
5343# A distinct article or unit related to Software.
5344@register("https://spdx.org/rdf/3.0.0/terms/Software/SoftwareArtifact", compact_type="software_SoftwareArtifact", abstract=True)
5345class software_SoftwareArtifact(Artifact):
5346 NODE_KIND = NodeKind.BlankNodeOrIRI
5347 ID_ALIAS = "spdxId"
5348 NAMED_INDIVIDUALS = {
5349 }
5350
5351 @classmethod
5352 def _register_props(cls):
5353 super()._register_props()
5354 # Provides additional purpose information of the software artifact.
5355 cls._add_property(
5356 "software_additionalPurpose",
5357 ListProp(EnumProp([
5358 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application", "application"),
5359 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive", "archive"),
5360 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom", "bom"),
5361 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration", "configuration"),
5362 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container", "container"),
5363 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data", "data"),
5364 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device", "device"),
5365 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
5366 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
5367 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation", "documentation"),
5368 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence", "evidence"),
5369 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable", "executable"),
5370 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file", "file"),
5371 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
5372 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware", "firmware"),
5373 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework", "framework"),
5374 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install", "install"),
5375 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library", "library"),
5376 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest", "manifest"),
5377 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model", "model"),
5378 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module", "module"),
5379 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
5380 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other", "other"),
5381 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch", "patch"),
5382 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform", "platform"),
5383 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement", "requirement"),
5384 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source", "source"),
5385 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification", "specification"),
5386 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test", "test"),
5387 ])),
5388 iri="https://spdx.org/rdf/3.0.0/terms/Software/additionalPurpose",
5389 compact="software_additionalPurpose",
5390 )
5391 # Provides a place for the SPDX data creator to record acknowledgement text for
5392 # a software Package, File or Snippet.
5393 cls._add_property(
5394 "software_attributionText",
5395 ListProp(StringProp()),
5396 iri="https://spdx.org/rdf/3.0.0/terms/Software/attributionText",
5397 compact="software_attributionText",
5398 )
5399 # A canonical, unique, immutable identifier of the artifact content, that may be
5400 # used for verifying its identity and/or integrity.
5401 cls._add_property(
5402 "software_contentIdentifier",
5403 ListProp(ObjectProp(software_ContentIdentifier, False)),
5404 iri="https://spdx.org/rdf/3.0.0/terms/Software/contentIdentifier",
5405 compact="software_contentIdentifier",
5406 )
5407 # Identifies the text of one or more copyright notices for a software Package,
5408 # File or Snippet, if any.
5409 cls._add_property(
5410 "software_copyrightText",
5411 StringProp(),
5412 iri="https://spdx.org/rdf/3.0.0/terms/Software/copyrightText",
5413 compact="software_copyrightText",
5414 )
5415 # Provides information about the primary purpose of the software artifact.
5416 cls._add_property(
5417 "software_primaryPurpose",
5418 EnumProp([
5419 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application", "application"),
5420 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive", "archive"),
5421 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom", "bom"),
5422 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration", "configuration"),
5423 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container", "container"),
5424 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data", "data"),
5425 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device", "device"),
5426 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
5427 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
5428 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation", "documentation"),
5429 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence", "evidence"),
5430 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable", "executable"),
5431 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file", "file"),
5432 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
5433 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware", "firmware"),
5434 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework", "framework"),
5435 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install", "install"),
5436 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library", "library"),
5437 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest", "manifest"),
5438 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model", "model"),
5439 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module", "module"),
5440 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
5441 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other", "other"),
5442 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch", "patch"),
5443 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform", "platform"),
5444 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement", "requirement"),
5445 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source", "source"),
5446 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification", "specification"),
5447 ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test", "test"),
5448 ]),
5449 iri="https://spdx.org/rdf/3.0.0/terms/Software/primaryPurpose",
5450 compact="software_primaryPurpose",
5451 )
5452
5453
5454# A container for a grouping of SPDX-3.0 content characterizing details
5455# (provenence, composition, licensing, etc.) about a product.
5456@register("https://spdx.org/rdf/3.0.0/terms/Core/Bom", compact_type="Bom", abstract=False)
5457class Bom(Bundle):
5458 NODE_KIND = NodeKind.BlankNodeOrIRI
5459 ID_ALIAS = "spdxId"
5460 NAMED_INDIVIDUALS = {
5461 }
5462
5463
5464# A license that is not listed on the SPDX License List.
5465@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/CustomLicense", compact_type="expandedlicensing_CustomLicense", abstract=False)
5466class expandedlicensing_CustomLicense(expandedlicensing_License):
5467 NODE_KIND = NodeKind.BlankNodeOrIRI
5468 ID_ALIAS = "spdxId"
5469 NAMED_INDIVIDUALS = {
5470 }
5471
5472
5473# Connects a vulnerability and an element designating the element as a product
5474# affected by the vulnerability.
5475@register("https://spdx.org/rdf/3.0.0/terms/Security/VexAffectedVulnAssessmentRelationship", compact_type="security_VexAffectedVulnAssessmentRelationship", abstract=False)
5476class security_VexAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5477 NODE_KIND = NodeKind.BlankNodeOrIRI
5478 ID_ALIAS = "spdxId"
5479 NAMED_INDIVIDUALS = {
5480 }
5481
5482 @classmethod
5483 def _register_props(cls):
5484 super()._register_props()
5485 # Provides advise on how to mitigate or remediate a vulnerability when a VEX product
5486 # is affected by it.
5487 cls._add_property(
5488 "security_actionStatement",
5489 StringProp(),
5490 iri="https://spdx.org/rdf/3.0.0/terms/Security/actionStatement",
5491 compact="security_actionStatement",
5492 )
5493 # Records the time when a recommended action was communicated in a VEX statement
5494 # to mitigate a vulnerability.
5495 cls._add_property(
5496 "security_actionStatementTime",
5497 ListProp(DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",)),
5498 iri="https://spdx.org/rdf/3.0.0/terms/Security/actionStatementTime",
5499 compact="security_actionStatementTime",
5500 )
5501
5502
5503# Links a vulnerability and elements representing products (in the VEX sense) where
5504# a fix has been applied and are no longer affected.
5505@register("https://spdx.org/rdf/3.0.0/terms/Security/VexFixedVulnAssessmentRelationship", compact_type="security_VexFixedVulnAssessmentRelationship", abstract=False)
5506class security_VexFixedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5507 NODE_KIND = NodeKind.BlankNodeOrIRI
5508 ID_ALIAS = "spdxId"
5509 NAMED_INDIVIDUALS = {
5510 }
5511
5512
5513# Links a vulnerability and one or more elements designating the latter as products
5514# not affected by the vulnerability.
5515@register("https://spdx.org/rdf/3.0.0/terms/Security/VexNotAffectedVulnAssessmentRelationship", compact_type="security_VexNotAffectedVulnAssessmentRelationship", abstract=False)
5516class security_VexNotAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5517 NODE_KIND = NodeKind.BlankNodeOrIRI
5518 ID_ALIAS = "spdxId"
5519 NAMED_INDIVIDUALS = {
5520 }
5521
5522 @classmethod
5523 def _register_props(cls):
5524 super()._register_props()
5525 # Explains why a VEX product is not affected by a vulnerability. It is an
5526 # alternative in VexNotAffectedVulnAssessmentRelationship to the machine-readable
5527 # justification label.
5528 cls._add_property(
5529 "security_impactStatement",
5530 StringProp(),
5531 iri="https://spdx.org/rdf/3.0.0/terms/Security/impactStatement",
5532 compact="security_impactStatement",
5533 )
5534 # Timestamp of impact statement.
5535 cls._add_property(
5536 "security_impactStatementTime",
5537 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5538 iri="https://spdx.org/rdf/3.0.0/terms/Security/impactStatementTime",
5539 compact="security_impactStatementTime",
5540 )
5541 # Impact justification label to be used when linking a vulnerability to an element
5542 # representing a VEX product with a VexNotAffectedVulnAssessmentRelationship
5543 # relationship.
5544 cls._add_property(
5545 "security_justificationType",
5546 EnumProp([
5547 ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent", "componentNotPresent"),
5548 ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", "inlineMitigationsAlreadyExist"),
5549 ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", "vulnerableCodeCannotBeControlledByAdversary"),
5550 ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", "vulnerableCodeNotInExecutePath"),
5551 ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent", "vulnerableCodeNotPresent"),
5552 ]),
5553 iri="https://spdx.org/rdf/3.0.0/terms/Security/justificationType",
5554 compact="security_justificationType",
5555 )
5556
5557
5558# Designates elements as products where the impact of a vulnerability is being
5559# investigated.
5560@register("https://spdx.org/rdf/3.0.0/terms/Security/VexUnderInvestigationVulnAssessmentRelationship", compact_type="security_VexUnderInvestigationVulnAssessmentRelationship", abstract=False)
5561class security_VexUnderInvestigationVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5562 NODE_KIND = NodeKind.BlankNodeOrIRI
5563 ID_ALIAS = "spdxId"
5564 NAMED_INDIVIDUALS = {
5565 }
5566
5567
5568# Refers to any object that stores content on a computer.
5569@register("https://spdx.org/rdf/3.0.0/terms/Software/File", compact_type="software_File", abstract=False)
5570class software_File(software_SoftwareArtifact):
5571 NODE_KIND = NodeKind.BlankNodeOrIRI
5572 ID_ALIAS = "spdxId"
5573 NAMED_INDIVIDUALS = {
5574 }
5575
5576 @classmethod
5577 def _register_props(cls):
5578 super()._register_props()
5579 # Provides information about the content type of an Element.
5580 cls._add_property(
5581 "software_contentType",
5582 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
5583 iri="https://spdx.org/rdf/3.0.0/terms/Software/contentType",
5584 compact="software_contentType",
5585 )
5586 # Describes if a given file is a directory or non-directory kind of file.
5587 cls._add_property(
5588 "software_fileKind",
5589 EnumProp([
5590 ("https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory", "directory"),
5591 ("https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file", "file"),
5592 ]),
5593 iri="https://spdx.org/rdf/3.0.0/terms/Software/fileKind",
5594 compact="software_fileKind",
5595 )
5596
5597
5598# Refers to any unit of content that can be associated with a distribution of
5599# software.
5600@register("https://spdx.org/rdf/3.0.0/terms/Software/Package", compact_type="software_Package", abstract=False)
5601class software_Package(software_SoftwareArtifact):
5602 NODE_KIND = NodeKind.BlankNodeOrIRI
5603 ID_ALIAS = "spdxId"
5604 NAMED_INDIVIDUALS = {
5605 }
5606
5607 @classmethod
5608 def _register_props(cls):
5609 super()._register_props()
5610 # Identifies the download Uniform Resource Identifier for the package at the time
5611 # that the document was created.
5612 cls._add_property(
5613 "software_downloadLocation",
5614 AnyURIProp(),
5615 iri="https://spdx.org/rdf/3.0.0/terms/Software/downloadLocation",
5616 compact="software_downloadLocation",
5617 )
5618 # A place for the SPDX document creator to record a website that serves as the
5619 # package's home page.
5620 cls._add_property(
5621 "software_homePage",
5622 AnyURIProp(),
5623 iri="https://spdx.org/rdf/3.0.0/terms/Software/homePage",
5624 compact="software_homePage",
5625 )
5626 # Provides a place for the SPDX data creator to record the package URL string
5627 # (in accordance with the
5628 # [package URL spec](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst))
5629 # for a software Package.
5630 cls._add_property(
5631 "software_packageUrl",
5632 AnyURIProp(),
5633 iri="https://spdx.org/rdf/3.0.0/terms/Software/packageUrl",
5634 compact="software_packageUrl",
5635 )
5636 # Identify the version of a package.
5637 cls._add_property(
5638 "software_packageVersion",
5639 StringProp(),
5640 iri="https://spdx.org/rdf/3.0.0/terms/Software/packageVersion",
5641 compact="software_packageVersion",
5642 )
5643 # Records any relevant background information or additional comments
5644 # about the origin of the package.
5645 cls._add_property(
5646 "software_sourceInfo",
5647 StringProp(),
5648 iri="https://spdx.org/rdf/3.0.0/terms/Software/sourceInfo",
5649 compact="software_sourceInfo",
5650 )
5651
5652
5653# A collection of SPDX Elements describing a single package.
5654@register("https://spdx.org/rdf/3.0.0/terms/Software/Sbom", compact_type="software_Sbom", abstract=False)
5655class software_Sbom(Bom):
5656 NODE_KIND = NodeKind.BlankNodeOrIRI
5657 ID_ALIAS = "spdxId"
5658 NAMED_INDIVIDUALS = {
5659 }
5660
5661 @classmethod
5662 def _register_props(cls):
5663 super()._register_props()
5664 # Provides information about the type of an SBOM.
5665 cls._add_property(
5666 "software_sbomType",
5667 ListProp(EnumProp([
5668 ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed", "analyzed"),
5669 ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build", "build"),
5670 ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed", "deployed"),
5671 ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design", "design"),
5672 ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime", "runtime"),
5673 ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source", "source"),
5674 ])),
5675 iri="https://spdx.org/rdf/3.0.0/terms/Software/sbomType",
5676 compact="software_sbomType",
5677 )
5678
5679
5680# Describes a certain part of a file.
5681@register("https://spdx.org/rdf/3.0.0/terms/Software/Snippet", compact_type="software_Snippet", abstract=False)
5682class software_Snippet(software_SoftwareArtifact):
5683 NODE_KIND = NodeKind.BlankNodeOrIRI
5684 ID_ALIAS = "spdxId"
5685 NAMED_INDIVIDUALS = {
5686 }
5687
5688 @classmethod
5689 def _register_props(cls):
5690 super()._register_props()
5691 # Defines the byte range in the original host file that the snippet information
5692 # applies to.
5693 cls._add_property(
5694 "software_byteRange",
5695 ObjectProp(PositiveIntegerRange, False),
5696 iri="https://spdx.org/rdf/3.0.0/terms/Software/byteRange",
5697 compact="software_byteRange",
5698 )
5699 # Defines the line range in the original host file that the snippet information
5700 # applies to.
5701 cls._add_property(
5702 "software_lineRange",
5703 ObjectProp(PositiveIntegerRange, False),
5704 iri="https://spdx.org/rdf/3.0.0/terms/Software/lineRange",
5705 compact="software_lineRange",
5706 )
5707 # Defines the original host file that the snippet information applies to.
5708 cls._add_property(
5709 "software_snippetFromFile",
5710 ObjectProp(software_File, True),
5711 iri="https://spdx.org/rdf/3.0.0/terms/Software/snippetFromFile",
5712 min_count=1,
5713 compact="software_snippetFromFile",
5714 )
5715
5716
5717# Specifies an AI package and its associated information.
5718@register("https://spdx.org/rdf/3.0.0/terms/AI/AIPackage", compact_type="ai_AIPackage", abstract=False)
5719class ai_AIPackage(software_Package):
5720 NODE_KIND = NodeKind.BlankNodeOrIRI
5721 ID_ALIAS = "spdxId"
5722 NAMED_INDIVIDUALS = {
5723 }
5724
5725 @classmethod
5726 def _register_props(cls):
5727 super()._register_props()
5728 # States if a human is involved in the decisions of the AI software.
5729 cls._add_property(
5730 "ai_autonomyType",
5731 EnumProp([
5732 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no", "no"),
5733 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion", "noAssertion"),
5734 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes", "yes"),
5735 ]),
5736 iri="https://spdx.org/rdf/3.0.0/terms/AI/autonomyType",
5737 compact="ai_autonomyType",
5738 )
5739 # Captures the domain in which the AI package can be used.
5740 cls._add_property(
5741 "ai_domain",
5742 ListProp(StringProp()),
5743 iri="https://spdx.org/rdf/3.0.0/terms/AI/domain",
5744 compact="ai_domain",
5745 )
5746 # Indicates the amount of energy consumed to train the AI model.
5747 cls._add_property(
5748 "ai_energyConsumption",
5749 ObjectProp(ai_EnergyConsumption, False),
5750 iri="https://spdx.org/rdf/3.0.0/terms/AI/energyConsumption",
5751 compact="ai_energyConsumption",
5752 )
5753 # Records a hyperparameter used to build the AI model contained in the AI
5754 # package.
5755 cls._add_property(
5756 "ai_hyperparameter",
5757 ListProp(ObjectProp(DictionaryEntry, False)),
5758 iri="https://spdx.org/rdf/3.0.0/terms/AI/hyperparameter",
5759 compact="ai_hyperparameter",
5760 )
5761 # Provides relevant information about the AI software, not including the model
5762 # description.
5763 cls._add_property(
5764 "ai_informationAboutApplication",
5765 StringProp(),
5766 iri="https://spdx.org/rdf/3.0.0/terms/AI/informationAboutApplication",
5767 compact="ai_informationAboutApplication",
5768 )
5769 # Describes relevant information about different steps of the training process.
5770 cls._add_property(
5771 "ai_informationAboutTraining",
5772 StringProp(),
5773 iri="https://spdx.org/rdf/3.0.0/terms/AI/informationAboutTraining",
5774 compact="ai_informationAboutTraining",
5775 )
5776 # Captures a limitation of the AI software.
5777 cls._add_property(
5778 "ai_limitation",
5779 StringProp(),
5780 iri="https://spdx.org/rdf/3.0.0/terms/AI/limitation",
5781 compact="ai_limitation",
5782 )
5783 # Records the measurement of prediction quality of the AI model.
5784 cls._add_property(
5785 "ai_metric",
5786 ListProp(ObjectProp(DictionaryEntry, False)),
5787 iri="https://spdx.org/rdf/3.0.0/terms/AI/metric",
5788 compact="ai_metric",
5789 )
5790 # Captures the threshold that was used for computation of a metric described in
5791 # the metric field.
5792 cls._add_property(
5793 "ai_metricDecisionThreshold",
5794 ListProp(ObjectProp(DictionaryEntry, False)),
5795 iri="https://spdx.org/rdf/3.0.0/terms/AI/metricDecisionThreshold",
5796 compact="ai_metricDecisionThreshold",
5797 )
5798 # Describes all the preprocessing steps applied to the training data before the
5799 # model training.
5800 cls._add_property(
5801 "ai_modelDataPreprocessing",
5802 ListProp(StringProp()),
5803 iri="https://spdx.org/rdf/3.0.0/terms/AI/modelDataPreprocessing",
5804 compact="ai_modelDataPreprocessing",
5805 )
5806 # Describes methods that can be used to explain the model.
5807 cls._add_property(
5808 "ai_modelExplainability",
5809 ListProp(StringProp()),
5810 iri="https://spdx.org/rdf/3.0.0/terms/AI/modelExplainability",
5811 compact="ai_modelExplainability",
5812 )
5813 # Records the results of general safety risk assessment of the AI system.
5814 cls._add_property(
5815 "ai_safetyRiskAssessment",
5816 EnumProp([
5817 ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high", "high"),
5818 ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low", "low"),
5819 ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium", "medium"),
5820 ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious", "serious"),
5821 ]),
5822 iri="https://spdx.org/rdf/3.0.0/terms/AI/safetyRiskAssessment",
5823 compact="ai_safetyRiskAssessment",
5824 )
5825 # Captures a standard that is being complied with.
5826 cls._add_property(
5827 "ai_standardCompliance",
5828 ListProp(StringProp()),
5829 iri="https://spdx.org/rdf/3.0.0/terms/AI/standardCompliance",
5830 compact="ai_standardCompliance",
5831 )
5832 # Records the type of the model used in the AI software.
5833 cls._add_property(
5834 "ai_typeOfModel",
5835 ListProp(StringProp()),
5836 iri="https://spdx.org/rdf/3.0.0/terms/AI/typeOfModel",
5837 compact="ai_typeOfModel",
5838 )
5839 # Records if sensitive personal information is used during model training or
5840 # could be used during the inference.
5841 cls._add_property(
5842 "ai_useSensitivePersonalInformation",
5843 EnumProp([
5844 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no", "no"),
5845 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion", "noAssertion"),
5846 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes", "yes"),
5847 ]),
5848 iri="https://spdx.org/rdf/3.0.0/terms/AI/useSensitivePersonalInformation",
5849 compact="ai_useSensitivePersonalInformation",
5850 )
5851
5852
5853# Specifies a data package and its associated information.
5854@register("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetPackage", compact_type="dataset_DatasetPackage", abstract=False)
5855class dataset_DatasetPackage(software_Package):
5856 NODE_KIND = NodeKind.BlankNodeOrIRI
5857 ID_ALIAS = "spdxId"
5858 NAMED_INDIVIDUALS = {
5859 }
5860
5861 @classmethod
5862 def _register_props(cls):
5863 super()._register_props()
5864 # Describes the anonymization methods used.
5865 cls._add_property(
5866 "dataset_anonymizationMethodUsed",
5867 ListProp(StringProp()),
5868 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/anonymizationMethodUsed",
5869 compact="dataset_anonymizationMethodUsed",
5870 )
5871 # Describes the confidentiality level of the data points contained in the dataset.
5872 cls._add_property(
5873 "dataset_confidentialityLevel",
5874 EnumProp([
5875 ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber", "amber"),
5876 ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear", "clear"),
5877 ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green", "green"),
5878 ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red", "red"),
5879 ]),
5880 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/confidentialityLevel",
5881 compact="dataset_confidentialityLevel",
5882 )
5883 # Describes how the dataset was collected.
5884 cls._add_property(
5885 "dataset_dataCollectionProcess",
5886 StringProp(),
5887 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/dataCollectionProcess",
5888 compact="dataset_dataCollectionProcess",
5889 )
5890 # Describes the preprocessing steps that were applied to the raw data to create the given dataset.
5891 cls._add_property(
5892 "dataset_dataPreprocessing",
5893 ListProp(StringProp()),
5894 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/dataPreprocessing",
5895 compact="dataset_dataPreprocessing",
5896 )
5897 # The field describes the availability of a dataset.
5898 cls._add_property(
5899 "dataset_datasetAvailability",
5900 EnumProp([
5901 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough", "clickthrough"),
5902 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload", "directDownload"),
5903 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query", "query"),
5904 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration", "registration"),
5905 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript", "scrapingScript"),
5906 ]),
5907 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetAvailability",
5908 compact="dataset_datasetAvailability",
5909 )
5910 # Describes potentially noisy elements of the dataset.
5911 cls._add_property(
5912 "dataset_datasetNoise",
5913 StringProp(),
5914 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetNoise",
5915 compact="dataset_datasetNoise",
5916 )
5917 # Captures the size of the dataset.
5918 cls._add_property(
5919 "dataset_datasetSize",
5920 NonNegativeIntegerProp(),
5921 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetSize",
5922 compact="dataset_datasetSize",
5923 )
5924 # Describes the type of the given dataset.
5925 cls._add_property(
5926 "dataset_datasetType",
5927 ListProp(EnumProp([
5928 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio", "audio"),
5929 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical", "categorical"),
5930 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph", "graph"),
5931 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image", "image"),
5932 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion", "noAssertion"),
5933 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric", "numeric"),
5934 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other", "other"),
5935 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor", "sensor"),
5936 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured", "structured"),
5937 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic", "syntactic"),
5938 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text", "text"),
5939 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries", "timeseries"),
5940 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp", "timestamp"),
5941 ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video", "video"),
5942 ])),
5943 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetType",
5944 min_count=1,
5945 compact="dataset_datasetType",
5946 )
5947 # Describes a mechanism to update the dataset.
5948 cls._add_property(
5949 "dataset_datasetUpdateMechanism",
5950 StringProp(),
5951 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetUpdateMechanism",
5952 compact="dataset_datasetUpdateMechanism",
5953 )
5954 # Describes if any sensitive personal information is present in the dataset.
5955 cls._add_property(
5956 "dataset_hasSensitivePersonalInformation",
5957 EnumProp([
5958 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no", "no"),
5959 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion", "noAssertion"),
5960 ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes", "yes"),
5961 ]),
5962 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/hasSensitivePersonalInformation",
5963 compact="dataset_hasSensitivePersonalInformation",
5964 )
5965 # Describes what the given dataset should be used for.
5966 cls._add_property(
5967 "dataset_intendedUse",
5968 StringProp(),
5969 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/intendedUse",
5970 compact="dataset_intendedUse",
5971 )
5972 # Records the biases that the dataset is known to encompass.
5973 cls._add_property(
5974 "dataset_knownBias",
5975 ListProp(StringProp()),
5976 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/knownBias",
5977 compact="dataset_knownBias",
5978 )
5979 # Describes a sensor used for collecting the data.
5980 cls._add_property(
5981 "dataset_sensor",
5982 ListProp(ObjectProp(DictionaryEntry, False)),
5983 iri="https://spdx.org/rdf/3.0.0/terms/Dataset/sensor",
5984 compact="dataset_sensor",
5985 )
5986
5987
5988"""Format Guard"""
5989# fmt: on
5990
5991
5992def main():
5993 import argparse
5994 from pathlib import Path
5995
5996 parser = argparse.ArgumentParser(description="Python SHACL model test")
5997 parser.add_argument("infile", type=Path, help="Input file")
5998 parser.add_argument("--print", action="store_true", help="Print object tree")
5999 parser.add_argument("--outfile", type=Path, help="Output file")
6000
6001 args = parser.parse_args()
6002
6003 objectset = SHACLObjectSet()
6004 with args.infile.open("r") as f:
6005 d = JSONLDDeserializer()
6006 d.read(f, objectset)
6007
6008 if args.print:
6009 print_tree(objectset.objects)
6010
6011 if args.outfile:
6012 with args.outfile.open("wb") as f:
6013 s = JSONLDSerializer()
6014 s.write(objectset, f)
6015
6016 return 0
6017
6018
6019if __name__ == "__main__":
6020 sys.exit(main())
diff --git a/meta/lib/oe/spdx30_tasks.py b/meta/lib/oe/spdx30_tasks.py
new file mode 100644
index 0000000000..6a2858c665
--- /dev/null
+++ b/meta/lib/oe/spdx30_tasks.py
@@ -0,0 +1,1243 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import json
8import oe.cve_check
9import oe.packagedata
10import oe.patch
11import oe.sbom30
12import oe.spdx30
13import oe.spdx_common
14import oe.sdk
15import os
16
17from contextlib import contextmanager
18from datetime import datetime, timezone
19from pathlib import Path
20
21
22def set_timestamp_now(d, o, prop):
23 if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1":
24 setattr(o, prop, datetime.now(timezone.utc))
25 else:
26 # Doing this helps to validated that the property actually exists, and
27 # also that it is not mandatory
28 delattr(o, prop)
29
30
31def add_license_expression(d, objset, license_expression, license_data):
32 simple_license_text = {}
33 license_text_map = {}
34 license_ref_idx = 0
35
36 def add_license_text(name):
37 nonlocal objset
38 nonlocal simple_license_text
39
40 if name in simple_license_text:
41 return simple_license_text[name]
42
43 lic = objset.find_filter(
44 oe.spdx30.simplelicensing_SimpleLicensingText,
45 name=name,
46 )
47
48 if lic is not None:
49 simple_license_text[name] = lic
50 return lic
51
52 lic = objset.add(
53 oe.spdx30.simplelicensing_SimpleLicensingText(
54 _id=objset.new_spdxid("license-text", name),
55 creationInfo=objset.doc.creationInfo,
56 name=name,
57 )
58 )
59 simple_license_text[name] = lic
60
61 if name == "PD":
62 lic.simplelicensing_licenseText = "Software released to the public domain"
63 return lic
64
65 # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
66 for directory in [d.getVar("COMMON_LICENSE_DIR")] + (
67 d.getVar("LICENSE_PATH") or ""
68 ).split():
69 try:
70 with (Path(directory) / name).open(errors="replace") as f:
71 lic.simplelicensing_licenseText = f.read()
72 return lic
73
74 except FileNotFoundError:
75 pass
76
77 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
78 filename = d.getVarFlag("NO_GENERIC_LICENSE", name)
79 if filename:
80 filename = d.expand("${S}/" + filename)
81 with open(filename, errors="replace") as f:
82 lic.simplelicensing_licenseText = f.read()
83 return lic
84 else:
85 bb.fatal("Cannot find any text for license %s" % name)
86
87 def convert(l):
88 nonlocal license_text_map
89 nonlocal license_ref_idx
90
91 if l == "(" or l == ")":
92 return l
93
94 if l == "&":
95 return "AND"
96
97 if l == "|":
98 return "OR"
99
100 if l == "CLOSED":
101 return "NONE"
102
103 spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
104 if spdx_license in license_data["licenses"]:
105 return spdx_license
106
107 spdx_license = "LicenseRef-" + l
108 if spdx_license not in license_text_map:
109 license_text_map[spdx_license] = add_license_text(l)._id
110
111 return spdx_license
112
113 lic_split = (
114 license_expression.replace("(", " ( ")
115 .replace(")", " ) ")
116 .replace("|", " | ")
117 .replace("&", " & ")
118 .split()
119 )
120 spdx_license_expression = " ".join(convert(l) for l in lic_split)
121
122 return objset.new_license_expression(spdx_license_expression, license_data, license_text_map)
123
124
125def add_package_files(
126 d,
127 objset,
128 topdir,
129 get_spdxid,
130 get_purposes,
131 license_data,
132 *,
133 archive=None,
134 ignore_dirs=[],
135 ignore_top_level_dirs=[],
136):
137 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
138 if source_date_epoch:
139 source_date_epoch = int(source_date_epoch)
140
141 spdx_files = set()
142
143 file_counter = 1
144 for subdir, dirs, files in os.walk(topdir):
145 dirs[:] = [d for d in dirs if d not in ignore_dirs]
146 if subdir == str(topdir):
147 dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
148
149 for file in files:
150 filepath = Path(subdir) / file
151 if filepath.is_symlink() or not filepath.is_file():
152 continue
153
154 bb.debug(1, "Adding file %s to %s" % (filepath, objset.doc._id))
155
156 filename = str(filepath.relative_to(topdir))
157 file_purposes = get_purposes(filepath)
158
159 spdx_file = objset.new_file(
160 get_spdxid(file_counter),
161 filename,
162 filepath,
163 purposes=file_purposes,
164 )
165 spdx_files.add(spdx_file)
166
167 if oe.spdx30.software_SoftwarePurpose.source in file_purposes:
168 objset.scan_declared_licenses(spdx_file, filepath, license_data)
169
170 if archive is not None:
171 with filepath.open("rb") as f:
172 info = archive.gettarinfo(fileobj=f)
173 info.name = filename
174 info.uid = 0
175 info.gid = 0
176 info.uname = "root"
177 info.gname = "root"
178
179 if source_date_epoch is not None and info.mtime > source_date_epoch:
180 info.mtime = source_date_epoch
181
182 archive.addfile(info, f)
183
184 file_counter += 1
185
186 return spdx_files
187
188
189def get_package_sources_from_debug(
190 d, package, package_files, sources, source_hash_cache
191):
192 def file_path_match(file_path, pkg_file):
193 if file_path.lstrip("/") == pkg_file.name.lstrip("/"):
194 return True
195
196 for e in pkg_file.extension:
197 if isinstance(e, oe.sbom30.OEFileNameAliasExtension):
198 for a in e.aliases:
199 if file_path.lstrip("/") == a.lstrip("/"):
200 return True
201
202 return False
203
204 debug_search_paths = [
205 Path(d.getVar("PKGD")),
206 Path(d.getVar("STAGING_DIR_TARGET")),
207 Path(d.getVar("STAGING_DIR_NATIVE")),
208 Path(d.getVar("STAGING_KERNEL_DIR")),
209 ]
210
211 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
212
213 if pkg_data is None:
214 return
215
216 dep_source_files = set()
217
218 for file_path, file_data in pkg_data["files_info"].items():
219 if not "debugsrc" in file_data:
220 continue
221
222 if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files):
223 bb.fatal(
224 "No package file found for %s in %s; SPDX found: %s"
225 % (str(file_path), package, " ".join(p.name for p in package_files))
226 )
227 continue
228
229 for debugsrc in file_data["debugsrc"]:
230 for search in debug_search_paths:
231 if debugsrc.startswith("/usr/src/kernel"):
232 debugsrc_path = search / debugsrc.replace("/usr/src/kernel/", "")
233 else:
234 debugsrc_path = search / debugsrc.lstrip("/")
235
236 if debugsrc_path in source_hash_cache:
237 file_sha256 = source_hash_cache[debugsrc_path]
238 if file_sha256 is None:
239 continue
240 else:
241 # We can only hash files below, skip directories, links, etc.
242 if not debugsrc_path.is_file():
243 source_hash_cache[debugsrc_path] = None
244 continue
245
246 file_sha256 = bb.utils.sha256_file(debugsrc_path)
247 source_hash_cache[debugsrc_path] = file_sha256
248
249 if file_sha256 in sources:
250 source_file = sources[file_sha256]
251 dep_source_files.add(source_file)
252 else:
253 bb.debug(
254 1,
255 "Debug source %s with SHA256 %s not found in any dependency"
256 % (str(debugsrc_path), file_sha256),
257 )
258 break
259 else:
260 bb.debug(1, "Debug source %s not found" % debugsrc)
261
262 return dep_source_files
263
264
265def collect_dep_objsets(d, build):
266 deps = oe.spdx_common.get_spdx_deps(d)
267
268 dep_objsets = []
269 dep_builds = set()
270
271 dep_build_spdxids = set()
272 for dep in deps:
273 bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn))
274 dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(
275 d, "recipes", dep.pn, oe.spdx30.build_Build
276 )
277 # If the dependency is part of the taskhash, return it to be linked
278 # against. Otherwise, it cannot be linked against because this recipe
279 # will not rebuilt if dependency changes
280 if dep.in_taskhash:
281 dep_objsets.append(dep_objset)
282
283 # The build _can_ be linked against (by alias)
284 dep_builds.add(dep_build)
285
286 return dep_objsets, dep_builds
287
288
289def collect_dep_sources(dep_objsets):
290 sources = {}
291 for objset in dep_objsets:
292 # Don't collect sources from native recipes as they
293 # match non-native sources also.
294 if objset.is_native():
295 continue
296
297 bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name))
298
299 dep_build = objset.find_root(oe.spdx30.build_Build)
300 if not dep_build:
301 bb.fatal("Unable to find a build")
302
303 for e in objset.foreach_type(oe.spdx30.Relationship):
304 if dep_build is not e.from_:
305 continue
306
307 if e.relationshipType != oe.spdx30.RelationshipType.hasInputs:
308 continue
309
310 for to in e.to:
311 if not isinstance(to, oe.spdx30.software_File):
312 continue
313
314 if (
315 to.software_primaryPurpose
316 != oe.spdx30.software_SoftwarePurpose.source
317 ):
318 continue
319
320 for v in to.verifiedUsing:
321 if v.algorithm == oe.spdx30.HashAlgorithm.sha256:
322 sources[v.hashValue] = to
323 break
324 else:
325 bb.fatal(
326 "No SHA256 found for %s in %s" % (to.name, objset.doc.name)
327 )
328
329 return sources
330
331
332def add_download_files(d, objset):
333 inputs = set()
334
335 urls = d.getVar("SRC_URI").split()
336 fetch = bb.fetch2.Fetch(urls, d)
337
338 for download_idx, src_uri in enumerate(urls):
339 fd = fetch.ud[src_uri]
340
341 for name in fd.names:
342 file_name = os.path.basename(fetch.localpath(src_uri))
343 if oe.patch.patch_path(src_uri, fetch, "", expand=False):
344 primary_purpose = oe.spdx30.software_SoftwarePurpose.patch
345 else:
346 primary_purpose = oe.spdx30.software_SoftwarePurpose.source
347
348 if fd.type == "file":
349 if os.path.isdir(fd.localpath):
350 walk_idx = 1
351 for root, dirs, files in os.walk(fd.localpath):
352 for f in files:
353 f_path = os.path.join(root, f)
354 if os.path.islink(f_path):
355 # TODO: SPDX doesn't support symlinks yet
356 continue
357
358 file = objset.new_file(
359 objset.new_spdxid(
360 "source", str(download_idx + 1), str(walk_idx)
361 ),
362 os.path.join(
363 file_name, os.path.relpath(f_path, fd.localpath)
364 ),
365 f_path,
366 purposes=[primary_purpose],
367 )
368
369 inputs.add(file)
370 walk_idx += 1
371
372 else:
373 file = objset.new_file(
374 objset.new_spdxid("source", str(download_idx + 1)),
375 file_name,
376 fd.localpath,
377 purposes=[primary_purpose],
378 )
379 inputs.add(file)
380
381 else:
382 uri = fd.type
383 proto = getattr(fd, "proto", None)
384 if proto is not None:
385 uri = uri + "+" + proto
386 uri = uri + "://" + fd.host + fd.path
387
388 if fd.method.supports_srcrev():
389 uri = uri + "@" + fd.revisions[name]
390
391 dl = objset.add(
392 oe.spdx30.software_Package(
393 _id=objset.new_spdxid("source", str(download_idx + 1)),
394 creationInfo=objset.doc.creationInfo,
395 name=file_name,
396 software_primaryPurpose=primary_purpose,
397 software_downloadLocation=uri,
398 )
399 )
400
401 if fd.method.supports_checksum(fd):
402 # TODO Need something better than hard coding this
403 for checksum_id in ["sha256", "sha1"]:
404 expected_checksum = getattr(
405 fd, "%s_expected" % checksum_id, None
406 )
407 if expected_checksum is None:
408 continue
409
410 dl.verifiedUsing.append(
411 oe.spdx30.Hash(
412 algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id),
413 hashValue=expected_checksum,
414 )
415 )
416
417 inputs.add(dl)
418
419 return inputs
420
421
422def set_purposes(d, element, *var_names, force_purposes=[]):
423 purposes = force_purposes[:]
424
425 for var_name in var_names:
426 val = d.getVar(var_name)
427 if val:
428 purposes.extend(val.split())
429 break
430
431 if not purposes:
432 bb.warn("No SPDX purposes found in %s" % " ".join(var_names))
433 return
434
435 element.software_primaryPurpose = getattr(
436 oe.spdx30.software_SoftwarePurpose, purposes[0]
437 )
438 element.software_additionalPurpose = [
439 getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:]
440 ]
441
442
443def create_spdx(d):
444 def set_var_field(var, obj, name, package=None):
445 val = None
446 if package:
447 val = d.getVar("%s:%s" % (var, package))
448
449 if not val:
450 val = d.getVar(var)
451
452 if val:
453 setattr(obj, name, val)
454
455 license_data = oe.spdx_common.load_spdx_license_data(d)
456
457 deploydir = Path(d.getVar("SPDXDEPLOY"))
458 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
459 spdx_workdir = Path(d.getVar("SPDXWORK"))
460 include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
461 pkg_arch = d.getVar("SSTATE_PKGARCH")
462 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
463 "cross", d
464 )
465 include_vex = d.getVar("SPDX_INCLUDE_VEX")
466 if not include_vex in ("none", "current", "all"):
467 bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'")
468
469 build_objset = oe.sbom30.ObjectSet.new_objset(d, d.getVar("PN"))
470
471 build = build_objset.new_task_build("recipe", "recipe")
472 build_objset.doc.rootElement.append(build)
473
474 build_objset.set_is_native(is_native)
475
476 for var in (d.getVar("SPDX_CUSTOM_ANNOTATION_VARS") or "").split():
477 new_annotation(
478 d,
479 build_objset,
480 build,
481 "%s=%s" % (var, d.getVar(var)),
482 oe.spdx30.AnnotationType.other,
483 )
484
485 build_inputs = set()
486
487 # Add CVEs
488 cve_by_status = {}
489 if include_vex != "none":
490 for cve in d.getVarFlags("CVE_STATUS") or {}:
491 decoded_status = oe.cve_check.decode_cve_status(d, cve)
492
493 # If this CVE is fixed upstream, skip it unless all CVEs are
494 # specified.
495 if include_vex != "all" and 'detail' in decoded_status and \
496 decoded_status['detail'] in (
497 "fixed-version",
498 "cpe-stable-backport",
499 ):
500 bb.debug(1, "Skipping %s since it is already fixed upstream" % cve)
501 continue
502
503 cve_by_status.setdefault(decoded_status['mapping'], {})[cve] = (
504 build_objset.new_cve_vuln(cve),
505 decoded_status['detail'],
506 decoded_status['description'],
507 )
508
509 cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
510
511 source_files = add_download_files(d, build_objset)
512 build_inputs |= source_files
513
514 recipe_spdx_license = add_license_expression(d, build_objset, d.getVar("LICENSE"), license_data)
515 build_objset.new_relationship(
516 source_files,
517 oe.spdx30.RelationshipType.hasConcludedLicense,
518 [recipe_spdx_license],
519 )
520
521 if oe.spdx_common.process_sources(d) and include_sources:
522 bb.debug(1, "Adding source files to SPDX")
523 oe.spdx_common.get_patched_src(d)
524
525 build_inputs |= add_package_files(
526 d,
527 build_objset,
528 spdx_workdir,
529 lambda file_counter: build_objset.new_spdxid(
530 "sourcefile", str(file_counter)
531 ),
532 lambda filepath: [oe.spdx30.software_SoftwarePurpose.source],
533 license_data,
534 ignore_dirs=[".git"],
535 ignore_top_level_dirs=["temp"],
536 archive=None,
537 )
538
539 dep_objsets, dep_builds = collect_dep_objsets(d, build)
540 if dep_builds:
541 build_objset.new_scoped_relationship(
542 [build],
543 oe.spdx30.RelationshipType.dependsOn,
544 oe.spdx30.LifecycleScopeType.build,
545 sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds),
546 )
547
548 debug_source_ids = set()
549 source_hash_cache = {}
550
551 # Write out the package SPDX data now. It is not complete as we cannot
552 # write the runtime data, so write it to a staging area and a later task
553 # will write out the final collection
554
555 # TODO: Handle native recipe output
556 if not is_native:
557 bb.debug(1, "Collecting Dependency sources files")
558 sources = collect_dep_sources(dep_objsets)
559
560 bb.build.exec_func("read_subpackage_metadata", d)
561
562 pkgdest = Path(d.getVar("PKGDEST"))
563 for package in d.getVar("PACKAGES").split():
564 if not oe.packagedata.packaged(package, d):
565 continue
566
567 pkg_name = d.getVar("PKG:%s" % package) or package
568
569 bb.debug(1, "Creating SPDX for package %s" % pkg_name)
570
571 pkg_objset = oe.sbom30.ObjectSet.new_objset(d, pkg_name)
572
573 spdx_package = pkg_objset.add_root(
574 oe.spdx30.software_Package(
575 _id=pkg_objset.new_spdxid("package", pkg_name),
576 creationInfo=pkg_objset.doc.creationInfo,
577 name=pkg_name,
578 software_packageVersion=d.getVar("PV"),
579 )
580 )
581 set_timestamp_now(d, spdx_package, "builtTime")
582
583 set_purposes(
584 d,
585 spdx_package,
586 "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package,
587 "SPDX_PACKAGE_ADDITIONAL_PURPOSE",
588 force_purposes=["install"],
589 )
590
591 supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER")
592 if supplier is not None:
593 spdx_package.supplier = (
594 supplier if isinstance(supplier, str) else supplier._id
595 )
596
597 set_var_field(
598 "HOMEPAGE", spdx_package, "software_homePage", package=package
599 )
600 set_var_field("SUMMARY", spdx_package, "summary", package=package)
601 set_var_field("DESCRIPTION", spdx_package, "description", package=package)
602
603 pkg_objset.new_scoped_relationship(
604 [build._id],
605 oe.spdx30.RelationshipType.hasOutputs,
606 oe.spdx30.LifecycleScopeType.build,
607 [spdx_package],
608 )
609
610 for cpe_id in cpe_ids:
611 spdx_package.externalIdentifier.append(
612 oe.spdx30.ExternalIdentifier(
613 externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23,
614 identifier=cpe_id,
615 )
616 )
617
618 # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file
619 # generated and link it to the package
620 # spdx_package_file = pkg_objset.add(oe.spdx30.software_File(
621 # _id=pkg_objset.new_spdxid("distribution", pkg_name),
622 # creationInfo=pkg_objset.doc.creationInfo,
623 # name=pkg_name,
624 # software_primaryPurpose=spdx_package.software_primaryPurpose,
625 # software_additionalPurpose=spdx_package.software_additionalPurpose,
626 # ))
627 # set_timestamp_now(d, spdx_package_file, "builtTime")
628
629 ## TODO add hashes
630 # pkg_objset.new_relationship(
631 # [spdx_package],
632 # oe.spdx30.RelationshipType.hasDistributionArtifact,
633 # [spdx_package_file],
634 # )
635
636 # NOTE: licenses live in the recipe collection and are referenced
637 # by ID in the package collection(s). This helps reduce duplication
638 # (since a lot of packages will have the same license), and also
639 # prevents duplicate license SPDX IDs in the packages
640 package_license = d.getVar("LICENSE:%s" % package)
641 if package_license and package_license != d.getVar("LICENSE"):
642 package_spdx_license = add_license_expression(
643 d, build_objset, package_license, license_data
644 )
645 else:
646 package_spdx_license = recipe_spdx_license
647
648 pkg_objset.new_relationship(
649 [spdx_package],
650 oe.spdx30.RelationshipType.hasConcludedLicense,
651 [package_spdx_license._id],
652 )
653
654 # NOTE: CVE Elements live in the recipe collection
655 all_cves = set()
656 for status, cves in cve_by_status.items():
657 for cve, items in cves.items():
658 spdx_cve, detail, description = items
659
660 all_cves.add(spdx_cve._id)
661
662 if status == "Patched":
663 pkg_objset.new_vex_patched_relationship(
664 [spdx_cve._id], [spdx_package]
665 )
666 elif status == "Unpatched":
667 pkg_objset.new_vex_unpatched_relationship(
668 [spdx_cve._id], [spdx_package]
669 )
670 elif status == "Ignored":
671 spdx_vex = pkg_objset.new_vex_ignored_relationship(
672 [spdx_cve._id],
673 [spdx_package],
674 impact_statement=description,
675 )
676
677 if detail in (
678 "ignored",
679 "cpe-incorrect",
680 "disputed",
681 "upstream-wontfix",
682 ):
683 # VEX doesn't have justifications for this
684 pass
685 elif detail in (
686 "not-applicable-config",
687 "not-applicable-platform",
688 ):
689 for v in spdx_vex:
690 v.security_justificationType = (
691 oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent
692 )
693 else:
694 bb.fatal(f"Unknown detail '{detail}' for ignored {cve}")
695 else:
696 bb.fatal(f"Unknown {cve} status '{status}'")
697
698 if all_cves:
699 pkg_objset.new_relationship(
700 [spdx_package],
701 oe.spdx30.RelationshipType.hasAssociatedVulnerability,
702 sorted(list(all_cves)),
703 )
704
705 bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name)
706 package_files = add_package_files(
707 d,
708 pkg_objset,
709 pkgdest / package,
710 lambda file_counter: pkg_objset.new_spdxid(
711 "package", pkg_name, "file", str(file_counter)
712 ),
713 # TODO: Can we know the purpose here?
714 lambda filepath: [],
715 license_data,
716 ignore_top_level_dirs=["CONTROL", "DEBIAN"],
717 archive=None,
718 )
719
720 if package_files:
721 pkg_objset.new_relationship(
722 [spdx_package],
723 oe.spdx30.RelationshipType.contains,
724 sorted(list(package_files)),
725 )
726
727 if include_sources:
728 debug_sources = get_package_sources_from_debug(
729 d, package, package_files, sources, source_hash_cache
730 )
731 debug_source_ids |= set(
732 oe.sbom30.get_element_link_id(d) for d in debug_sources
733 )
734
735 oe.sbom30.write_recipe_jsonld_doc(
736 d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False
737 )
738
739 if include_sources:
740 bb.debug(1, "Adding sysroot files to SPDX")
741 sysroot_files = add_package_files(
742 d,
743 build_objset,
744 d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"),
745 lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)),
746 lambda filepath: [],
747 license_data,
748 archive=None,
749 )
750
751 if sysroot_files:
752 build_objset.new_scoped_relationship(
753 [build],
754 oe.spdx30.RelationshipType.hasOutputs,
755 oe.spdx30.LifecycleScopeType.build,
756 sorted(list(sysroot_files)),
757 )
758
759 if build_inputs or debug_source_ids:
760 build_objset.new_scoped_relationship(
761 [build],
762 oe.spdx30.RelationshipType.hasInputs,
763 oe.spdx30.LifecycleScopeType.build,
764 sorted(list(build_inputs)) + sorted(list(debug_source_ids)),
765 )
766
767 oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir)
768
769
770def create_package_spdx(d):
771 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
772 deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
773 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
774 "cross", d
775 )
776
777 providers = oe.spdx_common.collect_package_providers(d)
778 pkg_arch = d.getVar("SSTATE_PKGARCH")
779
780 if is_native:
781 return
782
783 bb.build.exec_func("read_subpackage_metadata", d)
784
785 dep_package_cache = {}
786
787 # Any element common to all packages that need to be referenced by ID
788 # should be written into this objset set
789 common_objset = oe.sbom30.ObjectSet.new_objset(
790 d, "%s-package-common" % d.getVar("PN")
791 )
792
793 pkgdest = Path(d.getVar("PKGDEST"))
794 for package in d.getVar("PACKAGES").split():
795 localdata = bb.data.createCopy(d)
796 pkg_name = d.getVar("PKG:%s" % package) or package
797 localdata.setVar("PKG", pkg_name)
798 localdata.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + ":" + package)
799
800 if not oe.packagedata.packaged(package, localdata):
801 continue
802
803 spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld(
804 d,
805 pkg_arch,
806 "packages-staging",
807 pkg_name,
808 oe.spdx30.software_Package,
809 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
810 )
811
812 # We will write out a new collection, so link it to the new
813 # creation info in the common package data. The old creation info
814 # should still exist and be referenced by all the existing elements
815 # in the package
816 pkg_objset.creationInfo = pkg_objset.copy_creation_info(
817 common_objset.doc.creationInfo
818 )
819
820 runtime_spdx_deps = set()
821
822 deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
823 seen_deps = set()
824 for dep, _ in deps.items():
825 if dep in seen_deps:
826 continue
827
828 if dep not in providers:
829 continue
830
831 (dep, _) = providers[dep]
832
833 if not oe.packagedata.packaged(dep, localdata):
834 continue
835
836 dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
837 dep_pkg = dep_pkg_data["PKG"]
838
839 if dep in dep_package_cache:
840 dep_spdx_package = dep_package_cache[dep]
841 else:
842 bb.debug(1, "Searching for %s" % dep_pkg)
843 dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld(
844 d,
845 "packages-staging",
846 dep_pkg,
847 oe.spdx30.software_Package,
848 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
849 )
850 dep_package_cache[dep] = dep_spdx_package
851
852 runtime_spdx_deps.add(dep_spdx_package)
853 seen_deps.add(dep)
854
855 if runtime_spdx_deps:
856 pkg_objset.new_scoped_relationship(
857 [spdx_package],
858 oe.spdx30.RelationshipType.dependsOn,
859 oe.spdx30.LifecycleScopeType.runtime,
860 [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps],
861 )
862
863 oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir)
864
865 oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir)
866
867
868def write_bitbake_spdx(d):
869 # Set PN to "bitbake" so that SPDX IDs can be generated
870 d.setVar("PN", "bitbake")
871 d.setVar("BB_TASKHASH", "bitbake")
872 oe.spdx_common.load_spdx_license_data(d)
873
874 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
875
876 objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False)
877
878 host_import_key = d.getVar("SPDX_BUILD_HOST")
879 invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False)
880 on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False)
881
882 if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
883 # Since the Build objects are unique, we may as well set the creation
884 # time to the current time instead of the fallback SDE
885 objset.doc.creationInfo.created = datetime.now(timezone.utc)
886
887 # Each invocation of bitbake should have a unique ID since it is a
888 # unique build
889 nonce = os.urandom(16).hex()
890
891 build = objset.add_root(
892 oe.spdx30.build_Build(
893 _id=objset.new_spdxid(nonce, include_unihash=False),
894 creationInfo=objset.doc.creationInfo,
895 build_buildType=oe.sbom30.SPDX_BUILD_TYPE,
896 )
897 )
898 set_timestamp_now(d, build, "build_buildStartTime")
899
900 if host_import_key:
901 objset.new_scoped_relationship(
902 [build],
903 oe.spdx30.RelationshipType.hasHost,
904 oe.spdx30.LifecycleScopeType.build,
905 [objset.new_import("SPDX_BUILD_HOST")],
906 )
907
908 if invoked_by:
909 objset.add(invoked_by)
910 invoked_by_spdx = objset.new_scoped_relationship(
911 [build],
912 oe.spdx30.RelationshipType.invokedBy,
913 oe.spdx30.LifecycleScopeType.build,
914 [invoked_by],
915 )
916
917 if on_behalf_of:
918 objset.add(on_behalf_of)
919 objset.new_scoped_relationship(
920 [on_behalf_of],
921 oe.spdx30.RelationshipType.delegatedTo,
922 oe.spdx30.LifecycleScopeType.build,
923 invoked_by_spdx,
924 )
925
926 elif on_behalf_of:
927 bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set")
928
929 else:
930 if host_import_key:
931 bb.warn(
932 "SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
933 )
934
935 if invoked_by:
936 bb.warn(
937 "SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
938 )
939
940 if on_behalf_of:
941 bb.warn(
942 "SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
943 )
944
945 for obj in objset.foreach_type(oe.spdx30.Element):
946 obj.extension.append(oe.sbom30.OELinkExtension(link_spdx_id=False))
947 obj.extension.append(oe.sbom30.OEIdAliasExtension())
948
949 oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json")
950
951
952def collect_build_package_inputs(d, objset, build, packages):
953 providers = oe.spdx_common.collect_package_providers(d)
954
955 build_deps = set()
956 missing_providers = set()
957
958 for name in sorted(packages.keys()):
959 if name not in providers:
960 missing_providers.add(name)
961 continue
962
963 pkg_name, pkg_hashfn = providers[name]
964
965 # Copy all of the package SPDX files into the Sbom elements
966 pkg_spdx, _ = oe.sbom30.find_root_obj_in_jsonld(
967 d,
968 "packages",
969 pkg_name,
970 oe.spdx30.software_Package,
971 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
972 )
973 build_deps.add(pkg_spdx._id)
974
975 if missing_providers:
976 bb.fatal(
977 f"Unable to find SPDX provider(s) for: {', '.join(sorted(missing_providers))}"
978 )
979
980 if build_deps:
981 objset.new_scoped_relationship(
982 [build],
983 oe.spdx30.RelationshipType.hasInputs,
984 oe.spdx30.LifecycleScopeType.build,
985 sorted(list(build_deps)),
986 )
987
988
989def create_rootfs_spdx(d):
990 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
991 deploydir = Path(d.getVar("SPDXROOTFSDEPLOY"))
992 root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES"))
993 image_basename = d.getVar("IMAGE_BASENAME")
994 machine = d.getVar("MACHINE")
995
996 with root_packages_file.open("r") as f:
997 packages = json.load(f)
998
999 objset = oe.sbom30.ObjectSet.new_objset(d, "%s-%s" % (image_basename, machine))
1000
1001 rootfs = objset.add_root(
1002 oe.spdx30.software_Package(
1003 _id=objset.new_spdxid("rootfs", image_basename),
1004 creationInfo=objset.doc.creationInfo,
1005 name=image_basename,
1006 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
1007 )
1008 )
1009 set_timestamp_now(d, rootfs, "builtTime")
1010
1011 rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs"))
1012 set_timestamp_now(d, rootfs_build, "build_buildEndTime")
1013
1014 objset.new_scoped_relationship(
1015 [rootfs_build],
1016 oe.spdx30.RelationshipType.hasOutputs,
1017 oe.spdx30.LifecycleScopeType.build,
1018 [rootfs],
1019 )
1020
1021 collect_build_package_inputs(d, objset, rootfs_build, packages)
1022
1023 oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir)
1024
1025
1026def create_image_spdx(d):
1027 image_deploy_dir = Path(d.getVar("IMGDEPLOYDIR"))
1028 manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST"))
1029 spdx_work_dir = Path(d.getVar("SPDXIMAGEWORK"))
1030
1031 image_basename = d.getVar("IMAGE_BASENAME")
1032 machine = d.getVar("MACHINE")
1033
1034 objset = oe.sbom30.ObjectSet.new_objset(d, "%s-%s" % (image_basename, machine))
1035
1036 with manifest_path.open("r") as f:
1037 manifest = json.load(f)
1038
1039 builds = []
1040 for task in manifest:
1041 imagetype = task["imagetype"]
1042 taskname = task["taskname"]
1043
1044 image_build = objset.add_root(
1045 objset.new_task_build(taskname, "image/%s" % imagetype)
1046 )
1047 set_timestamp_now(d, image_build, "build_buildEndTime")
1048 builds.append(image_build)
1049
1050 artifacts = []
1051
1052 for image in task["images"]:
1053 image_filename = image["filename"]
1054 image_path = image_deploy_dir / image_filename
1055 a = objset.add_root(
1056 oe.spdx30.software_File(
1057 _id=objset.new_spdxid("image", image_filename),
1058 creationInfo=objset.doc.creationInfo,
1059 name=image_filename,
1060 verifiedUsing=[
1061 oe.spdx30.Hash(
1062 algorithm=oe.spdx30.HashAlgorithm.sha256,
1063 hashValue=bb.utils.sha256_file(image_path),
1064 )
1065 ],
1066 )
1067 )
1068 set_purposes(
1069 d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE"
1070 )
1071 set_timestamp_now(d, a, "builtTime")
1072
1073 artifacts.append(a)
1074
1075 if artifacts:
1076 objset.new_scoped_relationship(
1077 [image_build],
1078 oe.spdx30.RelationshipType.hasOutputs,
1079 oe.spdx30.LifecycleScopeType.build,
1080 artifacts,
1081 )
1082
1083 if builds:
1084 rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
1085 d,
1086 "rootfs",
1087 "%s-%s" % (image_basename, machine),
1088 oe.spdx30.software_Package,
1089 # TODO: Should use a purpose to filter here?
1090 )
1091 objset.new_scoped_relationship(
1092 builds,
1093 oe.spdx30.RelationshipType.hasInputs,
1094 oe.spdx30.LifecycleScopeType.build,
1095 [rootfs_image._id],
1096 )
1097
1098 objset.add_aliases()
1099 objset.link()
1100 oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir)
1101
1102
1103def create_image_sbom_spdx(d):
1104 image_name = d.getVar("IMAGE_NAME")
1105 image_basename = d.getVar("IMAGE_BASENAME")
1106 image_link_name = d.getVar("IMAGE_LINK_NAME")
1107 imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR"))
1108 machine = d.getVar("MACHINE")
1109
1110 spdx_path = imgdeploydir / (image_name + ".spdx.json")
1111
1112 root_elements = []
1113
1114 # TODO: Do we need to add the rootfs or are the image files sufficient?
1115 rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
1116 d,
1117 "rootfs",
1118 "%s-%s" % (image_basename, machine),
1119 oe.spdx30.software_Package,
1120 # TODO: Should use a purpose here?
1121 )
1122 root_elements.append(rootfs_image._id)
1123
1124 image_objset, _ = oe.sbom30.find_jsonld(
1125 d, "image", "%s-%s" % (image_basename, machine), required=True
1126 )
1127 for o in image_objset.foreach_root(oe.spdx30.software_File):
1128 root_elements.append(o._id)
1129
1130 objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements)
1131
1132 oe.sbom30.write_jsonld_doc(d, objset, spdx_path)
1133
1134 def make_image_link(target_path, suffix):
1135 if image_link_name:
1136 link = imgdeploydir / (image_link_name + suffix)
1137 if link != target_path:
1138 link.symlink_to(os.path.relpath(target_path, link.parent))
1139
1140 make_image_link(spdx_path, ".spdx.json")
1141
1142
1143def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname):
1144 sdk_name = toolchain_outputname + "-" + sdk_type
1145 sdk_packages = oe.sdk.sdk_list_installed_packages(d, sdk_type == "target")
1146
1147 objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name)
1148
1149 sdk_rootfs = objset.add_root(
1150 oe.spdx30.software_Package(
1151 _id=objset.new_spdxid("sdk-rootfs", sdk_name),
1152 creationInfo=objset.doc.creationInfo,
1153 name=sdk_name,
1154 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
1155 )
1156 )
1157 set_timestamp_now(d, sdk_rootfs, "builtTime")
1158
1159 sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs"))
1160 set_timestamp_now(d, sdk_build, "build_buildEndTime")
1161
1162 objset.new_scoped_relationship(
1163 [sdk_build],
1164 oe.spdx30.RelationshipType.hasOutputs,
1165 oe.spdx30.LifecycleScopeType.build,
1166 [sdk_rootfs],
1167 )
1168
1169 collect_build_package_inputs(d, objset, sdk_build, sdk_packages)
1170
1171 objset.add_aliases()
1172 oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json")
1173
1174
1175def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname):
1176 # Load the document written earlier
1177 rootfs_objset = oe.sbom30.load_jsonld(
1178 d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True
1179 )
1180
1181 # Create a new build for the SDK installer
1182 sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate")
1183 set_timestamp_now(d, sdk_build, "build_buildEndTime")
1184
1185 rootfs = rootfs_objset.find_root(oe.spdx30.software_Package)
1186 if rootfs is None:
1187 bb.fatal("Unable to find rootfs artifact")
1188
1189 rootfs_objset.new_scoped_relationship(
1190 [sdk_build],
1191 oe.spdx30.RelationshipType.hasInputs,
1192 oe.spdx30.LifecycleScopeType.build,
1193 [rootfs],
1194 )
1195
1196 files = set()
1197 root_files = []
1198
1199 # NOTE: os.walk() doesn't return symlinks
1200 for dirpath, dirnames, filenames in os.walk(sdk_deploydir):
1201 for fn in filenames:
1202 fpath = Path(dirpath) / fn
1203 if not fpath.is_file() or fpath.is_symlink():
1204 continue
1205
1206 relpath = str(fpath.relative_to(sdk_deploydir))
1207
1208 f = rootfs_objset.new_file(
1209 rootfs_objset.new_spdxid("sdk-installer", relpath),
1210 relpath,
1211 fpath,
1212 )
1213 set_timestamp_now(d, f, "builtTime")
1214
1215 if fn.endswith(".manifest"):
1216 f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest
1217 elif fn.endswith(".testdata.json"):
1218 f.software_primaryPurpose = (
1219 oe.spdx30.software_SoftwarePurpose.configuration
1220 )
1221 else:
1222 set_purposes(d, f, "SPDX_SDK_PURPOSE")
1223 root_files.append(f)
1224
1225 files.add(f)
1226
1227 if files:
1228 rootfs_objset.new_scoped_relationship(
1229 [sdk_build],
1230 oe.spdx30.RelationshipType.hasOutputs,
1231 oe.spdx30.LifecycleScopeType.build,
1232 files,
1233 )
1234 else:
1235 bb.warn(f"No SDK output files found in {sdk_deploydir}")
1236
1237 objset, sbom = oe.sbom30.create_sbom(
1238 d, toolchain_outputname, sorted(list(files)), [rootfs_objset]
1239 )
1240
1241 oe.sbom30.write_jsonld_doc(
1242 d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json")
1243 )
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py
new file mode 100644
index 0000000000..dfe90f96cf
--- /dev/null
+++ b/meta/lib/oe/spdx_common.py
@@ -0,0 +1,227 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb
8import collections
9import json
10import oe.packagedata
11import re
12import shutil
13
14from pathlib import Path
15
16
17LIC_REGEX = re.compile(
18 rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$",
19 re.MULTILINE,
20)
21
22
23def extract_licenses(filename):
24 """
25 Extract SPDX License identifiers from a file
26 """
27 try:
28 with open(filename, "rb") as f:
29 size = min(15000, os.stat(filename).st_size)
30 txt = f.read(size)
31 licenses = re.findall(LIC_REGEX, txt)
32 if licenses:
33 ascii_licenses = [lic.decode("ascii") for lic in licenses]
34 return ascii_licenses
35 except Exception as e:
36 bb.warn(f"Exception reading {filename}: {e}")
37 return []
38
39
40def is_work_shared_spdx(d):
41 return bb.data.inherits_class("kernel", d) or ("work-shared" in d.getVar("WORKDIR"))
42
43
44def load_spdx_license_data(d):
45
46 with open(d.getVar("SPDX_LICENSES"), "r") as f:
47 data = json.load(f)
48 # Transform the license array to a dictionary
49 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
50
51 return data
52
53
54def process_sources(d):
55 """
56 Returns True if the sources for this recipe should be included in the SPDX
57 or False if not
58 """
59 pn = d.getVar("PN")
60 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
61 if pn in assume_provided:
62 for p in d.getVar("PROVIDES").split():
63 if p != pn:
64 pn = p
65 break
66
67 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
68 # so avoid archiving source here.
69 if pn.startswith("glibc-locale"):
70 return False
71 if d.getVar("PN") == "libtool-cross":
72 return False
73 if d.getVar("PN") == "libgcc-initial":
74 return False
75 if d.getVar("PN") == "shadow-sysroot":
76 return False
77
78 # We just archive gcc-source for all the gcc related recipes
79 if d.getVar("BPN") in ["gcc", "libgcc"]:
80 bb.debug(1, "spdx: There is bug in scan of %s is, do nothing" % pn)
81 return False
82
83 return True
84
85
86Dep = collections.namedtuple("Dep", ["pn", "hashfn", "in_taskhash"])
87
88
89def collect_direct_deps(d, dep_task):
90 """
91 Find direct dependencies of current task
92
93 Returns the list of recipes that have a dep_task that the current task
94 depends on
95 """
96 current_task = "do_" + d.getVar("BB_CURRENTTASK")
97 pn = d.getVar("PN")
98
99 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
100
101 for this_dep in taskdepdata.values():
102 if this_dep[0] == pn and this_dep[1] == current_task:
103 break
104 else:
105 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
106
107 deps = set()
108
109 for dep_name in this_dep.deps:
110 dep_data = taskdepdata[dep_name]
111 if dep_data.taskname == dep_task and dep_data.pn != pn:
112 deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps))
113
114 return sorted(deps)
115
116
117def get_spdx_deps(d):
118 """
119 Reads the SPDX dependencies JSON file and returns the data
120 """
121 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
122
123 deps = []
124 with spdx_deps_file.open("r") as f:
125 for d in json.load(f):
126 deps.append(Dep(*d))
127 return deps
128
129
130def collect_package_providers(d):
131 """
132 Returns a dictionary where each RPROVIDES is mapped to the package that
133 provides it
134 """
135 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
136
137 providers = {}
138
139 deps = collect_direct_deps(d, "do_create_spdx")
140 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
141
142 for dep_pn, dep_hashfn, _ in deps:
143 localdata = d
144 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
145 if not recipe_data:
146 localdata = bb.data.createCopy(d)
147 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
148 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
149
150 for pkg in recipe_data.get("PACKAGES", "").split():
151 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
152 rprovides = set(
153 n
154 for n, _ in bb.utils.explode_dep_versions2(
155 pkg_data.get("RPROVIDES", "")
156 ).items()
157 )
158 rprovides.add(pkg)
159
160 if "PKG" in pkg_data:
161 pkg = pkg_data["PKG"]
162 rprovides.add(pkg)
163
164 for r in rprovides:
165 providers[r] = (pkg, dep_hashfn)
166
167 return providers
168
169
170def get_patched_src(d):
171 """
172 Save patched source of the recipe in SPDX_WORKDIR.
173 """
174 spdx_workdir = d.getVar("SPDXWORK")
175 spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE")
176 pn = d.getVar("PN")
177
178 workdir = d.getVar("WORKDIR")
179
180 try:
181 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
182 if not is_work_shared_spdx(d):
183 # Change the WORKDIR to make do_unpack do_patch run in another dir.
184 d.setVar("WORKDIR", spdx_workdir)
185 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
186 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
187
188 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
189 # possibly requiring of the following tasks (such as some recipes's
190 # do_patch required 'B' existed).
191 bb.utils.mkdirhier(d.getVar("B"))
192
193 bb.build.exec_func("do_unpack", d)
194 # Copy source of kernel to spdx_workdir
195 if is_work_shared_spdx(d):
196 share_src = d.getVar("WORKDIR")
197 d.setVar("WORKDIR", spdx_workdir)
198 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
199 src_dir = (
200 spdx_workdir
201 + "/"
202 + d.getVar("PN")
203 + "-"
204 + d.getVar("PV")
205 + "-"
206 + d.getVar("PR")
207 )
208 bb.utils.mkdirhier(src_dir)
209 if bb.data.inherits_class("kernel", d):
210 share_src = d.getVar("STAGING_KERNEL_DIR")
211 cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
212 cmd_copy_shared_res = os.popen(cmd_copy_share).read()
213 bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
214
215 git_path = src_dir + "/.git"
216 if os.path.exists(git_path):
217 shutils.rmtree(git_path)
218
219 # Make sure gcc and kernel sources are patched only once
220 if not (d.getVar("SRC_URI") == "" or is_work_shared_spdx(d)):
221 bb.build.exec_func("do_patch", d)
222
223 # Some userland has no source.
224 if not os.path.exists(spdx_workdir):
225 bb.utils.mkdirhier(spdx_workdir)
226 finally:
227 d.setVar("WORKDIR", workdir)
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index a46e5502ab..f883497292 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -93,6 +93,14 @@ def sstate_lockedsigs(d):
93 sigs[pn][task] = [h, siggen_lockedsigs_var] 93 sigs[pn][task] = [h, siggen_lockedsigs_var]
94 return sigs 94 return sigs
95 95
96def lockedsigs_unihashmap(d):
97 unihashmap = {}
98 data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split()
99 for entry in data:
100 pn, task, taskhash, unihash = entry.split(":")
101 unihashmap[(pn, task)] = (taskhash, unihash)
102 return unihashmap
103
96class SignatureGeneratorOEBasicHashMixIn(object): 104class SignatureGeneratorOEBasicHashMixIn(object):
97 supports_multiconfig_datacaches = True 105 supports_multiconfig_datacaches = True
98 106
@@ -100,6 +108,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
100 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() 108 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
101 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() 109 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
102 self.lockedsigs = sstate_lockedsigs(data) 110 self.lockedsigs = sstate_lockedsigs(data)
111 self.unihashmap = lockedsigs_unihashmap(data)
103 self.lockedhashes = {} 112 self.lockedhashes = {}
104 self.lockedpnmap = {} 113 self.lockedpnmap = {}
105 self.lockedhashfn = {} 114 self.lockedhashfn = {}
@@ -209,6 +218,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
209 def get_cached_unihash(self, tid): 218 def get_cached_unihash(self, tid):
210 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: 219 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
211 return self.lockedhashes[tid] 220 return self.lockedhashes[tid]
221
222 (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
223 recipename = self.lockedpnmap[fn]
224
225 if (recipename, task) in self.unihashmap:
226 taskhash, unihash = self.unihashmap[(recipename, task)]
227 if taskhash == self.taskhash[tid]:
228 return unihash
229
212 return super().get_cached_unihash(tid) 230 return super().get_cached_unihash(tid)
213 231
214 def dump_sigtask(self, fn, task, stampbase, runtime): 232 def dump_sigtask(self, fn, task, stampbase, runtime):
@@ -219,6 +237,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
219 237
220 def dump_lockedsigs(self, sigfile, taskfilter=None): 238 def dump_lockedsigs(self, sigfile, taskfilter=None):
221 types = {} 239 types = {}
240 unihashmap = {}
222 for tid in self.runtaskdeps: 241 for tid in self.runtaskdeps:
223 # Bitbake changed this to a tuple in newer versions 242 # Bitbake changed this to a tuple in newer versions
224 if isinstance(tid, tuple): 243 if isinstance(tid, tuple):
@@ -226,13 +245,18 @@ class SignatureGeneratorOEBasicHashMixIn(object):
226 if taskfilter: 245 if taskfilter:
227 if not tid in taskfilter: 246 if not tid in taskfilter:
228 continue 247 continue
229 fn = bb.runqueue.fn_from_tid(tid) 248 (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
230 t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] 249 t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
231 t = 't-' + t.replace('_', '-') 250 t = 't-' + t.replace('_', '-')
232 if t not in types: 251 if t not in types:
233 types[t] = [] 252 types[t] = []
234 types[t].append(tid) 253 types[t].append(tid)
235 254
255 taskhash = self.taskhash[tid]
256 unihash = self.get_unihash(tid)
257 if taskhash != unihash:
258 unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash
259
236 with open(sigfile, "w") as f: 260 with open(sigfile, "w") as f:
237 l = sorted(types) 261 l = sorted(types)
238 for t in l: 262 for t in l:
@@ -245,7 +269,12 @@ class SignatureGeneratorOEBasicHashMixIn(object):
245 continue 269 continue
246 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") 270 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
247 f.write(' "\n') 271 f.write(' "\n')
248 f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l))) 272 f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l)))
273 f.write('SIGGEN_UNIHASHMAP += "\\\n')
274 sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
275 for tid in sortedtid:
276 f.write(unihashmap[tid] + " \\\n")
277 f.write(' "\n')
249 278
250 def dump_siglist(self, sigfile, path_prefix_strip=None): 279 def dump_siglist(self, sigfile, path_prefix_strip=None):
251 def strip_fn(fn): 280 def strip_fn(fn):
@@ -327,7 +356,6 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge
327 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') 356 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
328 if not self.method: 357 if not self.method:
329 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") 358 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
330 self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1)
331 self.username = data.getVar("BB_HASHSERVE_USERNAME") 359 self.username = data.getVar("BB_HASHSERVE_USERNAME")
332 self.password = data.getVar("BB_HASHSERVE_PASSWORD") 360 self.password = data.getVar("BB_HASHSERVE_PASSWORD")
333 if not self.username or not self.password: 361 if not self.username or not self.password:
@@ -371,7 +399,13 @@ def find_siginfo(pn, taskname, taskhashlist, d):
371 return siginfo.rpartition('.')[2] 399 return siginfo.rpartition('.')[2]
372 400
373 def get_time(fullpath): 401 def get_time(fullpath):
374 return os.stat(fullpath).st_mtime 402 # NFS can end up in a weird state where the file exists but has no stat info.
403 # If that happens, we assume it doesn't acutally exist and show a warning
404 try:
405 return os.stat(fullpath).st_mtime
406 except FileNotFoundError:
407 bb.warn("Could not obtain mtime for {}".format(fullpath))
408 return None
375 409
376 # First search in stamps dir 410 # First search in stamps dir
377 localdata = d.createCopy() 411 localdata = d.createCopy()
@@ -394,13 +428,17 @@ def find_siginfo(pn, taskname, taskhashlist, d):
394 if taskhashlist: 428 if taskhashlist:
395 for taskhash in taskhashlist: 429 for taskhash in taskhashlist:
396 if fullpath.endswith('.%s' % taskhash): 430 if fullpath.endswith('.%s' % taskhash):
397 hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} 431 mtime = get_time(fullpath)
432 if mtime:
433 hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime}
398 if len(hashfiles) == len(taskhashlist): 434 if len(hashfiles) == len(taskhashlist):
399 foundall = True 435 foundall = True
400 break 436 break
401 else: 437 else:
402 hashval = get_hashval(fullpath) 438 hashval = get_hashval(fullpath)
403 hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} 439 mtime = get_time(fullpath)
440 if mtime:
441 hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime}
404 442
405 if not taskhashlist or (len(hashfiles) < 2 and not foundall): 443 if not taskhashlist or (len(hashfiles) < 2 and not foundall):
406 # That didn't work, look in sstate-cache 444 # That didn't work, look in sstate-cache
@@ -431,7 +469,9 @@ def find_siginfo(pn, taskname, taskhashlist, d):
431 actual_hashval = get_hashval(fullpath) 469 actual_hashval = get_hashval(fullpath)
432 if actual_hashval in hashfiles: 470 if actual_hashval in hashfiles:
433 continue 471 continue
434 hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)} 472 mtime = get_time(fullpath)
473 if mtime:
474 hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime}
435 475
436 return hashfiles 476 return hashfiles
437 477
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 14a7d07ef0..83f1440887 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -482,7 +482,7 @@ def get_multilib_datastore(variant, d):
482 localdata.setVar("MLPREFIX", "") 482 localdata.setVar("MLPREFIX", "")
483 return localdata 483 return localdata
484 484
485class ImageQAFailed(Exception): 485class ImageQAFailed(bb.BBHandledException):
486 def __init__(self, description, name=None, logfile=None): 486 def __init__(self, description, name=None, logfile=None):
487 self.description = description 487 self.description = description
488 self.name = name 488 self.name = name
diff --git a/meta/lib/oeqa/core/target/serial.py b/meta/lib/oeqa/core/target/serial.py
new file mode 100644
index 0000000000..7c2cd8b248
--- /dev/null
+++ b/meta/lib/oeqa/core/target/serial.py
@@ -0,0 +1,315 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import base64
6import logging
7import os
8from threading import Lock
9from . import OETarget
10
11class OESerialTarget(OETarget):
12
13 def __init__(self, logger, target_ip, server_ip, server_port=0,
14 timeout=300, serialcontrol_cmd=None, serialcontrol_extra_args=None,
15 serialcontrol_ps1=None, serialcontrol_connect_timeout=None,
16 machine=None, **kwargs):
17 if not logger:
18 logger = logging.getLogger('target')
19 logger.setLevel(logging.INFO)
20 filePath = os.path.join(os.getcwd(), 'remoteTarget.log')
21 fileHandler = logging.FileHandler(filePath, 'w', 'utf-8')
22 formatter = logging.Formatter(
23 '%(asctime)s.%(msecs)03d %(levelname)s: %(message)s',
24 '%H:%M:%S')
25 fileHandler.setFormatter(formatter)
26 logger.addHandler(fileHandler)
27
28 super(OESerialTarget, self).__init__(logger)
29
30 if serialcontrol_ps1:
31 self.target_ps1 = serialcontrol_ps1
32 elif machine:
33 # fallback to a default value which assumes root@machine
34 self.target_ps1 = f'root@{machine}:.*# '
35 else:
36 raise ValueError("Unable to determine shell command prompt (PS1) format.")
37
38 if not serialcontrol_cmd:
39 raise ValueError("Unable to determine serial control command.")
40
41 if serialcontrol_extra_args:
42 self.connection_script = f'{serialcontrol_cmd} {serialcontrol_extra_args}'
43 else:
44 self.connection_script = serialcontrol_cmd
45
46 if serialcontrol_connect_timeout:
47 self.connect_timeout = serialcontrol_connect_timeout
48 else:
49 self.connect_timeout = 10 # default to 10s connection timeout
50
51 self.default_command_timeout = timeout
52 self.ip = target_ip
53 self.server_ip = server_ip
54 self.server_port = server_port
55 self.conn = None
56 self.mutex = Lock()
57
58 def start(self, **kwargs):
59 pass
60
61 def stop(self, **kwargs):
62 pass
63
64 def get_connection(self):
65 if self.conn is None:
66 self.conn = SerialConnection(self.connection_script,
67 self.target_ps1,
68 self.connect_timeout,
69 self.default_command_timeout)
70
71 return self.conn
72
73 def run(self, cmd, timeout=None):
74 """
75 Runs command on target over the provided serial connection.
76 The first call will open the connection, and subsequent
77 calls will re-use the same connection to send new commands.
78
79 command: Command to run on target.
80 timeout: <value>: Kill command after <val> seconds.
81 None: Kill command default value seconds.
82 0: No timeout, runs until return.
83 """
84 # Lock needed to avoid multiple threads running commands concurrently
85 # A serial connection can only be used by one caller at a time
86 with self.mutex:
87 conn = self.get_connection()
88
89 self.logger.debug(f"[Running]$ {cmd}")
90 # Run the command, then echo $? to get the command's return code
91 try:
92 output = conn.run_command(cmd, timeout)
93 status = conn.run_command("echo $?")
94 self.logger.debug(f" [stdout]: {output}")
95 self.logger.debug(f" [ret code]: {status}\n\n")
96 except SerialTimeoutException as e:
97 self.logger.debug(e)
98 output = ""
99 status = 255
100
101 # Return to $HOME after each command to simulate a stateless SSH connection
102 conn.run_command('cd "$HOME"')
103
104 return (int(status), output)
105
106 def copyTo(self, localSrc, remoteDst):
107 """
108 Copies files by converting them to base 32, then transferring
109 the ASCII text to the target, and decoding it in place on the
110 target.
111
112 On a 115k baud serial connection, this method transfers at
113 roughly 30kbps.
114 """
115 with open(localSrc, 'rb') as file:
116 data = file.read()
117
118 b32 = base64.b32encode(data).decode('utf-8')
119
120 # To avoid shell line limits, send a chunk at a time
121 SPLIT_LEN = 512
122 lines = [b32[i:i+SPLIT_LEN] for i in range(0, len(b32), SPLIT_LEN)]
123
124 with self.mutex:
125 conn = self.get_connection()
126
127 filename = os.path.basename(localSrc)
128 TEMP = f'/tmp/{filename}.b32'
129
130 # Create or empty out the temp file
131 conn.run_command(f'echo -n "" > {TEMP}')
132
133 for line in lines:
134 conn.run_command(f'echo -n {line} >> {TEMP}')
135
136 # Check to see whether the remoteDst is a directory
137 is_directory = conn.run_command(f'[[ -d {remoteDst} ]]; echo $?')
138 if int(is_directory) == 0:
139 # append the localSrc filename to the end of remoteDst
140 remoteDst = os.path.join(remoteDst, filename)
141
142 conn.run_command(f'base32 -d {TEMP} > {remoteDst}')
143 conn.run_command(f'rm {TEMP}')
144
145 return 0, 'Success'
146
147 def copyFrom(self, remoteSrc, localDst):
148 """
149 Copies files by converting them to base 32 on the target, then
150 transferring the ASCII text to the host. That text is then
151 decoded here and written out to the destination.
152
153 On a 115k baud serial connection, this method transfers at
154 roughly 30kbps.
155 """
156 with self.mutex:
157 b32 = self.get_connection().run_command(f'base32 {remoteSrc}')
158
159 data = base64.b32decode(b32.replace('\r\n', ''))
160
161 # If the local path is a directory, get the filename from
162 # the remoteSrc path and append it to localDst
163 if os.path.isdir(localDst):
164 filename = os.path.basename(remoteSrc)
165 localDst = os.path.join(localDst, filename)
166
167 with open(localDst, 'wb') as file:
168 file.write(data)
169
170 return 0, 'Success'
171
172 def copyDirTo(self, localSrc, remoteDst):
173 """
174 Copy recursively localSrc directory to remoteDst in target.
175 """
176
177 for root, dirs, files in os.walk(localSrc):
178 # Create directories in the target as needed
179 for d in dirs:
180 tmpDir = os.path.join(root, d).replace(localSrc, "")
181 newDir = os.path.join(remoteDst, tmpDir.lstrip("/"))
182 cmd = "mkdir -p %s" % newDir
183 self.run(cmd)
184
185 # Copy files into the target
186 for f in files:
187 tmpFile = os.path.join(root, f).replace(localSrc, "")
188 dstFile = os.path.join(remoteDst, tmpFile.lstrip("/"))
189 srcFile = os.path.join(root, f)
190 self.copyTo(srcFile, dstFile)
191
192 def deleteFiles(self, remotePath, files):
193 """
194 Deletes files in target's remotePath.
195 """
196
197 cmd = "rm"
198 if not isinstance(files, list):
199 files = [files]
200
201 for f in files:
202 cmd = "%s %s" % (cmd, os.path.join(remotePath, f))
203
204 self.run(cmd)
205
206 def deleteDir(self, remotePath):
207 """
208 Deletes target's remotePath directory.
209 """
210
211 cmd = "rmdir %s" % remotePath
212 self.run(cmd)
213
214 def deleteDirStructure(self, localPath, remotePath):
215 """
216 Delete recursively localPath structure directory in target's remotePath.
217
218 This function is useful to delete a package that is installed in the
219 device under test (DUT) and the host running the test has such package
220 extracted in tmp directory.
221
222 Example:
223 pwd: /home/user/tmp
224 tree: .
225 └── work
226 ├── dir1
227 │   └── file1
228 └── dir2
229
230 localpath = "/home/user/tmp" and remotepath = "/home/user"
231
232 With the above variables this function will try to delete the
233 directory in the DUT in this order:
234 /home/user/work/dir1/file1
235 /home/user/work/dir1 (if dir is empty)
236 /home/user/work/dir2 (if dir is empty)
237 /home/user/work (if dir is empty)
238 """
239
240 for root, dirs, files in os.walk(localPath, topdown=False):
241 # Delete files first
242 tmpDir = os.path.join(root).replace(localPath, "")
243 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
244 self.deleteFiles(remoteDir, files)
245
246 # Remove dirs if empty
247 for d in dirs:
248 tmpDir = os.path.join(root, d).replace(localPath, "")
249 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
250 self.deleteDir(remoteDir)
251
252class SerialTimeoutException(Exception):
253 def __init__(self, msg):
254 self.msg = msg
255 def __str__(self):
256 return self.msg
257
258class SerialConnection:
259
260 def __init__(self, script, target_prompt, connect_timeout, default_command_timeout):
261 import pexpect # limiting scope to avoid build dependency
262 self.prompt = target_prompt
263 self.connect_timeout = connect_timeout
264 self.default_command_timeout = default_command_timeout
265 self.conn = pexpect.spawn('/bin/bash', ['-c', script], encoding='utf8')
266 self._seek_to_clean_shell()
267 # Disable echo to avoid the need to parse the outgoing command
268 self.run_command('stty -echo')
269
270 def _seek_to_clean_shell(self):
271 """
272 Attempts to find a clean shell, meaning it is clear and
273 ready to accept a new command. This is necessary to ensure
274 the correct output is captured from each command.
275 """
276 import pexpect # limiting scope to avoid build dependency
277 # Look for a clean shell
278 # Wait a short amount of time for the connection to finish
279 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT],
280 timeout=self.connect_timeout)
281
282 # if a timeout occurred, send an empty line and wait for a clean shell
283 if pexpect_code == 1:
284 # send a newline to clear and present the shell
285 self.conn.sendline("")
286 pexpect_code = self.conn.expect(self.prompt)
287
288 def run_command(self, cmd, timeout=None):
289 """
290 Runs command on target over the provided serial connection.
291 Returns any output on the shell while the command was run.
292
293 command: Command to run on target.
294 timeout: <value>: Kill command after <val> seconds.
295 None: Kill command default value seconds.
296 0: No timeout, runs until return.
297 """
298 import pexpect # limiting scope to avoid build dependency
299 # Convert from the OETarget defaults to pexpect timeout values
300 if timeout is None:
301 timeout = self.default_command_timeout
302 elif timeout == 0:
303 timeout = None # passing None to pexpect is infinite timeout
304
305 self.conn.sendline(cmd)
306 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT], timeout=timeout)
307
308 # check for timeout
309 if pexpect_code == 1:
310 self.conn.send('\003') # send Ctrl+C
311 self._seek_to_clean_shell()
312 raise SerialTimeoutException(f'Timeout executing: {cmd} after {timeout}s')
313
314 return self.conn.before.removesuffix('\r\n')
315
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py
index 09cdd14c75..d473469384 100644
--- a/meta/lib/oeqa/core/target/ssh.py
+++ b/meta/lib/oeqa/core/target/ssh.py
@@ -55,14 +55,14 @@ class OESSHTarget(OETarget):
55 def stop(self, **kwargs): 55 def stop(self, **kwargs):
56 pass 56 pass
57 57
58 def _run(self, command, timeout=None, ignore_status=True): 58 def _run(self, command, timeout=None, ignore_status=True, raw=False):
59 """ 59 """
60 Runs command in target using SSHProcess. 60 Runs command in target using SSHProcess.
61 """ 61 """
62 self.logger.debug("[Running]$ %s" % " ".join(command)) 62 self.logger.debug("[Running]$ %s" % " ".join(command))
63 63
64 starttime = time.time() 64 starttime = time.time()
65 status, output = SSHCall(command, self.logger, timeout) 65 status, output = SSHCall(command, self.logger, timeout, raw)
66 self.logger.debug("[Command returned '%d' after %.2f seconds]" 66 self.logger.debug("[Command returned '%d' after %.2f seconds]"
67 "" % (status, time.time() - starttime)) 67 "" % (status, time.time() - starttime))
68 68
@@ -72,7 +72,7 @@ class OESSHTarget(OETarget):
72 72
73 return (status, output) 73 return (status, output)
74 74
75 def run(self, command, timeout=None, ignore_status=True): 75 def run(self, command, timeout=None, ignore_status=True, raw=False):
76 """ 76 """
77 Runs command in target. 77 Runs command in target.
78 78
@@ -91,7 +91,7 @@ class OESSHTarget(OETarget):
91 else: 91 else:
92 processTimeout = self.timeout 92 processTimeout = self.timeout
93 93
94 status, output = self._run(sshCmd, processTimeout, ignore_status) 94 status, output = self._run(sshCmd, processTimeout, ignore_status, raw)
95 self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output)) 95 self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output))
96 96
97 return (status, output) 97 return (status, output)
@@ -206,7 +206,7 @@ class OESSHTarget(OETarget):
206 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) 206 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
207 self.deleteDir(remoteDir) 207 self.deleteDir(remoteDir)
208 208
209def SSHCall(command, logger, timeout=None, **opts): 209def SSHCall(command, logger, timeout=None, raw=False, **opts):
210 210
211 def run(): 211 def run():
212 nonlocal output 212 nonlocal output
@@ -265,7 +265,7 @@ def SSHCall(command, logger, timeout=None, **opts):
265 else: 265 else:
266 output_raw = process.communicate()[0] 266 output_raw = process.communicate()[0]
267 267
268 output = output_raw.decode('utf-8', errors='ignore') 268 output = output_raw if raw else output_raw.decode('utf-8', errors='ignore')
269 logger.debug('Data from SSH call:\n%s' % output.rstrip()) 269 logger.debug('Data from SSH call:\n%s' % output.rstrip())
270 270
271 # timout or not, make sure process exits and is not hanging 271 # timout or not, make sure process exits and is not hanging
@@ -292,7 +292,7 @@ def SSHCall(command, logger, timeout=None, **opts):
292 292
293 options = { 293 options = {
294 "stdout": subprocess.PIPE, 294 "stdout": subprocess.PIPE,
295 "stderr": subprocess.STDOUT, 295 "stderr": subprocess.STDOUT if not raw else None,
296 "stdin": None, 296 "stdin": None,
297 "shell": False, 297 "shell": False,
298 "bufsize": -1, 298 "bufsize": -1,
@@ -320,4 +320,4 @@ def SSHCall(command, logger, timeout=None, **opts):
320 logger.debug('Something went wrong, killing SSH process') 320 logger.debug('Something went wrong, killing SSH process')
321 raise 321 raise
322 322
323 return (process.returncode, output.rstrip()) 323 return (process.returncode, output if raw else output.rstrip())
diff --git a/meta/lib/oeqa/core/tests/common.py b/meta/lib/oeqa/core/tests/common.py
index 88cc758ad3..bcc4fde632 100644
--- a/meta/lib/oeqa/core/tests/common.py
+++ b/meta/lib/oeqa/core/tests/common.py
@@ -9,7 +9,6 @@ import os
9 9
10import unittest 10import unittest
11import logging 11import logging
12import os
13 12
14logger = logging.getLogger("oeqa") 13logger = logging.getLogger("oeqa")
15logger.setLevel(logging.INFO) 14logger.setLevel(logging.INFO)
diff --git a/meta/lib/oeqa/manual/crops.json b/meta/lib/oeqa/manual/crops.json
deleted file mode 100644
index 5cfa653843..0000000000
--- a/meta/lib/oeqa/manual/crops.json
+++ /dev/null
@@ -1,294 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "crops-default.crops-default.sdkext_eSDK_devtool_build_make",
5 "author": [
6 {
7 "email": "francisco.j.pedraza.gonzalez@intel.com",
8 "name": "francisco.j.pedraza.gonzalez@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
14 "expected_results": ""
15 },
16 "2": {
17 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n\n",
22 "expected_results": ""
23 },
24 "4": {
25 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
26 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
27 },
28 "5": {
29 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
30 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces."
31 },
32 "6": {
33 "action": " source environment-setup-i586-poky-linux \n\n",
34 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
35 },
36 "7": {
37 "action": " run command which devtool \n\n",
38 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n "
39 },
40 "8": {
41 "action": "devtool add myapp <directory>(this is myapp dir) \n\n\n",
42 "expected_results": "The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb"
43 },
44 "9": {
45 "action": " devtool build myapp \n\n",
46 "expected_results": "This should compile an image"
47 },
48 "10": {
49 "action": " devtool reset myapp ",
50 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase."
51 }
52 },
53 "summary": "sdkext_eSDK_devtool_build_make"
54 }
55 },
56 {
57 "test": {
58 "@alias": "crops-default.crops-default.sdkext_devtool_build_esdk_package",
59 "author": [
60 {
61 "email": "francisco.j.pedraza.gonzalez@intel.com",
62 "name": "francisco.j.pedraza.gonzalez@intel.com"
63 }
64 ],
65 "execution": {
66 "1": {
67 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
68 "expected_results": ""
69 },
70 "2": {
71 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
72 "expected_results": ""
73 },
74 "3": {
75 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp/ \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
76 "expected_results": ""
77 },
78 "4": {
79 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
80 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
81 },
82 "5": {
83 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include<stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
84 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
85 },
86 "6": {
87 "action": " source environment-setup-i586-poky-linux \n\n",
88 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
89 },
90 "7": {
91 "action": " run command which devtool \n\n",
92 "expected_results": " this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
93 },
94 "8": {
95 "action": " devtool add myapp <directory> (this is myapp dir) \n\n",
96 "expected_results": " The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
97 },
98 "9": {
99 "action": " devtool package myapp \n\n",
100 "expected_results": " you should expect a package creation of myapp and it should be under the /tmp/deploy/ \n\n"
101 },
102 "10": {
103 "action": " devtool reset myapp ",
104 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase.\n</package_format>"
105 }
106 },
107 "summary": "sdkext_devtool_build_esdk_package"
108 }
109 },
110 {
111 "test": {
112 "@alias": "crops-default.crops-default.sdkext_devtool_build_cmake",
113 "author": [
114 {
115 "email": "francisco.j.pedraza.gonzalez@intel.com",
116 "name": "francisco.j.pedraza.gonzalez@intel.com"
117 }
118 ],
119 "execution": {
120 "1": {
121 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
122 "expected_results": ""
123 },
124 "2": {
125 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
126 "expected_results": ""
127 },
128 "3": {
129 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
130 "expected_results": ""
131 },
132 "4": {
133 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
134 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
135 },
136 "5": {
137 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
138 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
139 },
140 "6": {
141 "action": " source environment-setup-i586-poky-linux \n\n",
142 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
143 },
144 "7": {
145 "action": " run command which devtool \n\n",
146 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
147 },
148 "8": {
149 "action": " devtool add myapp <directory> (this is myapp_cmake dir) \n\n",
150 "expected_results": "The directory you should input is the myapp_cmake directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
151 },
152 "9": {
153 "action": " devtool build myapp \n\n",
154 "expected_results": "This should compile an image \n\n"
155 },
156 "10": {
157 "action": " devtool reset myapp ",
158 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase. "
159 }
160 },
161 "summary": "sdkext_devtool_build_cmake"
162 }
163 },
164 {
165 "test": {
166 "@alias": "crops-default.crops-default.sdkext_extend_autotools_recipe_creation",
167 "author": [
168 {
169 "email": "francisco.j.pedraza.gonzalez@intel.com",
170 "name": "francisco.j.pedraza.gonzalez@intel.com"
171 }
172 ],
173 "execution": {
174 "1": {
175 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
176 "expected_results": ""
177 },
178 "2": {
179 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
180 "expected_results": ""
181 },
182 "3": {
183 "action": " source environment-setup-i586-poky-linux \n\n",
184 "expected_results": " This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
185 },
186 "4": {
187 "action": "run command which devtool \n\n",
188 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
189 },
190 "5": {
191 "action": "devtool sdk-install -s libxml2 \n\n",
192 "expected_results": "this should install libxml2 \n\n"
193 },
194 "6": {
195 "action": "devtool add librdfa https://github.com/rdfa/librdfa \n\n",
196 "expected_results": "This should automatically create the recipe librdfa.bb under /recipes/librdfa/librdfa.bb \n\n"
197 },
198 "7": {
199 "action": "devtool build librdfa \n\n",
200 "expected_results": "This should compile \n\n"
201 },
202 "8": {
203 "action": "devtool reset librdfa ",
204 "expected_results": "This cleans sysroot of the librdfa recipe, but it leaves the source tree intact. meaning it does not erase."
205 }
206 },
207 "summary": "sdkext_extend_autotools_recipe_creation"
208 }
209 },
210 {
211 "test": {
212 "@alias": "crops-default.crops-default.sdkext_devtool_kernelmodule",
213 "author": [
214 {
215 "email": "francisco.j.pedraza.gonzalez@intel.com",
216 "name": "francisco.j.pedraza.gonzalez@intel.com"
217 }
218 ],
219 "execution": {
220 "1": {
221 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n",
222 "expected_results": ""
223 },
224 "2": {
225 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
226 "expected_results": ""
227 },
228 "3": {
229 "action": "source environment-setup-i586-poky-linux \n\n",
230 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n \n"
231 },
232 "4": {
233 "action": "run command which devtool \n\n",
234 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
235 },
236 "5": {
237 "action": "devtool add kernel-module-hello-world https://git.yoctoproject.org/git/kernel-module-hello-world \n\n",
238 "expected_results": "This should automatically create the recipe kernel-module-hello-world.bb under <crops-esdk-workdir-workspace>/recipes/kernel-module-hello-world/kernel-module-hello-world.bb "
239 },
240 "6": {
241 "action": "devtool build kernel-module-hello-world \n\n",
242 "expected_results": "This should compile an image \n\n"
243 },
244 "7": {
245 "action": "devtool reset kernel-module-hello-world ",
246 "expected_results": "This cleans sysroot of the kernel-module-hello-world recipe, but it leaves the source tree intact. meaning it does not erase."
247 }
248 },
249 "summary": "sdkext_devtool_kernelmodule"
250 }
251 },
252 {
253 "test": {
254 "@alias": "crops-default.crops-default.sdkext_recipes_for_nodejs",
255 "author": [
256 {
257 "email": "francisco.j.pedraza.gonzalez@intel.com",
258 "name": "francisco.j.pedraza.gonzalez@intel.com"
259 }
260 ],
261 "execution": {
262 "1": {
263 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\nlets say variable npm = npm://registry.npmjs.org;name=winston;version=2.2.0 \n\n",
264 "expected_results": ""
265 },
266 "2": {
267 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
268 "expected_results": ""
269 },
270 "3": {
271 "action": "source environment-setup-i586-poky-linux \n\n",
272 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
273 },
274 "4": {
275 "action": "run command which devtool \n\n",
276 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
277 },
278 "5": {
279 "action": " 4a) git clone git://git.openembedded.org/meta-openembedded in layers/build directory \n \n4b) Add meta-openembedded/meta-oe in bblayer.conf as mentioned below: ${SDKBASEMETAPATH}/layers/build/meta-openembedded/meta-oe \\ \n\n4c) devtool add \"npm://registry.npmjs.org;name=npm;version=2.2.0\" \n\n",
280 "expected_results": " This should automatically create the recipe npm.bb under /recipes/npm/npm.bb \n\n"
281 },
282 "6": {
283 "action": "devtool build npm \n\n",
284 "expected_results": "This should compile an image \n\n"
285 },
286 "7": {
287 "action": " devtool reset npm",
288 "expected_results": "This cleans sysroot of the npm recipe, but it leaves the source tree intact. meaning it does not erase."
289 }
290 },
291 "summary": "sdkext_recipes_for_nodejs"
292 }
293 }
294]
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json
deleted file mode 100644
index 6c110d0656..0000000000
--- a/meta/lib/oeqa/manual/eclipse-plugin.json
+++ /dev/null
@@ -1,322 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "eclipse-plugin.eclipse-plugin.support_SSH_connection_to_Target",
5 "author": [
6 {
7 "email": "ee.peng.yeoh@intel.com",
8 "name": "ee.peng.yeoh@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "In Eclipse, swich to Remote System Explorer to create a connention baseed on SSH, input the remote target IP address as the Host name, make sure disable the proxy in Window->Preferences->General->Network Connection, set Direct as Active Provider field. ",
14 "expected_results": "the connection based on SSH could be set up."
15 },
16 "2": {
17 "action": "Configure connection from Eclipse: Run->Run Configurations->C/C++ Remote Application\\ ->New Connection->General->SSH Only ",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Then right click to connect, input the user ID and password. ",
22 "expected_results": ""
23 },
24 "4": {
25 "action": "expand the connection, it will show the Sftp Files etc. \nNOTE. Might need to change dropbear to openssh and add the packagegroup-core-eclipse-debug recipe",
26 "expected_results": ""
27 }
28 },
29 "summary": "support_SSH_connection_to_Target"
30 }
31 },
32 {
33 "test": {
34 "@alias": "eclipse-plugin.eclipse-plugin.Launch_QEMU_from_Eclipse",
35 "author": [
36 {
37 "email": "ee.peng.yeoh@intel.com",
38 "name": "ee.peng.yeoh@intel.com"
39 }
40 ],
41 "execution": {
42 "1": {
43 "action": "Set the Yocto ADT's toolchain root location, sysroot location and kernel, in the menu Window -> Preferences -> Yocto ADT. \n \n",
44 "expected_results": ""
45 },
46 "2": {
47 "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
48 "expected_results": " Qemu can be lauched normally."
49 },
50 "3": {
51 "action": "(a)Point to the Toolchain: \n \nIf you are using a stand-alone pre-built toolchain, you should be pointing to the /opt/poky/{test-version} directory as Toolchain Root Location. This is the default location for toolchains installed by the ADT Installer or by hand. If ADT is installed in other location, use that location as Toolchain location.\nIf you are using a system-derived toolchain, the path you provide for the Toolchain Root Location field is the Yocto Project's build directory. \n \n E.g:/home/user/yocto/poky/build \n",
52 "expected_results": ""
53 },
54 "4": {
55 "action": "(b)Specify the Sysroot Location: \nSysroot Location is the location where the root filesystem for the target hardware is created on the development system by the ADT Installer (SYSROOT in step 2 of the case ADT installer Installation). \n \n Local : e.g: /home/user/qemux86-sato-sdk \nUsing ADT : e.g :/home/user/test-yocto/qemux86 \n\n",
56 "expected_results": ""
57 },
58 "5": {
59 "action": "(c)Select the Target Architecture: \n \nThe target architecture is the type of hardware you are going to use or emulate. Use the pull-down Target Architecture menu to make your selection. \n \n\n",
60 "expected_results": ""
61 },
62 "6": {
63 "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
64 "expected_results": ""
65 },
66 "7": {
67 "action": "(e) select OK to save the settings. \n\n\n1: In the Eclipse toolbar, expose the Run -> External Tools menu. Your image should appear as a selectable menu item. \n2: Select your image in the navigation pane to launch the emulator in a new window. \n3: If needed, enter your host root password in the shell window at the prompt. This sets up a Tap 0 connection needed for running in user-space NFS mode. \n",
68 "expected_results": ""
69 }
70 },
71 "summary": "Launch_QEMU_from_Eclipse"
72 }
73 },
74 {
75 "test": {
76 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project",
77 "author": [
78 {
79 "email": "ee.peng.yeoh@intel.com",
80 "name": "ee.peng.yeoh@intel.com"
81 }
82 ],
83 "execution": {
84 "1": {
85 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
86 "expected_results": ""
87 },
88 "2": {
89 "action": "Select File -> New -> Project.",
90 "expected_results": ""
91 },
92 "3": {
93 "action": "Double click C/C++.",
94 "expected_results": ""
95 },
96 "4": {
97 "action": "Click C or C++ Project to create the project.",
98 "expected_results": ""
99 },
100 "5": {
101 "action": "Expand Yocto ADT Project.",
102 "expected_results": ""
103 },
104 "6": {
105 "action": "Select Hello World ANSI C Autotools Project.",
106 "expected_results": ""
107 },
108 "7": {
109 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
110 "expected_results": ""
111 },
112 "8": {
113 "action": "Click Next.",
114 "expected_results": ""
115 },
116 "9": {
117 "action": "Add information in the Author and Copyright notice fields. \n1",
118 "expected_results": ""
119 },
120 "10": {
121 "action": "Click Finish. \n1",
122 "expected_results": ""
123 },
124 "11": {
125 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
126 "expected_results": ""
127 },
128 "12": {
129 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
130 "expected_results": ""
131 },
132 "13": {
133 "action": "In the Project Explorer window, right click the project -> Build project. \n1",
134 "expected_results": "Under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
135 },
136 "14": {
137 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
138 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
139 },
140 "15": {
141 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
142 "expected_results": ""
143 },
144 "16": {
145 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \nRight click it again and Debug as -> Debug Configurations \nUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \nin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application.\ne.g.: /home/root/myapplication \nIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
146 "expected_results": ""
147 },
148 "17": {
149 "action": "After all settings are done, select the Debug button on the bottom right corner",
150 "expected_results": ""
151 }
152 },
153 "summary": "Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project"
154 }
155 },
156 {
157 "test": {
158 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project",
159 "author": [
160 {
161 "email": "ee.peng.yeoh@intel.com",
162 "name": "ee.peng.yeoh@intel.com"
163 }
164 ],
165 "execution": {
166 "1": {
167 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
168 "expected_results": ""
169 },
170 "2": {
171 "action": "Select File -> New -> Project. ",
172 "expected_results": ""
173 },
174 "3": {
175 "action": "Double click C/C++. ",
176 "expected_results": ""
177 },
178 "4": {
179 "action": "Click C or C++ Project to create the project. ",
180 "expected_results": ""
181 },
182 "5": {
183 "action": "Expand Yocto ADT Project. ",
184 "expected_results": ""
185 },
186 "6": {
187 "action": "Select Hello World ANSI C++ Autotools Project. ",
188 "expected_results": ""
189 },
190 "7": {
191 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
192 "expected_results": ""
193 },
194 "8": {
195 "action": "Click Next.",
196 "expected_results": ""
197 },
198 "9": {
199 "action": "Add information in the Author and Copyright notice fields.",
200 "expected_results": ""
201 },
202 "10": {
203 "action": "Click Finish. \n1",
204 "expected_results": ""
205 },
206 "11": {
207 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
208 "expected_results": ""
209 },
210 "12": {
211 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
212 "expected_results": ""
213 },
214 "13": {
215 "action": "In the Project Explorer window, right click the project -> Build project. \n\n1",
216 "expected_results": "under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
217 },
218 "14": {
219 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
220 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
221 },
222 "15": {
223 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
224 "expected_results": ""
225 },
226 "16": {
227 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \n\t\tRight click it again and Debug as -> Debug Configurations \n\t\tUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. \n\t\te.g.: /home/root/myapplication \n\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
228 "expected_results": ""
229 },
230 "17": {
231 "action": "After all settings are done, select the Debug button on the bottom right corner",
232 "expected_results": ""
233 }
234 },
235 "summary": "Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project"
236 }
237 },
238 {
239 "test": {
240 "@alias": "eclipse-plugin.eclipse-plugin.Build_Eclipse_Plugin_from_source",
241 "author": [
242 {
243 "email": "laurentiu.serban@intel.com",
244 "name": "laurentiu.serban@intel.com"
245 }
246 ],
247 "execution": {
248 "1": {
249 "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n",
250 "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
251 },
252 "2": {
253 "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n",
254 "expected_results": "After plugin is build you must have 4 archive in foder scripts from eclipse-poky: \n - org.yocto.bc - mars-master-$date.zip \n - org.yocto.doc - mars-master-$date.zip --> documentation \n - org.yocto.sdk - mars-master-$date.zip \n - org.yocto.sdk - mars-master-$date.-archive.zip --> plugin "
255 },
256 "3": {
257 "action": "Move to scripts/ folder. \n\n",
258 "expected_results": ""
259 },
260 "4": {
261 "action": "Run ./setup.sh \n\n",
262 "expected_results": ""
263 },
264 "5": {
265 "action": "When the script finishes, it prompts a command to issue to build the plugin. It should look similar to the following: \n\n$ ECLIPSE_HOME=/eclipse-poky/scripts/eclipse ./build.sh /&1 | tee -a build.log \n\nHere, the three arguments to the build script are tag name, branch for documentation and release name. \n\n",
266 "expected_results": ""
267 },
268 "6": {
269 "action": "On an eclipse without the Yocto Plugin, select \"Install New Software\" from Help pull-down menu \n\n",
270 "expected_results": ""
271 },
272 "7": {
273 "action": "Select Add and from the dialog choose Archive... Look for the *archive.zip file that was built previously with the build.sh script. Click OK. \n\n",
274 "expected_results": ""
275 },
276 "8": {
277 "action": "Select all components and proceed with Installation of plugin. Restarting eclipse might be required.\n",
278 "expected_results": ""
279 }
280 },
281 "summary": "Build_Eclipse_Plugin_from_source"
282 }
283 },
284 {
285 "test": {
286 "@alias": "eclipse-plugin.eclipse-plugin.Eclipse_Poky_installation_and_setup",
287 "author": [
288 {
289 "email": "ee.peng.yeoh@intel.com",
290 "name": "ee.peng.yeoh@intel.com"
291 }
292 ],
293 "execution": {
294 "1": {
295 "action": "Install SDK \n\ta)Download https://autobuilder.yocto.io/pub/releases//toolchain/x86_64/poky-glibc-x86_64-core-\timage-sato-i586-toolchain-.sh \n\tb)Run the SDK installer and accept the default installation directory ",
296 "expected_results": ""
297 },
298 "2": {
299 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) \n\ta) Go to https://www.eclipse.org/downloads/packages/all, click \"Oxygen R\" \n\tb) Click to download the build for your OS \n\tc) Click \"Download\" button to download from a mirror \n\td) Run \"tar xf\" to extract the downloaded archive ",
300 "expected_result": ""
301 },
302 "3": {
303 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) (Continue) \n\te) Run \"eclipse/eclipse\" to start Eclipse \n\tf) Optional step for host machine within Intel network: In Eclipse workbench window, go to \"Window\" menu -> \"Preferences...\". \n\tg) In \"Preferences\" dialog, go to \"General\" -> \"Network Connections\", set \"Active Provider\" to \"Manual\". In \"Proxy \tentries\" table, select HTTP and click \"Edit\" and enter host \"proxy-chain.intel.com\" port 911, click OK. Repeat for HTTPS with port 912 \nClick OK to close \"Preferences\" dialog. \n\th) Go to \"File\" menu -> \"Restart\" to restart Eclipse for proxy settings to take effect. ",
304 "expected_result": ""
305 },
306 "4": {
307 "action": "Install Eclipse Poky plugins \n\ta) Download https://autobuilder.yocto.io/pub/releases/<yocto-version>/eclipse-plugin/<eclipse-version>/org.yocto.sdk-development-<date>-archive.zip \n\tb) In Eclipse workbench window, go to \"Help\" menu -> \"Install New Software...\" \n\tc) In \"Install\" dialog, click \"Add...\" button \n\td) In \"Add Repository\" dialog, enter \"Eclipse Poky\" for (repository) Name, click \"Archive...\" ",
308 "expected_results": ""
309 },
310 "5": {
311 "action": "Install Eclipse Poky plugins (continue) \n\te) In \"Repository archive\" browse dialog, select the downloaded Eclipse Poky repository archive \n\tf) Back in \"Add Repository\" dialog, click \"OK\" \n\tg) Back in \"Install\" dialog, make sure \"Work with:\" is set to \"Eclipse Poky\" repository, tick \"Yocto Project \tDocumentation Plug-in\" and \"Yocto Project SDK Plug-in\", click \"Next >\" and verify plugins/features name/version, \tclick \"Next >\" and accept license agreement, click \"Finish\" \n\th) If \"Security Warning\" dialog appears, click \"OK\" to install unsigned content. \n\ti) In \"Software Updates\" dialog, click \"Yes\" to restart Eclipse to complete Eclipse Poky plugins installation. ",
312 "expected_results": ""
313 },
314 "6": {
315 "action": "Setup Eclipse Poky to use SDK \n\ta) In Eclipse workbench window, go to \"Window\" menu -> \"Preferences\". \n\tb) In \"Preferences\" window, go to \"Yocto Project SDK\", in \"Cross Compiler Options\" frame, select \"Standalone pre-\tbuilt toolchain\". ",
316 "expected_results": "Eclipse Poky plugins installed and running successfully, e.g. observe that \"Yocto Project Tools\" menu is available on Eclipse workbench window."
317 }
318 },
319 "summary": "Eclipse_Poky_installation_and_setup"
320 }
321 }
322]
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py
index f588a93200..e81360670c 100644
--- a/meta/lib/oeqa/runtime/cases/ltp.py
+++ b/meta/lib/oeqa/runtime/cases/ltp.py
@@ -57,7 +57,7 @@ class LtpTestBase(OERuntimeTestCase):
57 57
58class LtpTest(LtpTestBase): 58class LtpTest(LtpTestBase):
59 59
60 ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"] 60 ltp_groups = ["math", "syscalls", "dio", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"]
61 61
62 ltp_fs = ["fs", "fs_bind"] 62 ltp_fs = ["fs", "fs_bind"]
63 # skip kernel cpuhotplug 63 # skip kernel cpuhotplug
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
index 2c0bd9a247..9c2677c4cf 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
@@ -1,2 +1,8 @@
1# These should be reviewed to see if they are still needed 1# These should be reviewed to see if they are still needed
2cacheinfo: Failed to find cpu0 device node 2cacheinfo: Failed to find cpu0 device node
3
4# 6.10 restructures sysctl registration such that mips
5# registers an empty table and generates harmless warnings:
6# failed when register_sysctl_sz sched_fair_sysctls to kernel
7# failed when register_sysctl_sz sched_core_sysctls to kernel
8failed when register_sysctl_sz sched
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
index b0c0fc9ddf..143db40d63 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
@@ -13,6 +13,14 @@ FBIOPUT_VSCREENINFO failed, double buffering disabled
13# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size) 13# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size)
14# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size) 14# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size)
15invalid BAR (can't size) 15invalid BAR (can't size)
16# 6.10+ the invalid BAR warnings are of this format:
17# pci 0000:00:00.0: [Firmware Bug]: BAR 0: invalid; can't size
18# pci 0000:00:00.0: [Firmware Bug]: BAR 1: invalid; can't size
19# pci 0000:00:00.0: [Firmware Bug]: BAR 2: invalid; can't size
20# pci 0000:00:00.0: [Firmware Bug]: BAR 3: invalid; can't size
21# pci 0000:00:00.0: [Firmware Bug]: BAR 4: invalid; can't size
22# pci 0000:00:00.0: [Firmware Bug]: BAR 5: invalid; can't size
23invalid; can't size
16 24
17# These should be reviewed to see if they are still needed 25# These should be reviewed to see if they are still needed
18wrong ELF class 26wrong ELF class
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py
index 6966923c94..47c77fccd5 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -34,7 +34,7 @@ class ParseLogsTest(OERuntimeTestCase):
34 log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"] 34 log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"]
35 35
36 # The keywords that identify error messages in the log files 36 # The keywords that identify error messages in the log files
37 errors = ["error", "cannot", "can't", "failed"] 37 errors = ["error", "cannot", "can't", "failed", "---[ cut here ]---", "No irq handler for vector"]
38 38
39 # A list of error messages that should be ignored 39 # A list of error messages that should be ignored
40 ignore_errors = [] 40 ignore_errors = []
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py
index ee97b8ef66..364264369a 100644
--- a/meta/lib/oeqa/runtime/cases/scp.py
+++ b/meta/lib/oeqa/runtime/cases/scp.py
@@ -25,7 +25,7 @@ class ScpTest(OERuntimeTestCase):
25 os.remove(cls.tmp_path) 25 os.remove(cls.tmp_path)
26 26
27 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
28 @OEHasPackage(['openssh-scp']) 28 @OEHasPackage({'openssh-scp', 'openssh-sftp-server'})
29 def test_scp_file(self): 29 def test_scp_file(self):
30 dst = '/tmp/test_scp_file' 30 dst = '/tmp/test_scp_file'
31 31
diff --git a/meta/lib/oeqa/runtime/cases/ssh.py b/meta/lib/oeqa/runtime/cases/ssh.py
index cdbef59500..b86428002f 100644
--- a/meta/lib/oeqa/runtime/cases/ssh.py
+++ b/meta/lib/oeqa/runtime/cases/ssh.py
@@ -4,6 +4,9 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import time
8import signal
9
7from oeqa.runtime.case import OERuntimeTestCase 10from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends 11from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage 12from oeqa.runtime.decorator.package import OEHasPackage
@@ -13,12 +16,22 @@ class SSHTest(OERuntimeTestCase):
13 @OETestDepends(['ping.PingTest.test_ping']) 16 @OETestDepends(['ping.PingTest.test_ping'])
14 @OEHasPackage(['dropbear', 'openssh-sshd']) 17 @OEHasPackage(['dropbear', 'openssh-sshd'])
15 def test_ssh(self): 18 def test_ssh(self):
16 (status, output) = self.target.run('sleep 20', timeout=2) 19 for i in range(20):
17 msg='run() timed out but return code was zero.' 20 status, output = self.target.run("uname -a", timeout=5)
18 self.assertNotEqual(status, 0, msg=msg) 21 if status == 0:
19 (status, output) = self.target.run('uname -a') 22 break
20 self.assertEqual(status, 0, msg='SSH Test failed: %s' % output) 23 elif status == 255 or status == -signal.SIGTERM:
21 (status, output) = self.target.run('cat /etc/controllerimage') 24 # ssh returns 255 only if a ssh error occurs. This could
22 msg = "This isn't the right image - /etc/controllerimage " \ 25 # be an issue with "Connection refused" because the port
23 "shouldn't be here %s" % output 26 # isn't open yet, and this could check explicitly for that
24 self.assertEqual(status, 1, msg=msg) 27 # here. However, let's keep it simple and just retry for
28 # all errors a limited amount of times with a sleep to
29 # give it time for the port to open.
30 # We sometimes see -15 (SIGTERM) on slow emulation machines too, likely
31 # from boot/init not being 100% complete, retry for these too.
32 time.sleep(5)
33 continue
34 else:
35 self.fail("uname failed with \"%s\" (exit code %s)" % (output, status))
36 if status != 0:
37 self.fail("ssh failed with \"%s\" (exit code %s)" % (output, status))
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py
index 5481e1d840..640f28abe9 100644
--- a/meta/lib/oeqa/runtime/cases/systemd.py
+++ b/meta/lib/oeqa/runtime/cases/systemd.py
@@ -145,18 +145,29 @@ class SystemdServiceTests(SystemdTest):
145 Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks, 145 Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks,
146 extracted from the minidebuginfo metadata (.gnu_debugdata elf section). 146 extracted from the minidebuginfo metadata (.gnu_debugdata elf section).
147 """ 147 """
148 t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && sleep 1000",)) 148 # use "env sleep" instead of "sleep" to avoid calling the shell builtin function
149 t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && env sleep 1000",))
149 t_thread.start() 150 t_thread.start()
150 time.sleep(1) 151 time.sleep(1)
151 152
152 status, output = self.target.run('pidof sleep') 153 status, sleep_pid = self.target.run('pidof sleep')
153 # cause segfault on purpose 154 # cause segfault on purpose
154 self.target.run('kill -SEGV %s' % output) 155 self.target.run('kill -SEGV %s' % sleep_pid)
155 self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % output) 156 self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % sleep_pid)
156 157
157 (status, output) = self.target.run('coredumpctl info') 158 # Give some time to systemd-coredump@.service to process the coredump
159 for x in range(20):
160 status, output = self.target.run('coredumpctl list %s' % sleep_pid)
161 if status == 0:
162 break
163 time.sleep(1)
164 else:
165 self.fail("Timed out waiting for coredump creation")
166
167 (status, output) = self.target.run('coredumpctl info %s' % sleep_pid)
158 self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output) 168 self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output)
159 self.assertEqual('sleep_for_duration (busybox.nosuid' in output, True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output) 169 self.assertEqual('sleep_for_duration (busybox.nosuid' in output or 'xnanosleep (sleep.coreutils' in output,
170 True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output)
160 171
161class SystemdJournalTests(SystemdTest): 172class SystemdJournalTests(SystemdTest):
162 173
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py
index cb7227a8df..daabc44910 100644
--- a/meta/lib/oeqa/runtime/context.py
+++ b/meta/lib/oeqa/runtime/context.py
@@ -8,6 +8,7 @@ import os
8import sys 8import sys
9 9
10from oeqa.core.context import OETestContext, OETestContextExecutor 10from oeqa.core.context import OETestContext, OETestContextExecutor
11from oeqa.core.target.serial import OESerialTarget
11from oeqa.core.target.ssh import OESSHTarget 12from oeqa.core.target.ssh import OESSHTarget
12from oeqa.core.target.qemu import OEQemuTarget 13from oeqa.core.target.qemu import OEQemuTarget
13 14
@@ -60,7 +61,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
60 runtime_group = self.parser.add_argument_group('runtime options') 61 runtime_group = self.parser.add_argument_group('runtime options')
61 62
62 runtime_group.add_argument('--target-type', action='store', 63 runtime_group.add_argument('--target-type', action='store',
63 default=self.default_target_type, choices=['simpleremote', 'qemu'], 64 default=self.default_target_type, choices=['simpleremote', 'qemu', 'serial'],
64 help="Target type of device under test, default: %s" \ 65 help="Target type of device under test, default: %s" \
65 % self.default_target_type) 66 % self.default_target_type)
66 runtime_group.add_argument('--target-ip', action='store', 67 runtime_group.add_argument('--target-ip', action='store',
@@ -108,6 +109,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
108 target = OESSHTarget(logger, target_ip, server_ip, **kwargs) 109 target = OESSHTarget(logger, target_ip, server_ip, **kwargs)
109 elif target_type == 'qemu': 110 elif target_type == 'qemu':
110 target = OEQemuTarget(logger, server_ip, **kwargs) 111 target = OEQemuTarget(logger, server_ip, **kwargs)
112 elif target_type == 'serial':
113 target = OESerialTarget(logger, target_ip, server_ip, **kwargs)
111 else: 114 else:
112 # XXX: This code uses the old naming convention for controllers and 115 # XXX: This code uses the old naming convention for controllers and
113 # targets, the idea it is to leave just targets as the controller 116 # targets, the idea it is to leave just targets as the controller
@@ -203,8 +206,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
203 206
204 super(OERuntimeTestContextExecutor, self)._process_args(logger, args) 207 super(OERuntimeTestContextExecutor, self)._process_args(logger, args)
205 208
209 td = self.tc_kwargs['init']['td']
210
206 target_kwargs = {} 211 target_kwargs = {}
212 target_kwargs['machine'] = td.get("MACHINE") or None
207 target_kwargs['qemuboot'] = args.qemu_boot 213 target_kwargs['qemuboot'] = args.qemu_boot
214 target_kwargs['serialcontrol_cmd'] = td.get("TEST_SERIALCONTROL_CMD") or None
215 target_kwargs['serialcontrol_extra_args'] = td.get("TEST_SERIALCONTROL_EXTRA_ARGS") or ""
216 target_kwargs['serialcontrol_ps1'] = td.get("TEST_SERIALCONTROL_PS1") or None
217 target_kwargs['serialcontrol_connect_timeout'] = td.get("TEST_SERIALCONTROL_CONNECT_TIMEOUT") or None
208 218
209 self.tc_kwargs['init']['target'] = \ 219 self.tc_kwargs['init']['target'] = \
210 OERuntimeTestContextExecutor.getTarget(args.target_type, 220 OERuntimeTestContextExecutor.getTarget(args.target_type,
diff --git a/meta/lib/oeqa/sdk/case.py b/meta/lib/oeqa/sdk/case.py
index c45882689c..46a3789f57 100644
--- a/meta/lib/oeqa/sdk/case.py
+++ b/meta/lib/oeqa/sdk/case.py
@@ -6,6 +6,7 @@
6 6
7import os 7import os
8import subprocess 8import subprocess
9import shutil
9 10
10from oeqa.core.case import OETestCase 11from oeqa.core.case import OETestCase
11 12
@@ -21,12 +22,14 @@ class OESDKTestCase(OETestCase):
21 archive = os.path.basename(urlparse(url).path) 22 archive = os.path.basename(urlparse(url).path)
22 23
23 if dl_dir: 24 if dl_dir:
24 tarball = os.path.join(dl_dir, archive) 25 archive_tarball = os.path.join(dl_dir, archive)
25 if os.path.exists(tarball): 26 if os.path.exists(archive_tarball):
26 return tarball 27 return archive_tarball
27 28
28 tarball = os.path.join(workdir, archive) 29 tarball = os.path.join(workdir, archive)
29 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT) 30 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT)
31 if dl_dir and not os.path.exists(archive_tarball):
32 shutil.copyfile(tarball, archive_tarball)
30 return tarball 33 return tarball
31 34
32 def check_elf(self, path, target_os=None, target_arch=None): 35 def check_elf(self, path, target_os=None, target_arch=None):
diff --git a/meta/lib/oeqa/sdk/cases/buildcpio.py b/meta/lib/oeqa/sdk/cases/autotools.py
index 51003b19cd..4bac28f04d 100644
--- a/meta/lib/oeqa/sdk/cases/buildcpio.py
+++ b/meta/lib/oeqa/sdk/cases/autotools.py
@@ -13,10 +13,15 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class BuildCpioTest(OESDKTestCase): 16class AutotoolsTest(OESDKTestCase):
17 """ 17 """
18 Check that autotools will cross-compile correctly. 18 Check that autotools will cross-compile correctly.
19 """ 19 """
20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("AutotoolsTest class: SDK doesn't contain a supported C library")
24
20 def test_cpio(self): 25 def test_cpio(self):
21 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir: 26 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir:
22 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz") 27 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz")
diff --git a/meta/lib/oeqa/sdk/cases/assimp.py b/meta/lib/oeqa/sdk/cases/cmake.py
index e986838aea..cb0944ee99 100644
--- a/meta/lib/oeqa/sdk/cases/assimp.py
+++ b/meta/lib/oeqa/sdk/cases/cmake.py
@@ -13,22 +13,26 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class BuildAssimp(OESDKTestCase): 16class CMakeTest(OESDKTestCase):
17 """ 17 """
18 Test case to build a project using cmake. 18 Test case to build a project using cmake.
19 """ 19 """
20 20
21 def setUp(self): 21 def setUp(self):
22 libc = self.td.get("TCLIBC")
23 if libc in [ 'newlib' ]:
24 raise unittest.SkipTest("CMakeTest class: SDK doesn't contain a supported C library")
25
22 if not (self.tc.hasHostPackage("nativesdk-cmake") or 26 if not (self.tc.hasHostPackage("nativesdk-cmake") or
23 self.tc.hasHostPackage("cmake-native")): 27 self.tc.hasHostPackage("cmake-native")):
24 raise unittest.SkipTest("Needs cmake") 28 raise unittest.SkipTest("CMakeTest: needs cmake")
25 29
26 def test_assimp(self): 30 def test_assimp(self):
27 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: 31 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir:
28 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.3.1.tar.gz") 32 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.4.1.tar.gz")
29 33
30 dirs = {} 34 dirs = {}
31 dirs["source"] = os.path.join(testdir, "assimp-5.3.1") 35 dirs["source"] = os.path.join(testdir, "assimp-5.4.1")
32 dirs["build"] = os.path.join(testdir, "build") 36 dirs["build"] = os.path.join(testdir, "build")
33 dirs["install"] = os.path.join(testdir, "install") 37 dirs["install"] = os.path.join(testdir, "install")
34 38
@@ -39,7 +43,7 @@ class BuildAssimp(OESDKTestCase):
39 self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs)) 43 self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs))
40 os.makedirs(dirs["build"]) 44 os.makedirs(dirs["build"])
41 45
42 self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs)) 46 self._run("cd {build} && cmake -DASSIMP_WARNINGS_AS_ERRORS=OFF -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs))
43 self._run("cmake --build {build} -- -j".format(**dirs)) 47 self._run("cmake --build {build} -- -j".format(**dirs))
44 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs)) 48 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs))
45 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.3.0")) 49 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.4.1"))
diff --git a/meta/lib/oeqa/sdk/cases/gcc.py b/meta/lib/oeqa/sdk/cases/gcc.py
index fc28b9c3d4..e810d2c42b 100644
--- a/meta/lib/oeqa/sdk/cases/gcc.py
+++ b/meta/lib/oeqa/sdk/cases/gcc.py
@@ -26,6 +26,10 @@ class GccCompileTest(OESDKTestCase):
26 os.path.join(self.tc.sdk_dir, f)) 26 os.path.join(self.tc.sdk_dir, f))
27 27
28 def setUp(self): 28 def setUp(self):
29 libc = self.td.get("TCLIBC")
30 if libc in [ 'newlib' ]:
31 raise unittest.SkipTest("GccCompileTest class: SDK doesn't contain a supported C library")
32
29 machine = self.td.get("MACHINE") 33 machine = self.td.get("MACHINE")
30 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or 34 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or
31 self.tc.hasHostPackage("^gcc-", regex=True)): 35 self.tc.hasHostPackage("^gcc-", regex=True)):
diff --git a/meta/lib/oeqa/sdk/cases/buildgalculator.py b/meta/lib/oeqa/sdk/cases/gtk3.py
index 178f07472d..8f60d5e7da 100644
--- a/meta/lib/oeqa/sdk/cases/buildgalculator.py
+++ b/meta/lib/oeqa/sdk/cases/gtk3.py
@@ -13,11 +13,15 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class GalculatorTest(OESDKTestCase): 16class GTK3Test(OESDKTestCase):
17 """ 17 """
18 Test that autotools and GTK+ 3 compiles correctly. 18 Test that autotools and GTK+ 3 compiles correctly.
19 """ 19 """
20 def setUp(self): 20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("GTK3Test class: SDK doesn't contain a supported C library")
24
21 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \ 25 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \
22 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)): 26 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)):
23 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3") 27 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3")
diff --git a/meta/lib/oeqa/sdk/cases/kmod.py b/meta/lib/oeqa/sdk/cases/kmod.py
new file mode 100644
index 0000000000..9e8fdbcd40
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/kmod.py
@@ -0,0 +1,41 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import subprocess
9import tempfile
10import unittest
11
12from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class KernelModuleTest(OESDKTestCase):
17 """
18 Test that out-of-tree kernel modules build.
19 """
20
21 def setUp(self):
22 if not self.tc.hasTargetPackage("kernel-devsrc"):
23 raise unittest.SkipTest("KernelModuleTest needs kernel-devsrc")
24
25 # These targets need to be built before kernel modules can be built.
26 self._run("make -j -C $OECORE_TARGET_SYSROOT/usr/src/kernel prepare scripts")
27
28
29 def test_cryptodev(self):
30 with tempfile.TemporaryDirectory(prefix="cryptodev", dir=self.tc.sdk_dir) as testdir:
31 git_url = "https://github.com/cryptodev-linux/cryptodev-linux"
32 # This is a knnown-good commit post-1.13 that builds with kernel 6.7+
33 git_sha = "bb8bc7cf60d2c0b097c8b3b0e807f805b577a53f"
34
35 sourcedir = os.path.join(testdir, "cryptodev-linux")
36 subprocess.check_output(["git", "clone", git_url, sourcedir], stderr=subprocess.STDOUT)
37 self.assertTrue(os.path.isdir(sourcedir))
38 subprocess.check_output(["git", "-C", sourcedir, "checkout", git_sha], stderr=subprocess.STDOUT)
39
40 self._run("make -C %s V=1 KERNEL_DIR=$OECORE_TARGET_SYSROOT/usr/src/kernel" % sourcedir)
41 self.check_elf(os.path.join(sourcedir, "cryptodev.ko"))
diff --git a/meta/lib/oeqa/sdk/cases/buildlzip.py b/meta/lib/oeqa/sdk/cases/makefile.py
index b4b7d85b88..e1e2484820 100644
--- a/meta/lib/oeqa/sdk/cases/buildlzip.py
+++ b/meta/lib/oeqa/sdk/cases/makefile.py
@@ -4,15 +4,21 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import os, tempfile, subprocess, unittest 7import os, tempfile, subprocess
8import unittest
8from oeqa.sdk.case import OESDKTestCase 9from oeqa.sdk.case import OESDKTestCase
9from oeqa.utils.subprocesstweak import errors_have_output 10from oeqa.utils.subprocesstweak import errors_have_output
10errors_have_output() 11errors_have_output()
11 12
12class BuildLzipTest(OESDKTestCase): 13class MakefileTest(OESDKTestCase):
13 """ 14 """
14 Test that "plain" compilation works, using just $CC $CFLAGS etc. 15 Test that "plain" compilation works, using just $CC $CFLAGS etc.
15 """ 16 """
17 def setUp(self):
18 libc = self.td.get("TCLIBC")
19 if libc in [ 'newlib' ]:
20 raise unittest.SkipTest("MakefileTest class: SDK doesn't contain a supported C library")
21
16 def test_lzip(self): 22 def test_lzip(self):
17 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir: 23 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir:
18 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz") 24 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz")
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py
index ea10f568b2..20f6b553d0 100644
--- a/meta/lib/oeqa/sdk/cases/maturin.py
+++ b/meta/lib/oeqa/sdk/cases/maturin.py
@@ -8,7 +8,6 @@ import os
8import shutil 8import shutil
9import unittest 9import unittest
10 10
11from oeqa.core.utils.path import remove_safe
12from oeqa.sdk.case import OESDKTestCase 11from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 12from oeqa.utils.subprocesstweak import errors_have_output
14 13
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/meson.py
index 147ee3e0ee..1edf78720a 100644
--- a/meta/lib/oeqa/sdk/cases/buildepoxy.py
+++ b/meta/lib/oeqa/sdk/cases/meson.py
@@ -13,14 +13,18 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class EpoxyTest(OESDKTestCase): 16class MesonTest(OESDKTestCase):
17 """ 17 """
18 Test that Meson builds correctly. 18 Test that Meson builds correctly.
19 """ 19 """
20 def setUp(self): 20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("MesonTest class: SDK doesn't contain a supported C library")
24
21 if not (self.tc.hasHostPackage("nativesdk-meson") or 25 if not (self.tc.hasHostPackage("nativesdk-meson") or
22 self.tc.hasHostPackage("meson-native")): 26 self.tc.hasHostPackage("meson-native")):
23 raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson") 27 raise unittest.SkipTest("MesonTest: needs meson")
24 28
25 def test_epoxy(self): 29 def test_epoxy(self):
26 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: 30 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py
index 5ea992b9f3..51284949f5 100644
--- a/meta/lib/oeqa/sdk/cases/python.py
+++ b/meta/lib/oeqa/sdk/cases/python.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import subprocess, unittest 7import unittest
8from oeqa.sdk.case import OESDKTestCase 8from oeqa.sdk.case import OESDKTestCase
9 9
10from oeqa.utils.subprocesstweak import errors_have_output 10from oeqa.utils.subprocesstweak import errors_have_output
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py
index f5d437bb19..a54245851b 100644
--- a/meta/lib/oeqa/sdk/cases/rust.py
+++ b/meta/lib/oeqa/sdk/cases/rust.py
@@ -8,7 +8,6 @@ import os
8import shutil 8import shutil
9import unittest 9import unittest
10 10
11from oeqa.core.utils.path import remove_safe
12from oeqa.sdk.case import OESDKTestCase 11from oeqa.sdk.case import OESDKTestCase
13 12
14from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
diff --git a/meta/lib/oeqa/sdkext/cases/devtool.py b/meta/lib/oeqa/sdkext/cases/devtool.py
index 5ffb732556..d0746e68eb 100644
--- a/meta/lib/oeqa/sdkext/cases/devtool.py
+++ b/meta/lib/oeqa/sdkext/cases/devtool.py
@@ -69,10 +69,9 @@ class DevtoolTest(OESDKExtTestCase):
69 self._test_devtool_build(self.myapp_cmake_dst) 69 self._test_devtool_build(self.myapp_cmake_dst)
70 70
71 def test_extend_autotools_recipe_creation(self): 71 def test_extend_autotools_recipe_creation(self):
72 req = 'https://github.com/rdfa/librdfa' 72 recipe = "test-dbus-wait"
73 recipe = "librdfa" 73 self._run('devtool sdk-install dbus')
74 self._run('devtool sdk-install libxml2') 74 self._run('devtool add %s https://git.yoctoproject.org/git/dbus-wait' % (recipe) )
75 self._run('devtool add %s %s' % (recipe, req) )
76 try: 75 try:
77 self._run('devtool build %s' % recipe) 76 self._run('devtool build %s' % recipe)
78 finally: 77 finally:
diff --git a/meta/lib/oeqa/selftest/cases/bbclasses.py b/meta/lib/oeqa/selftest/cases/bbclasses.py
new file mode 100644
index 0000000000..10545ebe65
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bbclasses.py
@@ -0,0 +1,106 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import get_bb_vars, bitbake
9
10class Systemd(OESelftestTestCase):
11 """
12 Tests related to the systemd bbclass.
13 """
14
15 def getVars(self, recipe):
16 self.bb_vars = get_bb_vars(
17 [
18 'BPN',
19 'D',
20 'INIT_D_DIR',
21 'prefix',
22 'systemd_system_unitdir',
23 'sysconfdir',
24 ],
25 recipe,
26 )
27
28 def fileExists(self, filename):
29 self.assertExists(filename.format(**self.bb_vars))
30
31 def fileNotExists(self, filename):
32 self.assertNotExists(filename.format(**self.bb_vars))
33
34 def test_systemd_in_distro(self):
35 """
36 Summary: Verify that no sysvinit files are installed when the
37 systemd distro feature is enabled, but sysvinit is not.
38 Expected: Systemd service file exists, but /etc does not.
39 Product: OE-Core
40 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
41 """
42
43 self.write_config("""
44DISTRO_FEATURES:append = " systemd usrmerge"
45DISTRO_FEATURES:remove = "sysvinit"
46VIRTUAL-RUNTIME_init_manager = "systemd"
47""")
48 bitbake("systemd-only systemd-and-sysvinit -c install")
49
50 self.getVars("systemd-only")
51 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
52
53 self.getVars("systemd-and-sysvinit")
54 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
55 self.fileNotExists("{D}{sysconfdir}")
56
57 def test_systemd_and_sysvinit_in_distro(self):
58 """
59 Summary: Verify that both systemd and sysvinit files are installed
60 when both the systemd and sysvinit distro features are
61 enabled.
62 Expected: Systemd service file and sysvinit initscript exist.
63 Product: OE-Core
64 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
65 """
66
67 self.write_config("""
68DISTRO_FEATURES:append = " systemd sysvinit usrmerge"
69VIRTUAL-RUNTIME_init_manager = "systemd"
70""")
71 bitbake("systemd-only systemd-and-sysvinit -c install")
72
73 self.getVars("systemd-only")
74 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
75
76 self.getVars("systemd-and-sysvinit")
77 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
78 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
79
80 def test_sysvinit_in_distro(self):
81 """
82 Summary: Verify that no systemd service files are installed when the
83 sysvinit distro feature is enabled, but systemd is not.
84 Expected: The systemd service file does not exist, nor does /usr.
85 The sysvinit initscript exists.
86 Product: OE-Core
87 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
88 """
89
90 self.write_config("""
91DISTRO_FEATURES:remove = "systemd"
92DISTRO_FEATURES:append = " sysvinit usrmerge"
93VIRTUAL-RUNTIME_init_manager = "sysvinit"
94""")
95 bitbake("systemd-only systemd-and-sysvinit -c install")
96
97 self.getVars("systemd-only")
98 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
99 self.fileNotExists("{D}{prefix}")
100 self.fileNotExists("{D}{sysconfdir}")
101 self.fileExists("{D}")
102
103 self.getVars("systemd-and-sysvinit")
104 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
105 self.fileNotExists("{D}{prefix}")
106 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
index 1688eabe4e..5ff263d342 100644
--- a/meta/lib/oeqa/selftest/cases/binutils.py
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -33,7 +33,7 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
33 features.append('CHECK_TARGETS = "{0}"'.format(suite)) 33 features.append('CHECK_TARGETS = "{0}"'.format(suite))
34 self.write_config("\n".join(features)) 34 self.write_config("\n".join(features))
35 35
36 recipe = "binutils-cross-testsuite" 36 recipe = "binutils-testsuite"
37 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) 37 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
38 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] 38 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
39 39
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index 31dafaa9c5..423c31e189 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -84,7 +84,7 @@ class SanityOptionsTest(OESelftestTestCase):
84 84
85 self.write_config("INHERIT:remove = \"report-error\"") 85 self.write_config("INHERIT:remove = \"report-error\"")
86 if "packages-list" not in get_bb_var("ERROR_QA"): 86 if "packages-list" not in get_bb_var("ERROR_QA"):
87 self.append_config("ERROR_QA:append = \" packages-list\"") 87 self.append_config("ERROR_QA:append:pn-xcursor-transparent-theme = \" packages-list\"")
88 88
89 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 89 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
90 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') 90 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
@@ -94,8 +94,8 @@ class SanityOptionsTest(OESelftestTestCase):
94 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) 94 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
95 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 95 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
96 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 96 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
97 self.append_config('ERROR_QA:remove = "packages-list"') 97 self.append_config('ERROR_QA:remove:pn-xcursor-transparent-theme = "packages-list"')
98 self.append_config('WARN_QA:append = " packages-list"') 98 self.append_config('WARN_QA:append:pn-xcursor-transparent-theme = " packages-list"')
99 res = bitbake("xcursor-transparent-theme -f -c package") 99 res = bitbake("xcursor-transparent-theme -f -c package")
100 self.delete_recipeinc('xcursor-transparent-theme') 100 self.delete_recipeinc('xcursor-transparent-theme')
101 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") 101 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
@@ -173,8 +173,8 @@ class BuildhistoryTests(BuildhistoryBase):
173 173
174 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest')) 174 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest'))
175 if 'FILELIST' in data: 175 if 'FILELIST' in data:
176 self.assertEqual(data['FILELIST'], '') 176 self.assertEqual(data['FILELIST'], '/usr/share/pkgconfig/default-icon-theme.pc')
177 self.assertEqual(int(data['PKGSIZE']), 0) 177 self.assertGreater(int(data['PKGSIZE']), 0)
178 178
179class ArchiverTest(OESelftestTestCase): 179class ArchiverTest(OESelftestTestCase):
180 def test_arch_work_dir_and_export_source(self): 180 def test_arch_work_dir_and_export_source(self):
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index 60cecd1328..3dd3e89d3e 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -72,6 +72,54 @@ class CVECheck(OESelftestTestCase):
72 self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8") 72 self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
73 self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31") 73 self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
74 74
75 def test_product_match(self):
76 from oe.cve_check import has_cve_product_match
77
78 status = {}
79 status["detail"] = "ignored"
80 status["vendor"] = "*"
81 status["product"] = "*"
82 status["description"] = ""
83 status["mapping"] = ""
84
85 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), True)
86 self.assertEqual(has_cve_product_match(status, "*:*"), True)
87 self.assertEqual(has_cve_product_match(status, "some_product"), True)
88 self.assertEqual(has_cve_product_match(status, "glibc"), True)
89 self.assertEqual(has_cve_product_match(status, "glibca"), True)
90 self.assertEqual(has_cve_product_match(status, "aglibc"), True)
91 self.assertEqual(has_cve_product_match(status, "*"), True)
92 self.assertEqual(has_cve_product_match(status, "aglibc glibc test:test"), True)
93
94 status["product"] = "glibc"
95 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
96 # The CPE in the recipe must be defined, no * accepted
97 self.assertEqual(has_cve_product_match(status, "*:*"), False)
98 self.assertEqual(has_cve_product_match(status, "*"), False)
99 self.assertEqual(has_cve_product_match(status, "some_product"), False)
100 self.assertEqual(has_cve_product_match(status, "glibc"), True)
101 self.assertEqual(has_cve_product_match(status, "glibca"), False)
102 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
103 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), True)
104 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc test"), True)
105 self.assertEqual(has_cve_product_match(status, "test some_vendor:glibc"), True)
106
107 status["vendor"] = "glibca"
108 status["product"] = "glibc"
109 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
110 # The CPE in the recipe must be defined, no * accepted
111 self.assertEqual(has_cve_product_match(status, "*:*"), False)
112 self.assertEqual(has_cve_product_match(status, "*"), False)
113 self.assertEqual(has_cve_product_match(status, "some_product"), False)
114 self.assertEqual(has_cve_product_match(status, "glibc"), False)
115 self.assertEqual(has_cve_product_match(status, "glibca"), False)
116 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
117 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), False)
118 self.assertEqual(has_cve_product_match(status, "glibca:glibc"), True)
119 self.assertEqual(has_cve_product_match(status, "test:test glibca:glibc"), True)
120 self.assertEqual(has_cve_product_match(status, "test glibca:glibc"), True)
121 self.assertEqual(has_cve_product_match(status, "glibca:glibc test"), True)
122
75 123
76 def test_recipe_report_json(self): 124 def test_recipe_report_json(self):
77 config = """ 125 config = """
@@ -217,9 +265,10 @@ CVE_CHECK_REPORT_PATCHED = "1"
217 # m4 CVE should not be in logrotate 265 # m4 CVE should not be in logrotate
218 self.assertNotIn("CVE-2008-1687", found_cves) 266 self.assertNotIn("CVE-2008-1687", found_cves)
219 # logrotate has both Patched and Ignored CVEs 267 # logrotate has both Patched and Ignored CVEs
268 detail = "version-not-in-range"
220 self.assertIn("CVE-2011-1098", found_cves) 269 self.assertIn("CVE-2011-1098", found_cves)
221 self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched") 270 self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
222 self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0) 271 self.assertEqual(found_cves["CVE-2011-1098"]["detail"], detail)
223 self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0) 272 self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
224 detail = "not-applicable-platform" 273 detail = "not-applicable-platform"
225 description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used" 274 description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
index 505b4be837..46c0cd87bb 100644
--- a/meta/lib/oeqa/selftest/cases/debuginfod.py
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -62,7 +62,7 @@ class Debuginfod(OESelftestTestCase):
62 62
63 raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest) 63 raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
64 64
65 def start_debuginfod(self): 65 def start_debuginfod(self, feed_dir):
66 # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot 66 # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
67 67
68 # Save some useful paths for later 68 # Save some useful paths for later
@@ -82,7 +82,7 @@ class Debuginfod(OESelftestTestCase):
82 # Disable rescanning, this is a one-shot test 82 # Disable rescanning, this is a one-shot test
83 "--rescan-time=0", 83 "--rescan-time=0",
84 "--groom-time=0", 84 "--groom-time=0",
85 get_bb_var("DEPLOY_DIR"), 85 feed_dir,
86 ] 86 ]
87 87
88 format = get_bb_var("PACKAGE_CLASSES").split()[0] 88 format = get_bb_var("PACKAGE_CLASSES").split()[0]
@@ -114,11 +114,12 @@ class Debuginfod(OESelftestTestCase):
114 self.write_config(""" 114 self.write_config("""
115TMPDIR = "${TOPDIR}/tmp-debuginfod" 115TMPDIR = "${TOPDIR}/tmp-debuginfod"
116DISTRO_FEATURES:append = " debuginfod" 116DISTRO_FEATURES:append = " debuginfod"
117INHERIT += "localpkgfeed"
117""") 118""")
118 bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package") 119 bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package xz:do_localpkgfeed")
119 120
120 try: 121 try:
121 self.start_debuginfod() 122 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
122 123
123 env = os.environ.copy() 124 env = os.environ.copy()
124 env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port 125 env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
@@ -141,12 +142,13 @@ DISTRO_FEATURES:append = " debuginfod"
141 self.write_config(""" 142 self.write_config("""
142TMPDIR = "${TOPDIR}/tmp-debuginfod" 143TMPDIR = "${TOPDIR}/tmp-debuginfod"
143DISTRO_FEATURES:append = " debuginfod" 144DISTRO_FEATURES:append = " debuginfod"
145INHERIT += "localpkgfeed"
144CORE_IMAGE_EXTRA_INSTALL += "elfutils xz" 146CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
145 """) 147 """)
146 bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot") 148 bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot xz:do_localpkgfeed")
147 149
148 try: 150 try:
149 self.start_debuginfod() 151 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
150 152
151 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu: 153 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
152 cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port) 154 cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 882225dde3..7d61773980 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -753,6 +753,25 @@ class DevtoolModifyTests(DevtoolBase):
753 result = runCmd('devtool status') 753 result = runCmd('devtool status')
754 self.assertNotIn('mdadm', result.output) 754 self.assertNotIn('mdadm', result.output)
755 755
756 def test_devtool_modify_go(self):
757 import oe.path
758 from tempfile import TemporaryDirectory
759 with TemporaryDirectory(prefix='devtoolqa') as tempdir:
760 self.track_for_cleanup(self.workspacedir)
761 self.add_command_to_tearDown('bitbake -c clean go-helloworld')
762 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
763 result = runCmd('devtool modify go-helloworld -x %s' % tempdir)
764 self.assertExists(
765 oe.path.join(tempdir, 'src', 'golang.org', 'x', 'example', 'go.mod'),
766 'Extracted source could not be found'
767 )
768 self.assertExists(
769 oe.path.join(self.workspacedir, 'conf', 'layer.conf'),
770 'Workspace directory not created'
771 )
772 matches = glob.glob(oe.path.join(self.workspacedir, 'appends', 'go-helloworld_*.bbappend'))
773 self.assertTrue(matches, 'bbappend not created %s' % result.output)
774
756 def test_devtool_buildclean(self): 775 def test_devtool_buildclean(self):
757 def assertFile(path, *paths): 776 def assertFile(path, *paths):
758 f = os.path.join(path, *paths) 777 f = os.path.join(path, *paths)
@@ -879,13 +898,8 @@ class DevtoolModifyTests(DevtoolBase):
879 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) 898 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
880 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 899 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
881 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 900 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
882 srcfile = os.path.join(tempdir, 'oe-local-files/share/dot.bashrc') 901 srcfile = os.path.join(tempdir, 'share/dot.bashrc')
883 srclink = os.path.join(tempdir, 'share/dot.bashrc')
884 self.assertExists(srcfile, 'Extracted source could not be found') 902 self.assertExists(srcfile, 'Extracted source could not be found')
885 if os.path.islink(srclink) and os.path.exists(srclink) and os.path.samefile(srcfile, srclink):
886 correct_symlink = True
887 self.assertTrue(correct_symlink, 'Source symlink to oe-local-files is broken')
888
889 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe)) 903 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
890 self.assertTrue(matches, 'bbappend not created') 904 self.assertTrue(matches, 'bbappend not created')
891 # Test devtool status 905 # Test devtool status
@@ -956,9 +970,9 @@ class DevtoolModifyTests(DevtoolBase):
956 # others git:// in SRC_URI 970 # others git:// in SRC_URI
957 # cointains a patch 971 # cointains a patch
958 testrecipe = 'hello-rs' 972 testrecipe = 'hello-rs'
959 bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe) 973 bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'UNPACKDIR', 'CARGO_HOME'], testrecipe)
960 recipefile = bb_vars['FILE'] 974 recipefile = bb_vars['FILE']
961 workdir = bb_vars['WORKDIR'] 975 unpackdir = bb_vars['UNPACKDIR']
962 cargo_home = bb_vars['CARGO_HOME'] 976 cargo_home = bb_vars['CARGO_HOME']
963 src_uri = bb_vars['SRC_URI'].split() 977 src_uri = bb_vars['SRC_URI'].split()
964 self.assertTrue(src_uri[0].startswith('git://'), 978 self.assertTrue(src_uri[0].startswith('git://'),
@@ -1029,7 +1043,7 @@ class DevtoolModifyTests(DevtoolBase):
1029 self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"') 1043 self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
1030 raw_url = raw_url.replace("git://", '%s://' % parms['protocol']) 1044 raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
1031 patch_line = '[patch."%s"]' % raw_url 1045 patch_line = '[patch."%s"]' % raw_url
1032 path_patched = os.path.join(workdir, parms['destsuffix']) 1046 path_patched = os.path.join(unpackdir, parms['destsuffix'])
1033 path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched) 1047 path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
1034 # Would have been better to use tomllib to read this file :/ 1048 # Would have been better to use tomllib to read this file :/
1035 self.assertIn(patch_line, cargo_config_contents) 1049 self.assertIn(patch_line, cargo_config_contents)
@@ -1278,7 +1292,7 @@ class DevtoolUpdateTests(DevtoolBase):
1278 with open(bbappendfile, 'r') as f: 1292 with open(bbappendfile, 'r') as f:
1279 self.assertEqual(expectedlines, f.readlines()) 1293 self.assertEqual(expectedlines, f.readlines())
1280 # Drop new commit and check patch gets deleted 1294 # Drop new commit and check patch gets deleted
1281 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1295 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
1282 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1296 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
1283 self.assertNotExists(patchfile, 'Patch file not deleted') 1297 self.assertNotExists(patchfile, 'Patch file not deleted')
1284 expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', 1298 expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
@@ -1287,6 +1301,7 @@ class DevtoolUpdateTests(DevtoolBase):
1287 self.assertEqual(expectedlines2, f.readlines()) 1301 self.assertEqual(expectedlines2, f.readlines())
1288 # Put commit back and check we can run it if layer isn't in bblayers.conf 1302 # Put commit back and check we can run it if layer isn't in bblayers.conf
1289 os.remove(bbappendfile) 1303 os.remove(bbappendfile)
1304 result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir)
1290 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) 1305 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
1291 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1306 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1292 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1307 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
@@ -1361,7 +1376,7 @@ class DevtoolUpdateTests(DevtoolBase):
1361 with open(bbappendfile, 'r') as f: 1376 with open(bbappendfile, 'r') as f:
1362 self.assertEqual(expectedlines, set(f.readlines())) 1377 self.assertEqual(expectedlines, set(f.readlines()))
1363 # Drop new commit and check SRCREV changes 1378 # Drop new commit and check SRCREV changes
1364 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1379 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
1365 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1380 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
1366 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created') 1381 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created')
1367 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir) 1382 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
@@ -1373,6 +1388,7 @@ class DevtoolUpdateTests(DevtoolBase):
1373 self.assertEqual(expectedlines, set(f.readlines())) 1388 self.assertEqual(expectedlines, set(f.readlines()))
1374 # Put commit back and check we can run it if layer isn't in bblayers.conf 1389 # Put commit back and check we can run it if layer isn't in bblayers.conf
1375 os.remove(bbappendfile) 1390 os.remove(bbappendfile)
1391 result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempsrcdir)
1376 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir) 1392 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
1377 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1393 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1378 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1394 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
@@ -1404,11 +1420,12 @@ class DevtoolUpdateTests(DevtoolBase):
1404 # Try building just to ensure we haven't broken that 1420 # Try building just to ensure we haven't broken that
1405 bitbake("%s" % testrecipe) 1421 bitbake("%s" % testrecipe)
1406 # Edit / commit local source 1422 # Edit / commit local source
1407 runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir) 1423 runCmd('echo "/* Foobar */" >> makedevs.c', cwd=tempdir)
1408 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1424 runCmd('echo "Foo" > new-local', cwd=tempdir)
1409 runCmd('echo "Bar" > new-file', cwd=tempdir) 1425 runCmd('echo "Bar" > new-file', cwd=tempdir)
1410 runCmd('git add new-file', cwd=tempdir) 1426 runCmd('git add new-file', cwd=tempdir)
1411 runCmd('git commit -m "Add new file"', cwd=tempdir) 1427 runCmd('git commit -m "Add new file"', cwd=tempdir)
1428 runCmd('git add new-local', cwd=tempdir)
1412 runCmd('devtool update-recipe %s' % testrecipe) 1429 runCmd('devtool update-recipe %s' % testrecipe)
1413 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1430 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1414 (' M', '.*/makedevs/makedevs.c$'), 1431 (' M', '.*/makedevs/makedevs.c$'),
@@ -1434,8 +1451,8 @@ class DevtoolUpdateTests(DevtoolBase):
1434 self.assertExists(local_file, 'File makedevs.c not created') 1451 self.assertExists(local_file, 'File makedevs.c not created')
1435 self.assertExists(patchfile, 'File new_local not created') 1452 self.assertExists(patchfile, 'File new_local not created')
1436 1453
1437 def test_devtool_update_recipe_local_files_2(self): 1454 def _test_devtool_update_recipe_local_files_2(self):
1438 """Check local source files support when oe-local-files is in Git""" 1455 """Check local source files support when editing local files in Git"""
1439 testrecipe = 'devtool-test-local' 1456 testrecipe = 'devtool-test-local'
1440 recipefile = get_bb_var('FILE', testrecipe) 1457 recipefile = get_bb_var('FILE', testrecipe)
1441 recipedir = os.path.dirname(recipefile) 1458 recipedir = os.path.dirname(recipefile)
@@ -1450,17 +1467,13 @@ class DevtoolUpdateTests(DevtoolBase):
1450 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 1467 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1451 # Check git repo 1468 # Check git repo
1452 self._check_src_repo(tempdir) 1469 self._check_src_repo(tempdir)
1453 # Add oe-local-files to Git
1454 runCmd('rm oe-local-files/.gitignore', cwd=tempdir)
1455 runCmd('git add oe-local-files', cwd=tempdir)
1456 runCmd('git commit -m "Add local sources"', cwd=tempdir)
1457 # Edit / commit local sources 1470 # Edit / commit local sources
1458 runCmd('echo "# Foobar" >> oe-local-files/file1', cwd=tempdir) 1471 runCmd('echo "# Foobar" >> file1', cwd=tempdir)
1459 runCmd('git commit -am "Edit existing file"', cwd=tempdir) 1472 runCmd('git commit -am "Edit existing file"', cwd=tempdir)
1460 runCmd('git rm oe-local-files/file2', cwd=tempdir) 1473 runCmd('git rm file2', cwd=tempdir)
1461 runCmd('git commit -m"Remove file"', cwd=tempdir) 1474 runCmd('git commit -m"Remove file"', cwd=tempdir)
1462 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1475 runCmd('echo "Foo" > new-local', cwd=tempdir)
1463 runCmd('git add oe-local-files/new-local', cwd=tempdir) 1476 runCmd('git add new-local', cwd=tempdir)
1464 runCmd('git commit -m "Add new local file"', cwd=tempdir) 1477 runCmd('git commit -m "Add new local file"', cwd=tempdir)
1465 runCmd('echo "Gar" > new-file', cwd=tempdir) 1478 runCmd('echo "Gar" > new-file', cwd=tempdir)
1466 runCmd('git add new-file', cwd=tempdir) 1479 runCmd('git add new-file', cwd=tempdir)
@@ -1469,7 +1482,7 @@ class DevtoolUpdateTests(DevtoolBase):
1469 os.path.dirname(recipefile)) 1482 os.path.dirname(recipefile))
1470 # Checkout unmodified file to working copy -> devtool should still pick 1483 # Checkout unmodified file to working copy -> devtool should still pick
1471 # the modified version from HEAD 1484 # the modified version from HEAD
1472 runCmd('git checkout HEAD^ -- oe-local-files/file1', cwd=tempdir) 1485 runCmd('git checkout HEAD^ -- file1', cwd=tempdir)
1473 runCmd('devtool update-recipe %s' % testrecipe) 1486 runCmd('devtool update-recipe %s' % testrecipe)
1474 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1487 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1475 (' M', '.*/file1$'), 1488 (' M', '.*/file1$'),
@@ -1544,7 +1557,7 @@ class DevtoolUpdateTests(DevtoolBase):
1544 # (don't bother with cleaning the recipe on teardown, we won't be building it) 1557 # (don't bother with cleaning the recipe on teardown, we won't be building it)
1545 result = runCmd('devtool modify %s' % testrecipe) 1558 result = runCmd('devtool modify %s' % testrecipe)
1546 # Modify one file 1559 # Modify one file
1547 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe, 'oe-local-files')) 1560 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe))
1548 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1561 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
1549 result = runCmd('devtool update-recipe %s' % testrecipe) 1562 result = runCmd('devtool update-recipe %s' % testrecipe)
1550 expected_status = [(' M', '.*/%s/file2$' % testrecipe)] 1563 expected_status = [(' M', '.*/%s/file2$' % testrecipe)]
@@ -2004,6 +2017,52 @@ class DevtoolUpgradeTests(DevtoolBase):
2004 newlines = f.readlines() 2017 newlines = f.readlines()
2005 self.assertEqual(desiredlines, newlines) 2018 self.assertEqual(desiredlines, newlines)
2006 2019
2020 def test_devtool_upgrade_recipe_upgrade_extra_tasks(self):
2021 # Check preconditions
2022 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2023 self.track_for_cleanup(self.workspacedir)
2024 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2025 recipe = 'python3-guessing-game'
2026 version = '0.2.0'
2027 commit = '40cf004c2772ffa20ea803fa3be1528a75be3e98'
2028 oldrecipefile = get_bb_var('FILE', recipe)
2029 oldcratesincfile = os.path.join(os.path.dirname(oldrecipefile), os.path.basename(oldrecipefile).strip('_git.bb') + '-crates.inc')
2030 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2031 self.track_for_cleanup(tempdir)
2032 # Check that recipe is not already under devtool control
2033 result = runCmd('devtool status')
2034 self.assertNotIn(recipe, result.output)
2035 # Check upgrade
2036 result = runCmd('devtool upgrade %s %s --version %s --srcrev %s' % (recipe, tempdir, version, commit))
2037 # Check if srctree at least is populated
2038 self.assertTrue(len(os.listdir(tempdir)) > 0, 'srctree (%s) should be populated with new (%s) source code' % (tempdir, commit))
2039 # Check new recipe file and new -crates.inc files are present
2040 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldrecipefile))
2041 newcratesincfile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldcratesincfile))
2042 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2043 self.assertExists(newcratesincfile, 'Recipe crates.inc file should exist after upgrade')
2044 # Check devtool status and make sure recipe is present
2045 result = runCmd('devtool status')
2046 self.assertIn(recipe, result.output)
2047 self.assertIn(tempdir, result.output)
2048 # Check recipe got changed as expected
2049 with open(oldrecipefile + '.upgraded', 'r') as f:
2050 desiredlines = f.readlines()
2051 with open(newrecipefile, 'r') as f:
2052 newlines = f.readlines()
2053 self.assertEqual(desiredlines, newlines)
2054 # Check crates.inc got changed as expected
2055 with open(oldcratesincfile + '.upgraded', 'r') as f:
2056 desiredlines = f.readlines()
2057 with open(newcratesincfile, 'r') as f:
2058 newlines = f.readlines()
2059 self.assertEqual(desiredlines, newlines)
2060 # Check devtool reset recipe
2061 result = runCmd('devtool reset %s -n' % recipe)
2062 result = runCmd('devtool status')
2063 self.assertNotIn(recipe, result.output)
2064 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
2065
2007 def test_devtool_layer_plugins(self): 2066 def test_devtool_layer_plugins(self):
2008 """Test that devtool can use plugins from other layers. 2067 """Test that devtool can use plugins from other layers.
2009 2068
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index ad952c004b..7771a42e2b 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -20,10 +20,10 @@ class Distrodata(OESelftestTestCase):
20 feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n' 20 feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n'
21 self.write_config(feature) 21 self.write_config(feature)
22 22
23 pkgs = oe.recipeutils.get_recipe_upgrade_status() 23 pkggroups = oe.recipeutils.get_recipe_upgrade_status()
24 24
25 regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN'] 25 regressed_failures = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'UNKNOWN_BROKEN']
26 regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN'] 26 regressed_successes = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'KNOWN_BROKEN']
27 msg = "" 27 msg = ""
28 if len(regressed_failures) > 0: 28 if len(regressed_failures) > 0:
29 msg = msg + """ 29 msg = msg + """
@@ -55,7 +55,7 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
55 return False 55 return False
56 56
57 def is_maintainer_exception(entry): 57 def is_maintainer_exception(entry):
58 exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data", 58 exceptions = ["musl", "newlib", "picolibc", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
59 "cve-update-nvd2-native",] 59 "cve-update-nvd2-native",]
60 for i in exceptions: 60 for i in exceptions:
61 if i in entry: 61 if i in entry:
@@ -115,3 +115,15 @@ The list of oe-core recipes with maintainers is empty. This may indicate that th
115 self.fail(""" 115 self.fail("""
116Unable to find recipes for the following entries in maintainers.inc: 116Unable to find recipes for the following entries in maintainers.inc:
117""" + "\n".join(['%s' % i for i in missing_recipes])) 117""" + "\n".join(['%s' % i for i in missing_recipes]))
118
119 def test_common_include_recipes(self):
120 """
121 Summary: Test that obtaining recipes that share includes between them returns a sane result
122 Expected: At least cmake and qemu entries are present in the output
123 Product: oe-core
124 Author: Alexander Kanavin <alex.kanavin@gmail.com>
125 """
126 recipes = oe.recipeutils.get_common_include_recipes()
127
128 self.assertIn({'qemu-system-native', 'qemu', 'qemu-native'}, recipes)
129 self.assertIn({'cmake-native', 'cmake'}, recipes)
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
index 347c065377..0b5f4602fb 100644
--- a/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -11,6 +11,51 @@ import re
11 11
12class FitImageTests(OESelftestTestCase): 12class FitImageTests(OESelftestTestCase):
13 13
14 def _setup_uboot_tools_native(self):
15 """build u-boot-tools-native and return RECIPE_SYSROOT_NATIVE"""
16 bitbake("u-boot-tools-native -c addto_recipe_sysroot")
17 return get_bb_var('RECIPE_SYSROOT_NATIVE', 'u-boot-tools-native')
18
19 def _verify_fit_image_signature(self, uboot_tools_sysroot_native, fitimage_path, dtb_path, conf_name=None):
20 """Verify the signature of a fit contfiguration
21
22 The fit_check_sign utility from u-boot-tools-native is called.
23 uboot-fit_check_sign -f fitImage -k $dtb_name -c conf-$dtb_name
24 """
25 fit_check_sign_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'uboot-fit_check_sign')
26 cmd = '%s -f %s -k %s' % (fit_check_sign_path, fitimage_path, dtb_path)
27 if conf_name:
28 cmd += ' -c %s' % conf_name
29 result = runCmd(cmd)
30 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
31 self.assertIn("Signature check OK", result.output)
32
33 @staticmethod
34 def _find_string_in_bin_file(file_path, search_string):
35 """find stings in a binary file
36
37 Shell equivalent: strings "$1" | grep "$2" | wc -l
38 return number of matches
39 """
40 found_positions = 0
41 with open(file_path, 'rb') as file:
42 byte = file.read(1)
43 current_position = 0
44 current_match = 0
45 while byte:
46 char = byte.decode('ascii', errors='ignore')
47 if char == search_string[current_match]:
48 current_match += 1
49 if current_match == len(search_string):
50 found_positions += 1
51 current_match = 0
52 else:
53 current_match = 0
54 current_position += 1
55 byte = file.read(1)
56 return found_positions
57
58
14 def test_fit_image(self): 59 def test_fit_image(self):
15 """ 60 """
16 Summary: Check if FIT image and Image Tree Source (its) are built 61 Summary: Check if FIT image and Image Tree Source (its) are built
@@ -53,10 +98,8 @@ FIT_DESC = "A model description"
53 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 98 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
54 "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME'])) 99 "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
55 100
56 self.assertTrue(os.path.exists(fitimage_its_path), 101 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
57 "%s image tree source doesn't exist" % (fitimage_its_path)) 102 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
58 self.assertTrue(os.path.exists(fitimage_path),
59 "%s FIT image doesn't exist" % (fitimage_path))
60 103
61 # Check that the type, load address, entrypoint address and default 104 # Check that the type, load address, entrypoint address and default
62 # values for kernel and ramdisk in Image Tree Source are as expected. 105 # values for kernel and ramdisk in Image Tree Source are as expected.
@@ -108,19 +151,21 @@ FIT_DESC = "A model description"
108 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon 151 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon
109 work by Usama Arif <usama.arif@arm.com> 152 work by Usama Arif <usama.arif@arm.com>
110 """ 153 """
154 a_comment = "a smart comment"
111 config = """ 155 config = """
112# Enable creation of fitImage 156# Enable creation of fitImage
113MACHINE = "beaglebone-yocto" 157MACHINE = "beaglebone-yocto"
114KERNEL_IMAGETYPES += " fitImage " 158KERNEL_IMAGETYPES += " fitImage "
115KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper " 159KERNEL_CLASSES = " kernel-fitimage "
116UBOOT_SIGN_ENABLE = "1" 160UBOOT_SIGN_ENABLE = "1"
117FIT_GENERATE_KEYS = "1" 161FIT_GENERATE_KEYS = "1"
118UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 162UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
119UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" 163UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
120UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" 164UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
121FIT_SIGN_INDIVIDUAL = "1" 165FIT_SIGN_INDIVIDUAL = "1"
122UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" 166UBOOT_MKIMAGE_SIGN_ARGS = "-c '%s'"
123""" 167""" % a_comment
168
124 self.write_config(config) 169 self.write_config(config)
125 170
126 # fitImage is created as part of linux recipe 171 # fitImage is created as part of linux recipe
@@ -133,10 +178,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
133 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 178 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
134 "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME'])) 179 "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME']))
135 180
136 self.assertTrue(os.path.exists(fitimage_its_path), 181 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
137 "%s image tree source doesn't exist" % (fitimage_its_path)) 182 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
138 self.assertTrue(os.path.exists(fitimage_path),
139 "%s FIT image doesn't exist" % (fitimage_path))
140 183
141 req_itspaths = [ 184 req_itspaths = [
142 ['/', 'images', 'kernel-1'], 185 ['/', 'images', 'kernel-1'],
@@ -195,10 +238,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
195 self.assertEqual(value, reqvalue) 238 self.assertEqual(value, reqvalue)
196 239
197 # Dump the image to see if it really got signed 240 # Dump the image to see if it really got signed
198 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 241 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
199 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 242 dumpimage_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'dumpimage')
200 recipe_sysroot_native = result.output.split('=')[1].strip('"')
201 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
202 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 243 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
203 in_signed = None 244 in_signed = None
204 signed_sections = {} 245 signed_sections = {}
@@ -224,17 +265,15 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
224 value = values.get('Sign value', None) 265 value = values.get('Sign value', None)
225 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 266 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
226 267
227 # Check for UBOOT_MKIMAGE_SIGN_ARGS 268 # Search for the string passed to mkimage: 1 kernel + 3 DTBs + config per DTB = 7 sections
228 result = runCmd('bitbake -e virtual/kernel | grep ^T=') 269 # Looks like mkimage supports to add a comment but does not support to read it back.
229 tempdir = result.output.split('=', 1)[1].strip().strip('') 270 found_comments = FitImageTests._find_string_in_bin_file(fitimage_path, a_comment)
230 result = runCmd('grep "a smart comment" %s/run.do_assemble_fitimage' % tempdir, ignore_status=True) 271 self.assertEqual(found_comments, 7, "Expected 7 signed and commented section in the fitImage.")
231 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN_ARGS value did not get used')
232 272
233 # Check for evidence of test-mkimage-wrapper class 273 # Verify the signature for all configurations = DTBs
234 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) 274 for dtb in ['am335x-bone.dtb', 'am335x-boneblack.dtb', 'am335x-bonegreen.dtb']:
235 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work') 275 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path,
236 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) 276 os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], dtb), 'conf-' + dtb)
237 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
238 277
239 def test_uboot_fit_image(self): 278 def test_uboot_fit_image(self):
240 """ 279 """
@@ -287,10 +326,8 @@ FIT_SIGN_INDIVIDUAL = "1"
287 fitimage_path = os.path.join(deploy_dir_image, 326 fitimage_path = os.path.join(deploy_dir_image,
288 "u-boot-fitImage-%s" % (machine,)) 327 "u-boot-fitImage-%s" % (machine,))
289 328
290 self.assertTrue(os.path.exists(fitimage_its_path), 329 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
291 "%s image tree source doesn't exist" % (fitimage_its_path)) 330 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
292 self.assertTrue(os.path.exists(fitimage_path),
293 "%s FIT image doesn't exist" % (fitimage_path))
294 331
295 # Check that the type, load address, entrypoint address and default 332 # Check that the type, load address, entrypoint address and default
296 # values for kernel and ramdisk in Image Tree Source are as expected. 333 # values for kernel and ramdisk in Image Tree Source are as expected.
@@ -351,7 +388,6 @@ UBOOT_ENTRYPOINT = "0x80080000"
351UBOOT_FIT_DESC = "A model description" 388UBOOT_FIT_DESC = "A model description"
352KERNEL_IMAGETYPES += " fitImage " 389KERNEL_IMAGETYPES += " fitImage "
353KERNEL_CLASSES = " kernel-fitimage " 390KERNEL_CLASSES = " kernel-fitimage "
354INHERIT += "test-mkimage-wrapper"
355UBOOT_SIGN_ENABLE = "1" 391UBOOT_SIGN_ENABLE = "1"
356FIT_GENERATE_KEYS = "1" 392FIT_GENERATE_KEYS = "1"
357UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 393UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
@@ -372,10 +408,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
372 fitimage_path = os.path.join(deploy_dir_image, 408 fitimage_path = os.path.join(deploy_dir_image,
373 "u-boot-fitImage-%s" % (machine,)) 409 "u-boot-fitImage-%s" % (machine,))
374 410
375 self.assertTrue(os.path.exists(fitimage_its_path), 411 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
376 "%s image tree source doesn't exist" % (fitimage_its_path)) 412 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
377 self.assertTrue(os.path.exists(fitimage_path),
378 "%s FIT image doesn't exist" % (fitimage_path))
379 413
380 # Check that the type, load address, entrypoint address and default 414 # Check that the type, load address, entrypoint address and default
381 # values for kernel and ramdisk in Image Tree Source are as expected. 415 # values for kernel and ramdisk in Image Tree Source are as expected.
@@ -425,6 +459,7 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
425 work by Paul Eggleton <paul.eggleton@microsoft.com> and 459 work by Paul Eggleton <paul.eggleton@microsoft.com> and
426 Usama Arif <usama.arif@arm.com> 460 Usama Arif <usama.arif@arm.com>
427 """ 461 """
462 a_comment = "a smart U-Boot comment"
428 config = """ 463 config = """
429# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at 464# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
430# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set 465# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
@@ -434,7 +469,6 @@ SPL_BINARY = "MLO"
434# The kernel-fitimage class is a dependency even if we're only 469# The kernel-fitimage class is a dependency even if we're only
435# creating/signing the U-Boot fitImage 470# creating/signing the U-Boot fitImage
436KERNEL_CLASSES = " kernel-fitimage" 471KERNEL_CLASSES = " kernel-fitimage"
437INHERIT += "test-mkimage-wrapper"
438# Enable creation and signing of the U-Boot fitImage 472# Enable creation and signing of the U-Boot fitImage
439UBOOT_FITIMAGE_ENABLE = "1" 473UBOOT_FITIMAGE_ENABLE = "1"
440SPL_SIGN_ENABLE = "1" 474SPL_SIGN_ENABLE = "1"
@@ -446,17 +480,17 @@ UBOOT_LOADADDRESS = "0x80000000"
446UBOOT_DTB_LOADADDRESS = "0x82000000" 480UBOOT_DTB_LOADADDRESS = "0x82000000"
447UBOOT_ARCH = "arm" 481UBOOT_ARCH = "arm"
448SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 482SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
449SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'" 483SPL_MKIMAGE_SIGN_ARGS = "-c '%s'"
450UBOOT_EXTLINUX = "0" 484UBOOT_EXTLINUX = "0"
451UBOOT_FIT_GENERATE_KEYS = "1" 485UBOOT_FIT_GENERATE_KEYS = "1"
452UBOOT_FIT_HASH_ALG = "sha256" 486UBOOT_FIT_HASH_ALG = "sha256"
453""" 487""" % a_comment
488
454 self.write_config(config) 489 self.write_config(config)
455 490
456 # The U-Boot fitImage is created as part of the U-Boot recipe 491 # The U-Boot fitImage is created as part of the U-Boot recipe
457 bitbake("virtual/bootloader") 492 bitbake("virtual/bootloader")
458 493
459 image_type = "core-image-minimal"
460 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 494 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
461 machine = get_bb_var('MACHINE') 495 machine = get_bb_var('MACHINE')
462 fitimage_its_path = os.path.join(deploy_dir_image, 496 fitimage_its_path = os.path.join(deploy_dir_image,
@@ -464,10 +498,8 @@ UBOOT_FIT_HASH_ALG = "sha256"
464 fitimage_path = os.path.join(deploy_dir_image, 498 fitimage_path = os.path.join(deploy_dir_image,
465 "u-boot-fitImage-%s" % (machine,)) 499 "u-boot-fitImage-%s" % (machine,))
466 500
467 self.assertTrue(os.path.exists(fitimage_its_path), 501 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
468 "%s image tree source doesn't exist" % (fitimage_its_path)) 502 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
469 self.assertTrue(os.path.exists(fitimage_path),
470 "%s FIT image doesn't exist" % (fitimage_path))
471 503
472 req_itspaths = [ 504 req_itspaths = [
473 ['/', 'images', 'uboot'], 505 ['/', 'images', 'uboot'],
@@ -516,10 +548,8 @@ UBOOT_FIT_HASH_ALG = "sha256"
516 self.assertEqual(value, reqvalue) 548 self.assertEqual(value, reqvalue)
517 549
518 # Dump the image to see if it really got signed 550 # Dump the image to see if it really got signed
519 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 551 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
520 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 552 dumpimage_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'dumpimage')
521 recipe_sysroot_native = result.output.split('=')[1].strip('"')
522 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
523 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 553 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
524 in_signed = None 554 in_signed = None
525 signed_sections = {} 555 signed_sections = {}
@@ -542,16 +572,14 @@ UBOOT_FIT_HASH_ALG = "sha256"
542 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 572 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
543 573
544 # Check for SPL_MKIMAGE_SIGN_ARGS 574 # Check for SPL_MKIMAGE_SIGN_ARGS
545 result = runCmd('bitbake -e virtual/bootloader | grep ^T=') 575 # Looks like mkimage supports to add a comment but does not support to read it back.
546 tempdir = result.output.split('=', 1)[1].strip().strip('') 576 found_comments = FitImageTests._find_string_in_bin_file(fitimage_path, a_comment)
547 result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) 577 self.assertEqual(found_comments, 2, "Expected 2 signed and commented section in the fitImage.")
548 self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used') 578
579 # Verify the signature
580 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path,
581 os.path.join(deploy_dir_image, 'u-boot-spl.dtb'))
549 582
550 # Check for evidence of test-mkimage-wrapper class
551 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
552 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
553 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
554 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
555 583
556 def test_sign_cascaded_uboot_fit_image(self): 584 def test_sign_cascaded_uboot_fit_image(self):
557 """ 585 """
@@ -573,6 +601,7 @@ UBOOT_FIT_HASH_ALG = "sha256"
573 work by Paul Eggleton <paul.eggleton@microsoft.com> and 601 work by Paul Eggleton <paul.eggleton@microsoft.com> and
574 Usama Arif <usama.arif@arm.com> 602 Usama Arif <usama.arif@arm.com>
575 """ 603 """
604 a_comment = "a smart cascaded U-Boot comment"
576 config = """ 605 config = """
577# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at 606# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
578# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set 607# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
@@ -588,7 +617,7 @@ UBOOT_DTB_BINARY = "u-boot.dtb"
588UBOOT_ENTRYPOINT = "0x80000000" 617UBOOT_ENTRYPOINT = "0x80000000"
589UBOOT_LOADADDRESS = "0x80000000" 618UBOOT_LOADADDRESS = "0x80000000"
590UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 619UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
591UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded Kernel comment'" 620UBOOT_MKIMAGE_SIGN_ARGS = "-c '%s'"
592UBOOT_DTB_LOADADDRESS = "0x82000000" 621UBOOT_DTB_LOADADDRESS = "0x82000000"
593UBOOT_ARCH = "arm" 622UBOOT_ARCH = "arm"
594SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 623SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
@@ -598,20 +627,18 @@ UBOOT_FIT_GENERATE_KEYS = "1"
598UBOOT_FIT_HASH_ALG = "sha256" 627UBOOT_FIT_HASH_ALG = "sha256"
599KERNEL_IMAGETYPES += " fitImage " 628KERNEL_IMAGETYPES += " fitImage "
600KERNEL_CLASSES = " kernel-fitimage " 629KERNEL_CLASSES = " kernel-fitimage "
601INHERIT += "test-mkimage-wrapper"
602UBOOT_SIGN_ENABLE = "1" 630UBOOT_SIGN_ENABLE = "1"
603FIT_GENERATE_KEYS = "1" 631FIT_GENERATE_KEYS = "1"
604UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 632UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
605UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" 633UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
606UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" 634UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
607FIT_SIGN_INDIVIDUAL = "1" 635FIT_SIGN_INDIVIDUAL = "1"
608""" 636""" % a_comment
609 self.write_config(config) 637 self.write_config(config)
610 638
611 # The U-Boot fitImage is created as part of the U-Boot recipe 639 # The U-Boot fitImage is created as part of the U-Boot recipe
612 bitbake("virtual/bootloader") 640 bitbake("virtual/bootloader")
613 641
614 image_type = "core-image-minimal"
615 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 642 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
616 machine = get_bb_var('MACHINE') 643 machine = get_bb_var('MACHINE')
617 fitimage_its_path = os.path.join(deploy_dir_image, 644 fitimage_its_path = os.path.join(deploy_dir_image,
@@ -619,10 +646,8 @@ FIT_SIGN_INDIVIDUAL = "1"
619 fitimage_path = os.path.join(deploy_dir_image, 646 fitimage_path = os.path.join(deploy_dir_image,
620 "u-boot-fitImage-%s" % (machine,)) 647 "u-boot-fitImage-%s" % (machine,))
621 648
622 self.assertTrue(os.path.exists(fitimage_its_path), 649 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
623 "%s image tree source doesn't exist" % (fitimage_its_path)) 650 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
624 self.assertTrue(os.path.exists(fitimage_path),
625 "%s FIT image doesn't exist" % (fitimage_path))
626 651
627 req_itspaths = [ 652 req_itspaths = [
628 ['/', 'images', 'uboot'], 653 ['/', 'images', 'uboot'],
@@ -671,10 +696,8 @@ FIT_SIGN_INDIVIDUAL = "1"
671 self.assertEqual(value, reqvalue) 696 self.assertEqual(value, reqvalue)
672 697
673 # Dump the image to see if it really got signed 698 # Dump the image to see if it really got signed
674 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 699 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
675 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 700 dumpimage_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'dumpimage')
676 recipe_sysroot_native = result.output.split('=')[1].strip('"')
677 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
678 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 701 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
679 in_signed = None 702 in_signed = None
680 signed_sections = {} 703 signed_sections = {}
@@ -697,17 +720,13 @@ FIT_SIGN_INDIVIDUAL = "1"
697 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 720 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
698 721
699 # Check for SPL_MKIMAGE_SIGN_ARGS 722 # Check for SPL_MKIMAGE_SIGN_ARGS
700 result = runCmd('bitbake -e virtual/bootloader | grep ^T=') 723 # Looks like mkimage supports to add a comment but does not support to read it back.
701 tempdir = result.output.split('=', 1)[1].strip().strip('') 724 found_comments = FitImageTests._find_string_in_bin_file(fitimage_path, a_comment)
702 result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) 725 self.assertEqual(found_comments, 2, "Expected 2 signed and commented section in the fitImage.")
703 self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
704
705 # Check for evidence of test-mkimage-wrapper class
706 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
707 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
708 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
709 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
710 726
727 # Verify the signature
728 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path,
729 os.path.join(deploy_dir_image, 'u-boot-spl.dtb'))
711 730
712 731
713 def test_initramfs_bundle(self): 732 def test_initramfs_bundle(self):
@@ -755,24 +774,24 @@ FIT_HASH_ALG = "sha256"
755 # fitImage is created as part of linux recipe 774 # fitImage is created as part of linux recipe
756 bitbake("virtual/kernel") 775 bitbake("virtual/kernel")
757 776
758 image_type = get_bb_var('INITRAMFS_IMAGE') 777 bb_vars = get_bb_vars([
759 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 778 'DEPLOY_DIR_IMAGE',
760 machine = get_bb_var('MACHINE') 779 'FIT_HASH_ALG',
761 fitimage_its_path = os.path.join(deploy_dir_image, 780 'FIT_KERNEL_COMP_ALG',
762 "fitImage-its-%s-%s-%s" % (image_type, machine, machine)) 781 'INITRAMFS_IMAGE',
763 fitimage_path = os.path.join(deploy_dir_image,"fitImage") 782 'MACHINE',
764 783 'UBOOT_ARCH',
765 self.assertTrue(os.path.exists(fitimage_its_path), 784 'UBOOT_ENTRYPOINT',
766 "%s image tree source doesn't exist" % (fitimage_its_path)) 785 'UBOOT_LOADADDRESS',
767 self.assertTrue(os.path.exists(fitimage_path), 786 'UBOOT_MKIMAGE_KERNEL_TYPE'
768 "%s FIT image doesn't exist" % (fitimage_path)) 787 ],
788 'virtual/kernel')
789 fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
790 "fitImage-its-%s-%s-%s" % (bb_vars['INITRAMFS_IMAGE'], bb_vars['MACHINE'], bb_vars['MACHINE']))
791 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],"fitImage")
769 792
770 kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) 793 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
771 kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) 794 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
772 kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE'))
773 kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
774 uboot_arch = str(get_bb_var('UBOOT_ARCH'))
775 fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
776 795
777 its_file = open(fitimage_its_path) 796 its_file = open(fitimage_its_path)
778 797
@@ -782,31 +801,31 @@ FIT_HASH_ALG = "sha256"
782 'kernel-1 {', 801 'kernel-1 {',
783 'description = "Linux kernel";', 802 'description = "Linux kernel";',
784 'data = /incbin/("linux.bin");', 803 'data = /incbin/("linux.bin");',
785 'type = "' + kernel_type + '";', 804 'type = "' + str(bb_vars['UBOOT_MKIMAGE_KERNEL_TYPE']) + '";',
786 'arch = "' + uboot_arch + '";', 805 'arch = "' + str(bb_vars['UBOOT_ARCH']) + '";',
787 'os = "linux";', 806 'os = "linux";',
788 'compression = "' + kernel_compression + '";', 807 'compression = "' + str(bb_vars['FIT_KERNEL_COMP_ALG']) + '";',
789 'load = <' + kernel_load + '>;', 808 'load = <' + str(bb_vars['UBOOT_LOADADDRESS']) + '>;',
790 'entry = <' + kernel_entry + '>;', 809 'entry = <' + str(bb_vars['UBOOT_ENTRYPOINT']) + '>;',
791 'hash-1 {', 810 'hash-1 {',
792 'algo = "' + fit_hash_alg +'";', 811 'algo = "' + str(bb_vars['FIT_HASH_ALG']) +'";',
793 '};', 812 '};',
794 '};' 813 '};'
795 ] 814 ]
796 815
797 node_str = exp_node_lines[0] 816 node_str = exp_node_lines[0]
798 817
799 test_passed = False
800
801 print ("checking kernel node\n") 818 print ("checking kernel node\n")
819 self.assertIn(node_str, its_lines)
802 820
803 if node_str in its_lines: 821 node_start_idx = its_lines.index(node_str)
804 node_start_idx = its_lines.index(node_str) 822 node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))]
805 node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))] 823
806 if node == exp_node_lines: 824 # Remove the absolute path. This refers to WORKDIR which is not always predictable.
807 print("kernel node verified") 825 re_data = re.compile(r'^data = /incbin/\(.*/linux\.bin"\);$')
808 else: 826 node = [re.sub(re_data, 'data = /incbin/("linux.bin");', cfg_str) for cfg_str in node]
809 self.assertTrue(test_passed == True,"kernel node does not match expectation") 827
828 self.assertEqual(node, exp_node_lines, "kernel node does not match expectation")
810 829
811 rx_configs = re.compile("^conf-.*") 830 rx_configs = re.compile("^conf-.*")
812 its_configs = list(filter(rx_configs.match, its_lines)) 831 its_configs = list(filter(rx_configs.match, its_lines))
@@ -822,25 +841,14 @@ FIT_HASH_ALG = "sha256"
822 841
823 node = its_lines[cfg_start_idx:line_idx] 842 node = its_lines[cfg_start_idx:line_idx]
824 print("checking configuration " + cfg_str.rstrip(" {")) 843 print("checking configuration " + cfg_str.rstrip(" {"))
825 rx_desc_line = re.compile("^description.*1 Linux kernel.*") 844 rx_desc_line = re.compile(r'^description = ".*Linux kernel.*')
826 if len(list(filter(rx_desc_line.match, node))) != 1: 845 self.assertEqual(len(list(filter(rx_desc_line.match, node))), 1, "kernel keyword not found in the description line")
827 self.assertTrue(test_passed == True,"kernel keyword not found in the description line")
828 break
829 else:
830 print("kernel keyword found in the description line")
831 846
832 if 'kernel = "kernel-1";' not in node: 847 self.assertIn('kernel = "kernel-1";', node)
833 self.assertTrue(test_passed == True,"kernel line not found")
834 break
835 else:
836 print("kernel line found")
837 848
838 rx_sign_line = re.compile("^sign-images.*kernel.*") 849 rx_sign_line = re.compile(r'^sign-images = .*kernel.*')
839 if len(list(filter(rx_sign_line.match, node))) != 1: 850 self.assertEqual(len(list(filter(rx_sign_line.match, node))), 1, "kernel hash not signed")
840 self.assertTrue(test_passed == True,"kernel hash not signed")
841 break
842 else:
843 print("kernel hash signed")
844 851
845 test_passed = True 852 # Verify the signature
846 self.assertTrue(test_passed == True,"Initramfs bundle test success") 853 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
854 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path, os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 'am335x-bone.dtb'))
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
index 89360178fe..4cc0894d42 100644
--- a/meta/lib/oeqa/selftest/cases/gcc.py
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -37,7 +37,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
37 features = [] 37 features = []
38 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets))) 38 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets)))
39 if ssh is not None: 39 if ssh is not None:
40 features.append('TOOLCHAIN_TEST_TARGET = "ssh"') 40 features.append('TOOLCHAIN_TEST_TARGET = "linux-ssh"')
41 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh)) 41 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
42 features.append('TOOLCHAIN_TEST_HOST_USER = "root"') 42 features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
43 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') 43 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index dc88c222bd..94d01ba116 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -250,12 +250,7 @@ USERADD_GID_TABLES += "files/static-group"
250DISTRO_FEATURES:append = " pam opengl wayland" 250DISTRO_FEATURES:append = " pam opengl wayland"
251 251
252# Switch to systemd 252# Switch to systemd
253DISTRO_FEATURES:append = " systemd usrmerge" 253INIT_MANAGER = "systemd"
254VIRTUAL-RUNTIME_init_manager = "systemd"
255VIRTUAL-RUNTIME_initscripts = ""
256VIRTUAL-RUNTIME_syslog = ""
257VIRTUAL-RUNTIME_login_manager = "shadow-base"
258DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
259 254
260# Replace busybox 255# Replace busybox
261PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils" 256PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils"
@@ -319,7 +314,7 @@ SKIP_RECIPE[busybox] = "Don't build this"
319 """ 314 """
320 config = """ 315 config = """
321DISTRO_FEATURES:append = " api-documentation" 316DISTRO_FEATURES:append = " api-documentation"
322CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc" 317CORE_IMAGE_EXTRA_INSTALL = "man-pages"
323""" 318"""
324 self.write_config(config) 319 self.write_config(config)
325 bitbake("core-image-minimal") 320 bitbake("core-image-minimal")
@@ -330,7 +325,7 @@ CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc"
330 self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output)) 325 self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
331 self.assertIn("iso_8859_15", output) 326 self.assertIn("iso_8859_15", output)
332 327
333 # This manpage is provided by kmod 328 # This manpage is provided by man-pages
334 status, output = qemu.run_serial("man --pager=cat modprobe") 329 status, output = qemu.run_serial("man --pager=cat intro")
335 self.assertEqual(status, 1, 'Failed to run man: %s' % (output)) 330 self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
336 self.assertIn("force-modversion", output) 331 self.assertIn("introduction to user commands", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
index f4af67a239..be5484bca4 100644
--- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -114,7 +114,7 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
114 114
115 def test_bash_and_license(self): 115 def test_bash_and_license(self):
116 self.disable_class("create-spdx") 116 self.disable_class("create-spdx")
117 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"') 117 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"')
118 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" 118 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
119 119
120 result = bitbake('core-image-minimal', ignore_status=True) 120 result = bitbake('core-image-minimal', ignore_status=True)
@@ -123,12 +123,12 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
123 123
124 def test_bash_or_license(self): 124 def test_bash_or_license(self):
125 self.disable_class("create-spdx") 125 self.disable_class("create-spdx")
126 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"') 126 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"\nERROR_QA:remove:pn-core-image-minimal = "license-file-missing"')
127 127
128 bitbake('core-image-minimal') 128 bitbake('core-image-minimal')
129 129
130 def test_bash_license_exceptions(self): 130 def test_bash_license_exceptions(self):
131 self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"') 131 self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"\nERROR_QA:remove:pn-core-image-minimal = "license-exception"')
132 132
133 bitbake('core-image-minimal') 133 bitbake('core-image-minimal')
134 134
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 379ed589ad..64b17117cc 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -37,7 +37,7 @@ FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
37SRC_URI:append = " file://appendtest.txt" 37SRC_URI:append = " file://appendtest.txt"
38 38
39sysroot_stage_all:append() { 39sysroot_stage_all:append() {
40 install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ 40 install -m 644 ${UNPACKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
41} 41}
42 42
43""" 43"""
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
index 4ca8ffb7aa..ac4888ef66 100644
--- a/meta/lib/oeqa/selftest/cases/locales.py
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -14,7 +14,7 @@ class LocalesTest(OESelftestTestCase):
14 features = [] 14 features = []
15 features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"') 15 features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
16 features.append('IMAGE_INSTALL:append = " glibc-utils localedef"') 16 features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
17 features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"') 17 features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8 en_US.ISO-8859-1 de_DE.UTF-8 fr_FR.ISO-8859-1 zh_HK.BIG5-HKSCS tr_TR.UTF-8"')
18 features.append('IMAGE_LINGUAS:append = " en-us fr-fr"') 18 features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
19 if binary_enabled: 19 if binary_enabled:
20 features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"') 20 features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
index ffe0d2604d..5a17ca52ea 100644
--- a/meta/lib/oeqa/selftest/cases/meta_ide.py
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -20,8 +20,8 @@ class MetaIDE(OESelftestTestCase):
20 bitbake('meta-ide-support') 20 bitbake('meta-ide-support')
21 bitbake('build-sysroots -c build_native_sysroot') 21 bitbake('build-sysroots -c build_native_sysroot')
22 bitbake('build-sysroots -c build_target_sysroot') 22 bitbake('build-sysroots -c build_target_sysroot')
23 bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE']) 23 bb_vars = get_bb_vars(['MACHINE_ARCH', 'TARGET_VENDOR', 'TARGET_OS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
24 cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] 24 cls.environment_script = 'environment-setup-%s%s-%s' % (bb_vars['MACHINE_ARCH'], bb_vars['TARGET_VENDOR'], bb_vars['TARGET_OS'])
25 cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE'] 25 cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
26 cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script) 26 cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
27 cls.corebasedir = bb_vars['COREBASE'] 27 cls.corebasedir = bb_vars['COREBASE']
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
index 2919f07939..a8923460f9 100644
--- a/meta/lib/oeqa/selftest/cases/minidebuginfo.py
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -8,6 +8,7 @@ import subprocess
8import tempfile 8import tempfile
9import shutil 9import shutil
10 10
11from oeqa.core.decorator import OETestTag
11from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd 13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
13 14
@@ -42,3 +43,18 @@ IMAGE_FSTYPES = "tar.bz2"
42 native_sysroot = native_sysroot, target_sys = target_sys) 43 native_sysroot = native_sysroot, target_sys = target_sys)
43 self.assertIn(".gnu_debugdata", r.output) 44 self.assertIn(".gnu_debugdata", r.output)
44 45
46 @OETestTag("runqemu")
47 def test_minidebuginfo_qemu(self):
48 """
49 Test minidebuginfo inside a qemu.
50 This runs test_systemd_coredump_minidebuginfo and other minidebuginfo runtime tests which may be added in the future.
51 """
52
53 self.write_config("""
54DISTRO_FEATURES:append = " minidebuginfo"
55INIT_MANAGER = "systemd"
56IMAGE_CLASSES += "testimage"
57TEST_SUITES = "ping ssh systemd"
58 """)
59 bitbake('core-image-minimal')
60 bitbake('-c testimage core-image-minimal')
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index f69efccfee..bfbc33b08d 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -175,7 +175,7 @@ class OEListPackageconfigTests(OESelftestTestCase):
175 def test_packageconfig_flags_option_all(self): 175 def test_packageconfig_flags_option_all(self):
176 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) 176 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
177 expected_endlines = [] 177 expected_endlines = []
178 expected_endlines.append("pinentry-1.2.1") 178 expected_endlines.append("pinentry-1.3.1")
179 expected_endlines.append("PACKAGECONFIG ncurses") 179 expected_endlines.append("PACKAGECONFIG ncurses")
180 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") 180 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
181 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") 181 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
index e31063567b..580fbdcb9c 100644
--- a/meta/lib/oeqa/selftest/cases/overlayfs.py
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -5,7 +5,7 @@
5# 5#
6 6
7from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, runqemu 8from oeqa.utils.commands import bitbake, runqemu, get_bb_vars
9from oeqa.core.decorator import OETestTag 9from oeqa.core.decorator import OETestTag
10from oeqa.core.decorator.data import skipIfNotMachine 10from oeqa.core.decorator.data import skipIfNotMachine
11 11
@@ -466,6 +466,45 @@ IMAGE_INSTALL:append = " overlayfs-user"
466 line = getline_qemu(output, "Read-only file system") 466 line = getline_qemu(output, "Read-only file system")
467 self.assertTrue(line, msg=output) 467 self.assertTrue(line, msg=output)
468 468
469 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
470 def test_postinst_on_target_for_read_only_rootfs(self):
471 """
472 Summary: The purpose of this test case is to verify that post-installation
473 on target scripts are executed even if using read-only rootfs when
474 read-only-rootfs-delayed-postinsts is set
475 Expected: The test files are created on first boot
476 """
477
478 import oe.path
479
480 vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
481 sysconfdir = vars["sysconfdir"]
482 self.assertIsNotNone(sysconfdir)
483 # Need to use oe.path here as sysconfdir starts with /
484 targettestdir = os.path.join(sysconfdir, "postinst-test")
485
486 config = self.get_working_config()
487
488 args = {
489 'OVERLAYFS_INIT_OPTION': "",
490 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
491 'OVERLAYFS_ROOTFS_TYPE': "ext4",
492 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
493 }
494
495 # read-only-rootfs is already set in get_working_config()
496 config += 'EXTRA_IMAGE_FEATURES += "read-only-rootfs-delayed-postinsts"\n'
497 config += 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
498
499 self.write_config(config.format(**args))
500
501 res = bitbake('core-image-minimal')
502
503 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
504 for filename in ("rootfs", "delayed-a", "delayed-b"):
505 status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
506 self.assertIn("found", output, "%s was not present on boot" % filename)
507
469 def get_working_config(self): 508 def get_working_config(self):
470 return """ 509 return """
471# Use systemd as init manager 510# Use systemd as init manager
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 1aa6c03f8a..38ed7173fe 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -103,11 +103,37 @@ class PackageTests(OESelftestTestCase):
103 103
104 dest = get_bb_var('PKGDEST', 'selftest-hardlink') 104 dest = get_bb_var('PKGDEST', 'selftest-hardlink')
105 bindir = get_bb_var('bindir', 'selftest-hardlink') 105 bindir = get_bb_var('bindir', 'selftest-hardlink')
106 libdir = get_bb_var('libdir', 'selftest-hardlink')
107 libexecdir = get_bb_var('libexecdir', 'selftest-hardlink')
106 108
107 def checkfiles(): 109 def checkfiles():
108 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/ 110 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/
109 # so expect 8 in total. 111 # so expect 8 in total.
110 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8) 112 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8)
113 self.assertEqual(os.stat(dest + "/selftest-hardlink" + libexecdir + "/hello3").st_nlink, 8)
114
115 # Check dbg version
116 # 2 items, a copy in both package/packages-split so 4
117 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + bindir + "/.debug/hello1").st_nlink, 4)
118 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello1").st_nlink, 4)
119
120 # Even though the libexecdir name is 'hello3' or 'hello4', that isn't the debug target name
121 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello3"), False)
122 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello4"), False)
123
124 # Check the staticdev libraries
125 # 101 items, a copy in both package/packages-split so 202
126 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello.a").st_nlink, 202)
127 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-25.a").st_nlink, 202)
128 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-50.a").st_nlink, 202)
129 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-75.a").st_nlink, 202)
130
131 # Check static dbg
132 # 101 items, a copy in both package/packages-split so 202
133 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello.a").st_nlink, 202)
134 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-25.a").st_nlink, 202)
135 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-50.a").st_nlink, 202)
136 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-75.a").st_nlink, 202)
111 137
112 # Test a sparse file remains sparse 138 # Test a sparse file remains sparse
113 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest") 139 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest")
diff --git a/meta/lib/oeqa/selftest/cases/picolibc.py b/meta/lib/oeqa/selftest/cases/picolibc.py
new file mode 100644
index 0000000000..e40b4fc3d3
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/picolibc.py
@@ -0,0 +1,18 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, get_bb_var
9
10class PicolibcTest(OESelftestTestCase):
11
12 def test_picolibc(self):
13 compatible_machines = ['qemuarm', 'qemuarm64', 'qemuriscv32', 'qemuriscv64']
14 machine = get_bb_var('MACHINE')
15 if machine not in compatible_machines:
16 self.skipTest('This test only works with machines : %s' % ' '.join(compatible_machines))
17 self.write_config('TCLIBC = "picolibc"')
18 bitbake("picolibc-helloworld")
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 126906df50..f742dd4d64 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -138,7 +138,7 @@ class RecipetoolAppendTests(RecipetoolBase):
138 '\n', 138 '\n',
139 'do_install:append() {\n', 139 'do_install:append() {\n',
140 ' install -d ${D}${%s}\n' % dirname, 140 ' install -d ${D}${%s}\n' % dirname,
141 ' install -m 0755 ${WORKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname), 141 ' install -m 0755 ${UNPACKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname),
142 '}\n'] 142 '}\n']
143 self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name]) 143 self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name])
144 # Now try bbappending the same file again, contents should not change 144 # Now try bbappending the same file again, contents should not change
@@ -164,7 +164,7 @@ class RecipetoolAppendTests(RecipetoolBase):
164 '\n', 164 '\n',
165 'do_install:append() {\n', 165 'do_install:append() {\n',
166 ' install -d ${D}${datadir}\n', 166 ' install -d ${D}${datadir}\n',
167 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 167 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
168 '}\n'] 168 '}\n']
169 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile']) 169 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile'])
170 # Try adding another file, this time where the source file is executable 170 # Try adding another file, this time where the source file is executable
@@ -179,8 +179,8 @@ class RecipetoolAppendTests(RecipetoolBase):
179 '\n', 179 '\n',
180 'do_install:append() {\n', 180 'do_install:append() {\n',
181 ' install -d ${D}${datadir}\n', 181 ' install -d ${D}${datadir}\n',
182 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 182 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
183 ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, 183 ' install -m 0755 ${UNPACKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
184 '}\n'] 184 '}\n']
185 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name]) 185 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name])
186 186
@@ -192,7 +192,7 @@ class RecipetoolAppendTests(RecipetoolBase):
192 '\n', 192 '\n',
193 'do_install:append() {\n', 193 'do_install:append() {\n',
194 ' install -d ${D}${bindir}\n', 194 ' install -d ${D}${bindir}\n',
195 ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', 195 ' install -m 0755 ${UNPACKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
196 '}\n'] 196 '}\n']
197 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile']) 197 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile'])
198 self.assertNotIn('WARNING: ', output) 198 self.assertNotIn('WARNING: ', output)
@@ -207,7 +207,7 @@ class RecipetoolAppendTests(RecipetoolBase):
207 '\n', 207 '\n',
208 'do_install:append:mymachine() {\n', 208 'do_install:append:mymachine() {\n',
209 ' install -d ${D}${datadir}\n', 209 ' install -d ${D}${datadir}\n',
210 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 210 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
211 '}\n'] 211 '}\n']
212 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile']) 212 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile'])
213 self.assertNotIn('WARNING: ', output) 213 self.assertNotIn('WARNING: ', output)
@@ -241,7 +241,7 @@ class RecipetoolAppendTests(RecipetoolBase):
241 '\n', 241 '\n',
242 'do_install:append() {\n', 242 'do_install:append() {\n',
243 ' install -d ${D}${datadir}\n', 243 ' install -d ${D}${datadir}\n',
244 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', 244 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
245 '}\n'] 245 '}\n']
246 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile']) 246 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile'])
247 self.assertNotIn('WARNING: ', output) 247 self.assertNotIn('WARNING: ', output)
@@ -268,7 +268,7 @@ class RecipetoolAppendTests(RecipetoolBase):
268 '\n', 268 '\n',
269 'do_install:append() {\n', 269 'do_install:append() {\n',
270 ' install -d ${D}${sysconfdir}\n', 270 ' install -d ${D}${sysconfdir}\n',
271 ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', 271 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
272 '}\n'] 272 '}\n']
273 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile']) 273 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile'])
274 for line in output.splitlines(): 274 for line in output.splitlines():
@@ -286,7 +286,7 @@ class RecipetoolAppendTests(RecipetoolBase):
286 '\n', 286 '\n',
287 'do_install:append() {\n', 287 'do_install:append() {\n',
288 ' install -d ${D}${datadir}\n', 288 ' install -d ${D}${datadir}\n',
289 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', 289 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
290 '}\n'] 290 '}\n']
291 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile']) 291 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile'])
292 self.assertNotIn('WARNING: ', output) 292 self.assertNotIn('WARNING: ', output)
@@ -309,7 +309,7 @@ class RecipetoolAppendTests(RecipetoolBase):
309 '\n', 309 '\n',
310 'do_install:append() {\n', 310 'do_install:append() {\n',
311 ' install -d ${D}${datadir}\n', 311 ' install -d ${D}${datadir}\n',
312 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', 312 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
313 '}\n'] 313 '}\n']
314 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile']) 314 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile'])
315 315
@@ -1068,6 +1068,7 @@ class RecipetoolTests(RecipetoolBase):
1068 1068
1069 d = DataConnectorCopy 1069 d = DataConnectorCopy
1070 d.getVar = Mock(return_value=commonlicdir) 1070 d.getVar = Mock(return_value=commonlicdir)
1071 d.expand = Mock(side_effect=lambda x: x)
1071 1072
1072 srctree = tempfile.mkdtemp(prefix='recipetoolqa') 1073 srctree = tempfile.mkdtemp(prefix='recipetoolqa')
1073 self.track_for_cleanup(srctree) 1074 self.track_for_cleanup(srctree)
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
index 2cb4445f81..9949737172 100644
--- a/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -72,7 +72,7 @@ class RecipeUtilsTests(OESelftestTestCase):
72 expected_patch = """ 72 expected_patch = """
73--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 73--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
74+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 74+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
75@@ -8,6 +8,4 @@ 75@@ -11,6 +11,4 @@
76 76
77 BBCLASSEXTEND = "native nativesdk" 77 BBCLASSEXTEND = "native nativesdk"
78 78
@@ -97,7 +97,7 @@ class RecipeUtilsTests(OESelftestTestCase):
97 expected_patch = """ 97 expected_patch = """
98--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 98--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
99+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 99+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
100@@ -8,6 +8,3 @@ 100@@ -11,6 +11,3 @@
101 101
102 BBCLASSEXTEND = "native nativesdk" 102 BBCLASSEXTEND = "native nativesdk"
103 103
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 80e830136f..3d3f30eebc 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -133,7 +133,8 @@ class ReproducibleTests(OESelftestTestCase):
133 max_report_size = 250 * 1024 * 1024 133 max_report_size = 250 * 1024 * 1024
134 134
135 # targets are the things we want to test the reproducibility of 135 # targets are the things we want to test the reproducibility of
136 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] 136 # Have to add the virtual targets manually for now as builds may or may not include them as they're exclude from world
137 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world', 'virtual/librpc', 'virtual/libsdl2', 'virtual/crypt']
137 138
138 # sstate targets are things to pull from sstate to potentially cut build/debugging time 139 # sstate targets are things to pull from sstate to potentially cut build/debugging time
139 sstate_targets = [] 140 sstate_targets = []
@@ -273,9 +274,13 @@ class ReproducibleTests(OESelftestTestCase):
273 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) 274 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
274 self.logger.info('Non-reproducible packages will be copied to %s', save_dir) 275 self.logger.info('Non-reproducible packages will be copied to %s', save_dir)
275 276
277 # The below bug shows that a few reproducible issues are depends on build dir path length.
278 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=15554
279 # So, the reproducibleA & reproducibleB directories are changed to reproducibleA & reproducibleB-extended to have different size.
280
276 vars_A = self.do_test_build('reproducibleA', self.build_from_sstate) 281 vars_A = self.do_test_build('reproducibleA', self.build_from_sstate)
277 282
278 vars_B = self.do_test_build('reproducibleB', False) 283 vars_B = self.do_test_build('reproducibleB-extended', False)
279 284
280 # NOTE: The temp directories from the reproducible build are purposely 285 # NOTE: The temp directories from the reproducible build are purposely
281 # kept after the build so it can be diffed for debugging. 286 # kept after the build so it can be diffed for debugging.
@@ -330,7 +335,7 @@ class ReproducibleTests(OESelftestTestCase):
330 # Copy jquery to improve the diffoscope output usability 335 # Copy jquery to improve the diffoscope output usability
331 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) 336 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
332 337
333 run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, max_report_size=self.max_report_size, 338 run_diffoscope('reproducibleA', 'reproducibleB-extended', package_html_dir, max_report_size=self.max_report_size,
334 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) 339 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
335 340
336 if fails: 341 if fails:
diff --git a/meta/lib/oeqa/selftest/cases/retain.py b/meta/lib/oeqa/selftest/cases/retain.py
new file mode 100644
index 0000000000..892be45857
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/retain.py
@@ -0,0 +1,241 @@
1# Tests for retain.bbclass
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import os
9import glob
10import fnmatch
11import oe.path
12import shutil
13import tarfile
14from oeqa.utils.commands import bitbake, get_bb_vars
15from oeqa.selftest.case import OESelftestTestCase
16
17class Retain(OESelftestTestCase):
18
19 def test_retain_always(self):
20 """
21 Summary: Test retain class with RETAIN_DIRS_ALWAYS
22 Expected: Archive written to RETAIN_OUTDIR when build of test recipe completes
23 Product: oe-core
24 Author: Paul Eggleton <paul.eggleton@microsoft.com>
25 """
26
27 test_recipe = 'quilt-native'
28
29 features = 'INHERIT += "retain"\n'
30 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
31 self.write_config(features)
32
33 bitbake('-c clean %s' % test_recipe)
34
35 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
36 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
37 tmpdir = bb_vars['TMPDIR']
38 if len(retain_outdir) < 5:
39 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
40 if not oe.path.is_path_parent(tmpdir, retain_outdir):
41 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
42 try:
43 shutil.rmtree(retain_outdir)
44 except FileNotFoundError:
45 pass
46
47 bitbake(test_recipe)
48 if not glob.glob(os.path.join(retain_outdir, '%s_temp_*.tar.gz' % test_recipe)):
49 self.fail('No output archive for %s created' % test_recipe)
50
51
52 def test_retain_failure(self):
53 """
54 Summary: Test retain class default behaviour
55 Expected: Archive written to RETAIN_OUTDIR only when build of test
56 recipe fails, and archive contents are as expected
57 Product: oe-core
58 Author: Paul Eggleton <paul.eggleton@microsoft.com>
59 """
60
61 test_recipe_fail = 'error'
62
63 features = 'INHERIT += "retain"\n'
64 self.write_config(features)
65
66 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'RETAIN_DIRS_ALWAYS', 'RETAIN_DIRS_GLOBAL_ALWAYS'])
67 if bb_vars['RETAIN_DIRS_ALWAYS']:
68 self.fail('RETAIN_DIRS_ALWAYS is set, this interferes with the test')
69 if bb_vars['RETAIN_DIRS_GLOBAL_ALWAYS']:
70 self.fail('RETAIN_DIRS_GLOBAL_ALWAYS is set, this interferes with the test')
71 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
72 tmpdir = bb_vars['TMPDIR']
73 if len(retain_outdir) < 5:
74 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
75 if not oe.path.is_path_parent(tmpdir, retain_outdir):
76 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
77
78 try:
79 shutil.rmtree(retain_outdir)
80 except FileNotFoundError:
81 pass
82
83 bitbake('-c clean %s' % test_recipe_fail)
84
85 if os.path.exists(retain_outdir):
86 retain_dirlist = os.listdir(retain_outdir)
87 if retain_dirlist:
88 self.fail('RETAIN_OUTDIR should be empty without failure, contents:\n%s' % '\n'.join(retain_dirlist))
89
90 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
91 if result.status == 0:
92 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
93
94 archives = glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % test_recipe_fail))
95 if not archives:
96 self.fail('No output archive for %s created' % test_recipe_fail)
97 if len(archives) > 1:
98 self.fail('More than one archive for %s created' % test_recipe_fail)
99 for archive in archives:
100 found = False
101 archive_prefix = os.path.basename(archive).split('.tar')[0]
102 expected_prefix_start = '%s_workdir' % test_recipe_fail
103 if not archive_prefix.startswith(expected_prefix_start):
104 self.fail('Archive %s name does not start with expected prefix "%s"' % (os.path.basename(archive), expected_prefix_start))
105 with tarfile.open(archive) as tf:
106 for ti in tf:
107 if not fnmatch.fnmatch(ti.name, '%s/*' % archive_prefix):
108 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
109 if ti.name.endswith('/temp/log.do_compile'):
110 found = True
111 if not found:
112 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
113
114
115 def test_retain_global(self):
116 """
117 Summary: Test retain class RETAIN_DIRS_GLOBAL_* behaviour
118 Expected: Ensure RETAIN_DIRS_GLOBAL_ALWAYS always causes an
119 archive to be created, and RETAIN_DIRS_GLOBAL_FAILURE
120 only causes an archive to be created on failure.
121 Also test archive naming (with : character) as an
122 added bonus.
123 Product: oe-core
124 Author: Paul Eggleton <paul.eggleton@microsoft.com>
125 """
126
127 test_recipe = 'quilt-native'
128 test_recipe_fail = 'error'
129
130 features = 'INHERIT += "retain"\n'
131 features += 'RETAIN_DIRS_GLOBAL_ALWAYS = "${LOG_DIR};prefix=buildlogs"\n'
132 features += 'RETAIN_DIRS_GLOBAL_FAILURE = "${STAMPS_DIR}"\n'
133 self.write_config(features)
134
135 bitbake('-c clean %s' % test_recipe)
136
137 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'STAMPS_DIR'])
138 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
139 tmpdir = bb_vars['TMPDIR']
140 if len(retain_outdir) < 5:
141 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
142 if not oe.path.is_path_parent(tmpdir, retain_outdir):
143 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
144 try:
145 shutil.rmtree(retain_outdir)
146 except FileNotFoundError:
147 pass
148
149 # Test success case
150 bitbake(test_recipe)
151 if not glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz')):
152 self.fail('No output archive for LOG_DIR created')
153 stamps_dir = bb_vars['STAMPS_DIR']
154 if glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
155 self.fail('Output archive for STAMPS_DIR created when it should not have been')
156
157 # Test failure case
158 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
159 if result.status == 0:
160 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
161 if not glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
162 self.fail('Output archive for STAMPS_DIR not created')
163 if len(glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz'))) != 2:
164 self.fail('Should be exactly two buildlogs archives in output dir')
165
166
167 def test_retain_misc(self):
168 """
169 Summary: Test retain class with RETAIN_ENABLED and RETAIN_TARBALL_SUFFIX
170 Expected: Archive written to RETAIN_OUTDIR only when RETAIN_ENABLED is set
171 and archive contents are as expected. Also test archive naming
172 (with : character) as an added bonus.
173 Product: oe-core
174 Author: Paul Eggleton <paul.eggleton@microsoft.com>
175 """
176
177 test_recipe_fail = 'error'
178
179 features = 'INHERIT += "retain"\n'
180 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
181 features += 'RETAIN_ENABLED = "0"\n'
182 self.write_config(features)
183
184 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
185 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
186 tmpdir = bb_vars['TMPDIR']
187 if len(retain_outdir) < 5:
188 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
189 if not oe.path.is_path_parent(tmpdir, retain_outdir):
190 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
191
192 try:
193 shutil.rmtree(retain_outdir)
194 except FileNotFoundError:
195 pass
196
197 bitbake('-c clean %s' % test_recipe_fail)
198 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
199 if result.status == 0:
200 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
201
202 if os.path.exists(retain_outdir) and os.listdir(retain_outdir):
203 self.fail('RETAIN_OUTDIR should be empty with RETAIN_ENABLED = "0"')
204
205 features = 'INHERIT += "retain"\n'
206 features += 'RETAIN_DIRS_ALWAYS = "${T};prefix=recipelogs"\n'
207 features += 'RETAIN_TARBALL_SUFFIX = "${DATETIME}-testsuffix.tar.bz2"\n'
208 features += 'RETAIN_ENABLED = "1"\n'
209 self.write_config(features)
210
211 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
212 if result.status == 0:
213 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
214
215 archives = glob.glob(os.path.join(retain_outdir, '%s_*-testsuffix.tar.bz2' % test_recipe_fail))
216 if not archives:
217 self.fail('No output archive for %s created' % test_recipe_fail)
218 if len(archives) != 2:
219 self.fail('Two archives for %s expected, but %d exist' % (test_recipe_fail, len(archives)))
220 recipelogs_found = False
221 workdir_found = False
222 for archive in archives:
223 contents_found = False
224 archive_prefix = os.path.basename(archive).split('.tar')[0]
225 if archive_prefix.startswith('%s_recipelogs' % test_recipe_fail):
226 recipelogs_found = True
227 if archive_prefix.startswith('%s_workdir' % test_recipe_fail):
228 workdir_found = True
229 with tarfile.open(archive, 'r:bz2') as tf:
230 for ti in tf:
231 if not fnmatch.fnmatch(ti.name, '%s/*' % (archive_prefix)):
232 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
233 if ti.name.endswith('/log.do_compile'):
234 contents_found = True
235 if not contents_found:
236 # Both archives should contain this file
237 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
238 if not recipelogs_found:
239 self.fail('No archive with expected "recipelogs" prefix found')
240 if not workdir_found:
241 self.fail('No archive with expected "workdir" prefix found')
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index 12000aac16..27090ae5cd 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -273,7 +273,7 @@ TEST_RUNQEMUPARAMS += " slirp"
273 import subprocess, os 273 import subprocess, os
274 274
275 distro = oe.lsb.distro_identifier() 275 distro = oe.lsb.distro_identifier()
276 if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or 276 if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or
277 distro.startswith('almalinux') or distro.startswith('rocky')): 277 distro.startswith('almalinux') or distro.startswith('rocky')):
278 self.skipTest('virgl headless cannot be tested with %s' %(distro)) 278 self.skipTest('virgl headless cannot be tested with %s' %(distro))
279 279
@@ -310,10 +310,7 @@ class Postinst(OESelftestTestCase):
310 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' 310 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
311 features += 'PACKAGE_CLASSES = "%s"\n' % classes 311 features += 'PACKAGE_CLASSES = "%s"\n' % classes
312 if init_manager == "systemd": 312 if init_manager == "systemd":
313 features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n' 313 features += 'INIT_MANAGER = "systemd"\n'
314 features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
315 features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
316 features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
317 self.write_config(features) 314 self.write_config(features)
318 315
319 bitbake('core-image-minimal') 316 bitbake('core-image-minimal')
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
index ad14189c6d..cbe6366f75 100644
--- a/meta/lib/oeqa/selftest/cases/rust.py
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -66,132 +66,45 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
66 # bless: First runs rustfmt to format the codebase, 66 # bless: First runs rustfmt to format the codebase,
67 # then runs tidy checks. 67 # then runs tidy checks.
68 exclude_list = [ 68 exclude_list = [
69 'compiler/rustc', 69 'src/bootstrap',
70 'compiler/rustc_interface/src/tests.rs',
71 'library/panic_abort',
72 'library/panic_unwind',
73 'library/test/src/stats/tests.rs',
74 'src/bootstrap/builder/tests.rs',
75 'src/doc/rustc', 70 'src/doc/rustc',
76 'src/doc/rustdoc', 71 'src/doc/rustdoc',
77 'src/doc/unstable-book', 72 'src/doc/unstable-book',
78 'src/librustdoc', 73 'src/librustdoc',
79 'src/rustdoc-json-types', 74 'src/rustdoc-json-types',
80 'src/tools/compiletest/src/common.rs', 75 'src/tools/compiletest/src/common.rs',
76 'src/tools/jsondoclint',
81 'src/tools/lint-docs', 77 'src/tools/lint-docs',
78 'src/tools/replace-version-placeholder',
82 'src/tools/rust-analyzer', 79 'src/tools/rust-analyzer',
83 'src/tools/rustdoc-themes', 80 'src/tools/rustdoc-themes',
84 'src/tools/tidy', 81 'src/tools/rust-installer',
82 'src/tools/suggest-tests',
83 'src/tools/tidy/src/',
85 'tests/assembly/asm/aarch64-outline-atomics.rs', 84 'tests/assembly/asm/aarch64-outline-atomics.rs',
86 'tests/codegen/abi-main-signature-32bit-c-int.rs', 85 'tests/codegen/abi-main-signature-32bit-c-int.rs',
87 'tests/codegen/abi-repr-ext.rs', 86 'tests/codegen/i128-x86-align.rs',
88 'tests/codegen/abi-x86-interrupt.rs', 87 'tests/codegen/issues/issue-122805.rs',
89 'tests/codegen/branch-protection.rs',
90 'tests/codegen/catch-unwind.rs',
91 'tests/codegen/cf-protection.rs',
92 'tests/codegen/enum-bounds-check-derived-idx.rs',
93 'tests/codegen/force-unwind-tables.rs',
94 'tests/codegen/intrinsic-no-unnamed-attr.rs',
95 'tests/codegen/issues/issue-103840.rs',
96 'tests/codegen/issues/issue-47278.rs',
97 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs',
98 'tests/codegen/lifetime_start_end.rs',
99 'tests/codegen/local-generics-in-exe-internalized.rs',
100 'tests/codegen/match-unoptimized.rs',
101 'tests/codegen/noalias-rwlockreadguard.rs',
102 'tests/codegen/non-terminate/nonempty-infinite-loop.rs',
103 'tests/codegen/noreturn-uninhabited.rs',
104 'tests/codegen/repr-transparent-aggregates-3.rs',
105 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs',
106 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs',
107 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs',
108 'tests/codegen/sse42-implies-crc32.rs',
109 'tests/codegen/thread-local.rs', 88 'tests/codegen/thread-local.rs',
110 'tests/codegen/uninit-consts.rs', 89 'tests/mir-opt/',
111 'tests/pretty/raw-str-nonexpr.rs',
112 'tests/run-make', 90 'tests/run-make',
113 'tests/run-make-fulldeps', 91 'tests/run-make-fulldeps',
114 'tests/rustdoc', 92 'tests/rustdoc',
115 'tests/rustdoc-json', 93 'tests/rustdoc-json',
116 'tests/rustdoc-js-std', 94 'tests/rustdoc-js-std',
117 'tests/rustdoc-ui/cfg-test.rs',
118 'tests/rustdoc-ui/check-cfg-test.rs',
119 'tests/rustdoc-ui/display-output.rs',
120 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs',
121 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs',
122 'tests/rustdoc-ui/doc-test-doctest-feature.rs',
123 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs',
124 'tests/rustdoc-ui/doctest-output.rs',
125 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
126 'tests/rustdoc-ui/failed-doctest-compile-fail.rs',
127 'tests/rustdoc-ui/issue-80992.rs',
128 'tests/rustdoc-ui/issue-91134.rs',
129 'tests/rustdoc-ui/nocapture-fail.rs',
130 'tests/rustdoc-ui/nocapture.rs',
131 'tests/rustdoc-ui/no-run-flag.rs',
132 'tests/rustdoc-ui/run-directory.rs',
133 'tests/rustdoc-ui/test-no_std.rs',
134 'tests/rustdoc-ui/test-type.rs',
135 'tests/rustdoc/unit-return.rs',
136 'tests/ui/abi/stack-probes-lto.rs', 95 'tests/ui/abi/stack-probes-lto.rs',
137 'tests/ui/abi/stack-probes.rs', 96 'tests/ui/abi/stack-probes.rs',
138 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs', 97 'tests/ui/codegen/mismatched-data-layouts.rs',
139 'tests/ui/asm/x86_64/sym.rs',
140 'tests/ui/associated-type-bounds/fn-apit.rs',
141 'tests/ui/associated-type-bounds/fn-dyn-apit.rs',
142 'tests/ui/associated-type-bounds/fn-wrap-apit.rs',
143 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs', 98 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
144 'tests/ui/drop/dynamic-drop.rs', 99 'tests/ui-fulldeps/',
145 'tests/ui/empty_global_asm.rs',
146 'tests/ui/functions-closures/fn-help-with-err.rs',
147 'tests/ui/linkage-attr/issue-10755.rs',
148 'tests/ui/macros/restricted-shadowing-legacy.rs',
149 'tests/ui/process/nofile-limit.rs', 100 'tests/ui/process/nofile-limit.rs',
150 'tests/ui/process/process-panic-after-fork.rs',
151 'tests/ui/process/process-sigpipe.rs',
152 'tests/ui/simd/target-feature-mixup.rs',
153 'tests/ui/structs-enums/multiple-reprs.rs', 101 'tests/ui/structs-enums/multiple-reprs.rs',
154 'src/tools/jsondoclint', 102 'tidyselftest'
155 'src/tools/replace-version-placeholder',
156 'tests/codegen/abi-efiapi.rs',
157 'tests/codegen/abi-sysv64.rs',
158 'tests/codegen/align-byval.rs',
159 'tests/codegen/align-fn.rs',
160 'tests/codegen/asm-powerpc-clobbers.rs',
161 'tests/codegen/async-fn-debug-awaitee-field.rs',
162 'tests/codegen/binary-search-index-no-bound-check.rs',
163 'tests/codegen/call-metadata.rs',
164 'tests/codegen/debug-column.rs',
165 'tests/codegen/debug-limited.rs',
166 'tests/codegen/debuginfo-generic-closure-env-names.rs',
167 'tests/codegen/drop.rs',
168 'tests/codegen/dst-vtable-align-nonzero.rs',
169 'tests/codegen/enable-lto-unit-splitting.rs',
170 'tests/codegen/enum/enum-u128.rs',
171 'tests/codegen/fn-impl-trait-self.rs',
172 'tests/codegen/inherit_overflow.rs',
173 'tests/codegen/inline-function-args-debug-info.rs',
174 'tests/codegen/intrinsics/mask.rs',
175 'tests/codegen/intrinsics/transmute-niched.rs',
176 'tests/codegen/issues/issue-73258.rs',
177 'tests/codegen/issues/issue-75546.rs',
178 'tests/codegen/issues/issue-77812.rs',
179 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs',
180 'tests/codegen/llvm-ident.rs',
181 'tests/codegen/mainsubprogram.rs',
182 'tests/codegen/move-operands.rs',
183 'tests/codegen/repr/transparent-mips64.rs',
184 'tests/mir-opt/',
185 'tests/rustdoc-json',
186 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
187 'tests/rustdoc-ui/no-run-flag.rs',
188 'tests/ui-fulldeps/',
189 'tests/ui/numbers-arithmetic/u128.rs'
190 ] 103 ]
191 104
192 exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list]) 105 exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
193 # Add exclude_fail_tests with other test arguments 106 # Add exclude_fail_tests with other test arguments
194 testargs = exclude_fail_tests + " --doc --no-fail-fast --bless" 107 testargs = exclude_fail_tests + " --no-fail-fast --bless"
195 108
196 # wrap the execution with a qemu instance. 109 # wrap the execution with a qemu instance.
197 # Tests are run with 512 tasks in parallel to execute all tests very quickly 110 # Tests are run with 512 tasks in parallel to execute all tests very quickly
@@ -210,9 +123,8 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
210 tmpdir = get_bb_var("TMPDIR", "rust") 123 tmpdir = get_bb_var("TMPDIR", "rust")
211 124
212 # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools. 125 # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
213 cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath 126 cmd = "export TARGET_VENDOR=\"-poky\";"
214 cmd = cmd + " export TARGET_VENDOR=\"-poky\";" 127 cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/python3-native:%s/recipe-sysroot-native/usr/bin:%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, rustlibpath, rustlibpath, tcpath, tmpdir)
215 cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir)
216 cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath 128 cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
217 # Trigger testing. 129 # Trigger testing.
218 cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip 130 cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
index 05fc4e390b..be595babb3 100644
--- a/meta/lib/oeqa/selftest/cases/spdx.py
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -6,29 +6,37 @@
6 6
7import json 7import json
8import os 8import os
9import textwrap
10from pathlib import Path
9from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_var, runCmd 12from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
11 13
12class SPDXCheck(OESelftestTestCase):
13 14
15class SPDX22Check(OESelftestTestCase):
14 @classmethod 16 @classmethod
15 def setUpClass(cls): 17 def setUpClass(cls):
16 super(SPDXCheck, cls).setUpClass() 18 super().setUpClass()
17 bitbake("python3-spdx-tools-native") 19 bitbake("python3-spdx-tools-native")
18 bitbake("-c addto_recipe_sysroot python3-spdx-tools-native") 20 bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
19 21
20 def check_recipe_spdx(self, high_level_dir, spdx_file, target_name): 22 def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
21 config = """ 23 config = textwrap.dedent(
22INHERIT += "create-spdx" 24 """\
23""" 25 INHERIT:remove = "create-spdx"
26 INHERIT += "create-spdx-2.2"
27 """
28 )
24 self.write_config(config) 29 self.write_config(config)
25 30
26 deploy_dir = get_bb_var("DEPLOY_DIR") 31 deploy_dir = get_bb_var("DEPLOY_DIR")
27 machine_var = get_bb_var("MACHINE") 32 machine_var = get_bb_var("MACHINE")
33 spdx_version = get_bb_var("SPDX_VERSION")
28 # qemux86-64 creates the directory qemux86_64 34 # qemux86-64 creates the directory qemux86_64
29 machine_dir = machine_var.replace("-", "_") 35 machine_dir = machine_var.replace("-", "_")
30 36
31 full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file) 37 full_file_path = os.path.join(
38 deploy_dir, "spdx", spdx_version, machine_dir, high_level_dir, spdx_file
39 )
32 40
33 try: 41 try:
34 os.remove(full_file_path) 42 os.remove(full_file_path)
@@ -43,8 +51,13 @@ INHERIT += "create-spdx"
43 self.assertNotEqual(report, None) 51 self.assertNotEqual(report, None)
44 self.assertNotEqual(report["SPDXID"], None) 52 self.assertNotEqual(report["SPDXID"], None)
45 53
46 python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3') 54 python = os.path.join(
47 validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools') 55 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"),
56 "nativepython3",
57 )
58 validator = os.path.join(
59 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"), "pyspdxtools"
60 )
48 result = runCmd("{} {} -i {}".format(python, validator, filename)) 61 result = runCmd("{} {} -i {}".format(python, validator, filename))
49 62
50 self.assertExists(full_file_path) 63 self.assertExists(full_file_path)
@@ -52,3 +65,106 @@ INHERIT += "create-spdx"
52 65
53 def test_spdx_base_files(self): 66 def test_spdx_base_files(self):
54 self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files") 67 self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
68
69
70class SPDX3CheckBase(object):
71 """
72 Base class for checking SPDX 3 based tests
73 """
74
75 def check_spdx_file(self, filename):
76 import oe.spdx30
77
78 self.assertExists(filename)
79
80 # Read the file
81 objset = oe.spdx30.SHACLObjectSet()
82 with open(filename, "r") as f:
83 d = oe.spdx30.JSONLDDeserializer()
84 d.read(f, objset)
85
86 return objset
87
88 def check_recipe_spdx(self, target_name, spdx_path, *, task=None, extraconf=""):
89 config = textwrap.dedent(
90 f"""\
91 INHERIT:remove = "create-spdx"
92 INHERIT += "{self.SPDX_CLASS}"
93 {extraconf}
94 """
95 )
96 self.write_config(config)
97
98 if task:
99 bitbake(f"-c {task} {target_name}")
100 else:
101 bitbake(target_name)
102
103 filename = spdx_path.format(
104 **get_bb_vars(
105 [
106 "DEPLOY_DIR_IMAGE",
107 "DEPLOY_DIR_SPDX",
108 "MACHINE",
109 "MACHINE_ARCH",
110 "SDKMACHINE",
111 "SDK_DEPLOY",
112 "SPDX_VERSION",
113 "TOOLCHAIN_OUTPUTNAME",
114 ],
115 target_name,
116 )
117 )
118
119 return self.check_spdx_file(filename)
120
121 def check_objset_missing_ids(self, objset):
122 if objset.missing_ids:
123 self.assertTrue(
124 False,
125 "The following SPDXIDs are unresolved:\n "
126 + "\n ".join(objset.missing_ids),
127 )
128
129
130class SPDX30Check(SPDX3CheckBase, OESelftestTestCase):
131 SPDX_CLASS = "create-spdx-3.0"
132
133 def test_base_files(self):
134 self.check_recipe_spdx(
135 "base-files",
136 "{DEPLOY_DIR_SPDX}/{MACHINE_ARCH}/packages/base-files.spdx.json",
137 )
138
139 def test_core_image_minimal(self):
140 objset = self.check_recipe_spdx(
141 "core-image-minimal",
142 "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json",
143 )
144
145 # Document should be fully linked
146 self.check_objset_missing_ids(objset)
147
148 def test_core_image_minimal_sdk(self):
149 objset = self.check_recipe_spdx(
150 "core-image-minimal",
151 "{SDK_DEPLOY}/{TOOLCHAIN_OUTPUTNAME}.spdx.json",
152 task="populate_sdk",
153 )
154
155 # Document should be fully linked
156 self.check_objset_missing_ids(objset)
157
158 def test_baremetal_helloworld(self):
159 objset = self.check_recipe_spdx(
160 "baremetal-helloworld",
161 "{DEPLOY_DIR_IMAGE}/baremetal-helloworld-image-{MACHINE}.spdx.json",
162 extraconf=textwrap.dedent(
163 """\
164 TCLIBC = "baremetal"
165 """
166 ),
167 )
168
169 # Document should be fully linked
170 self.check_objset_missing_ids(objset)
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index 86d6cd7464..ae295bef5f 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -378,7 +378,6 @@ class SStateHashSameSigs(SStateBase):
378 self.write_config(""" 378 self.write_config("""
379MACHINE = "qemux86" 379MACHINE = "qemux86"
380TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 380TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
381TCLIBCAPPEND = ""
382BUILD_ARCH = "x86_64" 381BUILD_ARCH = "x86_64"
383BUILD_OS = "linux" 382BUILD_OS = "linux"
384SDKMACHINE = "x86_64" 383SDKMACHINE = "x86_64"
@@ -390,7 +389,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
390 self.write_config(""" 389 self.write_config("""
391MACHINE = "qemux86" 390MACHINE = "qemux86"
392TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 391TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
393TCLIBCAPPEND = ""
394BUILD_ARCH = "i686" 392BUILD_ARCH = "i686"
395BUILD_OS = "linux" 393BUILD_OS = "linux"
396SDKMACHINE = "i686" 394SDKMACHINE = "i686"
@@ -426,7 +424,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
426 424
427 self.write_config(""" 425 self.write_config("""
428TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 426TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
429TCLIBCAPPEND = \"\"
430NATIVELSBSTRING = \"DistroA\" 427NATIVELSBSTRING = \"DistroA\"
431BB_SIGNATURE_HANDLER = "OEBasicHash" 428BB_SIGNATURE_HANDLER = "OEBasicHash"
432""") 429""")
@@ -434,7 +431,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
434 bitbake("core-image-weston -S none") 431 bitbake("core-image-weston -S none")
435 self.write_config(""" 432 self.write_config("""
436TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 433TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
437TCLIBCAPPEND = \"\"
438NATIVELSBSTRING = \"DistroB\" 434NATIVELSBSTRING = \"DistroB\"
439BB_SIGNATURE_HANDLER = "OEBasicHash" 435BB_SIGNATURE_HANDLER = "OEBasicHash"
440""") 436""")
@@ -463,17 +459,17 @@ class SStateHashSameSigs2(SStateBase):
463 459
464 configA = """ 460 configA = """
465TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 461TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
466TCLIBCAPPEND = \"\"
467MACHINE = \"qemux86-64\" 462MACHINE = \"qemux86-64\"
468BB_SIGNATURE_HANDLER = "OEBasicHash" 463BB_SIGNATURE_HANDLER = "OEBasicHash"
469""" 464"""
470 #OLDEST_KERNEL is arch specific so set to a different value here for testing 465 #OLDEST_KERNEL is arch specific so set to a different value here for testing
471 configB = """ 466 configB = """
472TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 467TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
473TCLIBCAPPEND = \"\"
474MACHINE = \"qemuarm\" 468MACHINE = \"qemuarm\"
475OLDEST_KERNEL = \"3.3.0\" 469OLDEST_KERNEL = \"3.3.0\"
476BB_SIGNATURE_HANDLER = "OEBasicHash" 470BB_SIGNATURE_HANDLER = "OEBasicHash"
471ERROR_QA:append = " somenewoption"
472WARN_QA:append = " someotheroption"
477""" 473"""
478 self.sstate_common_samesigs(configA, configB, allarch=True) 474 self.sstate_common_samesigs(configA, configB, allarch=True)
479 475
@@ -484,7 +480,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
484 480
485 configA = """ 481 configA = """
486TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 482TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
487TCLIBCAPPEND = \"\"
488MACHINE = \"qemux86-64\" 483MACHINE = \"qemux86-64\"
489require conf/multilib.conf 484require conf/multilib.conf
490MULTILIBS = \"multilib:lib32\" 485MULTILIBS = \"multilib:lib32\"
@@ -493,7 +488,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
493""" 488"""
494 configB = """ 489 configB = """
495TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 490TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
496TCLIBCAPPEND = \"\"
497MACHINE = \"qemuarm\" 491MACHINE = \"qemuarm\"
498require conf/multilib.conf 492require conf/multilib.conf
499MULTILIBS = \"\" 493MULTILIBS = \"\"
@@ -511,7 +505,6 @@ class SStateHashSameSigs3(SStateBase):
511 505
512 self.write_config(""" 506 self.write_config("""
513TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 507TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
514TCLIBCAPPEND = \"\"
515MACHINE = \"qemux86\" 508MACHINE = \"qemux86\"
516require conf/multilib.conf 509require conf/multilib.conf
517MULTILIBS = "multilib:lib32" 510MULTILIBS = "multilib:lib32"
@@ -522,7 +515,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
522 bitbake("world meta-toolchain -S none") 515 bitbake("world meta-toolchain -S none")
523 self.write_config(""" 516 self.write_config("""
524TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 517TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
525TCLIBCAPPEND = \"\"
526MACHINE = \"qemux86copy\" 518MACHINE = \"qemux86copy\"
527require conf/multilib.conf 519require conf/multilib.conf
528MULTILIBS = "multilib:lib32" 520MULTILIBS = "multilib:lib32"
@@ -559,7 +551,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
559 551
560 self.write_config(""" 552 self.write_config("""
561TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 553TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
562TCLIBCAPPEND = \"\"
563MACHINE = \"qemux86\" 554MACHINE = \"qemux86\"
564require conf/multilib.conf 555require conf/multilib.conf
565MULTILIBS = "multilib:lib32" 556MULTILIBS = "multilib:lib32"
@@ -570,7 +561,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
570 bitbake("binutils-native -S none") 561 bitbake("binutils-native -S none")
571 self.write_config(""" 562 self.write_config("""
572TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 563TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
573TCLIBCAPPEND = \"\"
574MACHINE = \"qemux86copy\" 564MACHINE = \"qemux86copy\"
575BB_SIGNATURE_HANDLER = "OEBasicHash" 565BB_SIGNATURE_HANDLER = "OEBasicHash"
576""") 566""")
@@ -598,7 +588,6 @@ class SStateHashSameSigs4(SStateBase):
598 588
599 self.write_config(""" 589 self.write_config("""
600TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 590TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
601TCLIBCAPPEND = ""
602BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}" 591BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}"
603PARALLEL_MAKE = "-j 1" 592PARALLEL_MAKE = "-j 1"
604DL_DIR = "${TOPDIR}/download1" 593DL_DIR = "${TOPDIR}/download1"
@@ -613,7 +602,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
613 bitbake("world meta-toolchain -S none") 602 bitbake("world meta-toolchain -S none")
614 self.write_config(""" 603 self.write_config("""
615TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 604TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
616TCLIBCAPPEND = ""
617BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}" 605BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}"
618PARALLEL_MAKE = "-j 2" 606PARALLEL_MAKE = "-j 2"
619DL_DIR = "${TOPDIR}/download2" 607DL_DIR = "${TOPDIR}/download2"
@@ -724,7 +712,6 @@ class SStateFindSiginfo(SStateBase):
724 """ 712 """
725 self.write_config(""" 713 self.write_config("""
726TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\" 714TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
727TCLIBCAPPEND = \"\"
728MACHINE = \"qemux86-64\" 715MACHINE = \"qemux86-64\"
729require conf/multilib.conf 716require conf/multilib.conf
730MULTILIBS = "multilib:lib32" 717MULTILIBS = "multilib:lib32"
@@ -917,15 +904,24 @@ INHERIT += "base-do-configure-modified"
917""", 904""",
918expected_sametmp_output, expected_difftmp_output) 905expected_sametmp_output, expected_difftmp_output)
919 906
920@OETestTag("yocto-mirrors") 907class SStateCheckObjectPresence(SStateBase):
921class SStateMirrors(SStateBase): 908 def check_bb_output(self, output, targets, exceptions, check_cdn):
922 def check_bb_output(self, output, exceptions, check_cdn):
923 def is_exception(object, exceptions): 909 def is_exception(object, exceptions):
924 for e in exceptions: 910 for e in exceptions:
925 if re.search(e, object): 911 if re.search(e, object):
926 return True 912 return True
927 return False 913 return False
928 914
915 # sstate is checked for existence of these, but they never get written out to begin with
916 exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
917 exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
918 exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
919 exceptions += ["linux-yocto.*shared_workdir"]
920 # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
921 # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
922 # which makes tracing other changes difficult
923 exceptions += ["{}.*create_.*spdx".format(t) for t in targets.split()]
924
929 output_l = output.splitlines() 925 output_l = output.splitlines()
930 for l in output_l: 926 for l in output_l:
931 if l.startswith("Sstate summary"): 927 if l.startswith("Sstate summary"):
@@ -960,18 +956,9 @@ class SStateMirrors(SStateBase):
960 self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) 956 self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
961 self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) 957 self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
962 958
959@OETestTag("yocto-mirrors")
960class SStateMirrors(SStateCheckObjectPresence):
963 def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False): 961 def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
964 # sstate is checked for existence of these, but they never get written out to begin with
965 exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
966 exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
967 exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
968 exceptions += ["linux-yocto.*shared_workdir"]
969 # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
970 # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
971 # which makes tracing other changes difficult
972 exceptions += ["{}.*create_spdx".format(t) for t in targets.split()]
973 exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()]
974
975 if check_cdn: 962 if check_cdn:
976 self.config_sstate(True) 963 self.config_sstate(True)
977 self.append_config(""" 964 self.append_config("""
@@ -987,7 +974,7 @@ MACHINE = "{}"
987 bitbake("-S none {}".format(targets)) 974 bitbake("-S none {}".format(targets))
988 if ignore_errors: 975 if ignore_errors:
989 return 976 return
990 self.check_bb_output(result.output, exceptions, check_cdn) 977 self.check_bb_output(result.output, targets, exceptions, check_cdn)
991 978
992 def test_cdn_mirror_qemux86_64(self): 979 def test_cdn_mirror_qemux86_64(self):
993 exceptions = [] 980 exceptions = []
diff --git a/meta/lib/oeqa/selftest/context.py b/meta/lib/oeqa/selftest/context.py
index 99186175e5..acc3b073bd 100644
--- a/meta/lib/oeqa/selftest/context.py
+++ b/meta/lib/oeqa/selftest/context.py
@@ -117,8 +117,11 @@ class OESelftestTestContext(OETestContext):
117 newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath) 117 newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath)
118 f.write(newbblayers) 118 f.write(newbblayers)
119 119
120 # Rewrite builddir paths seen in environment variables
120 for e in os.environ: 121 for e in os.environ:
121 if builddir + "/" in os.environ[e]: 122 # Rewrite paths that absolutely point inside builddir
123 # (e.g $builddir/conf/ would be rewritten but not $builddir/../bitbake/)
124 if builddir + "/" in os.environ[e] and builddir + "/" in os.path.abspath(os.environ[e]):
122 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/") 125 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
123 if os.environ[e].endswith(builddir): 126 if os.environ[e].endswith(builddir):
124 os.environ[e] = os.environ[e].replace(builddir, newbuilddir) 127 os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py
index 53bdcbf266..e03f7e33bb 100644
--- a/meta/lib/oeqa/utils/__init__.py
+++ b/meta/lib/oeqa/utils/__init__.py
@@ -96,4 +96,10 @@ def get_json_result_dir(d):
96 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR") 96 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
97 if custom_json_result_dir: 97 if custom_json_result_dir:
98 json_result_dir = custom_json_result_dir 98 json_result_dir = custom_json_result_dir
99 return json_result_dir \ No newline at end of file 99 return json_result_dir
100
101def get_artefact_dir(d):
102 custom_json_result_dir = d.getVar("OEQA_ARTEFACT_DIR")
103 if custom_json_result_dir:
104 return custom_json_result_dir
105 return os.path.join(d.getVar("LOG_DIR"), 'oeqa-artefacts')
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index 575e380017..bf2f49d0c0 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -314,7 +314,23 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec=
314@contextlib.contextmanager 314@contextlib.contextmanager
315def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): 315def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
316 """ 316 """
317 launch_cmd means directly run the command, don't need set rootfs or env vars. 317 Starts a context manager for a 'oeqa.targetcontrol.QemuTarget' resource.
318 The underlying Qemu will be booted into a shell when the generator yields
319 and stopped when the 'with' block exits.
320
321 Usage:
322
323 with runqemu('core-image-minimal') as qemu:
324 qemu.run_serial('cat /proc/cpuinfo')
325
326 Args:
327 pn (str): (image) recipe to run on
328 ssh (boolean): whether or not to enable SSH (network access)
329 runqemuparams (str): space-separated list of params to pass to 'runqemu' script (like 'nographics', 'ovmf', etc.)
330 image_fstype (str): IMAGE_FSTYPE to use
331 launch_cmd (str): directly run this command and bypass automatic runqemu parameter generation
332 overrides (dict): dict of "'<bitbake-variable>': value" pairs that allows overriding bitbake variables
333 discard_writes (boolean): enables qemu -snapshot feature to prevent modifying original image
318 """ 334 """
319 335
320 import bb.tinfoil 336 import bb.tinfoil
diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py
index ecdddd2d40..8f787838b9 100644
--- a/meta/lib/oeqa/utils/postactions.py
+++ b/meta/lib/oeqa/utils/postactions.py
@@ -7,23 +7,20 @@
7# Run a set of actions after tests. The runner provides internal data 7# Run a set of actions after tests. The runner provides internal data
8# dictionary as well as test context to any action to run. 8# dictionary as well as test context to any action to run.
9 9
10from oeqa.utils import get_json_result_dir 10import datetime
11 11import io
12def create_artifacts_directory(d, tc): 12import os
13 import shutil 13import stat
14 14import subprocess
15 local_artifacts_dir = os.path.join(get_json_result_dir(d), "artifacts") 15import tempfile
16 if os.path.isdir(local_artifacts_dir): 16from oeqa.utils import get_artefact_dir
17 shutil.rmtree(local_artifacts_dir)
18
19 os.makedirs(local_artifacts_dir)
20 17
21################################################################## 18##################################################################
22# Host/target statistics 19# Host/target statistics
23################################################################## 20##################################################################
24 21
25def get_target_disk_usage(d, tc): 22def get_target_disk_usage(d, tc, artifacts_list, outputdir):
26 output_file = os.path.join(get_json_result_dir(d), "artifacts", "target_disk_usage.txt") 23 output_file = os.path.join(outputdir, "target_disk_usage.txt")
27 try: 24 try:
28 (status, output) = tc.target.run('df -h') 25 (status, output) = tc.target.run('df -h')
29 with open(output_file, 'w') as f: 26 with open(output_file, 'w') as f:
@@ -32,10 +29,10 @@ def get_target_disk_usage(d, tc):
32 except Exception as e: 29 except Exception as e:
33 bb.warn(f"Can not get target disk usage: {e}") 30 bb.warn(f"Can not get target disk usage: {e}")
34 31
35def get_host_disk_usage(d, tc): 32def get_host_disk_usage(d, tc, artifacts_list, outputdir):
36 import subprocess 33 import subprocess
37 34
38 output_file = os.path.join(get_json_result_dir(d), "artifacts", "host_disk_usage.txt") 35 output_file = os.path.join(outputdir, "host_disk_usage.txt")
39 try: 36 try:
40 with open(output_file, 'w') as f: 37 with open(output_file, 'w') as f:
41 output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={}) 38 output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={})
@@ -61,25 +58,21 @@ def get_artifacts_list(target, raw_list):
61 58
62 return result 59 return result
63 60
64def retrieve_test_artifacts(target, artifacts_list, target_dir): 61def list_and_fetch_failed_tests_artifacts(d, tc, artifacts_list, outputdir):
65 local_artifacts_dir = os.path.join(target_dir, "artifacts") 62 artifacts_list = get_artifacts_list(tc.target, artifacts_list)
66 for artifact_path in artifacts_list:
67 if not os.path.isabs(artifact_path):
68 bb.warn(f"{artifact_path} is not an absolute path")
69 continue
70 try:
71 dest_dir = os.path.join(local_artifacts_dir, os.path.dirname(artifact_path[1:]))
72 os.makedirs(dest_dir, exist_ok=True)
73 target.copyFrom(artifact_path, dest_dir)
74 except Exception as e:
75 bb.warn(f"Can not retrieve {artifact_path} from test target: {e}")
76
77def list_and_fetch_failed_tests_artifacts(d, tc):
78 artifacts_list = get_artifacts_list(tc.target, d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS"))
79 if not artifacts_list: 63 if not artifacts_list:
80 bb.warn("Could not load artifacts list, skip artifacts retrieval") 64 bb.warn("Could not load artifacts list, skip artifacts retrieval")
81 else: 65 return
82 retrieve_test_artifacts(tc.target, artifacts_list, get_json_result_dir(d)) 66 try:
67 cmd = "tar zcf - " + " ".join(artifacts_list)
68 (status, output) = tc.target.run(cmd, raw = True)
69 if status != 0 or not output:
70 raise Exception("Error while fetching compressed artifacts")
71 archive_name = os.path.join(outputdir, "tests_artifacts.tar.gz")
72 with open(archive_name, "wb") as f:
73 f.write(output)
74 except Exception as e:
75 bb.warn(f"Can not retrieve artifacts from test target: {e}")
83 76
84 77
85################################################################## 78##################################################################
@@ -87,12 +80,22 @@ def list_and_fetch_failed_tests_artifacts(d, tc):
87################################################################## 80##################################################################
88 81
89def run_failed_tests_post_actions(d, tc): 82def run_failed_tests_post_actions(d, tc):
83 artifacts = d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS")
84 # Allow all the code to be disabled by having no artifacts set, e.g. for systems with no ssh support
85 if not artifacts:
86 return
87
88 outputdir = get_artefact_dir(d)
89 os.makedirs(outputdir, exist_ok=True)
90 datestr = datetime.datetime.now().strftime('%Y%m%d')
91 outputdir = tempfile.mkdtemp(prefix='oeqa-target-artefacts-%s-' % datestr, dir=outputdir)
92 os.chmod(outputdir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
93
90 post_actions=[ 94 post_actions=[
91 create_artifacts_directory,
92 list_and_fetch_failed_tests_artifacts, 95 list_and_fetch_failed_tests_artifacts,
93 get_target_disk_usage, 96 get_target_disk_usage,
94 get_host_disk_usage 97 get_host_disk_usage
95 ] 98 ]
96 99
97 for action in post_actions: 100 for action in post_actions:
98 action(d, tc) 101 action(d, tc, artifacts, outputdir)
diff --git a/meta/lib/patchtest/patch.py b/meta/lib/patchtest/patch.py
index baf6283873..90faf3eeb4 100644
--- a/meta/lib/patchtest/patch.py
+++ b/meta/lib/patchtest/patch.py
@@ -14,22 +14,12 @@ import utils
14logger = logging.getLogger('patchtest') 14logger = logging.getLogger('patchtest')
15 15
16class PatchTestPatch(object): 16class PatchTestPatch(object):
17 MERGE_STATUS_INVALID = 'INVALID'
18 MERGE_STATUS_NOT_MERGED = 'NOTMERGED'
19 MERGE_STATUS_MERGED_SUCCESSFULL = 'PASS'
20 MERGE_STATUS_MERGED_FAIL = 'FAIL'
21 MERGE_STATUS = (MERGE_STATUS_INVALID,
22 MERGE_STATUS_NOT_MERGED,
23 MERGE_STATUS_MERGED_SUCCESSFULL,
24 MERGE_STATUS_MERGED_FAIL)
25
26 def __init__(self, path, forcereload=False): 17 def __init__(self, path, forcereload=False):
27 self._path = path 18 self._path = path
28 self._forcereload = forcereload 19 self._forcereload = forcereload
29 20
30 self._contents = None 21 self._contents = None
31 self._branch = None 22 self._branch = None
32 self._merge_status = PatchTestPatch.MERGE_STATUS_NOT_MERGED
33 23
34 @property 24 @property
35 def contents(self): 25 def contents(self):
@@ -51,12 +41,3 @@ class PatchTestPatch(object):
51 if not self._branch: 41 if not self._branch:
52 self._branch = utils.get_branch(self._path) 42 self._branch = utils.get_branch(self._path)
53 return self._branch 43 return self._branch
54
55 def setmergestatus(self, status):
56 self._merge_status = status
57
58 def getmergestatus(self):
59 return self._merge_status
60
61 merge_status = property(getmergestatus, setmergestatus)
62
diff --git a/meta/lib/patchtest/repo.py b/meta/lib/patchtest/repo.py
index d3788f466d..5f361ac500 100644
--- a/meta/lib/patchtest/repo.py
+++ b/meta/lib/patchtest/repo.py
@@ -11,6 +11,7 @@
11import os 11import os
12import utils 12import utils
13import logging 13import logging
14import git
14from patch import PatchTestPatch 15from patch import PatchTestPatch
15 16
16logger = logging.getLogger('patchtest') 17logger = logging.getLogger('patchtest')
@@ -21,15 +22,17 @@ class PatchTestRepo(object):
21 # prefixes used for temporal branches/stashes 22 # prefixes used for temporal branches/stashes
22 prefix = 'patchtest' 23 prefix = 'patchtest'
23 24
25
24 def __init__(self, patch, repodir, commit=None, branch=None): 26 def __init__(self, patch, repodir, commit=None, branch=None):
25 self._repodir = repodir 27 self._repodir = repodir
28 self._repo = git.Repo.init(repodir)
26 self._patch = PatchTestPatch(patch) 29 self._patch = PatchTestPatch(patch)
27 self._current_branch = self._get_current_branch() 30 self._current_branch = self._repo.active_branch.name
28 31
29 # targeted branch defined on the patch may be invalid, so make sure there 32 # targeted branch defined on the patch may be invalid, so make sure there
30 # is a corresponding remote branch 33 # is a corresponding remote branch
31 valid_patch_branch = None 34 valid_patch_branch = None
32 if self._patch.branch in self.upstream_branches(): 35 if self._patch.branch in self._repo.branches:
33 valid_patch_branch = self._patch.branch 36 valid_patch_branch = self._patch.branch
34 37
35 # Target Branch 38 # Target Branch
@@ -52,22 +55,19 @@ class PatchTestRepo(object):
52 55
53 self._workingbranch = "%s_%s" % (PatchTestRepo.prefix, os.getpid()) 56 self._workingbranch = "%s_%s" % (PatchTestRepo.prefix, os.getpid())
54 57
55 # create working branch 58 # create working branch. Use the '-B' flag so that we just
56 self._exec({'cmd': ['git', 'checkout', '-b', self._workingbranch, self._commit]}) 59 # check out the existing one if it's there
60 self._repo.git.execute(['git', 'checkout', '-B', self._workingbranch, self._commit])
57 61
58 self._patchmerged = False 62 self._patchmerged = False
59 63
60 # Check if patch can be merged using git-am 64 # Check if patch can be merged using git-am
61 self._patchcanbemerged = True 65 self._patchcanbemerged = True
62 try: 66 try:
63 self._exec({'cmd': ['git', 'am', '--keep-cr'], 'input': self._patch.contents}) 67 # Make sure to get the absolute path of the file
64 except utils.CmdException as ce: 68 self._repo.git.execute(['git', 'apply', '--check', os.path.abspath(self._patch.path)], with_exceptions=True)
65 self._exec({'cmd': ['git', 'am', '--abort']}) 69 except git.exc.GitCommandError as ce:
66 self._patchcanbemerged = False 70 self._patchcanbemerged = False
67 finally:
68 # if patch was applied, remove it
69 if self._patchcanbemerged:
70 self._exec({'cmd':['git', 'reset', '--hard', self._commit]})
71 71
72 # for debugging purposes, print all repo parameters 72 # for debugging purposes, print all repo parameters
73 logger.debug("Parameters") 73 logger.debug("Parameters")
@@ -97,78 +97,24 @@ class PatchTestRepo(object):
97 def canbemerged(self): 97 def canbemerged(self):
98 return self._patchcanbemerged 98 return self._patchcanbemerged
99 99
100 def _exec(self, cmds):
101 _cmds = []
102 if isinstance(cmds, dict):
103 _cmds.append(cmds)
104 elif isinstance(cmds, list):
105 _cmds = cmds
106 else:
107 raise utils.CmdException({'cmd':str(cmds)})
108
109 results = []
110 cmdfailure = False
111 try:
112 results = utils.exec_cmds(_cmds, self._repodir)
113 except utils.CmdException as ce:
114 cmdfailure = True
115 raise ce
116 finally:
117 if cmdfailure:
118 for cmd in _cmds:
119 logger.debug("CMD: %s" % ' '.join(cmd['cmd']))
120 else:
121 for result in results:
122 cmd, rc, stdout, stderr = ' '.join(result['cmd']), result['returncode'], result['stdout'], result['stderr']
123 logger.debug("CMD: %s RCODE: %s STDOUT: %s STDERR: %s" % (cmd, rc, stdout, stderr))
124
125 return results
126
127 def _get_current_branch(self, commit='HEAD'):
128 cmd = {'cmd':['git', 'rev-parse', '--abbrev-ref', commit]}
129 cb = self._exec(cmd)[0]['stdout']
130 if cb == commit:
131 logger.warning('You may be detached so patchtest will checkout to master after execution')
132 cb = 'master'
133 return cb
134
135 def _get_commitid(self, commit): 100 def _get_commitid(self, commit):
136 101
137 if not commit: 102 if not commit:
138 return None 103 return None
139 104
140 try: 105 try:
141 cmd = {'cmd':['git', 'rev-parse', '--short', commit]} 106 return self._repo.rev_parse(commit).hexsha
142 return self._exec(cmd)[0]['stdout'] 107 except Exception as e:
143 except utils.CmdException as ce: 108 print(f"Couldn't find commit {commit} in repo")
144 # try getting the commit under any remotes
145 cmd = {'cmd':['git', 'remote']}
146 remotes = self._exec(cmd)[0]['stdout']
147 for remote in remotes.splitlines():
148 cmd = {'cmd':['git', 'rev-parse', '--short', '%s/%s' % (remote, commit)]}
149 try:
150 return self._exec(cmd)[0]['stdout']
151 except utils.CmdException:
152 pass
153 109
154 return None 110 return None
155 111
156 def upstream_branches(self):
157 cmd = {'cmd':['git', 'branch', '--remotes']}
158 remote_branches = self._exec(cmd)[0]['stdout']
159
160 # just get the names, without the remote name
161 branches = set(branch.split('/')[-1] for branch in remote_branches.splitlines())
162 return branches
163
164 def merge(self): 112 def merge(self):
165 if self._patchcanbemerged: 113 if self._patchcanbemerged:
166 self._exec({'cmd': ['git', 'am', '--keep-cr'], 114 self._repo.git.execute(['git', 'am', '--keep-cr', os.path.abspath(self._patch.path)])
167 'input': self._patch.contents,
168 'updateenv': {'PTRESOURCE':self._patch.path}})
169 self._patchmerged = True 115 self._patchmerged = True
170 116
171 def clean(self): 117 def clean(self):
172 self._exec({'cmd':['git', 'checkout', '%s' % self._current_branch]}) 118 self._repo.git.execute(['git', 'checkout', self._current_branch])
173 self._exec({'cmd':['git', 'branch', '-D', self._workingbranch]}) 119 self._repo.git.execute(['git', 'branch', '-D', self._workingbranch])
174 self._patchmerged = False 120 self._patchmerged = False
diff --git a/meta/lib/patchtest/requirements.txt b/meta/lib/patchtest/requirements.txt
index ba55ff905e..4247b91f09 100644
--- a/meta/lib/patchtest/requirements.txt
+++ b/meta/lib/patchtest/requirements.txt
@@ -1,5 +1,6 @@
1boto3 1boto3
2git-pw>=2.5.0 2git-pw>=2.5.0
3GitPython
3jinja2 4jinja2
4pylint 5pylint
5pyparsing>=3.0.9 6pyparsing>=3.0.9
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail
index 0c40cdc1b6..30c1bc4624 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail
@@ -1,32 +1,43 @@
1From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: First Last <first.last@example.com> 2From: First Last <first.last@example.com>
3Date: Tue, 29 Aug 2023 13:32:24 -0400 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add a summary 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This patch should fail the selftests because the author address is from the 6This should fail the test_author_valid test.
7invalid "example.com".
8 7
9Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
10--- 9---
11 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- 10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
12 1 file changed, 2 insertions(+), 1 deletion(-) 11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13 13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
15index 547587bef4..491f0a3df7 100644 15new file mode 100644
16--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16index 00000000000..f3dec1b220c
17+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 17--- /dev/null
18@@ -1,3 +1,4 @@ 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
19+SUMMARY = "A cool sample" 19@@ -0,0 +1,21 @@
20 DESCRIPTION = "Simple helloworld application -- selftest variant" 20+SUMMARY = "This is an example summary"
21 SECTION = "examples" 21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22 LICENSE = "MIT" 22+SECTION = "examples"
23@@ -16,4 +17,4 @@ do_install() { 23+LICENSE = "MIT"
24 install -m 0755 helloworld ${D}${bindir} 24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25 } 25+
26 26+SRC_URI = "file://helloworld.c"
27-BBCLASSEXTEND = "native nativesdk" 27+
28\ No newline at end of file 28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
29+BBCLASSEXTEND = "native nativesdk" 40+BBCLASSEXTEND = "native nativesdk"
30-- 41--
312.41.0 422.45.1
32 43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass
index cbb8ef2cef..6e82b08bc6 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass
@@ -1,31 +1,43 @@
1From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: First Last <first.last@address.com> 2From: First Last <first.last@address.com>
3Date: Tue, 29 Aug 2023 13:32:24 -0400 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add a summary 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This patch should pass the selftests because the author address is in a valid format. 6This should pass the test_author_valid test.
7 7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- 10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 2 insertions(+), 1 deletion(-) 11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
12 13
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
14index 547587bef4..491f0a3df7 100644 15new file mode 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16index 00000000000..f3dec1b220c
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 17--- /dev/null
17@@ -1,3 +1,4 @@ 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
18+SUMMARY = "A cool sample" 19@@ -0,0 +1,21 @@
19 DESCRIPTION = "Simple helloworld application -- selftest variant" 20+SUMMARY = "This is an example summary"
20 SECTION = "examples" 21+DESCRIPTION = "Simple helloworld application -- selftest variant"
21 LICENSE = "MIT" 22+SECTION = "examples"
22@@ -16,4 +17,4 @@ do_install() { 23+LICENSE = "MIT"
23 install -m 0755 helloworld ${D}${bindir} 24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
24 } 25+
25 26+SRC_URI = "file://helloworld.c"
26-BBCLASSEXTEND = "native nativesdk" 27+
27\ No newline at end of file 28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
28+BBCLASSEXTEND = "native nativesdk" 40+BBCLASSEXTEND = "native nativesdk"
29-- 41--
302.41.0 422.45.1
31 43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail
index 3e2b81bca1..745a8f45d9 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail
@@ -1,31 +1,43 @@
1From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: Upgrade Helper <auh@auh.yoctoproject.org> 2From: Upgrade Helper <auh@auh.yoctoproject.org>
3Date: Tue, 29 Aug 2023 13:32:24 -0400 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add a summary 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This patch should fail the selftests because AUH is an invalid sender. 6This should fail the test_author_valid test.
7 7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- 10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 2 insertions(+), 1 deletion(-) 11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
12 13
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
14index 547587bef4..491f0a3df7 100644 15new file mode 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16index 00000000000..f3dec1b220c
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 17--- /dev/null
17@@ -1,3 +1,4 @@ 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
18+SUMMARY = "A cool sample" 19@@ -0,0 +1,21 @@
19 DESCRIPTION = "Simple helloworld application -- selftest variant" 20+SUMMARY = "This is an example summary"
20 SECTION = "examples" 21+DESCRIPTION = "Simple helloworld application -- selftest variant"
21 LICENSE = "MIT" 22+SECTION = "examples"
22@@ -16,4 +17,4 @@ do_install() { 23+LICENSE = "MIT"
23 install -m 0755 helloworld ${D}${bindir} 24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
24 } 25+
25 26+SRC_URI = "file://helloworld.c"
26-BBCLASSEXTEND = "native nativesdk" 27+
27\ No newline at end of file 28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
28+BBCLASSEXTEND = "native nativesdk" 40+BBCLASSEXTEND = "native nativesdk"
29-- 41--
302.41.0 422.45.1
31 43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass
index f84e1265a7..56cb77fa69 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass
@@ -1,31 +1,43 @@
1From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001 1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: First Last <averylongemailaddressthatishardtoread.from@address.com> 2From: First Last <averylongemailaddressthatishardtoread.from@address.com>
3Date: Tue, 29 Aug 2023 13:32:24 -0400 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] selftest-hello: add a summary 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This patch should pass the selftests because the author address is in a valid format. 6This should pass the test_author_valid test.
7 7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- 10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 1 file changed, 2 insertions(+), 1 deletion(-) 11 1 file changed, 21 insertions(+)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
12 13
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
14index 547587bef4..491f0a3df7 100644 15new file mode 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16index 00000000000..f3dec1b220c
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 17--- /dev/null
17@@ -1,3 +1,4 @@ 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
18+SUMMARY = "A cool sample" 19@@ -0,0 +1,21 @@
19 DESCRIPTION = "Simple helloworld application -- selftest variant" 20+SUMMARY = "This is an example summary"
20 SECTION = "examples" 21+DESCRIPTION = "Simple helloworld application -- selftest variant"
21 LICENSE = "MIT" 22+SECTION = "examples"
22@@ -16,4 +17,4 @@ do_install() { 23+LICENSE = "MIT"
23 install -m 0755 helloworld ${D}${bindir} 24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
24 } 25+
25 26+SRC_URI = "file://helloworld.c"
26-BBCLASSEXTEND = "native nativesdk" 27+
27\ No newline at end of file 28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
30+
31+do_compile() {
32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
33+}
34+
35+do_install() {
36+ install -d ${D}${bindir}
37+ install -m 0755 helloworld ${D}${bindir}
38+}
39+
28+BBCLASSEXTEND = "native nativesdk" 40+BBCLASSEXTEND = "native nativesdk"
29-- 41--
302.41.0 422.45.1
31 43
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail
index 80f409e952..6facb8c756 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail
@@ -1,25 +1,67 @@
1From fdfd605e565d874502522c4b70b786c8c5aa0bad Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: name@somedomain.com <email@address.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 17 Feb 2017 16:29:21 -0600 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] README: adds 'foo' to the header 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This test patch adds 'foo' to the header 6This should fail the test_bugzilla_entry_format test.
7 7
8[YOCTO 1234] 8[YOCTO 1234]
9CVE: CVE-1234-56789
9 10
10Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> 11Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11--- 12---
12 README | 1 + 13 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 1 file changed, 1 insertion(+) 14 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
15 2 files changed, 29 insertions(+), 1 deletion(-)
16 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 17
15diff --git a/README b/README 18diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16index 521916cd4f..cdf29dcea3 100644 19new file mode 100644
17--- a/README 20index 00000000000..8a4f9329303
18+++ b/README 21--- /dev/null
19@@ -1,3 +1,4 @@ 22+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20+**** FOO **** 23@@ -0,0 +1,26 @@
21 OpenEmbedded-Core 24+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22 ================= 25+From: Trevor Gamblin <tgamblin@baylibre.com>
26+Date: Tue, 29 Aug 2023 14:08:20 -0400
27+Subject: [PATCH] Fix CVE-NOT-REAL
28+
29+CVE: CVE-1234-56789
30+Upstream-Status: Backport(http://example.com/example)
31+
32+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
33+---
34+ strlen.c | 1 +
35+ 1 file changed, 1 insertion(+)
36+
37+diff --git a/strlen.c b/strlen.c
38+index 1788f38..83d7918 100644
39+--- a/strlen.c
40++++ b/strlen.c
41+
42+int main() {
43+
44+ printf("%d\n", str_len(string1));
45+ printf("%d\n", str_len(string2));
46+ printf("CVE FIXED!!!\n");
47+
48+ return 0;
49+}
50diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51index 2dc352d479e..d937759f157 100644
52--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
54@@ -3,7 +3,9 @@ SECTION = "examples"
55 LICENSE = "MIT"
56 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
57
58-SRC_URI = "file://helloworld.c"
59+SRC_URI = "file://helloworld.c \
60+ file://0001-Fix-CVE-1234-56789.patch \
61+ "
62
63 S = "${WORKDIR}/sources"
64 UNPACKDIR = "${S}"
65--
662.45.1
23 67
24--
252.11.0
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass
index 2648b03364..2f35458b4f 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass
@@ -1,25 +1,67 @@
1From fdfd605e565d874502522c4b70b786c8c5aa0bad Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: name@somedomain.com <email@address.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 17 Feb 2017 16:29:21 -0600 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] README: adds 'foo' to the header 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This test patch adds 'foo' to the header 6This should pass the test_bugzilla_entry_format test.
7 7
8[YOCTO #1234] 8[YOCTO #1234]
9CVE: CVE-1234-56789
9 10
10Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> 11Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11--- 12---
12 README | 1 + 13 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 1 file changed, 1 insertion(+) 14 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
15 2 files changed, 29 insertions(+), 1 deletion(-)
16 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 17
15diff --git a/README b/README 18diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16index 521916cd4f..cdf29dcea3 100644 19new file mode 100644
17--- a/README 20index 00000000000..8a4f9329303
18+++ b/README 21--- /dev/null
19@@ -1,3 +1,4 @@ 22+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20+**** FOO **** 23@@ -0,0 +1,26 @@
21 OpenEmbedded-Core 24+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22 ================= 25+From: Trevor Gamblin <tgamblin@baylibre.com>
26+Date: Tue, 29 Aug 2023 14:08:20 -0400
27+Subject: [PATCH] Fix CVE-NOT-REAL
28+
29+CVE: CVE-1234-56789
30+Upstream-Status: Backport(http://example.com/example)
31+
32+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
33+---
34+ strlen.c | 1 +
35+ 1 file changed, 1 insertion(+)
36+
37+diff --git a/strlen.c b/strlen.c
38+index 1788f38..83d7918 100644
39+--- a/strlen.c
40++++ b/strlen.c
41+
42+int main() {
43+
44+ printf("%d\n", str_len(string1));
45+ printf("%d\n", str_len(string2));
46+ printf("CVE FIXED!!!\n");
47+
48+ return 0;
49+}
50diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51index 2dc352d479e..d937759f157 100644
52--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
54@@ -3,7 +3,9 @@ SECTION = "examples"
55 LICENSE = "MIT"
56 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
57
58-SRC_URI = "file://helloworld.c"
59+SRC_URI = "file://helloworld.c \
60+ file://0001-Fix-CVE-1234-56789.patch \
61+ "
62
63 S = "${WORKDIR}/sources"
64 UNPACKDIR = "${S}"
65--
662.45.1
23 67
24--
252.11.0
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail
index 93ca0f9119..6f4e61c0da 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail
@@ -1,22 +1,62 @@
1From 0a52a62c9430c05d22cb7f46380488f2280b69bb Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 1 Sep 2023 08:56:14 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] README.OE-Core.md: add foo 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 6Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
7--- 7---
8 README.OE-Core.md | 1 + 8 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
9 1 file changed, 1 insertion(+) 9 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
10 2 files changed, 29 insertions(+), 1 deletion(-)
11 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
10 12
11diff --git a/README.OE-Core.md b/README.OE-Core.md 13diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
12index 2f2127fb03..48464252c8 100644 14new file mode 100644
13--- a/README.OE-Core.md 15index 00000000000..8a4f9329303
14+++ b/README.OE-Core.md 16--- /dev/null
15@@ -1,3 +1,4 @@ 17+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16+** FOO ** 18@@ -0,0 +1,26 @@
17 OpenEmbedded-Core 19+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
18 ================= 20+From: Trevor Gamblin <tgamblin@baylibre.com>
21+Date: Tue, 29 Aug 2023 14:08:20 -0400
22+Subject: [PATCH] Fix CVE-NOT-REAL
23+
24+CVE: CVE-1234-56789
25+Upstream-Status: Backport(http://example.com/example)
26+
27+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
28+---
29+ strlen.c | 1 +
30+ 1 file changed, 1 insertion(+)
31+
32+diff --git a/strlen.c b/strlen.c
33+index 1788f38..83d7918 100644
34+--- a/strlen.c
35++++ b/strlen.c
36+
37+int main() {
38+
39+ printf("%d\n", str_len(string1));
40+ printf("%d\n", str_len(string2));
41+ printf("CVE FIXED!!!\n");
42+
43+ return 0;
44+}
45diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
46index 2dc352d479e..d937759f157 100644
47--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
48+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49@@ -3,7 +3,9 @@ SECTION = "examples"
50 LICENSE = "MIT"
51 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
19 52
53-SRC_URI = "file://helloworld.c"
54+SRC_URI = "file://helloworld.c \
55+ file://0001-Fix-CVE-1234-56789.patch \
56+ "
57
58 S = "${WORKDIR}/sources"
59 UNPACKDIR = "${S}"
20-- 60--
212.41.0 612.45.1
22 62
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass
index 5e3dcbd58b..3fbc23fd00 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass
@@ -1,24 +1,66 @@
1From 0a52a62c9430c05d22cb7f46380488f2280b69bb Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 1 Sep 2023 08:56:14 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] README.OE-Core.md: add foo 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This is a commit message 6This should pass the test_commit_message_presence test.
7
8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 README.OE-Core.md | 1 + 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 1 file changed, 1 insertion(+) 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
12 16
13diff --git a/README.OE-Core.md b/README.OE-Core.md 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14index 2f2127fb03..48464252c8 100644 18new file mode 100644
15--- a/README.OE-Core.md 19index 00000000000..8a4f9329303
16+++ b/README.OE-Core.md 20--- /dev/null
17@@ -1,3 +1,4 @@ 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18+** FOO ** 22@@ -0,0 +1,26 @@
19 OpenEmbedded-Core 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
20 ================= 24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
21 61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
22-- 64--
232.41.0 652.45.1
24 66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail
deleted file mode 100644
index 9cc4aab38a..0000000000
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail
+++ /dev/null
@@ -1,36 +0,0 @@
1From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Wed, 30 Aug 2023 12:15:00 -0400
4Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
5
6This test should fail the mbox formatting test and the merge on head
7test.
8
9Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
10---
11 .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++-
12 1 file changed, 2 insertions(+), 1 deletion(-)
13 rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%)
14
15diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
16similarity index 88%
17rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
18rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
19index 547587bef4..acc388ec2c 100644
20--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
21+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
22@@ -1,3 +1,4 @@
23%+SUMMARY = "Hello!"
24 DESCRIPTION = "Simple helloworld application -- selftest variant"
25 SECTION = "examples"
26 LICENSE = "MIT"
27@@ -16,4 +17,4 @@ do_install() {
28 install -m 0755 helloworld ${D}${bindir}
29 }
30
31-BBCLASSEXTEND = "native nativesdk"
32\ No newline at end of file
33+BBCLASSEXTEND = "native nativesdk"
34--
352.41.0
36
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail
deleted file mode 100644
index eca1c60085..0000000000
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail
+++ /dev/null
@@ -1,35 +0,0 @@
1From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Wed, 30 Aug 2023 12:15:00 -0400
4Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
5
6This test should fail the merge-on-head and mbox formatting tests.
7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9---
10 .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++-
11 1 file changed, 2 insertions(+), 1 deletion(-)
12 rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%)
13
14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
15similarity index 88%
16rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
18index 547587bef4..acc388ec2c 100644
19--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
20+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
21@@ -1,3 +1,4 @@
22%+SUMMARY = "Hello!"
23 DESCRIPTION = "Simple helloworld application -- selftest variant"
24 SECTION = "examples"
25 LICENSE = "MIT"
26@@ -16,4 +17,4 @@ do_install() {
27 install -m 0755 helloworld ${D}${bindir}
28 }
29
30-BBCLASSEXTEND = "native nativesdk"
31\ No newline at end of file
32+BBCLASSEXTEND = "native nativesdk"
33--
342.41.0
35
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail
new file mode 100644
index 0000000000..0dda6802d1
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.fail
@@ -0,0 +1,66 @@
1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should fail the test_mbox_format test.
7
8CVE: CVE-1234-56789
9
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11---
12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16
17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
18new file mode 100644
19index 00000000000..8a4f9329303
20--- /dev/null
21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples"
54 LICENSE = "MIT"
55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
56
57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59%+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
61
62 S = "${WORKDIR}/sources"
63 UNPACKDIR = "${S}"
64--
652.45.1
66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass
index 33940adffc..f06ae11d04 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass
@@ -1,33 +1,66 @@
1From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Wed, 30 Aug 2023 12:15:00 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5
6This should pass the test_mbox_format test.
7
8CVE: CVE-1234-56789
5 9
6Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
7--- 11---
8 .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++- 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
9 1 file changed, 2 insertions(+), 1 deletion(-) 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
10 rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%) 14 2 files changed, 29 insertions(+), 1 deletion(-)
15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
11 16
12diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
13similarity index 88% 18new file mode 100644
14rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 19index 00000000000..8a4f9329303
15rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb 20--- /dev/null
16index 547587bef4..acc388ec2c 100644 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
22@@ -0,0 +1,26 @@
23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
24+From: Trevor Gamblin <tgamblin@baylibre.com>
25+Date: Tue, 29 Aug 2023 14:08:20 -0400
26+Subject: [PATCH] Fix CVE-NOT-REAL
27+
28+CVE: CVE-1234-56789
29+Upstream-Status: Backport(http://example.com/example)
30+
31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
32+---
33+ strlen.c | 1 +
34+ 1 file changed, 1 insertion(+)
35+
36+diff --git a/strlen.c b/strlen.c
37+index 1788f38..83d7918 100644
38+--- a/strlen.c
39++++ b/strlen.c
40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 2dc352d479e..d937759f157 100644
17--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
19@@ -1,3 +1,4 @@ 53@@ -3,7 +3,9 @@ SECTION = "examples"
20+SUMMARY = "Hello!"
21 DESCRIPTION = "Simple helloworld application -- selftest variant"
22 SECTION = "examples"
23 LICENSE = "MIT" 54 LICENSE = "MIT"
24@@ -16,4 +17,4 @@ do_install() { 55 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
25 install -m 0755 helloworld ${D}${bindir} 56
26 } 57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \
60+ "
27 61
28-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
29\ No newline at end of file 63 UNPACKDIR = "${S}"
30+BBCLASSEXTEND = "native nativesdk"
31-- 64--
322.41.0 652.45.1
33 66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip
index 2a72457878..072ccc28c0 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip
@@ -3,7 +3,7 @@ From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Wed, 30 Aug 2023 12:15:00 -0400 3Date: Wed, 30 Aug 2023 12:15:00 -0400
4Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1 4Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
5 5
6This file should pass the test_series_merge_on_head test. 6This file should skip the test_series_merge_on_head test.
7 7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail
index cdbbc61b61..c5e4df2549 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail
@@ -1,23 +1,25 @@
1From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello% fix CVE-1234-56789 4Subject: [PATCH] selftest-hello% fix CVE-1234-56789
5 5
6This should fail the test_shortlog_format test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..9219b8db62 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() {
39+
40+ printf("%d\n", str_len(string1));
41+ printf("%d\n", str_len(string2));
42++ printf("CVE FIXED!!!\n");
43+
44+ return 0;
45+ }
46+--
47+2.41.0
48+ 40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644
56 56
57-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
59+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
60+ " 60+ "
61
62 S = "${WORKDIR}"
63
64@@ -16,4 +18,4 @@ do_install() {
65 install -m 0755 helloworld ${D}${bindir}
66 }
67 61
68-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
69\ No newline at end of file 63 UNPACKDIR = "${S}"
70+BBCLASSEXTEND = "native nativesdk"
71-- 64--
722.41.0 652.45.1
73 66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass
index ef6017037c..4948e26afc 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass
@@ -1,23 +1,25 @@
1From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should pass the test_shortlog_format test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..9219b8db62 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() {
39+
40+ printf("%d\n", str_len(string1));
41+ printf("%d\n", str_len(string2));
42++ printf("CVE FIXED!!!\n");
43+
44+ return 0;
45+ }
46+--
47+2.41.0
48+ 40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644
56 56
57-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
59+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
60+ " 60+ "
61
62 S = "${WORKDIR}"
63
64@@ -16,4 +18,4 @@ do_install() {
65 install -m 0755 helloworld ${D}${bindir}
66 }
67 61
68-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
69\ No newline at end of file 63 UNPACKDIR = "${S}"
70+BBCLASSEXTEND = "native nativesdk"
71-- 64--
722.41.0 652.45.1
73 66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail
index 629e78540b..4ed1242821 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail
@@ -1,23 +1,25 @@
1From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: this is a very long commit shortlog with way too many words included in it to pass the test 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 this is a very long commit shortlog with way too many words included in it to pass the test
5
6This should fail the test_shortlong_length test.
5 7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..9219b8db62 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() {
39+
40+ printf("%d\n", str_len(string1));
41+ printf("%d\n", str_len(string2));
42++ printf("CVE FIXED!!!\n");
43+
44+ return 0;
45+ }
46+--
47+2.41.0
48+ 40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -57,17 +57,10 @@ index 547587bef4..76975a6729 100644
57-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
59+ file://0001-Fix-CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
60+ " 60+ "
61
62 S = "${WORKDIR}"
63
64@@ -16,4 +18,4 @@ do_install() {
65 install -m 0755 helloworld ${D}${bindir}
66 }
67 61
68-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
69\ No newline at end of file 63 UNPACKDIR = "${S}"
70+BBCLASSEXTEND = "native nativesdk"
71-- 64--
722.41.0 652.45.1
73 66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass
index ef6017037c..ef5066a650 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass
@@ -1,23 +1,25 @@
1From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should pass the test_shortlog_length test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..9219b8db62 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() {
39+
40+ printf("%d\n", str_len(string1));
41+ printf("%d\n", str_len(string2));
42++ printf("CVE FIXED!!!\n");
43+
44+ return 0;
45+ }
46+--
47+2.41.0
48+ 40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644
56 56
57-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
59+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
60+ " 60+ "
61
62 S = "${WORKDIR}"
63
64@@ -16,4 +18,4 @@ do_install() {
65 install -m 0755 helloworld ${D}${bindir}
66 }
67 61
68-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
69\ No newline at end of file 63 UNPACKDIR = "${S}"
70+BBCLASSEXTEND = "native nativesdk"
71-- 64--
722.41.0 652.45.1
73 66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail
index 35d92aeed7..4ede7271ee 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail
@@ -1,22 +1,24 @@
1From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should fail the test_signed_off_by_presence test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8--- 10---
9 .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ 11 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
10 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 12 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
11 2 files changed, 31 insertions(+), 2 deletions(-) 13 2 files changed, 29 insertions(+), 1 deletion(-)
12 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 14 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
13 15
14diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 16diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
15new file mode 100644 17new file mode 100644
16index 0000000000..869cfb6fe5 18index 00000000000..8a4f9329303
17--- /dev/null 19--- /dev/null
18+++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 20+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
19@@ -0,0 +1,27 @@ 21@@ -0,0 +1,26 @@
20+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 22+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
21+From: Trevor Gamblin <tgamblin@baylibre.com> 23+From: Trevor Gamblin <tgamblin@baylibre.com>
22+Date: Tue, 29 Aug 2023 14:08:20 -0400 24+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -34,18 +36,17 @@ index 0000000000..869cfb6fe5
34+index 1788f38..83d7918 100644 36+index 1788f38..83d7918 100644
35+--- a/strlen.c 37+--- a/strlen.c
36++++ b/strlen.c 38++++ b/strlen.c
37+@@ -8,6 +8,7 @@ int main() { 39+
38+ 40+int main() {
39+ printf("%d\n", str_len(string1)); 41+
40+ printf("%d\n", str_len(string2)); 42+ printf("%d\n", str_len(string1));
41++ printf("CVE FIXED!!!\n"); 43+ printf("%d\n", str_len(string2));
42+ 44+ printf("CVE FIXED!!!\n");
43+ return 0; 45+
44+ } 46+ return 0;
45+-- 47+}
46+2.41.0
47diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
48index 547587bef4..76975a6729 100644 49index 2dc352d479e..d937759f157 100644
49--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51@@ -3,7 +3,9 @@ SECTION = "examples" 52@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -54,18 +55,11 @@ index 547587bef4..76975a6729 100644
54 55
55-SRC_URI = "file://helloworld.c" 56-SRC_URI = "file://helloworld.c"
56+SRC_URI = "file://helloworld.c \ 57+SRC_URI = "file://helloworld.c \
57+ file://CVE-1234-56789.patch \ 58+ file://0001-Fix-CVE-1234-56789.patch \
58+ " 59+ "
59
60 S = "${WORKDIR}"
61
62@@ -16,4 +18,4 @@ do_install() {
63 install -m 0755 helloworld ${D}${bindir}
64 }
65 60
66-BBCLASSEXTEND = "native nativesdk" 61 S = "${WORKDIR}/sources"
67\ No newline at end of file 62 UNPACKDIR = "${S}"
68+BBCLASSEXTEND = "native nativesdk"
69-- 63--
702.41.0 642.45.1
71 65
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail
index 68f38dee06..f7c3f5145a 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail
@@ -1,23 +1,25 @@
1From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should fail the test_signed_off_by_presence test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Approved: Trevor Gamblin <tgamblin@baylibre.com> 10Approved-of-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..869cfb6fe5 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,18 +37,17 @@ index 0000000000..869cfb6fe5
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() { 40+
39+ 41+int main() {
40+ printf("%d\n", str_len(string1)); 42+
41+ printf("%d\n", str_len(string2)); 43+ printf("%d\n", str_len(string1));
42++ printf("CVE FIXED!!!\n"); 44+ printf("%d\n", str_len(string2));
43+ 45+ printf("CVE FIXED!!!\n");
44+ return 0; 46+
45+ } 47+ return 0;
46+-- 48+}
47+2.41.0
48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -55,18 +56,11 @@ index 547587bef4..76975a6729 100644
55 56
56-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
57+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
58+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
59+ " 60+ "
60
61 S = "${WORKDIR}"
62
63@@ -16,4 +18,4 @@ do_install() {
64 install -m 0755 helloworld ${D}${bindir}
65 }
66 61
67-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
68\ No newline at end of file 63 UNPACKDIR = "${S}"
69+BBCLASSEXTEND = "native nativesdk"
70-- 64--
712.41.0 652.45.1
72 66
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass
index ea34c76f0d..2661c1416f 100644
--- a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass
@@ -1,23 +1,25 @@
1From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should pass the test_signed_off_by_presence test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..869cfb6fe5 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,18 +37,17 @@ index 0000000000..869cfb6fe5
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() { 40+
39+ 41+int main() {
40+ printf("%d\n", str_len(string1)); 42+
41+ printf("%d\n", str_len(string2)); 43+ printf("%d\n", str_len(string1));
42++ printf("CVE FIXED!!!\n"); 44+ printf("%d\n", str_len(string2));
43+ 45+ printf("CVE FIXED!!!\n");
44+ return 0; 46+
45+ } 47+ return 0;
46+-- 48+}
47+2.41.0
48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -55,18 +56,11 @@ index 547587bef4..76975a6729 100644
55 56
56-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
57+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
58+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
59+ " 60+ "
60
61 S = "${WORKDIR}"
62
63@@ -16,4 +18,4 @@ do_install() {
64 install -m 0755 helloworld ${D}${bindir}
65 }
66 61
67-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
68\ No newline at end of file 63 UNPACKDIR = "${S}"
69+BBCLASSEXTEND = "native nativesdk"
70-- 64--
712.41.0 652.45.1
72 66
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail
index 3574463ade..dccafcd9bc 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail
@@ -1,30 +1,25 @@
1From c4ca86b9cca3643097db0328e2f34dccffbba309 Mon Sep 17 00:00:00 2001 1From 60450eefbc2c438a37c5e08759d021b18f0df0a3 Mon Sep 17 00:00:00 2001
2From: =?UTF-8?q?Simone=20Wei=C3=9F?= <simone.p.weiss@posteo.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Sat, 10 Feb 2024 13:18:44 +0100 3Date: Fri, 31 May 2024 09:18:17 -0400
4Subject: [PATCH] selftest-hello: add CVE_CHECK_IGNORE 4Subject: [PATCH] selftest-hello: add CVE_CHECK_IGNORE
5MIME-Version: 1.0
6Content-Type: text/plain; charset=UTF-8
7Content-Transfer-Encoding: 8bit
8 5
9This should fail the test_cve_tag_format selftest. 6This should fail the test_cve_tag_format selftest.
10 7
11Signed-off-by: Simone Weiß <simone.p.weiss@posteo.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
12--- 9---
13 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++- 10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 1 +
14 1 file changed, 2 insertions(+), 1 deletion(-) 11 1 file changed, 1 insertion(+)
15 12
16diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17index 547587bef4..3ef9b87c34 100644 14index 2dc352d479e..cc103de6e2e 100644
18--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
19+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
20@@ -16,4 +16,5 @@ do_install() { 17@@ -17,4 +17,5 @@ do_install() {
21 install -m 0755 helloworld ${D}${bindir} 18 install -m 0755 helloworld ${D}${bindir}
22 } 19 }
23 20
24-BBCLASSEXTEND = "native nativesdk"
25\ No newline at end of file
26+CVE_CHECK_IGNORE = "CVE-2024-12345" 21+CVE_CHECK_IGNORE = "CVE-2024-12345"
27+BBCLASSEXTEND = "native nativesdk" 22 BBCLASSEXTEND = "native nativesdk"
28-- 23--
292.39.2 242.45.1
30 25
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass
index 10f942a6eb..93a6cc91fb 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass
@@ -1,31 +1,25 @@
1From 7d4d3fee0c7111830ee9b2b049ae3ce265b26030 Mon Sep 17 00:00:00 2001 1From f91073242268d2b2c1a1a705e7fd585679f78a59 Mon Sep 17 00:00:00 2001
2From: =?UTF-8?q?Simone=20Wei=C3=9F?= <simone.p.weiss@posteo.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Sat, 10 Feb 2024 13:23:56 +0100 3Date: Fri, 31 May 2024 09:18:17 -0400
4Subject: [PATCH] selftest-hello: add CVE_STATUS 4Subject: [PATCH] selftest-hello: add CVE_STATUS
5MIME-Version: 1.0
6Content-Type: text/plain; charset=UTF-8
7Content-Transfer-Encoding: 8bit
8 5
9This should pass the test_cve_tag_format selftest. 6This should pass the test_cve_tag_format selftest.
10 7
11Signed-off-by: Simone Weiß <simone.p.weiss@posteo.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
12--- 9---
13 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +++- 10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 1 +
14 1 file changed, 3 insertions(+), 1 deletion(-) 11 1 file changed, 1 insertion(+)
15 12
16diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17index 547587bef4..9908b3b417 100644 14index 2dc352d479e..88c5c98608f 100644
18--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
19+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
20@@ -16,4 +16,6 @@ do_install() { 17@@ -17,4 +17,5 @@ do_install() {
21 install -m 0755 helloworld ${D}${bindir} 18 install -m 0755 helloworld ${D}${bindir}
22 } 19 }
23 20
24-BBCLASSEXTEND = "native nativesdk"
25\ No newline at end of file
26+CVE_STATUS[CVE-2024-12345] = "not-applicable-platform: Issue only applies on Windows" 21+CVE_STATUS[CVE-2024-12345] = "not-applicable-platform: Issue only applies on Windows"
27+ 22 BBCLASSEXTEND = "native nativesdk"
28+BBCLASSEXTEND = "native nativesdk"
29-- 23--
302.39.2 242.45.1
31 25
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail
index ab6c52c374..61b3784e3c 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail
@@ -1,19 +1,17 @@
1From f89919ea86d38404dd621521680a0162367bb965 Mon Sep 17 00:00:00 2001 1From 974c3a143bc67faaff9abcc0a06a3d5e692fc660 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Wed, 6 Sep 2023 09:09:27 -0400 3Date: Fri, 31 May 2024 11:51:15 -0400
4Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM 4Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM
5 5
6This test should fail the 6This should fail the test_lic_files_chksum_modified_not_mentioned test.
7test_metadata_lic_files_chksum.LicFilesChkSum.test_lic_files_chksum_modified_not_mentioned
8test.
9 7
10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
11--- 9---
12 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 ++-- 10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 +-
13 1 file changed, 2 insertions(+), 2 deletions(-) 11 1 file changed, 1 insertion(+), 1 deletion(-)
14 12
15diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
16index 547587bef4..65dda40aba 100644 14index 2dc352d479e..356921db1dd 100644
17--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
19@@ -1,7 +1,7 @@ 17@@ -1,7 +1,7 @@
@@ -25,13 +23,6 @@ index 547587bef4..65dda40aba 100644
25 23
26 SRC_URI = "file://helloworld.c" 24 SRC_URI = "file://helloworld.c"
27 25
28@@ -16,4 +16,4 @@ do_install() {
29 install -m 0755 helloworld ${D}${bindir}
30 }
31
32-BBCLASSEXTEND = "native nativesdk"
33\ No newline at end of file
34+BBCLASSEXTEND = "native nativesdk"
35-- 26--
362.41.0 272.45.1
37 28
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass
index 99d9f144da..b7be1e8e55 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass
@@ -1,21 +1,19 @@
1From f89919ea86d38404dd621521680a0162367bb965 Mon Sep 17 00:00:00 2001 1From 974c3a143bc67faaff9abcc0a06a3d5e692fc660 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Wed, 6 Sep 2023 09:09:27 -0400 3Date: Fri, 31 May 2024 11:51:15 -0400
4Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM 4Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM
5 5
6License-Update: Fix checksum 6This should pass the test_lic_files_chksum_modified_not_mentioned test.
7 7
8This test should pass the 8License-Update: Stuff happened!
9test_metadata_lic_files_chksum.LicFilesChkSum.test_lic_files_chksum_modified_not_mentioned
10test.
11 9
12Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
13--- 11---
14 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 ++-- 12 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 +-
15 1 file changed, 2 insertions(+), 2 deletions(-) 13 1 file changed, 1 insertion(+), 1 deletion(-)
16 14
17diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 15diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
18index 547587bef4..65dda40aba 100644 16index 2dc352d479e..356921db1dd 100644
19--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 17--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
20+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
21@@ -1,7 +1,7 @@ 19@@ -1,7 +1,7 @@
@@ -27,13 +25,6 @@ index 547587bef4..65dda40aba 100644
27 25
28 SRC_URI = "file://helloworld.c" 26 SRC_URI = "file://helloworld.c"
29 27
30@@ -16,4 +16,4 @@ do_install() {
31 install -m 0755 helloworld ${D}${bindir}
32 }
33
34-BBCLASSEXTEND = "native nativesdk"
35\ No newline at end of file
36+BBCLASSEXTEND = "native nativesdk"
37-- 28--
382.41.0 292.45.1
39 30
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail
index e14d644bb2..a7a0b0bacb 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail
@@ -1,53 +1,42 @@
1From 66430e7c6fbd5187b66560909a510e136fed91c0 Mon Sep 17 00:00:00 2001 1From 74bc209a4fbe4da2f57e153ccfff3d2241dada8d Mon Sep 17 00:00:00 2001
2From: Daniela Plascencia <daniela.plascencia@linux.intel.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Thu, 23 Feb 2017 10:34:27 -0600 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] meta: adding hello-yocto recipe 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This is a sample recipe 6This should fail the test_lic_files_chksum_presence test.
7 7
8Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../hello-world/hello-world/hello_world.c | 5 +++++ 10 .../selftest-hello-extra_1.0.bb | 20 +++++++++++++++++++
11 .../hello-world/hello-world_1.0.bb | 18 ++++++++++++++++++ 11 1 file changed, 20 insertions(+)
12 2 files changed, 23 insertions(+) 12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13 create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
14 create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
15 13
16diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
17new file mode 100644 15new file mode 100644
18index 0000000000..0d59f57d4c 16index 00000000000..875bcbef859
19--- /dev/null 17--- /dev/null
20+++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
21@@ -0,0 +1,5 @@ 19@@ -0,0 +1,20 @@
22+#include <stdio.h> 20+SUMMARY = "This is an example summary"
23+ 21+DESCRIPTION = "Simple helloworld application -- selftest variant"
24+int main(){ 22+SECTION = "examples"
25+ printf("Hello World\n");
26+}
27diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
28new file mode 100644
29index 0000000000..3c990c108a
30--- /dev/null
31+++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
32@@ -0,0 +1,18 @@
33+SUMMARY = "This is a sample summary"
34+DESCRIPTION = "This is a sample description"
35+HOMEPAGE = "https://sample.com/this-is-a-sample"
36+LICENSE = "MIT" 23+LICENSE = "MIT"
37+ 24+
38+SRC_URI += "file://hello_world.c" 25+SRC_URI = "file://helloworld.c"
39+ 26+
40+SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632" 27+S = "${WORKDIR}/sources"
41+SRC_URI[sha256sum] = "edef2bbde0fbf0d88232782a0eded323f483a0519d6fde9a3b1809056fd35f3e" 28+UNPACKDIR = "${S}"
42+ 29+
43+do_compile(){ 30+do_compile() {
44+ ${CC} -o hello_world ../hello_world.c 31+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
45+} 32+}
46+ 33+
47+do_install(){ 34+do_install() {
48+ install -d ${D}${bindir} 35+ install -d ${D}${bindir}
49+ install -m +x hello_world ${D}${bindir}/hello_world 36+ install -m 0755 helloworld ${D}${bindir}
50+} 37+}
38+
39+BBCLASSEXTEND = "native nativesdk"
51-- 40--
522.41.0 412.45.1
53 42
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass
index b8da16dfe5..8ffa97ec56 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass
@@ -1,54 +1,43 @@
1From 5144d2ba1aa763312c047dd5f8901368cff79da6 Mon Sep 17 00:00:00 2001 1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: Daniela Plascencia <daniela.plascencia@linux.intel.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Thu, 23 Feb 2017 10:34:27 -0600 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] meta: adding hello-yocto recipe 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This is a sample recipe 6This should pass the test_lic_files_chksum_presence test.
7 7
8Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../hello-world/hello-world/hello_world.c | 5 +++++ 10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 .../hello-world/hello-world_1.0.bb | 19 +++++++++++++++++++ 11 1 file changed, 21 insertions(+)
12 2 files changed, 24 insertions(+) 12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13 create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
14 create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
15 13
16diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
17new file mode 100644 15new file mode 100644
18index 0000000000..0d59f57d4c 16index 00000000000..f3dec1b220c
19--- /dev/null 17--- /dev/null
20+++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
21@@ -0,0 +1,5 @@ 19@@ -0,0 +1,21 @@
22+#include <stdio.h> 20+SUMMARY = "This is an example summary"
23+ 21+DESCRIPTION = "Simple helloworld application -- selftest variant"
24+int main(){ 22+SECTION = "examples"
25+ printf("Hello World\n");
26+}
27diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
28new file mode 100644
29index 0000000000..44d888c82a
30--- /dev/null
31+++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
32@@ -0,0 +1,19 @@
33+SUMMARY = "This is a sample summary"
34+DESCRIPTION = "This is a sample description"
35+HOMEPAGE = "https://sample.com/this-is-a-sample"
36+LICENSE = "MIT" 23+LICENSE = "MIT"
37+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" 24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
38+ 25+
39+SRC_URI += "file://hello_world.c" 26+SRC_URI = "file://helloworld.c"
40+ 27+
41+SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632" 28+S = "${WORKDIR}/sources"
42+SRC_URI[sha256sum] = "edef2bbde0fbf0d88232782a0eded323f483a0519d6fde9a3b1809056fd35f3e" 29+UNPACKDIR = "${S}"
43+ 30+
44+do_compile(){ 31+do_compile() {
45+ ${CC} -o hello_world ../hello_world.c 32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
46+} 33+}
47+ 34+
48+do_install(){ 35+do_install() {
49+ install -d ${D}${bindir} 36+ install -d ${D}${bindir}
50+ install -m +x hello_world ${D}${bindir}/hello_world 37+ install -m 0755 helloworld ${D}${bindir}
51+} 38+}
39+
40+BBCLASSEXTEND = "native nativesdk"
52-- 41--
532.41.0 422.45.1
54 43
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail
index 983b6e0c2b..0a402d0a3e 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail
@@ -1,17 +1,17 @@
1From 4ab06b5f81455249cd5e89d2cce9863803b5ecb5 Mon Sep 17 00:00:00 2001 1From f2f7b6bcb831289bc3ba2343ad7dc5bee6d6e0cd Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 8 Sep 2023 14:41:00 -0400 3Date: Fri, 31 May 2024 08:45:41 -0400
4Subject: [PATCH] selftest-hello: remove helloworld.c 4Subject: [PATCH] selftest-hello: remove helloworld.c
5 5
6This should fail the test_src_uri_left_files selftest. 6This should fail the test_src_uri_left_files selftest.
7 7
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +--- 10 meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 --
11 1 file changed, 1 insertion(+), 3 deletions(-) 11 1 file changed, 2 deletions(-)
12 12
13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 13diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
14index 547587bef4..f6817f05bc 100644 14index 2dc352d479e..e95270adaeb 100644
15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 15--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 16+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
17@@ -3,8 +3,6 @@ SECTION = "examples" 17@@ -3,8 +3,6 @@ SECTION = "examples"
@@ -20,16 +20,9 @@ index 547587bef4..f6817f05bc 100644
20 20
21-SRC_URI = "file://helloworld.c" 21-SRC_URI = "file://helloworld.c"
22- 22-
23 S = "${WORKDIR}" 23 S = "${WORKDIR}/sources"
24 UNPACKDIR = "${S}"
24 25
25 do_compile() {
26@@ -16,4 +14,4 @@ do_install() {
27 install -m 0755 helloworld ${D}${bindir}
28 }
29
30-BBCLASSEXTEND = "native nativesdk"
31\ No newline at end of file
32+BBCLASSEXTEND = "native nativesdk"
33-- 26--
342.41.0 272.45.1
35 28
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass
index 1f1a77e581..a675c028d0 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass
@@ -1,6 +1,6 @@
1From 6c7ac367a873bf827c19b81085c943eace917a99 Mon Sep 17 00:00:00 2001 1From e79933e2fc68570066eca66f0b599d259b7a1731 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Fri, 8 Sep 2023 14:41:00 -0400 3Date: Fri, 31 May 2024 08:18:48 -0400
4Subject: [PATCH] selftest-hello: remove helloworld.c 4Subject: [PATCH] selftest-hello: remove helloworld.c
5 5
6This should pass the test_src_uri_left_files selftest. 6This should pass the test_src_uri_left_files selftest.
@@ -8,13 +8,13 @@ This should pass the test_src_uri_left_files selftest.
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../recipes-test/selftest-hello/files/helloworld.c | 8 -------- 10 .../recipes-test/selftest-hello/files/helloworld.c | 8 --------
11 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +--- 11 .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 2 --
12 2 files changed, 1 insertion(+), 11 deletions(-) 12 2 files changed, 10 deletions(-)
13 delete mode 100644 meta-selftest/recipes-test/selftest-hello/files/helloworld.c 13 delete mode 100644 meta-selftest/recipes-test/selftest-hello/files/helloworld.c
14 14
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c 15diff --git a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
16deleted file mode 100644 16deleted file mode 100644
17index fc7169b7b8..0000000000 17index fc7169b7b83..00000000000
18--- a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c 18--- a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
19+++ /dev/null 19+++ /dev/null
20@@ -1,8 +0,0 @@ 20@@ -1,8 +0,0 @@
@@ -27,7 +27,7 @@ index fc7169b7b8..0000000000
27- return 0; 27- return 0;
28-} 28-}
29diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 29diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
30index 547587bef4..f6817f05bc 100644 30index 2dc352d479e..e95270adaeb 100644
31--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 31--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
32+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 32+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
33@@ -3,8 +3,6 @@ SECTION = "examples" 33@@ -3,8 +3,6 @@ SECTION = "examples"
@@ -36,16 +36,9 @@ index 547587bef4..f6817f05bc 100644
36 36
37-SRC_URI = "file://helloworld.c" 37-SRC_URI = "file://helloworld.c"
38- 38-
39 S = "${WORKDIR}" 39 S = "${WORKDIR}/sources"
40 UNPACKDIR = "${S}"
40 41
41 do_compile() {
42@@ -16,4 +14,4 @@ do_install() {
43 install -m 0755 helloworld ${D}${bindir}
44 }
45
46-BBCLASSEXTEND = "native nativesdk"
47\ No newline at end of file
48+BBCLASSEXTEND = "native nativesdk"
49-- 42--
502.41.0 432.45.1
51 44
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail
index 2d2b4e683d..1087843619 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail
@@ -1,46 +1,42 @@
1From e29da5faa74409be394caa09d9f3b7b60f8592b9 Mon Sep 17 00:00:00 2001 1From f4b72cc24f5e2a290a8637775c4d41c16d5d83aa Mon Sep 17 00:00:00 2001
2From: Daniela Plascencia <daniela.plascencia@linux.intel.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Thu, 23 Feb 2017 10:34:27 -0600 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] meta: adding hello-yocto recipe 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This is a sample recipe 6This should fail the test_summary_presence test.
7 7
8Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 meta/recipes-devtools/hello-world/hello-world/hello_world.c | 5 +++++ 10 .../selftest-hello-extra_1.0.bb | 20 +++++++++++++++++++
11 meta/recipes-devtools/hello-world/hello-world_1.0.bb | 12 ++++++++++++ 11 1 file changed, 20 insertions(+)
12 2 files changed, 17 insertions(+) 12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13 create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
14 create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
15 13
16diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
17new file mode 100644 15new file mode 100644
18index 0000000000..0d59f57d4c 16index 00000000000..2dc352d479e
19--- /dev/null 17--- /dev/null
20+++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
21@@ -0,0 +1,5 @@ 19@@ -0,0 +1,20 @@
22+#include <stdio.h> 20+DESCRIPTION = "Simple helloworld application -- selftest variant"
21+SECTION = "examples"
22+LICENSE = "MIT"
23+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
23+ 24+
24+int main(){ 25+SRC_URI = "file://helloworld.c"
25+ printf("Hello World\n");
26+}
27diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
28new file mode 100644
29index 0000000000..c4e1359217
30--- /dev/null
31+++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
32@@ -0,0 +1,12 @@
33+LICENSE = "CLOSED"
34+ 26+
35+SRC_URI += "file://hello_world.c" 27+S = "${WORKDIR}/sources"
28+UNPACKDIR = "${S}"
36+ 29+
37+do_compile(){ 30+do_compile() {
38+ ${CC} -o hello_world ../hello_world.c 31+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
39+} 32+}
40+ 33+
41+do_install(){ 34+do_install() {
42+ install -d ${D}${bindir} 35+ install -d ${D}${bindir}
43+ install -m +x hello_world ${D}${bindir}/hello_world 36+ install -m 0755 helloworld ${D}${bindir}
44+} 37+}
45-- 38+
462.11.0 39+BBCLASSEXTEND = "native nativesdk"
40--
412.45.1
42
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass
index 55f0309b3f..3d35a8d8fb 100644
--- a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass
@@ -1,49 +1,43 @@
1From 0cd2fed12ce4b7b071edde12aec4481ad7a6f107 Mon Sep 17 00:00:00 2001 1From 04eb94a0134ef5eb5b5fd783b303104fbfcd8437 Mon Sep 17 00:00:00 2001
2From: Daniela Plascencia <daniela.plascencia@linux.intel.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Thu, 23 Feb 2017 10:34:27 -0600 3Date: Fri, 31 May 2024 11:03:47 -0400
4Subject: [PATCH] meta: adding hello-yocto recipe 4Subject: [PATCH] selftest-hello: add selftest-hello-extra
5 5
6This is a sample recipe 6This should pass the test_summary_presence test.
7 7
8Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> 8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 9---
10 .../hello-world/hello-world/hello_world.c | 5 +++++ 10 .../selftest-hello-extra_1.0.bb | 21 +++++++++++++++++++
11 meta/recipes-devtools/hello-world/hello-world_1.0.bb | 15 +++++++++++++++ 11 1 file changed, 21 insertions(+)
12 2 files changed, 20 insertions(+) 12 create mode 100644 meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
13 create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
14 create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
15 13
16diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 14diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
17new file mode 100644 15new file mode 100644
18index 0000000000..0d59f57d4c 16index 00000000000..f3dec1b220c
19--- /dev/null 17--- /dev/null
20+++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c 18+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello-extra_1.0.bb
21@@ -0,0 +1,5 @@ 19@@ -0,0 +1,21 @@
22+#include <stdio.h> 20+SUMMARY = "This is an example summary"
21+DESCRIPTION = "Simple helloworld application -- selftest variant"
22+SECTION = "examples"
23+LICENSE = "MIT"
24+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
23+ 25+
24+int main(){ 26+SRC_URI = "file://helloworld.c"
25+ printf("Hello World\n");
26+}
27diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
28new file mode 100644
29index 0000000000..c54283eece
30--- /dev/null
31+++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
32@@ -0,0 +1,15 @@
33+SUMMARY = "This is a sample summary"
34+DESCRIPTION = "This is a sample description"
35+HOMEPAGE = "https://sample.com/this-is-a-sample"
36+LICENSE = "CLOSED"
37+ 27+
38+SRC_URI += "file://hello_world.c" 28+S = "${WORKDIR}/sources"
29+UNPACKDIR = "${S}"
39+ 30+
40+do_compile(){ 31+do_compile() {
41+ ${CC} -o hello_world ../hello_world.c 32+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
42+} 33+}
43+ 34+
44+do_install(){ 35+do_install() {
45+ install -d ${D}${bindir} 36+ install -d ${D}${bindir}
46+ install -m +x hello_world ${D}${bindir}/hello_world 37+ install -m 0755 helloworld ${D}${bindir}
47+} 38+}
48-- 39+
492.11.0 40+BBCLASSEXTEND = "native nativesdk"
41--
422.45.1
43
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail
index c763a7506e..f64f2a40b0 100644
--- a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail
@@ -1,29 +1,31 @@
1From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6CVE: CVE-BAD-FORMAT 6This should fail the test_cve_tag_format test.
7
8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..9219b8db62 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
24+Subject: [PATCH] Fix CVE-NOT-REAL 26+Subject: [PATCH] Fix CVE-NOT-REAL
25+ 27+
26+CVE: CVE-BAD-FORMAT 28+CVE: CVE-BAD_FORMAT
27+Upstream-Status: Backport(http://example.com/example) 29+Upstream-Status: Backport(http://example.com/example)
28+ 30+
29+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 31+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() {
39+
40+ printf("%d\n", str_len(string1));
41+ printf("%d\n", str_len(string2));
42++ printf("CVE FIXED!!!\n");
43+
44+ return 0;
45+ }
46+--
47+2.41.0
48+ 40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644
56 56
57-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
59+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
60+ " 60+ "
61
62 S = "${WORKDIR}"
63
64@@ -16,4 +18,4 @@ do_install() {
65 install -m 0755 helloworld ${D}${bindir}
66 }
67 61
68-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
69\ No newline at end of file 63 UNPACKDIR = "${S}"
70+BBCLASSEXTEND = "native nativesdk"
71-- 64--
722.41.0 652.45.1
73 66
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass
index ef6017037c..3819487041 100644
--- a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass
@@ -1,23 +1,25 @@
1From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should pass the test_cve_tag format test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..9219b8db62 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,19 +37,17 @@ index 0000000000..9219b8db62
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() {
39+
40+ printf("%d\n", str_len(string1));
41+ printf("%d\n", str_len(string2));
42++ printf("CVE FIXED!!!\n");
43+
44+ return 0;
45+ }
46+--
47+2.41.0
48+ 40+
41+int main() {
42+
43+ printf("%d\n", str_len(string1));
44+ printf("%d\n", str_len(string2));
45+ printf("CVE FIXED!!!\n");
46+
47+ return 0;
48+}
49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
53@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -56,18 +56,11 @@ index 547587bef4..76975a6729 100644
56 56
57-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
58+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
59+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
60+ " 60+ "
61
62 S = "${WORKDIR}"
63
64@@ -16,4 +18,4 @@ do_install() {
65 install -m 0755 helloworld ${D}${bindir}
66 }
67 61
68-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
69\ No newline at end of file 63 UNPACKDIR = "${S}"
70+BBCLASSEXTEND = "native nativesdk"
71-- 64--
722.41.0 652.45.1
73 66
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail
index ce8bf7b7d1..b2d0fab9e3 100644
--- a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail
@@ -1,23 +1,25 @@
1From 5a2d0ac780a0f4c046fb1a3c3463d3e726f191cb Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should fail the test_signed_off_by_presence test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../selftest-hello/files/CVE-1234-56789.patch | 26 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 25 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 30 insertions(+), 2 deletions(-) 14 2 files changed, 28 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..92a5b65a53 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,26 @@ 22@@ -0,0 +1,25 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -34,18 +36,17 @@ index 0000000000..92a5b65a53
34+index 1788f38..83d7918 100644 36+index 1788f38..83d7918 100644
35+--- a/strlen.c 37+--- a/strlen.c
36++++ b/strlen.c 38++++ b/strlen.c
37+@@ -8,6 +8,7 @@ int main() { 39+
38+ 40+int main() {
39+ printf("%d\n", str_len(string1)); 41+
40+ printf("%d\n", str_len(string2)); 42+ printf("%d\n", str_len(string1));
41++ printf("CVE FIXED!!!\n"); 43+ printf("%d\n", str_len(string2));
42+ 44+ printf("CVE FIXED!!!\n");
43+ return 0; 45+
44+ } 46+ return 0;
45+-- 47+}
46+2.41.0
47diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
48index 547587bef4..76975a6729 100644 49index 2dc352d479e..d937759f157 100644
49--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
50+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51@@ -3,7 +3,9 @@ SECTION = "examples" 52@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -54,18 +55,11 @@ index 547587bef4..76975a6729 100644
54 55
55-SRC_URI = "file://helloworld.c" 56-SRC_URI = "file://helloworld.c"
56+SRC_URI = "file://helloworld.c \ 57+SRC_URI = "file://helloworld.c \
57+ file://CVE-1234-56789.patch \ 58+ file://0001-Fix-CVE-1234-56789.patch \
58+ " 59+ "
59
60 S = "${WORKDIR}"
61
62@@ -16,4 +18,4 @@ do_install() {
63 install -m 0755 helloworld ${D}${bindir}
64 }
65 60
66-BBCLASSEXTEND = "native nativesdk" 61 S = "${WORKDIR}/sources"
67\ No newline at end of file 62 UNPACKDIR = "${S}"
68+BBCLASSEXTEND = "native nativesdk"
69-- 63--
702.41.0 642.45.1
71 65
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass
index ea34c76f0d..2661c1416f 100644
--- a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass
@@ -1,23 +1,25 @@
1From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001 1From c9519f11502d5bb5c143ed43b4c981b6a211bdf9 Mon Sep 17 00:00:00 2001
2From: Trevor Gamblin <tgamblin@baylibre.com> 2From: Trevor Gamblin <tgamblin@baylibre.com>
3Date: Tue, 29 Aug 2023 14:12:27 -0400 3Date: Fri, 31 May 2024 09:54:50 -0400
4Subject: [PATCH] selftest-hello: fix CVE-1234-56789 4Subject: [PATCH] selftest-hello: fix CVE-1234-56789
5 5
6This should pass the test_signed_off_by_presence test.
7
6CVE: CVE-1234-56789 8CVE: CVE-1234-56789
7 9
8Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> 10Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
9--- 11---
10 .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++ 12 .../files/0001-Fix-CVE-1234-56789.patch | 26 +++++++++++++++++++
11 .../selftest-hello/selftest-hello_1.0.bb | 6 +++-- 13 .../selftest-hello/selftest-hello_1.0.bb | 4 ++-
12 2 files changed, 31 insertions(+), 2 deletions(-) 14 2 files changed, 29 insertions(+), 1 deletion(-)
13 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 15 create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
14 16
15diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 17diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
16new file mode 100644 18new file mode 100644
17index 0000000000..869cfb6fe5 19index 00000000000..8a4f9329303
18--- /dev/null 20--- /dev/null
19+++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch 21+++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
20@@ -0,0 +1,27 @@ 22@@ -0,0 +1,26 @@
21+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001 23+From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
22+From: Trevor Gamblin <tgamblin@baylibre.com> 24+From: Trevor Gamblin <tgamblin@baylibre.com>
23+Date: Tue, 29 Aug 2023 14:08:20 -0400 25+Date: Tue, 29 Aug 2023 14:08:20 -0400
@@ -35,18 +37,17 @@ index 0000000000..869cfb6fe5
35+index 1788f38..83d7918 100644 37+index 1788f38..83d7918 100644
36+--- a/strlen.c 38+--- a/strlen.c
37++++ b/strlen.c 39++++ b/strlen.c
38+@@ -8,6 +8,7 @@ int main() { 40+
39+ 41+int main() {
40+ printf("%d\n", str_len(string1)); 42+
41+ printf("%d\n", str_len(string2)); 43+ printf("%d\n", str_len(string1));
42++ printf("CVE FIXED!!!\n"); 44+ printf("%d\n", str_len(string2));
43+ 45+ printf("CVE FIXED!!!\n");
44+ return 0; 46+
45+ } 47+ return 0;
46+-- 48+}
47+2.41.0
48diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 49diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
49index 547587bef4..76975a6729 100644 50index 2dc352d479e..d937759f157 100644
50--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 51--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
51+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb 52+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
52@@ -3,7 +3,9 @@ SECTION = "examples" 53@@ -3,7 +3,9 @@ SECTION = "examples"
@@ -55,18 +56,11 @@ index 547587bef4..76975a6729 100644
55 56
56-SRC_URI = "file://helloworld.c" 57-SRC_URI = "file://helloworld.c"
57+SRC_URI = "file://helloworld.c \ 58+SRC_URI = "file://helloworld.c \
58+ file://CVE-1234-56789.patch \ 59+ file://0001-Fix-CVE-1234-56789.patch \
59+ " 60+ "
60
61 S = "${WORKDIR}"
62
63@@ -16,4 +18,4 @@ do_install() {
64 install -m 0755 helloworld ${D}${bindir}
65 }
66 61
67-BBCLASSEXTEND = "native nativesdk" 62 S = "${WORKDIR}/sources"
68\ No newline at end of file 63 UNPACKDIR = "${S}"
69+BBCLASSEXTEND = "native nativesdk"
70-- 64--
712.41.0 652.45.1
72 66
diff --git a/meta/lib/patchtest/tests/test_mbox.py b/meta/lib/patchtest/tests/test_mbox.py
index 0b623b7d17..cd76e58a71 100644
--- a/meta/lib/patchtest/tests/test_mbox.py
+++ b/meta/lib/patchtest/tests/test_mbox.py
@@ -9,6 +9,7 @@ import collections
9import parse_shortlog 9import parse_shortlog
10import parse_signed_off_by 10import parse_signed_off_by
11import pyparsing 11import pyparsing
12import re
12import subprocess 13import subprocess
13from data import PatchTestInput 14from data import PatchTestInput
14 15
@@ -22,7 +23,7 @@ def headlog():
22 23
23class TestMbox(base.Base): 24class TestMbox(base.Base):
24 25
25 auh_email = 'auh@auh.yoctoproject.org' 26 auh_email = 'auh@yoctoproject.org'
26 27
27 invalids = [pyparsing.Regex("^Upgrade Helper.+"), 28 invalids = [pyparsing.Regex("^Upgrade Helper.+"),
28 pyparsing.Regex(auh_email), 29 pyparsing.Regex(auh_email),
@@ -31,7 +32,6 @@ class TestMbox(base.Base):
31 32
32 rexp_detect = pyparsing.Regex('\[\s?YOCTO.*\]') 33 rexp_detect = pyparsing.Regex('\[\s?YOCTO.*\]')
33 rexp_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]') 34 rexp_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]')
34 revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"')
35 signoff_prog = parse_signed_off_by.signed_off_by 35 signoff_prog = parse_signed_off_by.signed_off_by
36 revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"') 36 revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"')
37 maxlength = 90 37 maxlength = 90
@@ -83,7 +83,7 @@ class TestMbox(base.Base):
83 def test_shortlog_length(self): 83 def test_shortlog_length(self):
84 for commit in TestMbox.commits: 84 for commit in TestMbox.commits:
85 # no reason to re-check on revert shortlogs 85 # no reason to re-check on revert shortlogs
86 shortlog = commit.shortlog 86 shortlog = re.sub('^(\[.*?\])+ ', '', commit.shortlog)
87 if shortlog.startswith('Revert "'): 87 if shortlog.startswith('Revert "'):
88 continue 88 continue
89 l = len(shortlog) 89 l = len(shortlog)
@@ -155,5 +155,5 @@ class TestMbox(base.Base):
155 155
156 def test_non_auh_upgrade(self): 156 def test_non_auh_upgrade(self):
157 for commit in self.commits: 157 for commit in self.commits:
158 if self.auh_email in commit.payload: 158 if self.auh_email in commit.commit_message:
159 self.fail('Invalid author %s. Resend the series with a valid patch author' % self.auh_email, commit=commit) 159 self.fail('Invalid author %s. Resend the series with a valid patch author' % self.auh_email, commit=commit)
diff --git a/meta/lib/patchtest/utils.py b/meta/lib/patchtest/utils.py
index dd0abc22d9..8eddf3e85f 100644
--- a/meta/lib/patchtest/utils.py
+++ b/meta/lib/patchtest/utils.py
@@ -14,109 +14,6 @@ import logging
14import re 14import re
15import mailbox 15import mailbox
16 16
17class CmdException(Exception):
18 """ Simple exception class where its attributes are the ones passed when instantiated """
19 def __init__(self, cmd):
20 self._cmd = cmd
21 def __getattr__(self, name):
22 value = None
23 if self._cmd.has_key(name):
24 value = self._cmd[name]
25 return value
26
27def exec_cmd(cmd, cwd, ignore_error=False, input=None, strip=True, updateenv={}):
28 """
29 Input:
30
31 cmd: dict containing the following keys:
32
33 cmd : the command itself as an array of strings
34 ignore_error: if False, no exception is raised
35 strip: indicates if strip is done on the output (stdout and stderr)
36 input: input data to the command (stdin)
37 updateenv: environment variables to be appended to the current
38 process environment variables
39
40 NOTE: keys 'ignore_error' and 'input' are optional; if not included,
41 the defaults are the ones specify in the arguments
42 cwd: directory where commands are executed
43 ignore_error: raise CmdException if command fails to execute and
44 this value is False
45 input: input data (stdin) for the command
46
47 Output: dict containing the following keys:
48
49 cmd: the same as input
50 ignore_error: the same as input
51 strip: the same as input
52 input: the same as input
53 stdout: Standard output after command's execution
54 stderr: Standard error after command's execution
55 returncode: Return code after command's execution
56
57 """
58 cmddefaults = {
59 'cmd':'',
60 'ignore_error':ignore_error,
61 'strip':strip,
62 'input':input,
63 'updateenv':updateenv,
64 }
65
66 # update input values if necessary
67 cmddefaults.update(cmd)
68
69 _cmd = cmddefaults
70
71 if not _cmd['cmd']:
72 raise CmdException({'cmd':None, 'stderr':'no command given'})
73
74 # update the environment
75 env = os.environ
76 env.update(_cmd['updateenv'])
77
78 _command = [e for e in _cmd['cmd']]
79 p = subprocess.Popen(_command,
80 stdin=subprocess.PIPE,
81 stdout=subprocess.PIPE,
82 stderr=subprocess.PIPE,
83 universal_newlines=True,
84 cwd=cwd,
85 env=env)
86
87 # execute the command and strip output
88 (_stdout, _stderr) = p.communicate(_cmd['input'])
89 if _cmd['strip']:
90 _stdout, _stderr = map(str.strip, [_stdout, _stderr])
91
92 # generate the result
93 result = _cmd
94 result.update({'cmd':_command,'stdout':_stdout,'stderr':_stderr,'returncode':p.returncode})
95
96 # launch exception if necessary
97 if not _cmd['ignore_error'] and p.returncode:
98 raise CmdException(result)
99
100 return result
101
102def exec_cmds(cmds, cwd):
103 """ Executes commands
104
105 Input:
106 cmds: Array of commands
107 cwd: directory where commands are executed
108
109 Output: Array of output commands
110 """
111 results = []
112 _cmds = cmds
113
114 for cmd in _cmds:
115 result = exec_cmd(cmd, cwd)
116 results.append(result)
117
118 return results
119
120def logger_create(name): 17def logger_create(name):
121 logger = logging.getLogger(name) 18 logger = logging.getLogger(name)
122 loggerhandler = logging.StreamHandler() 19 loggerhandler = logging.StreamHandler()
@@ -125,20 +22,6 @@ def logger_create(name):
125 logger.setLevel(logging.INFO) 22 logger.setLevel(logging.INFO)
126 return logger 23 return logger
127 24
128def get_subject_prefix(path):
129 prefix = ""
130 mbox = mailbox.mbox(path)
131
132 if len(mbox):
133 subject = mbox[0]['subject']
134 if subject:
135 pattern = re.compile(r"(\[.*\])", re.DOTALL)
136 match = pattern.search(subject)
137 if match:
138 prefix = match.group(1)
139
140 return prefix
141
142def valid_branch(branch): 25def valid_branch(branch):
143 """ Check if branch is valid name """ 26 """ Check if branch is valid name """
144 lbranch = branch.lower() 27 lbranch = branch.lower()
@@ -153,7 +36,17 @@ def valid_branch(branch):
153 36
154def get_branch(path): 37def get_branch(path):
155 """ Get the branch name from mbox """ 38 """ Get the branch name from mbox """
156 fullprefix = get_subject_prefix(path) 39 fullprefix = ""
40 mbox = mailbox.mbox(path)
41
42 if len(mbox):
43 subject = mbox[0]['subject']
44 if subject:
45 pattern = re.compile(r"(\[.*\])", re.DOTALL)
46 match = pattern.search(subject)
47 if match:
48 fullprefix = match.group(1)
49
157 branch, branches, valid_branches = None, [], [] 50 branch, branches, valid_branches = None, [], []
158 51
159 if fullprefix: 52 if fullprefix: