diff options
Diffstat (limited to 'meta/lib/oe')
38 files changed, 10660 insertions, 554 deletions
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py index 6eb536ad28..73de774266 100644 --- a/meta/lib/oe/__init__.py +++ b/meta/lib/oe/__init__.py | |||
@@ -7,6 +7,9 @@ | |||
7 | from pkgutil import extend_path | 7 | from pkgutil import extend_path |
8 | __path__ = extend_path(__path__, __name__) | 8 | __path__ = extend_path(__path__, __name__) |
9 | 9 | ||
10 | BBIMPORTS = ["data", "path", "utils", "types", "package", "packagedata", \ | 10 | # Modules with vistorcode need to go first else anything depending on them won't be |
11 | "packagegroup", "sstatesig", "lsb", "cachedpath", "license", \ | 11 | # processed correctly (e.g. qa) |
12 | "qa", "reproducible", "rust", "buildcfg", "go"] | 12 | BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \ |
13 | "packagegroup", "sstatesig", "lsb", "cachedpath", "license", "qemu", \ | ||
14 | "reproducible", "rust", "buildcfg", "go", "spdx30_tasks", "spdx_common", \ | ||
15 | "cve_check", "tune"] | ||
diff --git a/meta/lib/oe/bootfiles.py b/meta/lib/oe/bootfiles.py new file mode 100644 index 0000000000..7ee148c4e2 --- /dev/null +++ b/meta/lib/oe/bootfiles.py | |||
@@ -0,0 +1,57 @@ | |||
1 | # | ||
2 | # SPDX-License-Identifier: MIT | ||
3 | # | ||
4 | # Copyright (C) 2024 Marcus Folkesson | ||
5 | # Author: Marcus Folkesson <marcus.folkesson@gmail.com> | ||
6 | # | ||
7 | # Utility functions handling boot files | ||
8 | # | ||
9 | # Look into deploy_dir and search for boot_files. | ||
10 | # Returns a list of tuples with (original filepath relative to | ||
11 | # deploy_dir, desired filepath renaming) | ||
12 | # | ||
13 | # Heavily inspired of bootimg_partition.py | ||
14 | # | ||
15 | def get_boot_files(deploy_dir, boot_files): | ||
16 | import re | ||
17 | import os | ||
18 | from glob import glob | ||
19 | |||
20 | if boot_files is None: | ||
21 | return None | ||
22 | |||
23 | # list of tuples (src_name, dst_name) | ||
24 | deploy_files = [] | ||
25 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
26 | if ';' in src_entry: | ||
27 | dst_entry = tuple(src_entry.split(';')) | ||
28 | if not dst_entry[0] or not dst_entry[1]: | ||
29 | raise ValueError('Malformed boot file entry: %s' % src_entry) | ||
30 | else: | ||
31 | dst_entry = (src_entry, src_entry) | ||
32 | |||
33 | deploy_files.append(dst_entry) | ||
34 | |||
35 | install_files = [] | ||
36 | for deploy_entry in deploy_files: | ||
37 | src, dst = deploy_entry | ||
38 | if '*' in src: | ||
39 | # by default install files under their basename | ||
40 | entry_name_fn = os.path.basename | ||
41 | if dst != src: | ||
42 | # unless a target name was given, then treat name | ||
43 | # as a directory and append a basename | ||
44 | entry_name_fn = lambda name: \ | ||
45 | os.path.join(dst, | ||
46 | os.path.basename(name)) | ||
47 | |||
48 | srcs = glob(os.path.join(deploy_dir, src)) | ||
49 | |||
50 | for entry in srcs: | ||
51 | src = os.path.relpath(entry, deploy_dir) | ||
52 | entry_dst_name = entry_name_fn(entry) | ||
53 | install_files.append((src, entry_dst_name)) | ||
54 | else: | ||
55 | install_files.append((src, dst)) | ||
56 | |||
57 | return install_files | ||
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py index 27b059b834..85b903fab0 100644 --- a/meta/lib/oe/buildcfg.py +++ b/meta/lib/oe/buildcfg.py | |||
@@ -17,21 +17,21 @@ def get_scmbasepath(d): | |||
17 | def get_metadata_git_branch(path): | 17 | def get_metadata_git_branch(path): |
18 | try: | 18 | try: |
19 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) | 19 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) |
20 | except bb.process.ExecutionError: | 20 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
21 | rev = '<unknown>' | 21 | rev = '<unknown>' |
22 | return rev.strip() | 22 | return rev.strip() |
23 | 23 | ||
24 | def get_metadata_git_revision(path): | 24 | def get_metadata_git_revision(path): |
25 | try: | 25 | try: |
26 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) | 26 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) |
27 | except bb.process.ExecutionError: | 27 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
28 | rev = '<unknown>' | 28 | rev = '<unknown>' |
29 | return rev.strip() | 29 | return rev.strip() |
30 | 30 | ||
31 | def get_metadata_git_toplevel(path): | 31 | def get_metadata_git_toplevel(path): |
32 | try: | 32 | try: |
33 | toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path) | 33 | toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path) |
34 | except bb.process.ExecutionError: | 34 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
35 | return "" | 35 | return "" |
36 | return toplevel.strip() | 36 | return toplevel.strip() |
37 | 37 | ||
@@ -39,21 +39,21 @@ def get_metadata_git_remotes(path): | |||
39 | try: | 39 | try: |
40 | remotes_list, _ = bb.process.run('git remote', cwd=path) | 40 | remotes_list, _ = bb.process.run('git remote', cwd=path) |
41 | remotes = remotes_list.split() | 41 | remotes = remotes_list.split() |
42 | except bb.process.ExecutionError: | 42 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
43 | remotes = [] | 43 | remotes = [] |
44 | return remotes | 44 | return remotes |
45 | 45 | ||
46 | def get_metadata_git_remote_url(path, remote): | 46 | def get_metadata_git_remote_url(path, remote): |
47 | try: | 47 | try: |
48 | uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path) | 48 | uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path) |
49 | except bb.process.ExecutionError: | 49 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
50 | return "" | 50 | return "" |
51 | return uri.strip() | 51 | return uri.strip() |
52 | 52 | ||
53 | def get_metadata_git_describe(path): | 53 | def get_metadata_git_describe(path): |
54 | try: | 54 | try: |
55 | describe, _ = bb.process.run('git describe --tags', cwd=path) | 55 | describe, _ = bb.process.run('git describe --tags --dirty', cwd=path) |
56 | except bb.process.ExecutionError: | 56 | except (bb.process.ExecutionError, bb.process.NotFoundError): |
57 | return "" | 57 | return "" |
58 | return describe.strip() | 58 | return describe.strip() |
59 | 59 | ||
diff --git a/meta/lib/oe/buildstats.py b/meta/lib/oe/buildstats.py new file mode 100644 index 0000000000..2700245ec6 --- /dev/null +++ b/meta/lib/oe/buildstats.py | |||
@@ -0,0 +1,254 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | # Implements system state sampling. Called by buildstats.bbclass. | ||
7 | # Because it is a real Python module, it can hold persistent state, | ||
8 | # like open log files and the time of the last sampling. | ||
9 | |||
10 | import time | ||
11 | import re | ||
12 | import bb.event | ||
13 | from collections import deque | ||
14 | |||
15 | class SystemStats: | ||
16 | def __init__(self, d): | ||
17 | bn = d.getVar('BUILDNAME') | ||
18 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) | ||
19 | bb.utils.mkdirhier(bsdir) | ||
20 | file_handlers = [('diskstats', self._reduce_diskstats), | ||
21 | ('meminfo', self._reduce_meminfo), | ||
22 | ('stat', self._reduce_stat), | ||
23 | ('net/dev', self._reduce_net)] | ||
24 | |||
25 | # Some hosts like openSUSE have readable /proc/pressure files | ||
26 | # but throw errors when these files are opened. Catch these error | ||
27 | # and ensure that the reduce_proc_pressure directory is not created. | ||
28 | if os.path.exists("/proc/pressure"): | ||
29 | try: | ||
30 | with open('/proc/pressure/cpu', 'rb') as source: | ||
31 | source.read() | ||
32 | pressuredir = os.path.join(bsdir, 'reduced_proc_pressure') | ||
33 | bb.utils.mkdirhier(pressuredir) | ||
34 | file_handlers.extend([('pressure/cpu', self._reduce_pressure), | ||
35 | ('pressure/io', self._reduce_pressure), | ||
36 | ('pressure/memory', self._reduce_pressure)]) | ||
37 | except Exception: | ||
38 | pass | ||
39 | |||
40 | self.proc_files = [] | ||
41 | for filename, handler in (file_handlers): | ||
42 | # The corresponding /proc files might not exist on the host. | ||
43 | # For example, /proc/diskstats is not available in virtualized | ||
44 | # environments like Linux-VServer. Silently skip collecting | ||
45 | # the data. | ||
46 | if os.path.exists(os.path.join('/proc', filename)): | ||
47 | # In practice, this class gets instantiated only once in | ||
48 | # the bitbake cooker process. Therefore 'append' mode is | ||
49 | # not strictly necessary, but using it makes the class | ||
50 | # more robust should two processes ever write | ||
51 | # concurrently. | ||
52 | if filename == 'net/dev': | ||
53 | destfile = os.path.join(bsdir, 'reduced_proc_net.log') | ||
54 | else: | ||
55 | destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename)) | ||
56 | self.proc_files.append((filename, open(destfile, 'ab'), handler)) | ||
57 | self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab') | ||
58 | # Last time that we sampled /proc data resp. recorded disk monitoring data. | ||
59 | self.last_proc = 0 | ||
60 | self.last_disk_monitor = 0 | ||
61 | # Minimum number of seconds between recording a sample. This becames relevant when we get | ||
62 | # called very often while many short tasks get started. Sampling during quiet periods | ||
63 | # depends on the heartbeat event, which fires less often. | ||
64 | # By default, the Heartbeat events occur roughly once every second but the actual time | ||
65 | # between these events deviates by a few milliseconds, in most cases. Hence | ||
66 | # pick a somewhat arbitary tolerance such that we sample a large majority | ||
67 | # of the Heartbeat events. This ignores rare events that fall outside the minimum | ||
68 | # and may lead an extra sample in a given second every so often. However, it allows for fairly | ||
69 | # consistent intervals between samples without missing many events. | ||
70 | self.tolerance = 0.01 | ||
71 | self.min_seconds = 1.0 - self.tolerance | ||
72 | |||
73 | self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') | ||
74 | self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+|nvme\d+n\d+.*)$') | ||
75 | self.diskstats_ltime = None | ||
76 | self.diskstats_data = None | ||
77 | self.stat_ltimes = None | ||
78 | # Last time we sampled /proc/pressure. All resources stored in a single dict with the key as filename | ||
79 | self.last_pressure = {"pressure/cpu": None, "pressure/io": None, "pressure/memory": None} | ||
80 | self.net_stats = {} | ||
81 | |||
82 | def close(self): | ||
83 | self.monitor_disk.close() | ||
84 | for _, output, _ in self.proc_files: | ||
85 | output.close() | ||
86 | |||
87 | def _reduce_meminfo(self, time, data, filename): | ||
88 | """ | ||
89 | Extracts 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree' | ||
90 | and writes their values into a single line, in that order. | ||
91 | """ | ||
92 | values = {} | ||
93 | for line in data.split(b'\n'): | ||
94 | m = self.meminfo_regex.match(line) | ||
95 | if m: | ||
96 | values[m.group(1)] = m.group(2) | ||
97 | if len(values) == 6: | ||
98 | return (time, | ||
99 | b' '.join([values[x] for x in | ||
100 | (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n') | ||
101 | |||
102 | def _reduce_net(self, time, data, filename): | ||
103 | data = data.split(b'\n') | ||
104 | for line in data[2:]: | ||
105 | if b":" not in line: | ||
106 | continue | ||
107 | try: | ||
108 | parts = line.split() | ||
109 | iface = (parts[0].strip(b':')).decode('ascii') | ||
110 | receive_bytes = int(parts[1]) | ||
111 | transmit_bytes = int(parts[9]) | ||
112 | except Exception: | ||
113 | continue | ||
114 | |||
115 | if iface not in self.net_stats: | ||
116 | self.net_stats[iface] = deque(maxlen=2) | ||
117 | self.net_stats[iface].append((receive_bytes, transmit_bytes, 0, 0)) | ||
118 | prev = self.net_stats[iface][-1] if self.net_stats[iface] else (0, 0, 0, 0) | ||
119 | receive_diff = receive_bytes - prev[0] | ||
120 | transmit_diff = transmit_bytes - prev[1] | ||
121 | self.net_stats[iface].append(( | ||
122 | receive_bytes, | ||
123 | transmit_bytes, | ||
124 | receive_diff, | ||
125 | transmit_diff | ||
126 | )) | ||
127 | |||
128 | result_str = "\n".join( | ||
129 | f"{iface}: {net_data[-1][0]} {net_data[-1][1]} {net_data[-1][2]} {net_data[-1][3]}" | ||
130 | for iface, net_data in self.net_stats.items() | ||
131 | ) + "\n" | ||
132 | |||
133 | return time, result_str.encode('ascii') | ||
134 | |||
135 | def _diskstats_is_relevant_line(self, linetokens): | ||
136 | if len(linetokens) < 14: | ||
137 | return False | ||
138 | disk = linetokens[2] | ||
139 | return self.diskstats_regex.match(disk) | ||
140 | |||
141 | def _reduce_diskstats(self, time, data, filename): | ||
142 | relevant_tokens = filter(self._diskstats_is_relevant_line, map(lambda x: x.split(), data.split(b'\n'))) | ||
143 | diskdata = [0] * 3 | ||
144 | reduced = None | ||
145 | for tokens in relevant_tokens: | ||
146 | # rsect | ||
147 | diskdata[0] += int(tokens[5]) | ||
148 | # wsect | ||
149 | diskdata[1] += int(tokens[9]) | ||
150 | # use | ||
151 | diskdata[2] += int(tokens[12]) | ||
152 | if self.diskstats_ltime: | ||
153 | # We need to compute information about the time interval | ||
154 | # since the last sampling and record the result as sample | ||
155 | # for that point in the past. | ||
156 | interval = time - self.diskstats_ltime | ||
157 | if interval > 0: | ||
158 | sums = [ a - b for a, b in zip(diskdata, self.diskstats_data) ] | ||
159 | readTput = sums[0] / 2.0 * 100.0 / interval | ||
160 | writeTput = sums[1] / 2.0 * 100.0 / interval | ||
161 | util = float( sums[2] ) / 10 / interval | ||
162 | util = max(0.0, min(1.0, util)) | ||
163 | reduced = (self.diskstats_ltime, (readTput, writeTput, util)) | ||
164 | |||
165 | self.diskstats_ltime = time | ||
166 | self.diskstats_data = diskdata | ||
167 | return reduced | ||
168 | |||
169 | |||
170 | def _reduce_nop(self, time, data, filename): | ||
171 | return (time, data) | ||
172 | |||
173 | def _reduce_stat(self, time, data, filename): | ||
174 | if not data: | ||
175 | return None | ||
176 | # CPU times {user, nice, system, idle, io_wait, irq, softirq} from first line | ||
177 | tokens = data.split(b'\n', 1)[0].split() | ||
178 | times = [ int(token) for token in tokens[1:] ] | ||
179 | reduced = None | ||
180 | if self.stat_ltimes: | ||
181 | user = float((times[0] + times[1]) - (self.stat_ltimes[0] + self.stat_ltimes[1])) | ||
182 | system = float((times[2] + times[5] + times[6]) - (self.stat_ltimes[2] + self.stat_ltimes[5] + self.stat_ltimes[6])) | ||
183 | idle = float(times[3] - self.stat_ltimes[3]) | ||
184 | iowait = float(times[4] - self.stat_ltimes[4]) | ||
185 | |||
186 | aSum = max(user + system + idle + iowait, 1) | ||
187 | reduced = (time, (user/aSum, system/aSum, iowait/aSum)) | ||
188 | |||
189 | self.stat_ltimes = times | ||
190 | return reduced | ||
191 | |||
192 | def _reduce_pressure(self, time, data, filename): | ||
193 | """ | ||
194 | Return reduced pressure: {avg10, avg60, avg300} and delta total compared to the previous sample | ||
195 | for the cpu, io and memory resources. A common function is used for all 3 resources since the | ||
196 | format of the /proc/pressure file is the same in each case. | ||
197 | """ | ||
198 | if not data: | ||
199 | return None | ||
200 | tokens = data.split(b'\n', 1)[0].split() | ||
201 | avg10 = float(tokens[1].split(b'=')[1]) | ||
202 | avg60 = float(tokens[2].split(b'=')[1]) | ||
203 | avg300 = float(tokens[3].split(b'=')[1]) | ||
204 | total = int(tokens[4].split(b'=')[1]) | ||
205 | |||
206 | reduced = None | ||
207 | if self.last_pressure[filename]: | ||
208 | delta = total - self.last_pressure[filename] | ||
209 | reduced = (time, (avg10, avg60, avg300, delta)) | ||
210 | self.last_pressure[filename] = total | ||
211 | return reduced | ||
212 | |||
213 | def sample(self, event, force): | ||
214 | """ | ||
215 | Collect and log proc or disk_monitor stats periodically. | ||
216 | Return True if a new sample is collected and hence the value last_proc or last_disk_monitor | ||
217 | is changed. | ||
218 | """ | ||
219 | retval = False | ||
220 | now = time.time() | ||
221 | if (now - self.last_proc > self.min_seconds) or force: | ||
222 | for filename, output, handler in self.proc_files: | ||
223 | with open(os.path.join('/proc', filename), 'rb') as input: | ||
224 | data = input.read() | ||
225 | if handler: | ||
226 | reduced = handler(now, data, filename) | ||
227 | else: | ||
228 | reduced = (now, data) | ||
229 | if reduced: | ||
230 | if isinstance(reduced[1], bytes): | ||
231 | # Use as it is. | ||
232 | data = reduced[1] | ||
233 | else: | ||
234 | # Convert to a single line. | ||
235 | data = (' '.join([str(x) for x in reduced[1]]) + '\n').encode('ascii') | ||
236 | # Unbuffered raw write, less overhead and useful | ||
237 | # in case that we end up with concurrent writes. | ||
238 | os.write(output.fileno(), | ||
239 | ('%.0f\n' % reduced[0]).encode('ascii') + | ||
240 | data + | ||
241 | b'\n') | ||
242 | self.last_proc = now | ||
243 | retval = True | ||
244 | |||
245 | if isinstance(event, bb.event.MonitorDiskEvent) and \ | ||
246 | ((now - self.last_disk_monitor > self.min_seconds) or force): | ||
247 | os.write(self.monitor_disk.fileno(), | ||
248 | ('%.0f\n' % now).encode('ascii') + | ||
249 | ''.join(['%s: %d\n' % (dev, sample.total_bytes - sample.free_bytes) | ||
250 | for dev, sample in event.disk_usage.items()]).encode('ascii') + | ||
251 | b'\n') | ||
252 | self.last_disk_monitor = now | ||
253 | retval = True | ||
254 | return retval \ No newline at end of file | ||
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py index 0138b791d4..68c85807d9 100644 --- a/meta/lib/oe/cachedpath.py +++ b/meta/lib/oe/cachedpath.py | |||
@@ -111,9 +111,13 @@ class CachedPath(object): | |||
111 | return True | 111 | return True |
112 | return False | 112 | return False |
113 | 113 | ||
114 | # WARNING - this is not currently a drop in replacement since they return False | ||
115 | # rather than raise exceptions. | ||
114 | def stat(self, path): | 116 | def stat(self, path): |
115 | return self.callstat(path) | 117 | return self.callstat(path) |
116 | 118 | ||
119 | # WARNING - this is not currently a drop in replacement since they return False | ||
120 | # rather than raise exceptions. | ||
117 | def lstat(self, path): | 121 | def lstat(self, path): |
118 | return self.calllstat(path) | 122 | return self.calllstat(path) |
119 | 123 | ||
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py index 5161d33d2d..8ae5d3b715 100644 --- a/meta/lib/oe/classextend.py +++ b/meta/lib/oe/classextend.py | |||
@@ -33,7 +33,7 @@ class ClassExtender(object): | |||
33 | name = name.replace("-" + self.extname, "") | 33 | name = name.replace("-" + self.extname, "") |
34 | if name.startswith("virtual/"): | 34 | if name.startswith("virtual/"): |
35 | # Assume large numbers of dashes means a triplet is present and we don't need to convert | 35 | # Assume large numbers of dashes means a triplet is present and we don't need to convert |
36 | if name.count("-") >= 3 and name.endswith(("-go", "-binutils", "-gcc", "-g++")): | 36 | if name.count("-") >= 3 and name.endswith(("-go",)): |
37 | return name | 37 | return name |
38 | subs = name.split("/", 1)[1] | 38 | subs = name.split("/", 1)[1] |
39 | if not subs.startswith(self.extname): | 39 | if not subs.startswith(self.extname): |
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 81abfbf9e2..ced751b835 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py | |||
@@ -193,13 +193,17 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, p | |||
193 | else: | 193 | else: |
194 | f.write(line) | 194 | f.write(line) |
195 | invalue = False | 195 | invalue = False |
196 | elif line.startswith('SIGGEN_LOCKEDSIGS'): | 196 | elif line.startswith('SIGGEN_LOCKEDSIGS_t'): |
197 | invalue = True | 197 | invalue = True |
198 | f.write(line) | 198 | f.write(line) |
199 | else: | ||
200 | invalue = False | ||
201 | f.write(line) | ||
199 | 202 | ||
200 | def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): | 203 | def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): |
201 | merged = {} | 204 | merged = {} |
202 | arch_order = [] | 205 | arch_order = [] |
206 | otherdata = [] | ||
203 | with open(lockedsigs_main, 'r') as f: | 207 | with open(lockedsigs_main, 'r') as f: |
204 | invalue = None | 208 | invalue = None |
205 | for line in f: | 209 | for line in f: |
@@ -212,6 +216,9 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu | |||
212 | invalue = line[18:].split('=', 1)[0].rstrip() | 216 | invalue = line[18:].split('=', 1)[0].rstrip() |
213 | merged[invalue] = [] | 217 | merged[invalue] = [] |
214 | arch_order.append(invalue) | 218 | arch_order.append(invalue) |
219 | else: | ||
220 | invalue = None | ||
221 | otherdata.append(line) | ||
215 | 222 | ||
216 | with open(lockedsigs_extra, 'r') as f: | 223 | with open(lockedsigs_extra, 'r') as f: |
217 | invalue = None | 224 | invalue = None |
@@ -246,6 +253,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu | |||
246 | f.write(' "\n') | 253 | f.write(' "\n') |
247 | fulltypes.append(typename) | 254 | fulltypes.append(typename) |
248 | f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) | 255 | f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) |
256 | f.write('\n' + ''.join(otherdata)) | ||
249 | 257 | ||
250 | if copy_output: | 258 | if copy_output: |
251 | write_sigs_file(copy_output, list(tocopy.keys()), tocopy) | 259 | write_sigs_file(copy_output, list(tocopy.keys()), tocopy) |
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py index ed5c714cb8..ae194f27cf 100644 --- a/meta/lib/oe/cve_check.py +++ b/meta/lib/oe/cve_check.py | |||
@@ -5,9 +5,11 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | import collections | 7 | import collections |
8 | import re | ||
9 | import itertools | ||
10 | import functools | 8 | import functools |
9 | import itertools | ||
10 | import os.path | ||
11 | import re | ||
12 | import oe.patch | ||
11 | 13 | ||
12 | _Version = collections.namedtuple( | 14 | _Version = collections.namedtuple( |
13 | "_Version", ["release", "patch_l", "pre_l", "pre_v"] | 15 | "_Version", ["release", "patch_l", "pre_l", "pre_v"] |
@@ -71,71 +73,134 @@ def _cmpkey(release, patch_l, pre_l, pre_v): | |||
71 | return _release, _patch, _pre | 73 | return _release, _patch, _pre |
72 | 74 | ||
73 | 75 | ||
74 | def get_patched_cves(d): | 76 | def parse_cve_from_filename(patch_filename): |
75 | """ | 77 | """ |
76 | Get patches that solve CVEs using the "CVE: " tag. | 78 | Parses CVE ID from the filename |
79 | |||
80 | Matches the last "CVE-YYYY-ID" in the file name, also if written | ||
81 | in lowercase. Possible to have multiple CVE IDs in a single | ||
82 | file name, but only the last one will be detected from the file name. | ||
83 | |||
84 | Returns the last CVE ID foudn in the filename. If no CVE ID is found | ||
85 | an empty string is returned. | ||
77 | """ | 86 | """ |
87 | cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d{4,})", re.IGNORECASE) | ||
78 | 88 | ||
79 | import re | 89 | # Check patch file name for CVE ID |
80 | import oe.patch | 90 | fname_match = cve_file_name_match.search(patch_filename) |
91 | return fname_match.group(1).upper() if fname_match else "" | ||
81 | 92 | ||
82 | cve_match = re.compile(r"CVE:( CVE-\d{4}-\d+)+") | ||
83 | 93 | ||
84 | # Matches the last "CVE-YYYY-ID" in the file name, also if written | 94 | def parse_cves_from_patch_contents(patch_contents): |
85 | # in lowercase. Possible to have multiple CVE IDs in a single | 95 | """ |
86 | # file name, but only the last one will be detected from the file name. | 96 | Parses CVE IDs from patch contents |
87 | # However, patch files contents addressing multiple CVE IDs are supported | ||
88 | # (cve_match regular expression) | ||
89 | cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE) | ||
90 | 97 | ||
91 | patched_cves = set() | 98 | Matches all CVE IDs contained on a line that starts with "CVE: ". Any |
92 | patches = oe.patch.src_patches(d) | 99 | delimiter (',', '&', "and", etc.) can be used without any issues. Multiple |
93 | bb.debug(2, "Scanning %d patches for CVEs" % len(patches)) | 100 | "CVE:" lines can also exist. |
94 | for url in patches: | ||
95 | patch_file = bb.fetch.decodeurl(url)[2] | ||
96 | 101 | ||
97 | # Check patch file name for CVE ID | 102 | Returns a set of all CVE IDs found in the patch contents. |
98 | fname_match = cve_file_name_match.search(patch_file) | 103 | """ |
99 | if fname_match: | 104 | cve_ids = set() |
100 | cve = fname_match.group(1).upper() | 105 | cve_match = re.compile(r"CVE-\d{4}-\d{4,}") |
101 | patched_cves.add(cve) | 106 | # Search for one or more "CVE: " lines |
102 | bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file)) | 107 | for line in patch_contents.split("\n"): |
103 | 108 | if not line.startswith("CVE:"): | |
104 | # Remote patches won't be present and compressed patches won't be | ||
105 | # unpacked, so say we're not scanning them | ||
106 | if not os.path.isfile(patch_file): | ||
107 | bb.note("%s is remote or compressed, not scanning content" % patch_file) | ||
108 | continue | 109 | continue |
110 | cve_ids.update(cve_match.findall(line)) | ||
111 | return cve_ids | ||
112 | |||
109 | 113 | ||
110 | with open(patch_file, "r", encoding="utf-8") as f: | 114 | def parse_cves_from_patch_file(patch_file): |
111 | try: | 115 | """ |
116 | Parses CVE IDs associated with a particular patch file, using both the filename | ||
117 | and patch contents. | ||
118 | |||
119 | Returns a set of all CVE IDs found in the patch filename and contents. | ||
120 | """ | ||
121 | cve_ids = set() | ||
122 | filename_cve = parse_cve_from_filename(patch_file) | ||
123 | if filename_cve: | ||
124 | bb.debug(2, "Found %s from patch file name %s" % (filename_cve, patch_file)) | ||
125 | cve_ids.add(parse_cve_from_filename(patch_file)) | ||
126 | |||
127 | # Remote patches won't be present and compressed patches won't be | ||
128 | # unpacked, so say we're not scanning them | ||
129 | if not os.path.isfile(patch_file): | ||
130 | bb.note("%s is remote or compressed, not scanning content" % patch_file) | ||
131 | return cve_ids | ||
132 | |||
133 | with open(patch_file, "r", encoding="utf-8") as f: | ||
134 | try: | ||
135 | patch_text = f.read() | ||
136 | except UnicodeDecodeError: | ||
137 | bb.debug( | ||
138 | 1, | ||
139 | "Failed to read patch %s using UTF-8 encoding" | ||
140 | " trying with iso8859-1" % patch_file, | ||
141 | ) | ||
142 | f.close() | ||
143 | with open(patch_file, "r", encoding="iso8859-1") as f: | ||
112 | patch_text = f.read() | 144 | patch_text = f.read() |
113 | except UnicodeDecodeError: | 145 | |
114 | bb.debug(1, "Failed to read patch %s using UTF-8 encoding" | 146 | cve_ids.update(parse_cves_from_patch_contents(patch_text)) |
115 | " trying with iso8859-1" % patch_file) | 147 | |
116 | f.close() | 148 | if not cve_ids: |
117 | with open(patch_file, "r", encoding="iso8859-1") as f: | 149 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) |
118 | patch_text = f.read() | 150 | else: |
119 | 151 | bb.debug(2, "Patch %s solves %s" % (patch_file, ", ".join(sorted(cve_ids)))) | |
120 | # Search for one or more "CVE: " lines | 152 | |
121 | text_match = False | 153 | return cve_ids |
122 | for match in cve_match.finditer(patch_text): | 154 | |
123 | # Get only the CVEs without the "CVE: " tag | 155 | |
124 | cves = patch_text[match.start()+5:match.end()] | 156 | @bb.parse.vardeps("CVE_STATUS") |
125 | for cve in cves.split(): | 157 | def get_patched_cves(d): |
126 | bb.debug(2, "Patch %s solves %s" % (patch_file, cve)) | 158 | """ |
127 | patched_cves.add(cve) | 159 | Determines the CVE IDs that have been solved by either patches incuded within |
128 | text_match = True | 160 | SRC_URI or by setting CVE_STATUS. |
129 | 161 | ||
130 | if not fname_match and not text_match: | 162 | Returns a dictionary with the CVE IDs as keys and an associated dictonary of |
131 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) | 163 | relevant metadata as the value. |
164 | """ | ||
165 | patched_cves = {} | ||
166 | patches = oe.patch.src_patches(d) | ||
167 | bb.debug(2, "Scanning %d patches for CVEs" % len(patches)) | ||
168 | |||
169 | # Check each patch file | ||
170 | for url in patches: | ||
171 | patch_file = bb.fetch.decodeurl(url)[2] | ||
172 | for cve_id in parse_cves_from_patch_file(patch_file): | ||
173 | if cve_id not in patched_cves: | ||
174 | patched_cves[cve_id] = { | ||
175 | "abbrev-status": "Patched", | ||
176 | "status": "fix-file-included", | ||
177 | "resource": [patch_file], | ||
178 | } | ||
179 | else: | ||
180 | patched_cves[cve_id]["resource"].append(patch_file) | ||
132 | 181 | ||
133 | # Search for additional patched CVEs | 182 | # Search for additional patched CVEs |
134 | for cve in (d.getVarFlags("CVE_STATUS") or {}): | 183 | for cve_id in d.getVarFlags("CVE_STATUS") or {}: |
135 | decoded_status, _, _ = decode_cve_status(d, cve) | 184 | decoded_status = decode_cve_status(d, cve_id) |
136 | if decoded_status == "Patched": | 185 | products = d.getVar("CVE_PRODUCT") |
137 | bb.debug(2, "CVE %s is additionally patched" % cve) | 186 | if has_cve_product_match(decoded_status, products): |
138 | patched_cves.add(cve) | 187 | if cve_id in patched_cves: |
188 | bb.warn( | ||
189 | 'CVE_STATUS[%s] = "%s" is overwriting previous status of "%s: %s"' | ||
190 | % ( | ||
191 | cve_id, | ||
192 | d.getVarFlag("CVE_STATUS", cve_id), | ||
193 | patched_cves[cve_id]["abbrev-status"], | ||
194 | patched_cves[cve_id]["status"], | ||
195 | ) | ||
196 | ) | ||
197 | patched_cves[cve_id] = { | ||
198 | "abbrev-status": decoded_status["mapping"], | ||
199 | "status": decoded_status["detail"], | ||
200 | "justification": decoded_status["description"], | ||
201 | "affected-vendor": decoded_status["vendor"], | ||
202 | "affected-product": decoded_status["product"], | ||
203 | } | ||
139 | 204 | ||
140 | return patched_cves | 205 | return patched_cves |
141 | 206 | ||
@@ -225,21 +290,89 @@ def convert_cve_version(version): | |||
225 | 290 | ||
226 | return version + update | 291 | return version + update |
227 | 292 | ||
293 | @bb.parse.vardeps("CVE_STATUS", "CVE_CHECK_STATUSMAP") | ||
228 | def decode_cve_status(d, cve): | 294 | def decode_cve_status(d, cve): |
229 | """ | 295 | """ |
230 | Convert CVE_STATUS into status, detail and description. | 296 | Convert CVE_STATUS into status, vendor, product, detail and description. |
231 | """ | 297 | """ |
232 | status = d.getVarFlag("CVE_STATUS", cve) | 298 | status = d.getVarFlag("CVE_STATUS", cve) |
233 | if not status: | 299 | if not status: |
234 | return ("", "", "") | 300 | return {} |
301 | |||
302 | status_split = status.split(':', 4) | ||
303 | status_out = {} | ||
304 | status_out["detail"] = status_split[0] | ||
305 | product = "*" | ||
306 | vendor = "*" | ||
307 | description = "" | ||
308 | if len(status_split) >= 4 and status_split[1].strip() == "cpe": | ||
309 | # Both vendor and product are mandatory if cpe: present, the syntax is then: | ||
310 | # detail: cpe:vendor:product:description | ||
311 | vendor = status_split[2].strip() | ||
312 | product = status_split[3].strip() | ||
313 | description = status_split[4].strip() | ||
314 | elif len(status_split) >= 2 and status_split[1].strip() == "cpe": | ||
315 | # Malformed CPE | ||
316 | bb.warn( | ||
317 | 'Invalid CPE information for CVE_STATUS[%s] = "%s", not setting CPE' | ||
318 | % (cve, status) | ||
319 | ) | ||
320 | else: | ||
321 | # Other case: no CPE, the syntax is then: | ||
322 | # detail: description | ||
323 | description = status.split(':', 1)[1].strip() if (len(status_split) > 1) else "" | ||
235 | 324 | ||
236 | status_split = status.split(':', 1) | 325 | status_out["vendor"] = vendor |
237 | detail = status_split[0] | 326 | status_out["product"] = product |
238 | description = status_split[1].strip() if (len(status_split) > 1) else "" | 327 | status_out["description"] = description |
239 | 328 | ||
329 | detail = status_out["detail"] | ||
240 | status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail) | 330 | status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail) |
241 | if status_mapping is None: | 331 | if status_mapping is None: |
242 | bb.warn('Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' % (detail, cve, status)) | 332 | bb.warn( |
333 | 'Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' | ||
334 | % (detail, cve, status) | ||
335 | ) | ||
243 | status_mapping = "Unpatched" | 336 | status_mapping = "Unpatched" |
337 | status_out["mapping"] = status_mapping | ||
338 | |||
339 | return status_out | ||
244 | 340 | ||
245 | return (status_mapping, detail, description) | 341 | def has_cve_product_match(detailed_status, products): |
342 | """ | ||
343 | Check product/vendor match between detailed_status from decode_cve_status and a string of | ||
344 | products (like from CVE_PRODUCT) | ||
345 | """ | ||
346 | for product in products.split(): | ||
347 | vendor = "*" | ||
348 | if ":" in product: | ||
349 | vendor, product = product.split(":", 1) | ||
350 | |||
351 | if (vendor == detailed_status["vendor"] or detailed_status["vendor"] == "*") and \ | ||
352 | (product == detailed_status["product"] or detailed_status["product"] == "*"): | ||
353 | return True | ||
354 | |||
355 | #if no match, return False | ||
356 | return False | ||
357 | |||
358 | def extend_cve_status(d): | ||
359 | # do this only once in case multiple classes use this | ||
360 | if d.getVar("CVE_STATUS_EXTENDED"): | ||
361 | return | ||
362 | d.setVar("CVE_STATUS_EXTENDED", "1") | ||
363 | |||
364 | # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS | ||
365 | cve_check_ignore = d.getVar("CVE_CHECK_IGNORE") | ||
366 | if cve_check_ignore: | ||
367 | bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS") | ||
368 | for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split(): | ||
369 | d.setVarFlag("CVE_STATUS", cve, "ignored") | ||
370 | |||
371 | # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once | ||
372 | for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split(): | ||
373 | cve_group = d.getVar(cve_status_group) | ||
374 | if cve_group is not None: | ||
375 | for cve in cve_group.split(): | ||
376 | d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status")) | ||
377 | else: | ||
378 | bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group) | ||
diff --git a/meta/lib/oe/elf.py b/meta/lib/oe/elf.py index eab2349a4f..9794453092 100644 --- a/meta/lib/oe/elf.py +++ b/meta/lib/oe/elf.py | |||
@@ -5,141 +5,144 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | def machine_dict(d): | 7 | def machine_dict(d): |
8 | # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? | 8 | # Generating this data is slow, so cache it |
9 | machdata = { | 9 | if not hasattr(machine_dict, "machdata"): |
10 | "darwin9" : { | 10 | machine_dict.machdata = { |
11 | "arm" : (40, 0, 0, True, 32), | 11 | # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? |
12 | }, | 12 | "darwin9" : { |
13 | "eabi" : { | 13 | "arm" : (40, 0, 0, True, 32), |
14 | "arm" : (40, 0, 0, True, 32), | 14 | }, |
15 | }, | 15 | "eabi" : { |
16 | "elf" : { | 16 | "arm" : (40, 0, 0, True, 32), |
17 | "aarch64" : (183, 0, 0, True, 64), | 17 | }, |
18 | "aarch64_be" :(183, 0, 0, False, 64), | 18 | "elf" : { |
19 | "i586" : (3, 0, 0, True, 32), | 19 | "aarch64" : (183, 0, 0, True, 64), |
20 | "i686" : (3, 0, 0, True, 32), | 20 | "aarch64_be" :(183, 0, 0, False, 64), |
21 | "x86_64": (62, 0, 0, True, 64), | 21 | "i586" : (3, 0, 0, True, 32), |
22 | "epiphany": (4643, 0, 0, True, 32), | 22 | "i686" : (3, 0, 0, True, 32), |
23 | "lm32": (138, 0, 0, False, 32), | 23 | "x86_64": (62, 0, 0, True, 64), |
24 | "loongarch64":(258, 0, 0, True, 64), | 24 | "epiphany": (4643, 0, 0, True, 32), |
25 | "mips": ( 8, 0, 0, False, 32), | 25 | "lm32": (138, 0, 0, False, 32), |
26 | "mipsel": ( 8, 0, 0, True, 32), | 26 | "loongarch64":(258, 0, 0, True, 64), |
27 | "microblaze": (189, 0, 0, False, 32), | 27 | "mips": ( 8, 0, 0, False, 32), |
28 | "microblazeel":(189, 0, 0, True, 32), | 28 | "mipsel": ( 8, 0, 0, True, 32), |
29 | "powerpc": (20, 0, 0, False, 32), | 29 | "microblaze": (189, 0, 0, False, 32), |
30 | "riscv32": (243, 0, 0, True, 32), | 30 | "microblazeel":(189, 0, 0, True, 32), |
31 | "riscv64": (243, 0, 0, True, 64), | 31 | "powerpc": (20, 0, 0, False, 32), |
32 | }, | 32 | "riscv32": (243, 0, 0, True, 32), |
33 | "linux" : { | 33 | "riscv64": (243, 0, 0, True, 64), |
34 | "aarch64" : (183, 0, 0, True, 64), | 34 | }, |
35 | "aarch64_be" :(183, 0, 0, False, 64), | 35 | "linux" : { |
36 | "arm" : (40, 97, 0, True, 32), | 36 | "aarch64" : (183, 0, 0, True, 64), |
37 | "armeb": (40, 97, 0, False, 32), | 37 | "aarch64_be" :(183, 0, 0, False, 64), |
38 | "powerpc": (20, 0, 0, False, 32), | 38 | "arm" : (40, 97, 0, True, 32), |
39 | "powerpc64": (21, 0, 0, False, 64), | 39 | "armeb": (40, 97, 0, False, 32), |
40 | "powerpc64le": (21, 0, 0, True, 64), | 40 | "powerpc": (20, 0, 0, False, 32), |
41 | "i386": ( 3, 0, 0, True, 32), | 41 | "powerpc64": (21, 0, 0, False, 64), |
42 | "i486": ( 3, 0, 0, True, 32), | 42 | "powerpc64le": (21, 0, 0, True, 64), |
43 | "i586": ( 3, 0, 0, True, 32), | 43 | "i386": ( 3, 0, 0, True, 32), |
44 | "i686": ( 3, 0, 0, True, 32), | 44 | "i486": ( 3, 0, 0, True, 32), |
45 | "x86_64": (62, 0, 0, True, 64), | 45 | "i586": ( 3, 0, 0, True, 32), |
46 | "ia64": (50, 0, 0, True, 64), | 46 | "i686": ( 3, 0, 0, True, 32), |
47 | "alpha": (36902, 0, 0, True, 64), | 47 | "x86_64": (62, 0, 0, True, 64), |
48 | "hppa": (15, 3, 0, False, 32), | 48 | "ia64": (50, 0, 0, True, 64), |
49 | "loongarch64":(258, 0, 0, True, 64), | 49 | "alpha": (36902, 0, 0, True, 64), |
50 | "m68k": ( 4, 0, 0, False, 32), | 50 | "hppa": (15, 3, 0, False, 32), |
51 | "mips": ( 8, 0, 0, False, 32), | 51 | "loongarch64":(258, 0, 0, True, 64), |
52 | "mipsel": ( 8, 0, 0, True, 32), | 52 | "m68k": ( 4, 0, 0, False, 32), |
53 | "mips64": ( 8, 0, 0, False, 64), | 53 | "mips": ( 8, 0, 0, False, 32), |
54 | "mips64el": ( 8, 0, 0, True, 64), | 54 | "mipsel": ( 8, 0, 0, True, 32), |
55 | "mipsisa32r6": ( 8, 0, 0, False, 32), | 55 | "mips64": ( 8, 0, 0, False, 64), |
56 | "mipsisa32r6el": ( 8, 0, 0, True, 32), | 56 | "mips64el": ( 8, 0, 0, True, 64), |
57 | "mipsisa64r6": ( 8, 0, 0, False, 64), | 57 | "mipsisa32r6": ( 8, 0, 0, False, 32), |
58 | "mipsisa64r6el": ( 8, 0, 0, True, 64), | 58 | "mipsisa32r6el": ( 8, 0, 0, True, 32), |
59 | "nios2": (113, 0, 0, True, 32), | 59 | "mipsisa64r6": ( 8, 0, 0, False, 64), |
60 | "riscv32": (243, 0, 0, True, 32), | 60 | "mipsisa64r6el": ( 8, 0, 0, True, 64), |
61 | "riscv64": (243, 0, 0, True, 64), | 61 | "nios2": (113, 0, 0, True, 32), |
62 | "s390": (22, 0, 0, False, 32), | 62 | "riscv32": (243, 0, 0, True, 32), |
63 | "sh4": (42, 0, 0, True, 32), | 63 | "riscv64": (243, 0, 0, True, 64), |
64 | "sparc": ( 2, 0, 0, False, 32), | 64 | "s390": (22, 0, 0, False, 32), |
65 | "microblaze": (189, 0, 0, False, 32), | 65 | "sh4": (42, 0, 0, True, 32), |
66 | "microblazeel":(189, 0, 0, True, 32), | 66 | "sparc": ( 2, 0, 0, False, 32), |
67 | }, | 67 | "microblaze": (189, 0, 0, False, 32), |
68 | "linux-android" : { | 68 | "microblazeel":(189, 0, 0, True, 32), |
69 | "aarch64" : (183, 0, 0, True, 64), | 69 | }, |
70 | "i686": ( 3, 0, 0, True, 32), | 70 | "linux-android" : { |
71 | "x86_64": (62, 0, 0, True, 64), | 71 | "aarch64" : (183, 0, 0, True, 64), |
72 | }, | 72 | "i686": ( 3, 0, 0, True, 32), |
73 | "linux-androideabi" : { | 73 | "x86_64": (62, 0, 0, True, 64), |
74 | "arm" : (40, 97, 0, True, 32), | 74 | }, |
75 | }, | 75 | "linux-androideabi" : { |
76 | "linux-musl" : { | 76 | "arm" : (40, 97, 0, True, 32), |
77 | "aarch64" : (183, 0, 0, True, 64), | 77 | }, |
78 | "aarch64_be" :(183, 0, 0, False, 64), | 78 | "linux-musl" : { |
79 | "arm" : ( 40, 97, 0, True, 32), | 79 | "aarch64" : (183, 0, 0, True, 64), |
80 | "armeb": ( 40, 97, 0, False, 32), | 80 | "aarch64_be" :(183, 0, 0, False, 64), |
81 | "powerpc": ( 20, 0, 0, False, 32), | 81 | "arm" : ( 40, 97, 0, True, 32), |
82 | "powerpc64": ( 21, 0, 0, False, 64), | 82 | "armeb": ( 40, 97, 0, False, 32), |
83 | "powerpc64le": (21, 0, 0, True, 64), | 83 | "powerpc": ( 20, 0, 0, False, 32), |
84 | "i386": ( 3, 0, 0, True, 32), | 84 | "powerpc64": ( 21, 0, 0, False, 64), |
85 | "i486": ( 3, 0, 0, True, 32), | 85 | "powerpc64le": (21, 0, 0, True, 64), |
86 | "i586": ( 3, 0, 0, True, 32), | 86 | "i386": ( 3, 0, 0, True, 32), |
87 | "i686": ( 3, 0, 0, True, 32), | 87 | "i486": ( 3, 0, 0, True, 32), |
88 | "x86_64": ( 62, 0, 0, True, 64), | 88 | "i586": ( 3, 0, 0, True, 32), |
89 | "mips": ( 8, 0, 0, False, 32), | 89 | "i686": ( 3, 0, 0, True, 32), |
90 | "mipsel": ( 8, 0, 0, True, 32), | 90 | "x86_64": ( 62, 0, 0, True, 64), |
91 | "mips64": ( 8, 0, 0, False, 64), | 91 | "loongarch64":( 258, 0, 0, True, 64), |
92 | "mips64el": ( 8, 0, 0, True, 64), | 92 | "mips": ( 8, 0, 0, False, 32), |
93 | "microblaze": (189, 0, 0, False, 32), | 93 | "mipsel": ( 8, 0, 0, True, 32), |
94 | "microblazeel":(189, 0, 0, True, 32), | 94 | "mips64": ( 8, 0, 0, False, 64), |
95 | "riscv32": (243, 0, 0, True, 32), | 95 | "mips64el": ( 8, 0, 0, True, 64), |
96 | "riscv64": (243, 0, 0, True, 64), | 96 | "microblaze": (189, 0, 0, False, 32), |
97 | "sh4": ( 42, 0, 0, True, 32), | 97 | "microblazeel":(189, 0, 0, True, 32), |
98 | }, | 98 | "riscv32": (243, 0, 0, True, 32), |
99 | "uclinux-uclibc" : { | 99 | "riscv64": (243, 0, 0, True, 64), |
100 | "bfin": ( 106, 0, 0, True, 32), | 100 | "sh4": ( 42, 0, 0, True, 32), |
101 | }, | 101 | }, |
102 | "linux-gnueabi" : { | 102 | "uclinux-uclibc" : { |
103 | "arm" : (40, 0, 0, True, 32), | 103 | "bfin": ( 106, 0, 0, True, 32), |
104 | "armeb" : (40, 0, 0, False, 32), | 104 | }, |
105 | }, | 105 | "linux-gnueabi" : { |
106 | "linux-musleabi" : { | 106 | "arm" : (40, 0, 0, True, 32), |
107 | "arm" : (40, 0, 0, True, 32), | 107 | "armeb" : (40, 0, 0, False, 32), |
108 | "armeb" : (40, 0, 0, False, 32), | 108 | }, |
109 | }, | 109 | "linux-musleabi" : { |
110 | "linux-gnuspe" : { | 110 | "arm" : (40, 0, 0, True, 32), |
111 | "powerpc": (20, 0, 0, False, 32), | 111 | "armeb" : (40, 0, 0, False, 32), |
112 | }, | 112 | }, |
113 | "linux-muslspe" : { | 113 | "linux-gnuspe" : { |
114 | "powerpc": (20, 0, 0, False, 32), | 114 | "powerpc": (20, 0, 0, False, 32), |
115 | }, | 115 | }, |
116 | "linux-gnu" : { | 116 | "linux-muslspe" : { |
117 | "powerpc": (20, 0, 0, False, 32), | 117 | "powerpc": (20, 0, 0, False, 32), |
118 | "sh4": (42, 0, 0, True, 32), | 118 | }, |
119 | }, | 119 | "linux-gnu" : { |
120 | "linux-gnu_ilp32" : { | 120 | "powerpc": (20, 0, 0, False, 32), |
121 | "aarch64" : (183, 0, 0, True, 32), | 121 | "sh4": (42, 0, 0, True, 32), |
122 | }, | 122 | }, |
123 | "linux-gnux32" : { | 123 | "linux-gnu_ilp32" : { |
124 | "x86_64": (62, 0, 0, True, 32), | 124 | "aarch64" : (183, 0, 0, True, 32), |
125 | }, | 125 | }, |
126 | "linux-muslx32" : { | 126 | "linux-gnux32" : { |
127 | "x86_64": (62, 0, 0, True, 32), | 127 | "x86_64": (62, 0, 0, True, 32), |
128 | }, | 128 | }, |
129 | "linux-gnun32" : { | 129 | "linux-muslx32" : { |
130 | "mips64": ( 8, 0, 0, False, 32), | 130 | "x86_64": (62, 0, 0, True, 32), |
131 | "mips64el": ( 8, 0, 0, True, 32), | 131 | }, |
132 | "mipsisa64r6": ( 8, 0, 0, False, 32), | 132 | "linux-gnun32" : { |
133 | "mipsisa64r6el":( 8, 0, 0, True, 32), | 133 | "mips64": ( 8, 0, 0, False, 32), |
134 | }, | 134 | "mips64el": ( 8, 0, 0, True, 32), |
135 | } | 135 | "mipsisa64r6": ( 8, 0, 0, False, 32), |
136 | "mipsisa64r6el":( 8, 0, 0, True, 32), | ||
137 | }, | ||
138 | } | ||
136 | 139 | ||
137 | # Add in any extra user supplied data which may come from a BSP layer, removing the | 140 | # Add in any extra user supplied data which may come from a BSP layer, removing the |
138 | # need to always change this class directly | 141 | # need to always change this class directly |
139 | extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split() | 142 | extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split() |
140 | for m in extra_machdata: | 143 | for m in extra_machdata: |
141 | call = m + "(machdata, d)" | 144 | call = m + "(machdata, d)" |
142 | locs = { "machdata" : machdata, "d" : d} | 145 | locs = { "machdata" : machine_dict.machdata, "d" : d} |
143 | machdata = bb.utils.better_eval(call, locs) | 146 | machine_dict.machdata = bb.utils.better_eval(call, locs) |
144 | 147 | ||
145 | return machdata | 148 | return machine_dict.machdata |
diff --git a/meta/lib/oe/fitimage.py b/meta/lib/oe/fitimage.py new file mode 100644 index 0000000000..f303799155 --- /dev/null +++ b/meta/lib/oe/fitimage.py | |||
@@ -0,0 +1,547 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | # This file contains common functions for the fitimage generation | ||
7 | |||
8 | import os | ||
9 | import shlex | ||
10 | import subprocess | ||
11 | import bb | ||
12 | |||
13 | from oeqa.utils.commands import runCmd | ||
14 | |||
15 | class ItsNode: | ||
16 | INDENT_SIZE = 8 | ||
17 | |||
18 | def __init__(self, name, parent_node, sub_nodes=None, properties=None): | ||
19 | self.name = name | ||
20 | self.parent_node = parent_node | ||
21 | |||
22 | self.sub_nodes = [] | ||
23 | if sub_nodes: | ||
24 | self.sub_nodes = sub_nodes | ||
25 | |||
26 | self.properties = {} | ||
27 | if properties: | ||
28 | self.properties = properties | ||
29 | |||
30 | if parent_node: | ||
31 | parent_node.add_sub_node(self) | ||
32 | |||
33 | def add_sub_node(self, sub_node): | ||
34 | self.sub_nodes.append(sub_node) | ||
35 | |||
36 | def add_property(self, key, value): | ||
37 | self.properties[key] = value | ||
38 | |||
39 | def emit(self, f, indent): | ||
40 | indent_str_name = " " * indent | ||
41 | indent_str_props = " " * (indent + self.INDENT_SIZE) | ||
42 | f.write("%s%s {\n" % (indent_str_name, self.name)) | ||
43 | for key, value in self.properties.items(): | ||
44 | bb.debug(1, "key: %s, value: %s" % (key, str(value))) | ||
45 | # Single integer: <0x12ab> | ||
46 | if isinstance(value, int): | ||
47 | f.write(indent_str_props + key + ' = <0x%x>;\n' % value) | ||
48 | # list of strings: "string1", "string2" or integers: <0x12ab 0x34cd> | ||
49 | elif isinstance(value, list): | ||
50 | if len(value) == 0: | ||
51 | f.write(indent_str_props + key + ' = "";\n') | ||
52 | elif isinstance(value[0], int): | ||
53 | list_entries = ' '.join('0x%x' % entry for entry in value) | ||
54 | f.write(indent_str_props + key + ' = <%s>;\n' % list_entries) | ||
55 | else: | ||
56 | list_entries = ', '.join('"%s"' % entry for entry in value) | ||
57 | f.write(indent_str_props + key + ' = %s;\n' % list_entries) | ||
58 | elif isinstance(value, str): | ||
59 | # path: /incbin/("path/to/file") | ||
60 | if key in ["data"] and value.startswith('/incbin/('): | ||
61 | f.write(indent_str_props + key + ' = %s;\n' % value) | ||
62 | # Integers which are already string formatted | ||
63 | elif value.startswith("<") and value.endswith(">"): | ||
64 | f.write(indent_str_props + key + ' = %s;\n' % value) | ||
65 | else: | ||
66 | f.write(indent_str_props + key + ' = "%s";\n' % value) | ||
67 | else: | ||
68 | bb.fatal("%s has unexpexted data type." % str(value)) | ||
69 | for sub_node in self.sub_nodes: | ||
70 | sub_node.emit(f, indent + self.INDENT_SIZE) | ||
71 | f.write(indent_str_name + '};\n') | ||
72 | |||
73 | class ItsNodeImages(ItsNode): | ||
74 | def __init__(self, parent_node): | ||
75 | super().__init__("images", parent_node) | ||
76 | |||
77 | class ItsNodeConfigurations(ItsNode): | ||
78 | def __init__(self, parent_node): | ||
79 | super().__init__("configurations", parent_node) | ||
80 | |||
81 | class ItsNodeHash(ItsNode): | ||
82 | def __init__(self, name, parent_node, algo, opt_props=None): | ||
83 | properties = { | ||
84 | "algo": algo | ||
85 | } | ||
86 | if opt_props: | ||
87 | properties.update(opt_props) | ||
88 | super().__init__(name, parent_node, None, properties) | ||
89 | |||
90 | class ItsImageSignature(ItsNode): | ||
91 | def __init__(self, name, parent_node, algo, keyname, opt_props=None): | ||
92 | properties = { | ||
93 | "algo": algo, | ||
94 | "key-name-hint": keyname | ||
95 | } | ||
96 | if opt_props: | ||
97 | properties.update(opt_props) | ||
98 | super().__init__(name, parent_node, None, properties) | ||
99 | |||
100 | class ItsNodeImage(ItsNode): | ||
101 | def __init__(self, name, parent_node, description, type, compression, sub_nodes=None, opt_props=None): | ||
102 | properties = { | ||
103 | "description": description, | ||
104 | "type": type, | ||
105 | "compression": compression, | ||
106 | } | ||
107 | if opt_props: | ||
108 | properties.update(opt_props) | ||
109 | super().__init__(name, parent_node, sub_nodes, properties) | ||
110 | |||
111 | class ItsNodeDtb(ItsNodeImage): | ||
112 | def __init__(self, name, parent_node, description, type, compression, | ||
113 | sub_nodes=None, opt_props=None, compatible=None): | ||
114 | super().__init__(name, parent_node, description, type, compression, sub_nodes, opt_props) | ||
115 | self.compatible = compatible | ||
116 | |||
117 | class ItsNodeDtbAlias(ItsNode): | ||
118 | """Additional Configuration Node for a DTB | ||
119 | |||
120 | Symlinks pointing to a DTB file are handled by an addtitional | ||
121 | configuration node referring to another DTB image node. | ||
122 | """ | ||
123 | def __init__(self, name, alias_name, compatible=None): | ||
124 | super().__init__(name, parent_node=None, sub_nodes=None, properties=None) | ||
125 | self.alias_name = alias_name | ||
126 | self.compatible = compatible | ||
127 | |||
128 | class ItsNodeConfigurationSignature(ItsNode): | ||
129 | def __init__(self, name, parent_node, algo, keyname, opt_props=None): | ||
130 | properties = { | ||
131 | "algo": algo, | ||
132 | "key-name-hint": keyname | ||
133 | } | ||
134 | if opt_props: | ||
135 | properties.update(opt_props) | ||
136 | super().__init__(name, parent_node, None, properties) | ||
137 | |||
138 | class ItsNodeConfiguration(ItsNode): | ||
139 | def __init__(self, name, parent_node, description, sub_nodes=None, opt_props=None): | ||
140 | properties = { | ||
141 | "description": description, | ||
142 | } | ||
143 | if opt_props: | ||
144 | properties.update(opt_props) | ||
145 | super().__init__(name, parent_node, sub_nodes, properties) | ||
146 | |||
147 | class ItsNodeRootKernel(ItsNode): | ||
148 | """Create FIT images for the kernel | ||
149 | |||
150 | Currently only a single kernel (no less or more) can be added to the FIT | ||
151 | image along with 0 or more device trees and 0 or 1 ramdisk. | ||
152 | |||
153 | If a device tree included in the FIT image, the default configuration is the | ||
154 | firt DTB. If there is no dtb present than the default configuation the kernel. | ||
155 | """ | ||
156 | def __init__(self, description, address_cells, host_prefix, arch, conf_prefix, | ||
157 | sign_enable=False, sign_keydir=None, | ||
158 | mkimage=None, mkimage_dtcopts=None, | ||
159 | mkimage_sign=None, mkimage_sign_args=None, | ||
160 | hash_algo=None, sign_algo=None, pad_algo=None, | ||
161 | sign_keyname_conf=None, | ||
162 | sign_individual=False, sign_keyname_img=None): | ||
163 | props = { | ||
164 | "description": description, | ||
165 | "#address-cells": f"<{address_cells}>" | ||
166 | } | ||
167 | super().__init__("/", None, None, props) | ||
168 | self.images = ItsNodeImages(self) | ||
169 | self.configurations = ItsNodeConfigurations(self) | ||
170 | |||
171 | self._host_prefix = host_prefix | ||
172 | self._arch = arch | ||
173 | self._conf_prefix = conf_prefix | ||
174 | |||
175 | # Signature related properties | ||
176 | self._sign_enable = sign_enable | ||
177 | self._sign_keydir = sign_keydir | ||
178 | self._mkimage = mkimage | ||
179 | self._mkimage_dtcopts = mkimage_dtcopts | ||
180 | self._mkimage_sign = mkimage_sign | ||
181 | self._mkimage_sign_args = mkimage_sign_args | ||
182 | self._hash_algo = hash_algo | ||
183 | self._sign_algo = sign_algo | ||
184 | self._pad_algo = pad_algo | ||
185 | self._sign_keyname_conf = sign_keyname_conf | ||
186 | self._sign_individual = sign_individual | ||
187 | self._sign_keyname_img = sign_keyname_img | ||
188 | self._sanitize_sign_config() | ||
189 | |||
190 | self._dtbs = [] | ||
191 | self._dtb_alias = [] | ||
192 | self._kernel = None | ||
193 | self._ramdisk = None | ||
194 | self._bootscr = None | ||
195 | self._setup = None | ||
196 | |||
197 | def _sanitize_sign_config(self): | ||
198 | if self._sign_enable: | ||
199 | if not self._hash_algo: | ||
200 | bb.fatal("FIT image signing is enabled but no hash algorithm is provided.") | ||
201 | if not self._sign_algo: | ||
202 | bb.fatal("FIT image signing is enabled but no signature algorithm is provided.") | ||
203 | if not self._pad_algo: | ||
204 | bb.fatal("FIT image signing is enabled but no padding algorithm is provided.") | ||
205 | if not self._sign_keyname_conf: | ||
206 | bb.fatal("FIT image signing is enabled but no configuration key name is provided.") | ||
207 | if self._sign_individual and not self._sign_keyname_img: | ||
208 | bb.fatal("FIT image signing is enabled for individual images but no image key name is provided.") | ||
209 | |||
210 | def write_its_file(self, itsfile): | ||
211 | with open(itsfile, 'w') as f: | ||
212 | f.write("/dts-v1/;\n\n") | ||
213 | self.emit(f, 0) | ||
214 | |||
215 | def its_add_node_image(self, image_id, description, image_type, compression, opt_props): | ||
216 | image_node = ItsNodeImage( | ||
217 | image_id, | ||
218 | self.images, | ||
219 | description, | ||
220 | image_type, | ||
221 | compression, | ||
222 | opt_props=opt_props | ||
223 | ) | ||
224 | if self._hash_algo: | ||
225 | ItsNodeHash( | ||
226 | "hash-1", | ||
227 | image_node, | ||
228 | self._hash_algo | ||
229 | ) | ||
230 | if self._sign_individual: | ||
231 | ItsImageSignature( | ||
232 | "signature-1", | ||
233 | image_node, | ||
234 | f"{self._hash_algo},{self._sign_algo}", | ||
235 | self._sign_keyname_img | ||
236 | ) | ||
237 | return image_node | ||
238 | |||
239 | def its_add_node_dtb(self, image_id, description, image_type, compression, opt_props, compatible): | ||
240 | dtb_node = ItsNodeDtb( | ||
241 | image_id, | ||
242 | self.images, | ||
243 | description, | ||
244 | image_type, | ||
245 | compression, | ||
246 | opt_props=opt_props, | ||
247 | compatible=compatible | ||
248 | ) | ||
249 | if self._hash_algo: | ||
250 | ItsNodeHash( | ||
251 | "hash-1", | ||
252 | dtb_node, | ||
253 | self._hash_algo | ||
254 | ) | ||
255 | if self._sign_individual: | ||
256 | ItsImageSignature( | ||
257 | "signature-1", | ||
258 | dtb_node, | ||
259 | f"{self._hash_algo},{self._sign_algo}", | ||
260 | self._sign_keyname_img | ||
261 | ) | ||
262 | return dtb_node | ||
263 | |||
264 | def fitimage_emit_section_kernel(self, kernel_id, kernel_path, compression, | ||
265 | load, entrypoint, mkimage_kernel_type, entrysymbol=None): | ||
266 | """Emit the fitImage ITS kernel section""" | ||
267 | if self._kernel: | ||
268 | bb.fatal("Kernel section already exists in the ITS file.") | ||
269 | if entrysymbol: | ||
270 | result = subprocess.run([self._host_prefix + "nm", "vmlinux"], capture_output=True, text=True) | ||
271 | for line in result.stdout.splitlines(): | ||
272 | parts = line.split() | ||
273 | if len(parts) == 3 and parts[2] == entrysymbol: | ||
274 | entrypoint = "<0x%s>" % parts[0] | ||
275 | break | ||
276 | kernel_node = self.its_add_node_image( | ||
277 | kernel_id, | ||
278 | "Linux kernel", | ||
279 | mkimage_kernel_type, | ||
280 | compression, | ||
281 | { | ||
282 | "data": '/incbin/("' + kernel_path + '")', | ||
283 | "arch": self._arch, | ||
284 | "os": "linux", | ||
285 | "load": f"<{load}>", | ||
286 | "entry": f"<{entrypoint}>" | ||
287 | } | ||
288 | ) | ||
289 | self._kernel = kernel_node | ||
290 | |||
291 | def fitimage_emit_section_dtb(self, dtb_id, dtb_path, dtb_loadaddress=None, | ||
292 | dtbo_loadaddress=None, add_compatible=False): | ||
293 | """Emit the fitImage ITS DTB section""" | ||
294 | load=None | ||
295 | dtb_ext = os.path.splitext(dtb_path)[1] | ||
296 | if dtb_ext == ".dtbo": | ||
297 | if dtbo_loadaddress: | ||
298 | load = dtbo_loadaddress | ||
299 | elif dtb_loadaddress: | ||
300 | load = dtb_loadaddress | ||
301 | |||
302 | opt_props = { | ||
303 | "data": '/incbin/("' + dtb_path + '")', | ||
304 | "arch": self._arch | ||
305 | } | ||
306 | if load: | ||
307 | opt_props["load"] = f"<{load}>" | ||
308 | |||
309 | # Preserve the DTB's compatible string to be added to the configuration node | ||
310 | compatible = None | ||
311 | if add_compatible: | ||
312 | compatible = get_compatible_from_dtb(dtb_path) | ||
313 | |||
314 | dtb_node = self.its_add_node_dtb( | ||
315 | "fdt-" + dtb_id, | ||
316 | "Flattened Device Tree blob", | ||
317 | "flat_dt", | ||
318 | "none", | ||
319 | opt_props, | ||
320 | compatible | ||
321 | ) | ||
322 | self._dtbs.append(dtb_node) | ||
323 | |||
324 | def fitimage_emit_section_dtb_alias(self, dtb_alias_id, dtb_path, add_compatible=False): | ||
325 | """Add a configuration node referring to another DTB""" | ||
326 | # Preserve the DTB's compatible string to be added to the configuration node | ||
327 | compatible = None | ||
328 | if add_compatible: | ||
329 | compatible = get_compatible_from_dtb(dtb_path) | ||
330 | |||
331 | dtb_id = os.path.basename(dtb_path) | ||
332 | dtb_alias_node = ItsNodeDtbAlias("fdt-" + dtb_id, dtb_alias_id, compatible) | ||
333 | self._dtb_alias.append(dtb_alias_node) | ||
334 | bb.warn(f"compatible: {compatible}, dtb_alias_id: {dtb_alias_id}, dtb_id: {dtb_id}, dtb_path: {dtb_path}") | ||
335 | |||
336 | def fitimage_emit_section_boot_script(self, bootscr_id, bootscr_path): | ||
337 | """Emit the fitImage ITS u-boot script section""" | ||
338 | if self._bootscr: | ||
339 | bb.fatal("U-boot script section already exists in the ITS file.") | ||
340 | bootscr_node = self.its_add_node_image( | ||
341 | bootscr_id, | ||
342 | "U-boot script", | ||
343 | "script", | ||
344 | "none", | ||
345 | { | ||
346 | "data": '/incbin/("' + bootscr_path + '")', | ||
347 | "arch": self._arch, | ||
348 | "type": "script" | ||
349 | } | ||
350 | ) | ||
351 | self._bootscr = bootscr_node | ||
352 | |||
353 | def fitimage_emit_section_setup(self, setup_id, setup_path): | ||
354 | """Emit the fitImage ITS setup section""" | ||
355 | if self._setup: | ||
356 | bb.fatal("Setup section already exists in the ITS file.") | ||
357 | load = "<0x00090000>" | ||
358 | entry = "<0x00090000>" | ||
359 | setup_node = self.its_add_node_image( | ||
360 | setup_id, | ||
361 | "Linux setup.bin", | ||
362 | "x86_setup", | ||
363 | "none", | ||
364 | { | ||
365 | "data": '/incbin/("' + setup_path + '")', | ||
366 | "arch": self._arch, | ||
367 | "os": "linux", | ||
368 | "load": load, | ||
369 | "entry": entry | ||
370 | } | ||
371 | ) | ||
372 | self._setup = setup_node | ||
373 | |||
374 | def fitimage_emit_section_ramdisk(self, ramdisk_id, ramdisk_path, description="ramdisk", load=None, entry=None): | ||
375 | """Emit the fitImage ITS ramdisk section""" | ||
376 | if self._ramdisk: | ||
377 | bb.fatal("Ramdisk section already exists in the ITS file.") | ||
378 | opt_props = { | ||
379 | "data": '/incbin/("' + ramdisk_path + '")', | ||
380 | "type": "ramdisk", | ||
381 | "arch": self._arch, | ||
382 | "os": "linux" | ||
383 | } | ||
384 | if load: | ||
385 | opt_props["load"] = f"<{load}>" | ||
386 | if entry: | ||
387 | opt_props["entry"] = f"<{entry}>" | ||
388 | |||
389 | ramdisk_node = self.its_add_node_image( | ||
390 | ramdisk_id, | ||
391 | description, | ||
392 | "ramdisk", | ||
393 | "none", | ||
394 | opt_props | ||
395 | ) | ||
396 | self._ramdisk = ramdisk_node | ||
397 | |||
398 | def _fitimage_emit_one_section_config(self, conf_node_name, dtb=None): | ||
399 | """Emit the fitImage ITS configuration section""" | ||
400 | opt_props = {} | ||
401 | conf_desc = [] | ||
402 | sign_entries = [] | ||
403 | |||
404 | if self._kernel: | ||
405 | conf_desc.append("Linux kernel") | ||
406 | opt_props["kernel"] = self._kernel.name | ||
407 | if self._sign_enable: | ||
408 | sign_entries.append("kernel") | ||
409 | |||
410 | if dtb: | ||
411 | conf_desc.append("FDT blob") | ||
412 | opt_props["fdt"] = dtb.name | ||
413 | if dtb.compatible: | ||
414 | opt_props["compatible"] = dtb.compatible | ||
415 | if self._sign_enable: | ||
416 | sign_entries.append("fdt") | ||
417 | |||
418 | if self._ramdisk: | ||
419 | conf_desc.append("ramdisk") | ||
420 | opt_props["ramdisk"] = self._ramdisk.name | ||
421 | if self._sign_enable: | ||
422 | sign_entries.append("ramdisk") | ||
423 | |||
424 | if self._bootscr: | ||
425 | conf_desc.append("u-boot script") | ||
426 | opt_props["bootscr"] = self._bootscr.name | ||
427 | if self._sign_enable: | ||
428 | sign_entries.append("bootscr") | ||
429 | |||
430 | if self._setup: | ||
431 | conf_desc.append("setup") | ||
432 | opt_props["setup"] = self._setup.name | ||
433 | if self._sign_enable: | ||
434 | sign_entries.append("setup") | ||
435 | |||
436 | # First added configuration is the default configuration | ||
437 | default_flag = "0" | ||
438 | if len(self.configurations.sub_nodes) == 0: | ||
439 | default_flag = "1" | ||
440 | |||
441 | conf_node = ItsNodeConfiguration( | ||
442 | conf_node_name, | ||
443 | self.configurations, | ||
444 | f"{default_flag} {', '.join(conf_desc)}", | ||
445 | opt_props=opt_props | ||
446 | ) | ||
447 | if self._hash_algo: | ||
448 | ItsNodeHash( | ||
449 | "hash-1", | ||
450 | conf_node, | ||
451 | self._hash_algo | ||
452 | ) | ||
453 | if self._sign_enable: | ||
454 | ItsNodeConfigurationSignature( | ||
455 | "signature-1", | ||
456 | conf_node, | ||
457 | f"{self._hash_algo},{self._sign_algo}", | ||
458 | self._sign_keyname_conf, | ||
459 | opt_props={ | ||
460 | "padding": self._pad_algo, | ||
461 | "sign-images": sign_entries | ||
462 | } | ||
463 | ) | ||
464 | |||
465 | def fitimage_emit_section_config(self, default_dtb_image=None): | ||
466 | if self._dtbs: | ||
467 | for dtb in self._dtbs: | ||
468 | dtb_name = dtb.name | ||
469 | if dtb.name.startswith("fdt-"): | ||
470 | dtb_name = dtb.name[len("fdt-"):] | ||
471 | self._fitimage_emit_one_section_config(self._conf_prefix + dtb_name, dtb) | ||
472 | for dtb in self._dtb_alias: | ||
473 | self._fitimage_emit_one_section_config(self._conf_prefix + dtb.alias_name, dtb) | ||
474 | else: | ||
475 | # Currently exactly one kernel is supported. | ||
476 | self._fitimage_emit_one_section_config(self._conf_prefix + "1") | ||
477 | |||
478 | default_conf = self.configurations.sub_nodes[0].name | ||
479 | if default_dtb_image and self._dtbs: | ||
480 | default_conf = self._conf_prefix + default_dtb_image | ||
481 | self.configurations.add_property('default', default_conf) | ||
482 | |||
483 | def run_mkimage_assemble(self, itsfile, fitfile): | ||
484 | cmd = [ | ||
485 | self._mkimage, | ||
486 | '-f', itsfile, | ||
487 | fitfile | ||
488 | ] | ||
489 | if self._mkimage_dtcopts: | ||
490 | cmd.insert(1, '-D') | ||
491 | cmd.insert(2, self._mkimage_dtcopts) | ||
492 | try: | ||
493 | subprocess.run(cmd, check=True, capture_output=True) | ||
494 | except subprocess.CalledProcessError as e: | ||
495 | bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}\nitsflile: {os.path.abspath(itsfile)}") | ||
496 | |||
497 | def run_mkimage_sign(self, fitfile): | ||
498 | if not self._sign_enable: | ||
499 | bb.debug(1, "FIT image signing is disabled. Skipping signing.") | ||
500 | return | ||
501 | |||
502 | # Some sanity checks because mkimage exits with 0 also without needed keys | ||
503 | sign_key_path = os.path.join(self._sign_keydir, self._sign_keyname_conf) | ||
504 | if not os.path.exists(sign_key_path + '.key') or not os.path.exists(sign_key_path + '.crt'): | ||
505 | bb.fatal("%s.key or .crt does not exist" % sign_key_path) | ||
506 | if self._sign_individual: | ||
507 | sign_key_img_path = os.path.join(self._sign_keydir, self._sign_keyname_img) | ||
508 | if not os.path.exists(sign_key_img_path + '.key') or not os.path.exists(sign_key_img_path + '.crt'): | ||
509 | bb.fatal("%s.key or .crt does not exist" % sign_key_img_path) | ||
510 | |||
511 | cmd = [ | ||
512 | self._mkimage_sign, | ||
513 | '-F', | ||
514 | '-k', self._sign_keydir, | ||
515 | '-r', fitfile | ||
516 | ] | ||
517 | if self._mkimage_dtcopts: | ||
518 | cmd.extend(['-D', self._mkimage_dtcopts]) | ||
519 | if self._mkimage_sign_args: | ||
520 | cmd.extend(shlex.split(self._mkimage_sign_args)) | ||
521 | try: | ||
522 | subprocess.run(cmd, check=True, capture_output=True) | ||
523 | except subprocess.CalledProcessError as e: | ||
524 | bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}") | ||
525 | |||
526 | |||
527 | def symlink_points_below(file_or_symlink, expected_parent_dir): | ||
528 | """returns symlink destination if it points below directory""" | ||
529 | file_path = os.path.join(expected_parent_dir, file_or_symlink) | ||
530 | if not os.path.islink(file_path): | ||
531 | return None | ||
532 | |||
533 | realpath = os.path.relpath(os.path.realpath(file_path), expected_parent_dir) | ||
534 | if realpath.startswith(".."): | ||
535 | return None | ||
536 | |||
537 | return realpath | ||
538 | |||
539 | def get_compatible_from_dtb(dtb_path, fdtget_path="fdtget"): | ||
540 | compatible = None | ||
541 | cmd = [fdtget_path, "-t", "s", dtb_path, "/", "compatible"] | ||
542 | try: | ||
543 | ret = subprocess.run(cmd, check=True, capture_output=True, text=True) | ||
544 | compatible = ret.stdout.strip().split() | ||
545 | except subprocess.CalledProcessError: | ||
546 | compatible = None | ||
547 | return compatible | ||
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py index dfd957d157..4559dc63b2 100644 --- a/meta/lib/oe/go.py +++ b/meta/lib/oe/go.py | |||
@@ -7,6 +7,10 @@ | |||
7 | import re | 7 | import re |
8 | 8 | ||
9 | def map_arch(a): | 9 | def map_arch(a): |
10 | """ | ||
11 | Map our architecture names to Go's GOARCH names. | ||
12 | See https://github.com/golang/go/blob/master/src/internal/syslist/syslist.go for the complete list. | ||
13 | """ | ||
10 | if re.match('i.86', a): | 14 | if re.match('i.86', a): |
11 | return '386' | 15 | return '386' |
12 | elif a == 'x86_64': | 16 | elif a == 'x86_64': |
@@ -31,4 +35,4 @@ def map_arch(a): | |||
31 | return 'riscv64' | 35 | return 'riscv64' |
32 | elif a == 'loongarch64': | 36 | elif a == 'loongarch64': |
33 | return 'loong64' | 37 | return 'loong64' |
34 | return '' | 38 | raise KeyError(f"Cannot map architecture {a}") |
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py index d9c8d94da4..6e55fa1e7f 100644 --- a/meta/lib/oe/license.py +++ b/meta/lib/oe/license.py | |||
@@ -7,6 +7,7 @@ | |||
7 | 7 | ||
8 | import ast | 8 | import ast |
9 | import re | 9 | import re |
10 | import oe.qa | ||
10 | from fnmatch import fnmatchcase as fnmatch | 11 | from fnmatch import fnmatchcase as fnmatch |
11 | 12 | ||
12 | def license_ok(license, dont_want_licenses): | 13 | def license_ok(license, dont_want_licenses): |
@@ -259,3 +260,220 @@ def apply_pkg_license_exception(pkg, bad_licenses, exceptions): | |||
259 | """Return remaining bad licenses after removing any package exceptions""" | 260 | """Return remaining bad licenses after removing any package exceptions""" |
260 | 261 | ||
261 | return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions] | 262 | return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions] |
263 | |||
264 | def return_spdx(d, license): | ||
265 | """ | ||
266 | This function returns the spdx mapping of a license if it exists. | ||
267 | """ | ||
268 | return d.getVarFlag('SPDXLICENSEMAP', license) | ||
269 | |||
270 | def canonical_license(d, license): | ||
271 | """ | ||
272 | Return the canonical (SPDX) form of the license if available (so GPLv3 | ||
273 | becomes GPL-3.0-only) or the passed license if there is no canonical form. | ||
274 | """ | ||
275 | return d.getVarFlag('SPDXLICENSEMAP', license) or license | ||
276 | |||
277 | def expand_wildcard_licenses(d, wildcard_licenses): | ||
278 | """ | ||
279 | There are some common wildcard values users may want to use. Support them | ||
280 | here. | ||
281 | """ | ||
282 | licenses = set(wildcard_licenses) | ||
283 | mapping = { | ||
284 | "AGPL-3.0*" : ["AGPL-3.0-only", "AGPL-3.0-or-later"], | ||
285 | "GPL-3.0*" : ["GPL-3.0-only", "GPL-3.0-or-later"], | ||
286 | "LGPL-3.0*" : ["LGPL-3.0-only", "LGPL-3.0-or-later"], | ||
287 | } | ||
288 | for k in mapping: | ||
289 | if k in wildcard_licenses: | ||
290 | licenses.remove(k) | ||
291 | for item in mapping[k]: | ||
292 | licenses.add(item) | ||
293 | |||
294 | for l in licenses: | ||
295 | if l in obsolete_license_list(): | ||
296 | bb.fatal("Error, %s is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE" % l) | ||
297 | if "*" in l: | ||
298 | bb.fatal("Error, %s is an invalid license wildcard entry" % l) | ||
299 | |||
300 | return list(licenses) | ||
301 | |||
302 | def incompatible_license_contains(license, truevalue, falsevalue, d): | ||
303 | license = canonical_license(d, license) | ||
304 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | ||
305 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
306 | return truevalue if license in bad_licenses else falsevalue | ||
307 | |||
308 | def incompatible_pkg_license(d, dont_want_licenses, license): | ||
309 | # Handles an "or" or two license sets provided by | ||
310 | # flattened_licenses(), pick one that works if possible. | ||
311 | def choose_lic_set(a, b): | ||
312 | return a if all(license_ok(canonical_license(d, lic), | ||
313 | dont_want_licenses) for lic in a) else b | ||
314 | |||
315 | try: | ||
316 | licenses = flattened_licenses(license, choose_lic_set) | ||
317 | except LicenseError as exc: | ||
318 | bb.fatal('%s: %s' % (d.getVar('P'), exc)) | ||
319 | |||
320 | incompatible_lic = [] | ||
321 | for l in licenses: | ||
322 | license = canonical_license(d, l) | ||
323 | if not license_ok(license, dont_want_licenses): | ||
324 | incompatible_lic.append(license) | ||
325 | |||
326 | return sorted(incompatible_lic) | ||
327 | |||
328 | def incompatible_license(d, dont_want_licenses, package=None): | ||
329 | """ | ||
330 | This function checks if a recipe has only incompatible licenses. It also | ||
331 | take into consideration 'or' operand. dont_want_licenses should be passed | ||
332 | as canonical (SPDX) names. | ||
333 | """ | ||
334 | license = d.getVar("LICENSE:%s" % package) if package else None | ||
335 | if not license: | ||
336 | license = d.getVar('LICENSE') | ||
337 | |||
338 | return incompatible_pkg_license(d, dont_want_licenses, license) | ||
339 | |||
340 | def check_license_flags(d): | ||
341 | """ | ||
342 | This function checks if a recipe has any LICENSE_FLAGS that | ||
343 | aren't acceptable. | ||
344 | |||
345 | If it does, it returns the all LICENSE_FLAGS missing from the list | ||
346 | of acceptable license flags, or all of the LICENSE_FLAGS if there | ||
347 | is no list of acceptable flags. | ||
348 | |||
349 | If everything is is acceptable, it returns None. | ||
350 | """ | ||
351 | |||
352 | def license_flag_matches(flag, acceptlist, pn): | ||
353 | """ | ||
354 | Return True if flag matches something in acceptlist, None if not. | ||
355 | |||
356 | Before we test a flag against the acceptlist, we append _${PN} | ||
357 | to it. We then try to match that string against the | ||
358 | acceptlist. This covers the normal case, where we expect | ||
359 | LICENSE_FLAGS to be a simple string like 'commercial', which | ||
360 | the user typically matches exactly in the acceptlist by | ||
361 | explicitly appending the package name e.g 'commercial_foo'. | ||
362 | If we fail the match however, we then split the flag across | ||
363 | '_' and append each fragment and test until we either match or | ||
364 | run out of fragments. | ||
365 | """ | ||
366 | flag_pn = ("%s_%s" % (flag, pn)) | ||
367 | for candidate in acceptlist: | ||
368 | if flag_pn == candidate: | ||
369 | return True | ||
370 | |||
371 | flag_cur = "" | ||
372 | flagments = flag_pn.split("_") | ||
373 | flagments.pop() # we've already tested the full string | ||
374 | for flagment in flagments: | ||
375 | if flag_cur: | ||
376 | flag_cur += "_" | ||
377 | flag_cur += flagment | ||
378 | for candidate in acceptlist: | ||
379 | if flag_cur == candidate: | ||
380 | return True | ||
381 | return False | ||
382 | |||
383 | def all_license_flags_match(license_flags, acceptlist): | ||
384 | """ Return all unmatched flags, None if all flags match """ | ||
385 | pn = d.getVar('PN') | ||
386 | split_acceptlist = acceptlist.split() | ||
387 | flags = [] | ||
388 | for flag in license_flags.split(): | ||
389 | if not license_flag_matches(flag, split_acceptlist, pn): | ||
390 | flags.append(flag) | ||
391 | return flags if flags else None | ||
392 | |||
393 | license_flags = d.getVar('LICENSE_FLAGS') | ||
394 | if license_flags: | ||
395 | acceptlist = d.getVar('LICENSE_FLAGS_ACCEPTED') | ||
396 | if not acceptlist: | ||
397 | return license_flags.split() | ||
398 | unmatched_flags = all_license_flags_match(license_flags, acceptlist) | ||
399 | if unmatched_flags: | ||
400 | return unmatched_flags | ||
401 | return None | ||
402 | |||
403 | def check_license_format(d): | ||
404 | """ | ||
405 | This function checks if LICENSE is well defined, | ||
406 | Validate operators in LICENSES. | ||
407 | No spaces are allowed between LICENSES. | ||
408 | """ | ||
409 | pn = d.getVar('PN') | ||
410 | licenses = d.getVar('LICENSE') | ||
411 | |||
412 | elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) | ||
413 | for pos, element in enumerate(elements): | ||
414 | if license_pattern.match(element): | ||
415 | if pos > 0 and license_pattern.match(elements[pos - 1]): | ||
416 | oe.qa.handle_error('license-format', | ||
417 | '%s: LICENSE value "%s" has an invalid format - license names ' \ | ||
418 | 'must be separated by the following characters to indicate ' \ | ||
419 | 'the license selection: %s' % | ||
420 | (pn, licenses, license_operator_chars), d) | ||
421 | elif not license_operator.match(element): | ||
422 | oe.qa.handle_error('license-format', | ||
423 | '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \ | ||
424 | 'in the valid list of separators (%s)' % | ||
425 | (pn, licenses, element, license_operator_chars), d) | ||
426 | |||
427 | def skip_incompatible_package_licenses(d, pkgs): | ||
428 | if not pkgs: | ||
429 | return {} | ||
430 | |||
431 | pn = d.getVar("PN") | ||
432 | |||
433 | check_license = False if pn.startswith("nativesdk-") else True | ||
434 | for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", | ||
435 | "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", | ||
436 | "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: | ||
437 | if pn.endswith(d.expand(t)): | ||
438 | check_license = False | ||
439 | if pn.startswith("gcc-source-"): | ||
440 | check_license = False | ||
441 | |||
442 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | ||
443 | if not check_license or not bad_licenses: | ||
444 | return {} | ||
445 | |||
446 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
447 | |||
448 | exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() | ||
449 | |||
450 | for lic_exception in exceptions: | ||
451 | if ":" in lic_exception: | ||
452 | lic_exception = lic_exception.split(":")[1] | ||
453 | if lic_exception in obsolete_license_list(): | ||
454 | bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) | ||
455 | |||
456 | skipped_pkgs = {} | ||
457 | for pkg in pkgs: | ||
458 | remaining_bad_licenses = apply_pkg_license_exception(pkg, bad_licenses, exceptions) | ||
459 | |||
460 | incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) | ||
461 | if incompatible_lic: | ||
462 | skipped_pkgs[pkg] = incompatible_lic | ||
463 | |||
464 | return skipped_pkgs | ||
465 | |||
466 | def tidy_licenses(value): | ||
467 | """ | ||
468 | Flat, split and sort licenses. | ||
469 | """ | ||
470 | from oe.license import flattened_licenses | ||
471 | |||
472 | def _choose(a, b): | ||
473 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
474 | return ["(%s | %s)" % (str_a, str_b)] | ||
475 | |||
476 | if not isinstance(value, str): | ||
477 | value = " & ".join(value) | ||
478 | |||
479 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
diff --git a/meta/lib/oe/license_finder.py b/meta/lib/oe/license_finder.py new file mode 100644 index 0000000000..16f5d7c94c --- /dev/null +++ b/meta/lib/oe/license_finder.py | |||
@@ -0,0 +1,179 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import fnmatch | ||
8 | import hashlib | ||
9 | import logging | ||
10 | import os | ||
11 | import re | ||
12 | |||
13 | import bb | ||
14 | import bb.utils | ||
15 | |||
16 | logger = logging.getLogger("BitBake.OE.LicenseFinder") | ||
17 | |||
18 | def _load_hash_csv(d): | ||
19 | """ | ||
20 | Load a mapping of (checksum: license name) from all files/license-hashes.csv | ||
21 | files that can be found in the available layers. | ||
22 | """ | ||
23 | import csv | ||
24 | md5sums = {} | ||
25 | |||
26 | # Read license md5sums from csv file | ||
27 | for path in d.getVar('BBPATH').split(':'): | ||
28 | csv_path = os.path.join(path, 'files', 'license-hashes.csv') | ||
29 | if os.path.isfile(csv_path): | ||
30 | with open(csv_path, newline='') as csv_file: | ||
31 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=['md5sum', 'license']) | ||
32 | for row in reader: | ||
33 | md5sums[row['md5sum']] = row['license'] | ||
34 | |||
35 | return md5sums | ||
36 | |||
37 | |||
38 | def _crunch_known_licenses(d): | ||
39 | """ | ||
40 | Calculate the MD5 checksums for the original and "crunched" versions of all | ||
41 | known licenses. | ||
42 | """ | ||
43 | md5sums = {} | ||
44 | |||
45 | lic_dirs = [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or "").split() | ||
46 | for lic_dir in lic_dirs: | ||
47 | for fn in os.listdir(lic_dir): | ||
48 | path = os.path.join(lic_dir, fn) | ||
49 | # Hash the exact contents | ||
50 | md5value = bb.utils.md5_file(path) | ||
51 | md5sums[md5value] = fn | ||
52 | # Also hash a "crunched" version | ||
53 | md5value = _crunch_license(path) | ||
54 | md5sums[md5value] = fn | ||
55 | |||
56 | return md5sums | ||
57 | |||
58 | |||
59 | def _crunch_license(licfile): | ||
60 | ''' | ||
61 | Remove non-material text from a license file and then calculate its | ||
62 | md5sum. This works well for licenses that contain a copyright statement, | ||
63 | but is also a useful way to handle people's insistence upon reformatting | ||
64 | the license text slightly (with no material difference to the text of the | ||
65 | license). | ||
66 | ''' | ||
67 | |||
68 | import oe.utils | ||
69 | |||
70 | # Note: these are carefully constructed! | ||
71 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
72 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
73 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
74 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
75 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
76 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
77 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
78 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
79 | |||
80 | lictext = [] | ||
81 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
82 | for line in f: | ||
83 | # Drop opening statements | ||
84 | if copyright_re.match(line): | ||
85 | continue | ||
86 | elif disclaimer_re.match(line): | ||
87 | continue | ||
88 | elif email_re.match(line): | ||
89 | continue | ||
90 | elif header_re.match(line): | ||
91 | continue | ||
92 | elif tag_re.match(line): | ||
93 | continue | ||
94 | elif url_re.match(line): | ||
95 | continue | ||
96 | elif license_title_re.match(line): | ||
97 | continue | ||
98 | elif license_statement_re.match(line): | ||
99 | continue | ||
100 | # Strip comment symbols | ||
101 | line = line.replace('*', '') \ | ||
102 | .replace('#', '') | ||
103 | # Unify spelling | ||
104 | line = line.replace('sub-license', 'sublicense') | ||
105 | # Squash spaces | ||
106 | line = oe.utils.squashspaces(line.strip()) | ||
107 | # Replace smart quotes, double quotes and backticks with single quotes | ||
108 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
109 | # Unify brackets | ||
110 | line = line.replace("{", "[").replace("}", "]") | ||
111 | if line: | ||
112 | lictext.append(line) | ||
113 | |||
114 | m = hashlib.md5() | ||
115 | try: | ||
116 | m.update(' '.join(lictext).encode('utf-8')) | ||
117 | md5val = m.hexdigest() | ||
118 | except UnicodeEncodeError: | ||
119 | md5val = None | ||
120 | return md5val | ||
121 | |||
122 | |||
123 | def find_license_files(srctree, first_only=False): | ||
124 | """ | ||
125 | Search srctree for files that look like they could be licenses. | ||
126 | If first_only is True, only return the first file found. | ||
127 | """ | ||
128 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
129 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go", ".sh") | ||
130 | licfiles = [] | ||
131 | for root, dirs, files in os.walk(srctree): | ||
132 | # Sort files so that LICENSE is before LICENSE.subcomponent, which is | ||
133 | # meaningful if first_only is set. | ||
134 | for fn in sorted(files): | ||
135 | if fn.endswith(skip_extensions): | ||
136 | continue | ||
137 | for spec in licspecs: | ||
138 | if fnmatch.fnmatch(fn, spec): | ||
139 | fullpath = os.path.join(root, fn) | ||
140 | if not fullpath in licfiles: | ||
141 | licfiles.append(fullpath) | ||
142 | if first_only: | ||
143 | return licfiles | ||
144 | |||
145 | return licfiles | ||
146 | |||
147 | |||
148 | def match_licenses(licfiles, srctree, d, extra_hashes={}): | ||
149 | md5sums = {} | ||
150 | md5sums.update(_load_hash_csv(d)) | ||
151 | md5sums.update(_crunch_known_licenses(d)) | ||
152 | md5sums.update(extra_hashes) | ||
153 | |||
154 | licenses = [] | ||
155 | for licfile in sorted(licfiles): | ||
156 | resolved_licfile = d.expand(licfile) | ||
157 | md5value = bb.utils.md5_file(resolved_licfile) | ||
158 | license = md5sums.get(md5value, None) | ||
159 | if not license: | ||
160 | crunched_md5 = _crunch_license(resolved_licfile) | ||
161 | license = md5sums.get(crunched_md5, None) | ||
162 | if not license: | ||
163 | license = 'Unknown' | ||
164 | logger.info("Please add the following line for '%s' to a 'license-hashes.csv' " \ | ||
165 | "and replace `Unknown` with the license:\n" \ | ||
166 | "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value)) | ||
167 | |||
168 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
169 | |||
170 | return licenses | ||
171 | |||
172 | |||
173 | def find_licenses(srctree, d, first_only=False, extra_hashes={}): | ||
174 | licfiles = find_license_files(srctree, first_only) | ||
175 | licenses = match_licenses(licfiles, srctree, d, extra_hashes) | ||
176 | |||
177 | # FIXME should we grab at least one source file with a license header and add that too? | ||
178 | |||
179 | return licenses | ||
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py index 61f18adc4a..cf7a13c247 100644 --- a/meta/lib/oe/manifest.py +++ b/meta/lib/oe/manifest.py | |||
@@ -200,7 +200,3 @@ def create_manifest(d, final_manifest=False, manifest_dir=None, | |||
200 | manifest.create_final() | 200 | manifest.create_final() |
201 | else: | 201 | else: |
202 | manifest.create_initial() | 202 | manifest.create_initial() |
203 | |||
204 | |||
205 | if __name__ == "__main__": | ||
206 | pass | ||
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index 1511ba47c4..ce69151e5d 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py | |||
@@ -14,10 +14,12 @@ import glob | |||
14 | import stat | 14 | import stat |
15 | import mmap | 15 | import mmap |
16 | import subprocess | 16 | import subprocess |
17 | import shutil | ||
17 | 18 | ||
19 | import bb.parse | ||
18 | import oe.cachedpath | 20 | import oe.cachedpath |
19 | 21 | ||
20 | def runstrip(arg): | 22 | def runstrip(file, elftype, strip, extra_strip_sections=''): |
21 | # Function to strip a single file, called from split_and_strip_files below | 23 | # Function to strip a single file, called from split_and_strip_files below |
22 | # A working 'file' (one which works on the target architecture) | 24 | # A working 'file' (one which works on the target architecture) |
23 | # | 25 | # |
@@ -27,12 +29,6 @@ def runstrip(arg): | |||
27 | # 8 - shared library | 29 | # 8 - shared library |
28 | # 16 - kernel module | 30 | # 16 - kernel module |
29 | 31 | ||
30 | if len(arg) == 3: | ||
31 | (file, elftype, strip) = arg | ||
32 | extra_strip_sections = '' | ||
33 | else: | ||
34 | (file, elftype, strip, extra_strip_sections) = arg | ||
35 | |||
36 | newmode = None | 32 | newmode = None |
37 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | 33 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): |
38 | origmode = os.stat(file)[stat.ST_MODE] | 34 | origmode = os.stat(file)[stat.ST_MODE] |
@@ -195,20 +191,33 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_alre | |||
195 | 191 | ||
196 | oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process) | 192 | oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process) |
197 | 193 | ||
194 | TRANSLATE = ( | ||
195 | ("@", "@at@"), | ||
196 | (" ", "@space@"), | ||
197 | ("\t", "@tab@"), | ||
198 | ("[", "@openbrace@"), | ||
199 | ("]", "@closebrace@"), | ||
200 | ("_", "@underscore@"), | ||
201 | (":", "@colon@"), | ||
202 | ) | ||
198 | 203 | ||
199 | def file_translate(file): | 204 | def file_translate(file): |
200 | ft = file.replace("@", "@at@") | 205 | ft = file |
201 | ft = ft.replace(" ", "@space@") | 206 | for s, replace in TRANSLATE: |
202 | ft = ft.replace("\t", "@tab@") | 207 | ft = ft.replace(s, replace) |
203 | ft = ft.replace("[", "@openbrace@") | 208 | |
204 | ft = ft.replace("]", "@closebrace@") | 209 | return ft |
205 | ft = ft.replace("_", "@underscore@") | 210 | |
211 | def file_reverse_translate(file): | ||
212 | ft = file | ||
213 | for s, replace in reversed(TRANSLATE): | ||
214 | ft = ft.replace(replace, s) | ||
215 | |||
206 | return ft | 216 | return ft |
207 | 217 | ||
208 | def filedeprunner(arg): | 218 | def filedeprunner(pkg, pkgfiles, rpmdeps, pkgdest): |
209 | import re, subprocess, shlex | 219 | import re, subprocess, shlex |
210 | 220 | ||
211 | (pkg, pkgfiles, rpmdeps, pkgdest) = arg | ||
212 | provides = {} | 221 | provides = {} |
213 | requires = {} | 222 | requires = {} |
214 | 223 | ||
@@ -648,6 +657,8 @@ def split_locales(d): | |||
648 | except ValueError: | 657 | except ValueError: |
649 | locale_index = len(packages) | 658 | locale_index = len(packages) |
650 | 659 | ||
660 | lic = d.getVar("LICENSE:" + pn + "-locale") | ||
661 | |||
651 | localepaths = [] | 662 | localepaths = [] |
652 | locales = set() | 663 | locales = set() |
653 | for localepath in (d.getVar('LOCALE_PATHS') or "").split(): | 664 | for localepath in (d.getVar('LOCALE_PATHS') or "").split(): |
@@ -683,6 +694,8 @@ def split_locales(d): | |||
683 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | 694 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) |
684 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) | 695 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) |
685 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | 696 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) |
697 | if lic: | ||
698 | d.setVar('LICENSE:' + pkg, lic) | ||
686 | if locale_section: | 699 | if locale_section: |
687 | d.setVar('SECTION:' + pkg, locale_section) | 700 | d.setVar('SECTION:' + pkg, locale_section) |
688 | 701 | ||
@@ -979,7 +992,7 @@ def copydebugsources(debugsrcdir, sources, d): | |||
979 | 992 | ||
980 | prefixmap = {} | 993 | prefixmap = {} |
981 | for flag in cflags.split(): | 994 | for flag in cflags.split(): |
982 | if not flag.startswith("-fdebug-prefix-map"): | 995 | if not flag.startswith("-ffile-prefix-map"): |
983 | continue | 996 | continue |
984 | if "recipe-sysroot" in flag: | 997 | if "recipe-sysroot" in flag: |
985 | continue | 998 | continue |
@@ -1037,6 +1050,51 @@ def copydebugsources(debugsrcdir, sources, d): | |||
1037 | if os.path.exists(p) and not os.listdir(p): | 1050 | if os.path.exists(p) and not os.listdir(p): |
1038 | os.rmdir(p) | 1051 | os.rmdir(p) |
1039 | 1052 | ||
1053 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
1054 | def save_debugsources_info(debugsrcdir, sources_raw, d): | ||
1055 | import json | ||
1056 | import bb.compress.zstd | ||
1057 | if debugsrcdir and sources_raw: | ||
1058 | debugsources_file = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd") | ||
1059 | debugsources_dir = os.path.dirname(debugsources_file) | ||
1060 | if not os.path.isdir(debugsources_dir): | ||
1061 | bb.utils.mkdirhier(debugsources_dir) | ||
1062 | bb.utils.remove(debugsources_file) | ||
1063 | |||
1064 | workdir = d.getVar("WORKDIR") | ||
1065 | pn = d.getVar('PN') | ||
1066 | |||
1067 | # Kernel sources are in a different directory and are special case | ||
1068 | # we format the sources as expected by spdx by replacing /usr/src/kernel/ | ||
1069 | # into BP/ | ||
1070 | kernel_src = d.getVar('KERNEL_SRC_PATH') | ||
1071 | bp = d.getVar('BP') | ||
1072 | sources_dict = {} | ||
1073 | for file, src_files in sources_raw: | ||
1074 | file_clean = file.replace(f"{workdir}/package/","") | ||
1075 | sources_clean = [ | ||
1076 | src.replace(f"{debugsrcdir}/{pn}/", "") | ||
1077 | if not kernel_src else src.replace(f"{kernel_src}/", f"{bp}/") | ||
1078 | for src in src_files | ||
1079 | if not any(keyword in src for keyword in ("<internal>", "<built-in>")) and not src.endswith("/") | ||
1080 | ] | ||
1081 | sources_dict[file_clean] = sorted(sources_clean) | ||
1082 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
1083 | with bb.compress.zstd.open(debugsources_file, "wt", encoding="utf-8", num_threads=num_threads) as f: | ||
1084 | json.dump(sources_dict, f, sort_keys=True) | ||
1085 | |||
1086 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
1087 | def read_debugsources_info(d): | ||
1088 | import json | ||
1089 | import bb.compress.zstd | ||
1090 | try: | ||
1091 | fn = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd") | ||
1092 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
1093 | with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f: | ||
1094 | return json.load(f) | ||
1095 | except FileNotFoundError: | ||
1096 | bb.debug(1, f"File not found: {fn}") | ||
1097 | return None | ||
1040 | 1098 | ||
1041 | def process_split_and_strip_files(d): | 1099 | def process_split_and_strip_files(d): |
1042 | cpath = oe.cachedpath.CachedPath() | 1100 | cpath = oe.cachedpath.CachedPath() |
@@ -1064,6 +1122,7 @@ def process_split_and_strip_files(d): | |||
1064 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | 1122 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): |
1065 | checkelf = {} | 1123 | checkelf = {} |
1066 | checkelflinks = {} | 1124 | checkelflinks = {} |
1125 | checkstatic = {} | ||
1067 | for root, dirs, files in cpath.walk(dvar): | 1126 | for root, dirs, files in cpath.walk(dvar): |
1068 | for f in files: | 1127 | for f in files: |
1069 | file = os.path.join(root, f) | 1128 | file = os.path.join(root, f) |
@@ -1077,10 +1136,6 @@ def process_split_and_strip_files(d): | |||
1077 | if file in skipfiles: | 1136 | if file in skipfiles: |
1078 | continue | 1137 | continue |
1079 | 1138 | ||
1080 | if oe.package.is_static_lib(file): | ||
1081 | staticlibs.append(file) | ||
1082 | continue | ||
1083 | |||
1084 | try: | 1139 | try: |
1085 | ltarget = cpath.realpath(file, dvar, False) | 1140 | ltarget = cpath.realpath(file, dvar, False) |
1086 | s = cpath.lstat(ltarget) | 1141 | s = cpath.lstat(ltarget) |
@@ -1092,6 +1147,13 @@ def process_split_and_strip_files(d): | |||
1092 | continue | 1147 | continue |
1093 | if not s: | 1148 | if not s: |
1094 | continue | 1149 | continue |
1150 | |||
1151 | if oe.package.is_static_lib(file): | ||
1152 | # Use a reference of device ID and inode number to identify files | ||
1153 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
1154 | checkstatic[file] = (file, file_reference) | ||
1155 | continue | ||
1156 | |||
1095 | # Check its an executable | 1157 | # Check its an executable |
1096 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ | 1158 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ |
1097 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ | 1159 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ |
@@ -1156,6 +1218,27 @@ def process_split_and_strip_files(d): | |||
1156 | # Modified the file so clear the cache | 1218 | # Modified the file so clear the cache |
1157 | cpath.updatecache(file) | 1219 | cpath.updatecache(file) |
1158 | 1220 | ||
1221 | # Do the same hardlink processing as above, but for static libraries | ||
1222 | results = list(checkstatic.keys()) | ||
1223 | |||
1224 | # As above, sort the results. | ||
1225 | results.sort(key=lambda x: x[0]) | ||
1226 | |||
1227 | for file in results: | ||
1228 | # Use a reference of device ID and inode number to identify files | ||
1229 | file_reference = checkstatic[file][1] | ||
1230 | if file_reference in inodes: | ||
1231 | os.unlink(file) | ||
1232 | os.link(inodes[file_reference][0], file) | ||
1233 | inodes[file_reference].append(file) | ||
1234 | else: | ||
1235 | inodes[file_reference] = [file] | ||
1236 | # break hardlink | ||
1237 | bb.utils.break_hardlinks(file) | ||
1238 | staticlibs.append(file) | ||
1239 | # Modified the file so clear the cache | ||
1240 | cpath.updatecache(file) | ||
1241 | |||
1159 | def strip_pkgd_prefix(f): | 1242 | def strip_pkgd_prefix(f): |
1160 | nonlocal dvar | 1243 | nonlocal dvar |
1161 | 1244 | ||
@@ -1194,11 +1277,24 @@ def process_split_and_strip_files(d): | |||
1194 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | 1277 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] |
1195 | fpath = dvar + dest | 1278 | fpath = dvar + dest |
1196 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | 1279 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] |
1197 | bb.utils.mkdirhier(os.path.dirname(fpath)) | 1280 | if os.access(ftarget, os.R_OK): |
1198 | # Only one hardlink of separated debug info file in each directory | 1281 | bb.utils.mkdirhier(os.path.dirname(fpath)) |
1199 | if not os.access(fpath, os.R_OK): | 1282 | # Only one hardlink of separated debug info file in each directory |
1200 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | 1283 | if not os.access(fpath, os.R_OK): |
1201 | os.link(ftarget, fpath) | 1284 | #bb.note("Link %s -> %s" % (fpath, ftarget)) |
1285 | os.link(ftarget, fpath) | ||
1286 | elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
1287 | deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"] | ||
1288 | fpath = dvar + deststatic | ||
1289 | ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"] | ||
1290 | if os.access(ftarget, os.R_OK): | ||
1291 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
1292 | # Only one hardlink of separated debug info file in each directory | ||
1293 | if not os.access(fpath, os.R_OK): | ||
1294 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
1295 | os.link(ftarget, fpath) | ||
1296 | else: | ||
1297 | bb.note("Unable to find inode link target %s" % (target)) | ||
1202 | 1298 | ||
1203 | # Create symlinks for all cases we were able to split symbols | 1299 | # Create symlinks for all cases we were able to split symbols |
1204 | for file in symlinks: | 1300 | for file in symlinks: |
@@ -1230,6 +1326,9 @@ def process_split_and_strip_files(d): | |||
1230 | # Process the dv["srcdir"] if requested... | 1326 | # Process the dv["srcdir"] if requested... |
1231 | # This copies and places the referenced sources for later debugging... | 1327 | # This copies and places the referenced sources for later debugging... |
1232 | copydebugsources(dv["srcdir"], sources, d) | 1328 | copydebugsources(dv["srcdir"], sources, d) |
1329 | |||
1330 | # Save source info to be accessible to other tasks | ||
1331 | save_debugsources_info(dv["srcdir"], results, d) | ||
1233 | # | 1332 | # |
1234 | # End of debug splitting | 1333 | # End of debug splitting |
1235 | # | 1334 | # |
@@ -1394,10 +1493,10 @@ def populate_packages(d): | |||
1394 | 1493 | ||
1395 | # Handle excluding packages with incompatible licenses | 1494 | # Handle excluding packages with incompatible licenses |
1396 | package_list = [] | 1495 | package_list = [] |
1496 | skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages) | ||
1397 | for pkg in packages: | 1497 | for pkg in packages: |
1398 | licenses = d.getVar('_exclude_incompatible-' + pkg) | 1498 | if pkg in skipped_pkgs: |
1399 | if licenses: | 1499 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg]) |
1400 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) | ||
1401 | oe.qa.handle_error("incompatible-license", msg, d) | 1500 | oe.qa.handle_error("incompatible-license", msg, d) |
1402 | else: | 1501 | else: |
1403 | package_list.append(pkg) | 1502 | package_list.append(pkg) |
@@ -1566,7 +1665,6 @@ def process_shlibs(pkgfiles, d): | |||
1566 | needs_ldconfig = False | 1665 | needs_ldconfig = False |
1567 | needed = set() | 1666 | needed = set() |
1568 | sonames = set() | 1667 | sonames = set() |
1569 | renames = [] | ||
1570 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | 1668 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') |
1571 | cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null" | 1669 | cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null" |
1572 | fd = os.popen(cmd) | 1670 | fd = os.popen(cmd) |
@@ -1594,11 +1692,9 @@ def process_shlibs(pkgfiles, d): | |||
1594 | sonames.add(prov) | 1692 | sonames.add(prov) |
1595 | if libdir_re.match(os.path.dirname(file)): | 1693 | if libdir_re.match(os.path.dirname(file)): |
1596 | needs_ldconfig = True | 1694 | needs_ldconfig = True |
1597 | if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname): | 1695 | return (needs_ldconfig, needed, sonames) |
1598 | renames.append((file, os.path.join(os.path.dirname(file), this_soname))) | ||
1599 | return (needs_ldconfig, needed, sonames, renames) | ||
1600 | 1696 | ||
1601 | def darwin_so(file, needed, sonames, renames, pkgver): | 1697 | def darwin_so(file, needed, sonames, pkgver): |
1602 | if not os.path.exists(file): | 1698 | if not os.path.exists(file): |
1603 | return | 1699 | return |
1604 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | 1700 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') |
@@ -1650,7 +1746,7 @@ def process_shlibs(pkgfiles, d): | |||
1650 | if name and name not in needed[pkg]: | 1746 | if name and name not in needed[pkg]: |
1651 | needed[pkg].add((name, file, tuple())) | 1747 | needed[pkg].add((name, file, tuple())) |
1652 | 1748 | ||
1653 | def mingw_dll(file, needed, sonames, renames, pkgver): | 1749 | def mingw_dll(file, needed, sonames, pkgver): |
1654 | if not os.path.exists(file): | 1750 | if not os.path.exists(file): |
1655 | return | 1751 | return |
1656 | 1752 | ||
@@ -1669,11 +1765,6 @@ def process_shlibs(pkgfiles, d): | |||
1669 | if dllname: | 1765 | if dllname: |
1670 | needed[pkg].add((dllname, file, tuple())) | 1766 | needed[pkg].add((dllname, file, tuple())) |
1671 | 1767 | ||
1672 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": | ||
1673 | snap_symlinks = True | ||
1674 | else: | ||
1675 | snap_symlinks = False | ||
1676 | |||
1677 | needed = {} | 1768 | needed = {} |
1678 | 1769 | ||
1679 | shlib_provider = oe.package.read_shlib_providers(d) | 1770 | shlib_provider = oe.package.read_shlib_providers(d) |
@@ -1692,16 +1783,15 @@ def process_shlibs(pkgfiles, d): | |||
1692 | 1783 | ||
1693 | needed[pkg] = set() | 1784 | needed[pkg] = set() |
1694 | sonames = set() | 1785 | sonames = set() |
1695 | renames = [] | ||
1696 | linuxlist = [] | 1786 | linuxlist = [] |
1697 | for file in pkgfiles[pkg]: | 1787 | for file in pkgfiles[pkg]: |
1698 | soname = None | 1788 | soname = None |
1699 | if cpath.islink(file): | 1789 | if cpath.islink(file): |
1700 | continue | 1790 | continue |
1701 | if hostos.startswith("darwin"): | 1791 | if hostos.startswith("darwin"): |
1702 | darwin_so(file, needed, sonames, renames, pkgver) | 1792 | darwin_so(file, needed, sonames, pkgver) |
1703 | elif hostos.startswith("mingw"): | 1793 | elif hostos.startswith("mingw"): |
1704 | mingw_dll(file, needed, sonames, renames, pkgver) | 1794 | mingw_dll(file, needed, sonames, pkgver) |
1705 | elif os.access(file, os.X_OK) or lib_re.match(file): | 1795 | elif os.access(file, os.X_OK) or lib_re.match(file): |
1706 | linuxlist.append(file) | 1796 | linuxlist.append(file) |
1707 | 1797 | ||
@@ -1711,14 +1801,8 @@ def process_shlibs(pkgfiles, d): | |||
1711 | ldconfig = r[0] | 1801 | ldconfig = r[0] |
1712 | needed[pkg] |= r[1] | 1802 | needed[pkg] |= r[1] |
1713 | sonames |= r[2] | 1803 | sonames |= r[2] |
1714 | renames.extend(r[3]) | ||
1715 | needs_ldconfig = needs_ldconfig or ldconfig | 1804 | needs_ldconfig = needs_ldconfig or ldconfig |
1716 | 1805 | ||
1717 | for (old, new) in renames: | ||
1718 | bb.note("Renaming %s to %s" % (old, new)) | ||
1719 | bb.utils.rename(old, new) | ||
1720 | pkgfiles[pkg].remove(old) | ||
1721 | |||
1722 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | 1806 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") |
1723 | if len(sonames): | 1807 | if len(sonames): |
1724 | with open(shlibs_file, 'w') as fd: | 1808 | with open(shlibs_file, 'w') as fd: |
@@ -1839,7 +1923,7 @@ def process_pkgconfig(pkgfiles, d): | |||
1839 | if m: | 1923 | if m: |
1840 | hdr = m.group(1) | 1924 | hdr = m.group(1) |
1841 | exp = pd.expand(m.group(2)) | 1925 | exp = pd.expand(m.group(2)) |
1842 | if hdr == 'Requires': | 1926 | if hdr == 'Requires' or hdr == 'Requires.private': |
1843 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | 1927 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() |
1844 | continue | 1928 | continue |
1845 | m = var_re.match(l) | 1929 | m = var_re.match(l) |
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py index 6774cdb794..2100a97c12 100644 --- a/meta/lib/oe/package_manager/__init__.py +++ b/meta/lib/oe/package_manager/__init__.py | |||
@@ -365,45 +365,43 @@ class PackageManager(object, metaclass=ABCMeta): | |||
365 | for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split(): | 365 | for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split(): |
366 | globs += (" " + complementary_linguas) % lang | 366 | globs += (" " + complementary_linguas) % lang |
367 | 367 | ||
368 | if globs is None: | 368 | if globs: |
369 | return | 369 | # we need to write the list of installed packages to a file because the |
370 | 370 | # oe-pkgdata-util reads it from a file | |
371 | # we need to write the list of installed packages to a file because the | 371 | with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: |
372 | # oe-pkgdata-util reads it from a file | 372 | pkgs = self.list_installed() |
373 | with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: | 373 | |
374 | pkgs = self.list_installed() | 374 | provided_pkgs = set() |
375 | 375 | for pkg in pkgs.values(): | |
376 | provided_pkgs = set() | 376 | provided_pkgs |= set(pkg.get('provs', [])) |
377 | for pkg in pkgs.values(): | 377 | |
378 | provided_pkgs |= set(pkg.get('provs', [])) | 378 | output = oe.utils.format_pkg_list(pkgs, "arch") |
379 | 379 | installed_pkgs.write(output) | |
380 | output = oe.utils.format_pkg_list(pkgs, "arch") | 380 | installed_pkgs.flush() |
381 | installed_pkgs.write(output) | 381 | |
382 | installed_pkgs.flush() | 382 | cmd = ["oe-pkgdata-util", |
383 | 383 | "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, | |
384 | cmd = ["oe-pkgdata-util", | 384 | globs] |
385 | "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, | 385 | exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') |
386 | globs] | 386 | if exclude: |
387 | exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') | 387 | cmd.extend(['--exclude=' + '|'.join(exclude.split())]) |
388 | if exclude: | 388 | try: |
389 | cmd.extend(['--exclude=' + '|'.join(exclude.split())]) | 389 | bb.note('Running %s' % cmd) |
390 | try: | 390 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
391 | bb.note('Running %s' % cmd) | 391 | stdout, stderr = proc.communicate() |
392 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | 392 | if stderr: bb.note(stderr.decode("utf-8")) |
393 | stdout, stderr = proc.communicate() | 393 | complementary_pkgs = stdout.decode("utf-8") |
394 | if stderr: bb.note(stderr.decode("utf-8")) | 394 | complementary_pkgs = set(complementary_pkgs.split()) |
395 | complementary_pkgs = stdout.decode("utf-8") | 395 | skip_pkgs = sorted(complementary_pkgs & provided_pkgs) |
396 | complementary_pkgs = set(complementary_pkgs.split()) | 396 | install_pkgs = sorted(complementary_pkgs - provided_pkgs) |
397 | skip_pkgs = sorted(complementary_pkgs & provided_pkgs) | 397 | bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( |
398 | install_pkgs = sorted(complementary_pkgs - provided_pkgs) | 398 | ' '.join(install_pkgs), |
399 | bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( | 399 | ' '.join(skip_pkgs))) |
400 | ' '.join(install_pkgs), | 400 | self.install(install_pkgs, hard_depends_only=True) |
401 | ' '.join(skip_pkgs))) | 401 | except subprocess.CalledProcessError as e: |
402 | self.install(install_pkgs, hard_depends_only=True) | 402 | bb.fatal("Could not compute complementary packages list. Command " |
403 | except subprocess.CalledProcessError as e: | 403 | "'%s' returned %d:\n%s" % |
404 | bb.fatal("Could not compute complementary packages list. Command " | 404 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) |
405 | "'%s' returned %d:\n%s" % | ||
406 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
407 | 405 | ||
408 | if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': | 406 | if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': |
409 | target_arch = self.d.getVar('TARGET_ARCH') | 407 | target_arch = self.d.getVar('TARGET_ARCH') |
@@ -449,7 +447,7 @@ class PackageManager(object, metaclass=ABCMeta): | |||
449 | return res | 447 | return res |
450 | return _append(uris, base_paths) | 448 | return _append(uris, base_paths) |
451 | 449 | ||
452 | def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies): | 450 | def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies, include_self=False): |
453 | """ | 451 | """ |
454 | Go through our do_package_write_X dependencies and hardlink the packages we depend | 452 | Go through our do_package_write_X dependencies and hardlink the packages we depend |
455 | upon into the repo directory. This prevents us seeing other packages that may | 453 | upon into the repo directory. This prevents us seeing other packages that may |
@@ -486,14 +484,17 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie | |||
486 | bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") | 484 | bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") |
487 | pkgdeps = set() | 485 | pkgdeps = set() |
488 | start = [start] | 486 | start = [start] |
489 | seen = set(start) | 487 | if include_self: |
488 | seen = set() | ||
489 | else: | ||
490 | seen = set(start) | ||
490 | # Support direct dependencies (do_rootfs -> do_package_write_X) | 491 | # Support direct dependencies (do_rootfs -> do_package_write_X) |
491 | # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) | 492 | # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) |
492 | while start: | 493 | while start: |
493 | next = [] | 494 | next = [] |
494 | for dep2 in start: | 495 | for dep2 in start: |
495 | for dep in taskdepdata[dep2][3]: | 496 | for dep in taskdepdata[dep2][3]: |
496 | if taskdepdata[dep][0] != pn: | 497 | if include_self or taskdepdata[dep][0] != pn: |
497 | if "do_" + taskname in dep: | 498 | if "do_" + taskname in dep: |
498 | pkgdeps.add(dep) | 499 | pkgdeps.add(dep) |
499 | elif dep not in seen: | 500 | elif dep not in seen: |
diff --git a/meta/lib/oe/package_manager/common_deb_ipk.py b/meta/lib/oe/package_manager/common_deb_ipk.py new file mode 100644 index 0000000000..6a1e28ee6f --- /dev/null +++ b/meta/lib/oe/package_manager/common_deb_ipk.py | |||
@@ -0,0 +1,97 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import glob | ||
8 | import os | ||
9 | import subprocess | ||
10 | import tempfile | ||
11 | |||
12 | import bb | ||
13 | |||
14 | from oe.package_manager import opkg_query, PackageManager | ||
15 | |||
16 | class OpkgDpkgPM(PackageManager): | ||
17 | def __init__(self, d, target_rootfs): | ||
18 | """ | ||
19 | This is an abstract class. Do not instantiate this directly. | ||
20 | """ | ||
21 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
22 | |||
23 | def package_info(self, pkg): | ||
24 | """ | ||
25 | Returns a dictionary with the package info. | ||
26 | """ | ||
27 | raise NotImplementedError | ||
28 | |||
29 | def _common_package_info(self, cmd): | ||
30 | """ | ||
31 | "Returns a dictionary with the package info. | ||
32 | |||
33 | This method extracts the common parts for Opkg and Dpkg | ||
34 | """ | ||
35 | |||
36 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
37 | if proc.returncode: | ||
38 | bb.fatal("Unable to list available packages. Command '%s' " | ||
39 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
40 | elif proc.stderr: | ||
41 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
42 | |||
43 | return opkg_query(proc.stdout) | ||
44 | |||
45 | def extract(self, pkg): | ||
46 | """ | ||
47 | Returns the path to a tmpdir where resides the contents of a package. | ||
48 | |||
49 | Deleting the tmpdir is responsability of the caller. | ||
50 | """ | ||
51 | pkg_info = self.package_info(pkg) | ||
52 | if not pkg_info: | ||
53 | bb.fatal("Unable to get information for package '%s' while " | ||
54 | "trying to extract the package." % pkg) | ||
55 | |||
56 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
57 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
58 | pkg_path = pkg_info[pkg]["filepath"] | ||
59 | |||
60 | if not os.path.isfile(pkg_path): | ||
61 | bb.fatal("Unable to extract package for '%s'." | ||
62 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
63 | |||
64 | tmp_dir = tempfile.mkdtemp() | ||
65 | current_dir = os.getcwd() | ||
66 | os.chdir(tmp_dir) | ||
67 | |||
68 | try: | ||
69 | cmd = [ar_cmd, 'x', pkg_path] | ||
70 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
71 | data_tar = glob.glob("data.tar.*") | ||
72 | if len(data_tar) != 1: | ||
73 | bb.fatal("Unable to extract %s package. Failed to identify " | ||
74 | "data tarball (found tarballs '%s').", | ||
75 | pkg_path, data_tar) | ||
76 | data_tar = data_tar[0] | ||
77 | cmd = [tar_cmd, 'xf', data_tar] | ||
78 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
79 | except subprocess.CalledProcessError as e: | ||
80 | bb.utils.remove(tmp_dir, recurse=True) | ||
81 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
82 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
83 | except OSError as e: | ||
84 | bb.utils.remove(tmp_dir, recurse=True) | ||
85 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
86 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
87 | |||
88 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
89 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
90 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
91 | bb.utils.remove(os.path.join(tmp_dir, data_tar)) | ||
92 | os.chdir(current_dir) | ||
93 | |||
94 | return tmp_dir | ||
95 | |||
96 | def _handle_intercept_failure(self, registered_pkgs): | ||
97 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py index 0c23c884c1..e09e81e490 100644 --- a/meta/lib/oe/package_manager/deb/__init__.py +++ b/meta/lib/oe/package_manager/deb/__init__.py | |||
@@ -7,6 +7,7 @@ | |||
7 | import re | 7 | import re |
8 | import subprocess | 8 | import subprocess |
9 | from oe.package_manager import * | 9 | from oe.package_manager import * |
10 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
10 | 11 | ||
11 | class DpkgIndexer(Indexer): | 12 | class DpkgIndexer(Indexer): |
12 | def _create_configs(self): | 13 | def _create_configs(self): |
@@ -111,72 +112,6 @@ class PMPkgsList(PkgsList): | |||
111 | 112 | ||
112 | return opkg_query(cmd_output) | 113 | return opkg_query(cmd_output) |
113 | 114 | ||
114 | class OpkgDpkgPM(PackageManager): | ||
115 | def __init__(self, d, target_rootfs): | ||
116 | """ | ||
117 | This is an abstract class. Do not instantiate this directly. | ||
118 | """ | ||
119 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
120 | |||
121 | def package_info(self, pkg, cmd): | ||
122 | """ | ||
123 | Returns a dictionary with the package info. | ||
124 | |||
125 | This method extracts the common parts for Opkg and Dpkg | ||
126 | """ | ||
127 | |||
128 | try: | ||
129 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") | ||
130 | except subprocess.CalledProcessError as e: | ||
131 | bb.fatal("Unable to list available packages. Command '%s' " | ||
132 | "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) | ||
133 | return opkg_query(output) | ||
134 | |||
135 | def extract(self, pkg, pkg_info): | ||
136 | """ | ||
137 | Returns the path to a tmpdir where resides the contents of a package. | ||
138 | |||
139 | Deleting the tmpdir is responsability of the caller. | ||
140 | |||
141 | This method extracts the common parts for Opkg and Dpkg | ||
142 | """ | ||
143 | |||
144 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
145 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
146 | pkg_path = pkg_info[pkg]["filepath"] | ||
147 | |||
148 | if not os.path.isfile(pkg_path): | ||
149 | bb.fatal("Unable to extract package for '%s'." | ||
150 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
151 | |||
152 | tmp_dir = tempfile.mkdtemp() | ||
153 | current_dir = os.getcwd() | ||
154 | os.chdir(tmp_dir) | ||
155 | data_tar = 'data.tar.xz' | ||
156 | |||
157 | try: | ||
158 | cmd = [ar_cmd, 'x', pkg_path] | ||
159 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
160 | cmd = [tar_cmd, 'xf', data_tar] | ||
161 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
162 | except subprocess.CalledProcessError as e: | ||
163 | bb.utils.remove(tmp_dir, recurse=True) | ||
164 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
165 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
166 | except OSError as e: | ||
167 | bb.utils.remove(tmp_dir, recurse=True) | ||
168 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
169 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
170 | |||
171 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
172 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
173 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
174 | os.chdir(current_dir) | ||
175 | |||
176 | return tmp_dir | ||
177 | |||
178 | def _handle_intercept_failure(self, registered_pkgs): | ||
179 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
180 | 115 | ||
181 | class DpkgPM(OpkgDpkgPM): | 116 | class DpkgPM(OpkgDpkgPM): |
182 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): | 117 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): |
@@ -496,7 +431,7 @@ class DpkgPM(OpkgDpkgPM): | |||
496 | Returns a dictionary with the package info. | 431 | Returns a dictionary with the package info. |
497 | """ | 432 | """ |
498 | cmd = "%s show %s" % (self.apt_cache_cmd, pkg) | 433 | cmd = "%s show %s" % (self.apt_cache_cmd, pkg) |
499 | pkg_info = super(DpkgPM, self).package_info(pkg, cmd) | 434 | pkg_info = self._common_package_info(cmd) |
500 | 435 | ||
501 | pkg_arch = pkg_info[pkg]["pkgarch"] | 436 | pkg_arch = pkg_info[pkg]["pkgarch"] |
502 | pkg_filename = pkg_info[pkg]["filename"] | 437 | pkg_filename = pkg_info[pkg]["filename"] |
@@ -504,19 +439,3 @@ class DpkgPM(OpkgDpkgPM): | |||
504 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | 439 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) |
505 | 440 | ||
506 | return pkg_info | 441 | return pkg_info |
507 | |||
508 | def extract(self, pkg): | ||
509 | """ | ||
510 | Returns the path to a tmpdir where resides the contents of a package. | ||
511 | |||
512 | Deleting the tmpdir is responsability of the caller. | ||
513 | """ | ||
514 | pkg_info = self.package_info(pkg) | ||
515 | if not pkg_info: | ||
516 | bb.fatal("Unable to get information for package '%s' while " | ||
517 | "trying to extract the package." % pkg) | ||
518 | |||
519 | tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info) | ||
520 | bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz")) | ||
521 | |||
522 | return tmp_dir | ||
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py index 8cc9953a02..3d998e52ff 100644 --- a/meta/lib/oe/package_manager/ipk/__init__.py +++ b/meta/lib/oe/package_manager/ipk/__init__.py | |||
@@ -8,6 +8,7 @@ import re | |||
8 | import shutil | 8 | import shutil |
9 | import subprocess | 9 | import subprocess |
10 | from oe.package_manager import * | 10 | from oe.package_manager import * |
11 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
11 | 12 | ||
12 | class OpkgIndexer(Indexer): | 13 | class OpkgIndexer(Indexer): |
13 | def write_index(self): | 14 | def write_index(self): |
@@ -90,76 +91,6 @@ class PMPkgsList(PkgsList): | |||
90 | return opkg_query(cmd_output) | 91 | return opkg_query(cmd_output) |
91 | 92 | ||
92 | 93 | ||
93 | |||
94 | class OpkgDpkgPM(PackageManager): | ||
95 | def __init__(self, d, target_rootfs): | ||
96 | """ | ||
97 | This is an abstract class. Do not instantiate this directly. | ||
98 | """ | ||
99 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
100 | |||
101 | def package_info(self, pkg, cmd): | ||
102 | """ | ||
103 | Returns a dictionary with the package info. | ||
104 | |||
105 | This method extracts the common parts for Opkg and Dpkg | ||
106 | """ | ||
107 | |||
108 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
109 | if proc.returncode: | ||
110 | bb.fatal("Unable to list available packages. Command '%s' " | ||
111 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
112 | elif proc.stderr: | ||
113 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
114 | |||
115 | return opkg_query(proc.stdout) | ||
116 | |||
117 | def extract(self, pkg, pkg_info): | ||
118 | """ | ||
119 | Returns the path to a tmpdir where resides the contents of a package. | ||
120 | |||
121 | Deleting the tmpdir is responsability of the caller. | ||
122 | |||
123 | This method extracts the common parts for Opkg and Dpkg | ||
124 | """ | ||
125 | |||
126 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
127 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
128 | pkg_path = pkg_info[pkg]["filepath"] | ||
129 | |||
130 | if not os.path.isfile(pkg_path): | ||
131 | bb.fatal("Unable to extract package for '%s'." | ||
132 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
133 | |||
134 | tmp_dir = tempfile.mkdtemp() | ||
135 | current_dir = os.getcwd() | ||
136 | os.chdir(tmp_dir) | ||
137 | data_tar = 'data.tar.zst' | ||
138 | |||
139 | try: | ||
140 | cmd = [ar_cmd, 'x', pkg_path] | ||
141 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
142 | cmd = [tar_cmd, 'xf', data_tar] | ||
143 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
144 | except subprocess.CalledProcessError as e: | ||
145 | bb.utils.remove(tmp_dir, recurse=True) | ||
146 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
147 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
148 | except OSError as e: | ||
149 | bb.utils.remove(tmp_dir, recurse=True) | ||
150 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
151 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
152 | |||
153 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
154 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
155 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
156 | os.chdir(current_dir) | ||
157 | |||
158 | return tmp_dir | ||
159 | |||
160 | def _handle_intercept_failure(self, registered_pkgs): | ||
161 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
162 | |||
163 | class OpkgPM(OpkgDpkgPM): | 94 | class OpkgPM(OpkgDpkgPM): |
164 | def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): | 95 | def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): |
165 | super(OpkgPM, self).__init__(d, target_rootfs) | 96 | super(OpkgPM, self).__init__(d, target_rootfs) |
@@ -485,7 +416,7 @@ class OpkgPM(OpkgDpkgPM): | |||
485 | Returns a dictionary with the package info. | 416 | Returns a dictionary with the package info. |
486 | """ | 417 | """ |
487 | cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) | 418 | cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) |
488 | pkg_info = super(OpkgPM, self).package_info(pkg, cmd) | 419 | pkg_info = self._common_package_info(cmd) |
489 | 420 | ||
490 | pkg_arch = pkg_info[pkg]["arch"] | 421 | pkg_arch = pkg_info[pkg]["arch"] |
491 | pkg_filename = pkg_info[pkg]["filename"] | 422 | pkg_filename = pkg_info[pkg]["filename"] |
@@ -493,19 +424,3 @@ class OpkgPM(OpkgDpkgPM): | |||
493 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | 424 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) |
494 | 425 | ||
495 | return pkg_info | 426 | return pkg_info |
496 | |||
497 | def extract(self, pkg): | ||
498 | """ | ||
499 | Returns the path to a tmpdir where resides the contents of a package. | ||
500 | |||
501 | Deleting the tmpdir is responsability of the caller. | ||
502 | """ | ||
503 | pkg_info = self.package_info(pkg) | ||
504 | if not pkg_info: | ||
505 | bb.fatal("Unable to get information for package '%s' while " | ||
506 | "trying to extract the package." % pkg) | ||
507 | |||
508 | tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info) | ||
509 | bb.utils.remove(os.path.join(tmp_dir, "data.tar.zst")) | ||
510 | |||
511 | return tmp_dir | ||
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py index f40c880af4..323ec5008f 100644 --- a/meta/lib/oe/package_manager/rpm/__init__.py +++ b/meta/lib/oe/package_manager/rpm/__init__.py | |||
@@ -393,8 +393,8 @@ class RpmPM(PackageManager): | |||
393 | # Strip file: prefix | 393 | # Strip file: prefix |
394 | pkg_path = pkg_name[5:] | 394 | pkg_path = pkg_name[5:] |
395 | 395 | ||
396 | cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio") | 396 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") |
397 | rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio") | 397 | rpm2archive_cmd = bb.utils.which(os.getenv("PATH"), "rpm2archive") |
398 | 398 | ||
399 | if not os.path.isfile(pkg_path): | 399 | if not os.path.isfile(pkg_path): |
400 | bb.fatal("Unable to extract package for '%s'." | 400 | bb.fatal("Unable to extract package for '%s'." |
@@ -405,7 +405,7 @@ class RpmPM(PackageManager): | |||
405 | os.chdir(tmp_dir) | 405 | os.chdir(tmp_dir) |
406 | 406 | ||
407 | try: | 407 | try: |
408 | cmd = "%s %s | %s -idmv" % (rpm2cpio_cmd, pkg_path, cpio_cmd) | 408 | cmd = "%s -n %s | %s xv" % (rpm2archive_cmd, pkg_path, tar_cmd) |
409 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) | 409 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) |
410 | except subprocess.CalledProcessError as e: | 410 | except subprocess.CalledProcessError as e: |
411 | bb.utils.remove(tmp_dir, recurse=True) | 411 | bb.utils.remove(tmp_dir, recurse=True) |
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index 2d1d6ddeb7..b6a10a930a 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py | |||
@@ -7,6 +7,7 @@ | |||
7 | import codecs | 7 | import codecs |
8 | import os | 8 | import os |
9 | import json | 9 | import json |
10 | import bb.parse | ||
10 | import bb.compress.zstd | 11 | import bb.compress.zstd |
11 | import oe.path | 12 | import oe.path |
12 | 13 | ||
@@ -64,6 +65,7 @@ def read_subpkgdata_dict(pkg, d): | |||
64 | ret[newvar] = subd[var] | 65 | ret[newvar] = subd[var] |
65 | return ret | 66 | return ret |
66 | 67 | ||
68 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
67 | def read_subpkgdata_extended(pkg, d): | 69 | def read_subpkgdata_extended(pkg, d): |
68 | import json | 70 | import json |
69 | import bb.compress.zstd | 71 | import bb.compress.zstd |
@@ -182,6 +184,7 @@ def runtime_mapping_rename(varname, pkg, d): | |||
182 | 184 | ||
183 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) | 185 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) |
184 | 186 | ||
187 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
185 | def emit_pkgdata(pkgfiles, d): | 188 | def emit_pkgdata(pkgfiles, d): |
186 | def process_postinst_on_target(pkg, mlprefix): | 189 | def process_postinst_on_target(pkg, mlprefix): |
187 | pkgval = d.getVar('PKG:%s' % pkg) | 190 | pkgval = d.getVar('PKG:%s' % pkg) |
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 60a0cc8291..edd77196ee 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -462,21 +462,23 @@ class GitApplyTree(PatchTree): | |||
462 | return (tmpfile, cmd) | 462 | return (tmpfile, cmd) |
463 | 463 | ||
464 | @staticmethod | 464 | @staticmethod |
465 | def addNote(repo, ref, key, value=None): | 465 | def addNote(repo, ref, key, value=None, commituser=None, commitemail=None): |
466 | note = key + (": %s" % value if value else "") | 466 | note = key + (": %s" % value if value else "") |
467 | notes_ref = GitApplyTree.notes_ref | 467 | notes_ref = GitApplyTree.notes_ref |
468 | runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo) | 468 | runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo) |
469 | runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo) | 469 | runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo) |
470 | runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo) | 470 | runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo) |
471 | runcmd(["git", "notes", "--ref", notes_ref, "append", "-m", note, ref], repo) | 471 | cmd = ["git"] |
472 | GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail) | ||
473 | runcmd(cmd + ["notes", "--ref", notes_ref, "append", "-m", note, ref], repo) | ||
472 | 474 | ||
473 | @staticmethod | 475 | @staticmethod |
474 | def removeNote(repo, ref, key): | 476 | def removeNote(repo, ref, key, commituser=None, commitemail=None): |
475 | notes = GitApplyTree.getNotes(repo, ref) | 477 | notes = GitApplyTree.getNotes(repo, ref) |
476 | notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")} | 478 | notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")} |
477 | runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo) | 479 | runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo) |
478 | for note, value in notes.items(): | 480 | for note, value in notes.items(): |
479 | GitApplyTree.addNote(repo, ref, note, value) | 481 | GitApplyTree.addNote(repo, ref, note, value, commituser, commitemail) |
480 | 482 | ||
481 | @staticmethod | 483 | @staticmethod |
482 | def getNotes(repo, ref): | 484 | def getNotes(repo, ref): |
@@ -507,7 +509,7 @@ class GitApplyTree(PatchTree): | |||
507 | GitApplyTree.gitCommandUserOptions(cmd, d=d) | 509 | GitApplyTree.gitCommandUserOptions(cmd, d=d) |
508 | cmd += ["commit", "-m", subject, "--no-verify"] | 510 | cmd += ["commit", "-m", subject, "--no-verify"] |
509 | runcmd(cmd, dir) | 511 | runcmd(cmd, dir) |
510 | GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit) | 512 | GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit, d.getVar('PATCH_GIT_USER_NAME'), d.getVar('PATCH_GIT_USER_EMAIL')) |
511 | 513 | ||
512 | @staticmethod | 514 | @staticmethod |
513 | def extractPatches(tree, startcommits, outdir, paths=None): | 515 | def extractPatches(tree, startcommits, outdir, paths=None): |
@@ -654,7 +656,7 @@ class GitApplyTree(PatchTree): | |||
654 | raise | 656 | raise |
655 | finally: | 657 | finally: |
656 | if patch_applied: | 658 | if patch_applied: |
657 | GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file'])) | 659 | GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file']), self.commituser, self.commitemail) |
658 | 660 | ||
659 | 661 | ||
660 | class QuiltTree(PatchSet): | 662 | class QuiltTree(PatchSet): |
@@ -882,7 +884,7 @@ class UserResolver(Resolver): | |||
882 | os.chdir(olddir) | 884 | os.chdir(olddir) |
883 | 885 | ||
884 | 886 | ||
885 | def patch_path(url, fetch, workdir, expand=True): | 887 | def patch_path(url, fetch, unpackdir, expand=True): |
886 | """Return the local path of a patch, or return nothing if this isn't a patch""" | 888 | """Return the local path of a patch, or return nothing if this isn't a patch""" |
887 | 889 | ||
888 | local = fetch.localpath(url) | 890 | local = fetch.localpath(url) |
@@ -891,7 +893,7 @@ def patch_path(url, fetch, workdir, expand=True): | |||
891 | base, ext = os.path.splitext(os.path.basename(local)) | 893 | base, ext = os.path.splitext(os.path.basename(local)) |
892 | if ext in ('.gz', '.bz2', '.xz', '.Z'): | 894 | if ext in ('.gz', '.bz2', '.xz', '.Z'): |
893 | if expand: | 895 | if expand: |
894 | local = os.path.join(workdir, base) | 896 | local = os.path.join(unpackdir, base) |
895 | ext = os.path.splitext(base)[1] | 897 | ext = os.path.splitext(base)[1] |
896 | 898 | ||
897 | urldata = fetch.ud[url] | 899 | urldata = fetch.ud[url] |
@@ -905,12 +907,12 @@ def patch_path(url, fetch, workdir, expand=True): | |||
905 | return local | 907 | return local |
906 | 908 | ||
907 | def src_patches(d, all=False, expand=True): | 909 | def src_patches(d, all=False, expand=True): |
908 | workdir = d.getVar('WORKDIR') | 910 | unpackdir = d.getVar('UNPACKDIR') |
909 | fetch = bb.fetch2.Fetch([], d) | 911 | fetch = bb.fetch2.Fetch([], d) |
910 | patches = [] | 912 | patches = [] |
911 | sources = [] | 913 | sources = [] |
912 | for url in fetch.urls: | 914 | for url in fetch.urls: |
913 | local = patch_path(url, fetch, workdir, expand) | 915 | local = patch_path(url, fetch, unpackdir, expand) |
914 | if not local: | 916 | if not local: |
915 | if all: | 917 | if all: |
916 | local = fetch.localpath(url) | 918 | local = fetch.localpath(url) |
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py index 5d21cdcbdf..a1efe97d88 100644 --- a/meta/lib/oe/path.py +++ b/meta/lib/oe/path.py | |||
@@ -10,6 +10,8 @@ import shutil | |||
10 | import subprocess | 10 | import subprocess |
11 | import os.path | 11 | import os.path |
12 | 12 | ||
13 | import bb.parse | ||
14 | |||
13 | def join(*paths): | 15 | def join(*paths): |
14 | """Like os.path.join but doesn't treat absolute RHS specially""" | 16 | """Like os.path.join but doesn't treat absolute RHS specially""" |
15 | return os.path.normpath("/".join(paths)) | 17 | return os.path.normpath("/".join(paths)) |
@@ -77,6 +79,7 @@ def replace_absolute_symlinks(basedir, d): | |||
77 | os.remove(path) | 79 | os.remove(path) |
78 | os.symlink(base, path) | 80 | os.symlink(base, path) |
79 | 81 | ||
82 | @bb.parse.vardepsexclude("TOPDIR") | ||
80 | def format_display(path, metadata): | 83 | def format_display(path, metadata): |
81 | """ Prepare a path for display to the user. """ | 84 | """ Prepare a path for display to the user. """ |
82 | rel = relative(metadata.getVar("TOPDIR"), path) | 85 | rel = relative(metadata.getVar("TOPDIR"), path) |
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py index f8ae3c743f..cd36cb5070 100644 --- a/meta/lib/oe/qa.py +++ b/meta/lib/oe/qa.py | |||
@@ -4,6 +4,7 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | 6 | ||
7 | import ast | ||
7 | import os, struct, mmap | 8 | import os, struct, mmap |
8 | 9 | ||
9 | class NotELFFileError(Exception): | 10 | class NotELFFileError(Exception): |
@@ -186,6 +187,20 @@ def write_error(type, error, d): | |||
186 | with open(logfile, "a+") as f: | 187 | with open(logfile, "a+") as f: |
187 | f.write("%s: %s [%s]\n" % (p, error, type)) | 188 | f.write("%s: %s [%s]\n" % (p, error, type)) |
188 | 189 | ||
190 | def handle_error_visitorcode(name, args): | ||
191 | execs = set() | ||
192 | contains = {} | ||
193 | warn = None | ||
194 | if isinstance(args[0], ast.Constant) and isinstance(args[0].value, str): | ||
195 | for i in ["ERROR_QA", "WARN_QA"]: | ||
196 | if i not in contains: | ||
197 | contains[i] = set() | ||
198 | contains[i].add(args[0].value) | ||
199 | else: | ||
200 | warn = args[0] | ||
201 | execs.add(name) | ||
202 | return contains, execs, warn | ||
203 | |||
189 | def handle_error(error_class, error_msg, d): | 204 | def handle_error(error_class, error_msg, d): |
190 | if error_class in (d.getVar("ERROR_QA") or "").split(): | 205 | if error_class in (d.getVar("ERROR_QA") or "").split(): |
191 | write_error(error_class, error_msg, d) | 206 | write_error(error_class, error_msg, d) |
@@ -198,12 +213,7 @@ def handle_error(error_class, error_msg, d): | |||
198 | else: | 213 | else: |
199 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) | 214 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) |
200 | return True | 215 | return True |
201 | 216 | handle_error.visitorcode = handle_error_visitorcode | |
202 | def add_message(messages, section, new_msg): | ||
203 | if section not in messages: | ||
204 | messages[section] = new_msg | ||
205 | else: | ||
206 | messages[section] = messages[section] + "\n" + new_msg | ||
207 | 217 | ||
208 | def exit_with_message_if_errors(message, d): | 218 | def exit_with_message_if_errors(message, d): |
209 | qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False) | 219 | qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False) |
diff --git a/meta/lib/oe/qemu.py b/meta/lib/oe/qemu.py new file mode 100644 index 0000000000..769865036c --- /dev/null +++ b/meta/lib/oe/qemu.py | |||
@@ -0,0 +1,54 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | def qemu_target_binary(d): | ||
8 | package_arch = d.getVar("PACKAGE_ARCH") | ||
9 | qemu_target_binary = (d.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "") | ||
10 | if qemu_target_binary: | ||
11 | return qemu_target_binary | ||
12 | |||
13 | target_arch = d.getVar("TARGET_ARCH") | ||
14 | if target_arch in ("i486", "i586", "i686"): | ||
15 | target_arch = "i386" | ||
16 | elif target_arch == "powerpc": | ||
17 | target_arch = "ppc" | ||
18 | elif target_arch == "powerpc64": | ||
19 | target_arch = "ppc64" | ||
20 | elif target_arch == "powerpc64le": | ||
21 | target_arch = "ppc64le" | ||
22 | |||
23 | return "qemu-" + target_arch | ||
24 | |||
25 | def qemu_wrapper_cmdline(d, rootfs_path, library_paths, qemu_options=None): | ||
26 | import string | ||
27 | |||
28 | package_arch = d.getVar("PACKAGE_ARCH") | ||
29 | if package_arch == "all": | ||
30 | return "false" | ||
31 | |||
32 | qemu_binary = qemu_target_binary(d) | ||
33 | if qemu_binary == "qemu-allarch": | ||
34 | qemu_binary = "qemuwrapper" | ||
35 | |||
36 | if qemu_options == None: | ||
37 | qemu_options = d.getVar("QEMU_OPTIONS") or "" | ||
38 | |||
39 | return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\ | ||
40 | + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " " | ||
41 | |||
42 | # Next function will return a string containing the command that is needed to | ||
43 | # to run a certain binary through qemu. For example, in order to make a certain | ||
44 | # postinstall scriptlet run at do_rootfs time and running the postinstall is | ||
45 | # architecture dependent, we can run it through qemu. For example, in the | ||
46 | # postinstall scriptlet, we could use the following: | ||
47 | # | ||
48 | # ${@qemu_run_binary(d, '$D', '/usr/bin/test_app')} [test_app arguments] | ||
49 | # | ||
50 | def qemu_run_binary(d, rootfs_path, binary): | ||
51 | libdir = rootfs_path + d.getVar("libdir", False) | ||
52 | base_libdir = rootfs_path + d.getVar("base_libdir", False) | ||
53 | |||
54 | return qemu_wrapper_cmdline(d, rootfs_path, [libdir, base_libdir]) + rootfs_path + binary | ||
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py index de1fbdd3a8..044f1bfa61 100644 --- a/meta/lib/oe/recipeutils.py +++ b/meta/lib/oe/recipeutils.py | |||
@@ -818,7 +818,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
818 | instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) | 818 | instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) |
819 | if not instdirline in instfunclines: | 819 | if not instdirline in instfunclines: |
820 | instfunclines.append(instdirline) | 820 | instfunclines.append(instdirline) |
821 | instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) | 821 | instfunclines.append('install -m %s ${UNPACKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) |
822 | if instfunclines: | 822 | if instfunclines: |
823 | bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines)) | 823 | bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines)) |
824 | 824 | ||
@@ -1070,10 +1070,15 @@ def get_recipe_upstream_version(rd): | |||
1070 | ud = bb.fetch2.FetchData(src_uri, rd) | 1070 | ud = bb.fetch2.FetchData(src_uri, rd) |
1071 | if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": | 1071 | if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": |
1072 | bb.fetch2.get_srcrev(rd) | 1072 | bb.fetch2.get_srcrev(rd) |
1073 | revision = ud.method.latest_revision(ud, rd, 'default') | 1073 | upversion = None |
1074 | upversion = pv | 1074 | revision = None |
1075 | if revision != rd.getVar("SRCREV"): | 1075 | try: |
1076 | upversion = upversion + "-new-commits-available" | 1076 | revision = ud.method.latest_revision(ud, rd, 'default') |
1077 | upversion = pv | ||
1078 | if revision != rd.getVar("SRCREV"): | ||
1079 | upversion = upversion + "-new-commits-available" | ||
1080 | except bb.fetch2.FetchError as e: | ||
1081 | bb.warn("Unable to obtain latest revision: {}".format(e)) | ||
1077 | else: | 1082 | else: |
1078 | pupver = ud.method.latest_versionstring(ud, rd) | 1083 | pupver = ud.method.latest_versionstring(ud, rd) |
1079 | (upversion, revision) = pupver | 1084 | (upversion, revision) = pupver |
@@ -1112,7 +1117,7 @@ def _get_recipe_upgrade_status(data): | |||
1112 | maintainer = data.getVar('RECIPE_MAINTAINER') | 1117 | maintainer = data.getVar('RECIPE_MAINTAINER') |
1113 | no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') | 1118 | no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') |
1114 | 1119 | ||
1115 | return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason) | 1120 | return {'pn':pn, 'status':status, 'cur_ver':cur_ver, 'next_ver':next_ver, 'maintainer':maintainer, 'revision':revision, 'no_upgrade_reason':no_upgrade_reason} |
1116 | 1121 | ||
1117 | def get_recipe_upgrade_status(recipes=None): | 1122 | def get_recipe_upgrade_status(recipes=None): |
1118 | pkgs_list = [] | 1123 | pkgs_list = [] |
@@ -1154,6 +1159,7 @@ def get_recipe_upgrade_status(recipes=None): | |||
1154 | if not recipes: | 1159 | if not recipes: |
1155 | recipes = tinfoil.all_recipe_files(variants=False) | 1160 | recipes = tinfoil.all_recipe_files(variants=False) |
1156 | 1161 | ||
1162 | recipeincludes = {} | ||
1157 | for fn in recipes: | 1163 | for fn in recipes: |
1158 | try: | 1164 | try: |
1159 | if fn.startswith("/"): | 1165 | if fn.startswith("/"): |
@@ -1178,8 +1184,65 @@ def get_recipe_upgrade_status(recipes=None): | |||
1178 | 1184 | ||
1179 | data_copy_list.append(data_copy) | 1185 | data_copy_list.append(data_copy) |
1180 | 1186 | ||
1187 | recipeincludes[data.getVar('FILE')] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')} | ||
1188 | |||
1181 | from concurrent.futures import ProcessPoolExecutor | 1189 | from concurrent.futures import ProcessPoolExecutor |
1182 | with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: | 1190 | with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: |
1183 | pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) | 1191 | pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) |
1184 | 1192 | ||
1185 | return pkgs_list | 1193 | return _group_recipes(pkgs_list, _get_common_include_recipes(recipeincludes)) |
1194 | |||
1195 | def get_common_include_recipes(): | ||
1196 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
1197 | tinfoil.prepare(config_only=False) | ||
1198 | |||
1199 | recipes = tinfoil.all_recipe_files(variants=False) | ||
1200 | |||
1201 | recipeincludes = {} | ||
1202 | for fn in recipes: | ||
1203 | data = tinfoil.parse_recipe_file(fn) | ||
1204 | recipeincludes[fn] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')} | ||
1205 | return _get_common_include_recipes(recipeincludes) | ||
1206 | |||
1207 | def _get_common_include_recipes(recipeincludes_all): | ||
1208 | recipeincludes = {} | ||
1209 | for fn,data in recipeincludes_all.items(): | ||
1210 | bbincluded_filtered = [i for i in data['bbincluded'] if os.path.dirname(i) == os.path.dirname(fn) and i != fn] | ||
1211 | if bbincluded_filtered: | ||
1212 | recipeincludes[data['pn']] = bbincluded_filtered | ||
1213 | |||
1214 | recipeincludes_inverted = {} | ||
1215 | for k,v in recipeincludes.items(): | ||
1216 | for i in v: | ||
1217 | recipeincludes_inverted.setdefault(i,set()).add(k) | ||
1218 | |||
1219 | recipeincludes_inverted_filtered = {k:v for k,v in recipeincludes_inverted.items() if len(v) > 1} | ||
1220 | |||
1221 | recipes_with_shared_includes = list() | ||
1222 | for v in recipeincludes_inverted_filtered.values(): | ||
1223 | recipeset = v | ||
1224 | for v1 in recipeincludes_inverted_filtered.values(): | ||
1225 | if recipeset.intersection(v1): | ||
1226 | recipeset.update(v1) | ||
1227 | if recipeset not in recipes_with_shared_includes: | ||
1228 | recipes_with_shared_includes.append(recipeset) | ||
1229 | |||
1230 | return recipes_with_shared_includes | ||
1231 | |||
1232 | def _group_recipes(recipes, groups): | ||
1233 | recipedict = {} | ||
1234 | for r in recipes: | ||
1235 | recipedict[r['pn']] = r | ||
1236 | |||
1237 | recipegroups = [] | ||
1238 | for g in groups: | ||
1239 | recipeset = [] | ||
1240 | for r in g: | ||
1241 | if r in recipedict.keys(): | ||
1242 | recipeset.append(recipedict[r]) | ||
1243 | del recipedict[r] | ||
1244 | recipegroups.append(recipeset) | ||
1245 | |||
1246 | for r in recipedict.values(): | ||
1247 | recipegroups.append([r]) | ||
1248 | return recipegroups | ||
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py index 448befce33..0270024a83 100644 --- a/meta/lib/oe/reproducible.py +++ b/meta/lib/oe/reproducible.py | |||
@@ -47,7 +47,7 @@ import bb | |||
47 | # 2. If there is a git checkout, use the last git commit timestamp. | 47 | # 2. If there is a git checkout, use the last git commit timestamp. |
48 | # Git does not preserve file timestamps on checkout. | 48 | # Git does not preserve file timestamps on checkout. |
49 | # | 49 | # |
50 | # 3. Use the mtime of "known" files such as NEWS, CHANGLELOG, ... | 50 | # 3. Use the mtime of "known" files such as NEWS, CHANGELOG, ... |
51 | # This works for well-kept repositories distributed via tarball. | 51 | # This works for well-kept repositories distributed via tarball. |
52 | # | 52 | # |
53 | # 4. Use the modification time of the youngest file in the source tree, if | 53 | # 4. Use the modification time of the youngest file in the source tree, if |
@@ -75,10 +75,11 @@ def get_source_date_epoch_from_known_files(d, sourcedir): | |||
75 | return source_date_epoch | 75 | return source_date_epoch |
76 | 76 | ||
77 | def find_git_folder(d, sourcedir): | 77 | def find_git_folder(d, sourcedir): |
78 | # First guess: WORKDIR/git | 78 | # First guess: UNPACKDIR/BB_GIT_DEFAULT_DESTSUFFIX |
79 | # This is the default git fetcher unpack path | 79 | # This is the default git fetcher unpack path |
80 | workdir = d.getVar('WORKDIR') | 80 | unpackdir = d.getVar('UNPACKDIR') |
81 | gitpath = os.path.join(workdir, "git/.git") | 81 | default_destsuffix = d.getVar('BB_GIT_DEFAULT_DESTSUFFIX') |
82 | gitpath = os.path.join(unpackdir, default_destsuffix, ".git") | ||
82 | if os.path.isdir(gitpath): | 83 | if os.path.isdir(gitpath): |
83 | return gitpath | 84 | return gitpath |
84 | 85 | ||
@@ -88,15 +89,16 @@ def find_git_folder(d, sourcedir): | |||
88 | return gitpath | 89 | return gitpath |
89 | 90 | ||
90 | # Perhaps there was a subpath or destsuffix specified. | 91 | # Perhaps there was a subpath or destsuffix specified. |
91 | # Go looking in the WORKDIR | 92 | # Go looking in the UNPACKDIR |
92 | exclude = set(["build", "image", "license-destdir", "patches", "pseudo", | 93 | for root, dirs, files in os.walk(unpackdir, topdown=True): |
93 | "recipe-sysroot", "recipe-sysroot-native", "sysroot-destdir", "temp"]) | ||
94 | for root, dirs, files in os.walk(workdir, topdown=True): | ||
95 | dirs[:] = [d for d in dirs if d not in exclude] | ||
96 | if '.git' in dirs: | 94 | if '.git' in dirs: |
97 | return os.path.join(root, ".git") | 95 | return os.path.join(root, ".git") |
98 | 96 | ||
99 | bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) | 97 | for root, dirs, files in os.walk(sourcedir, topdown=True): |
98 | if '.git' in dirs: | ||
99 | return os.path.join(root, ".git") | ||
100 | |||
101 | bb.warn("Failed to find a git repository in UNPACKDIR: %s" % unpackdir) | ||
100 | return None | 102 | return None |
101 | 103 | ||
102 | def get_source_date_epoch_from_git(d, sourcedir): | 104 | def get_source_date_epoch_from_git(d, sourcedir): |
@@ -120,7 +122,7 @@ def get_source_date_epoch_from_git(d, sourcedir): | |||
120 | return int(p.stdout.decode('utf-8')) | 122 | return int(p.stdout.decode('utf-8')) |
121 | 123 | ||
122 | def get_source_date_epoch_from_youngest_file(d, sourcedir): | 124 | def get_source_date_epoch_from_youngest_file(d, sourcedir): |
123 | if sourcedir == d.getVar('WORKDIR'): | 125 | if sourcedir == d.getVar('UNPACKDIR'): |
124 | # These sources are almost certainly not from a tarball | 126 | # These sources are almost certainly not from a tarball |
125 | return None | 127 | return None |
126 | 128 | ||
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index 8cd48f9450..14befac8fa 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py | |||
@@ -199,12 +199,12 @@ class Rootfs(object, metaclass=ABCMeta): | |||
199 | if command in commands: | 199 | if command in commands: |
200 | commands.remove(command) | 200 | commands.remove(command) |
201 | commands.append(command) | 201 | commands.append(command) |
202 | return "".join(commands) | 202 | return " ".join(commands) |
203 | 203 | ||
204 | # We want this to run as late as possible, in particular after | 204 | # We want this to run as late as possible, in particular after |
205 | # systemd_sysusers_create and set_user_group. Using :append is not enough | 205 | # systemd_sysusers_create and set_user_group. Using :append is not enough |
206 | make_last("tidy_shadowutils_files", post_process_cmds) | 206 | post_process_cmds = make_last("tidy_shadowutils_files", post_process_cmds) |
207 | make_last("rootfs_reproducible", post_process_cmds) | 207 | post_process_cmds = make_last("rootfs_reproducible", post_process_cmds) |
208 | 208 | ||
209 | execute_pre_post_process(self.d, pre_process_cmds) | 209 | execute_pre_post_process(self.d, pre_process_cmds) |
210 | 210 | ||
@@ -269,7 +269,11 @@ class Rootfs(object, metaclass=ABCMeta): | |||
269 | self.pm.remove(["run-postinsts"]) | 269 | self.pm.remove(["run-postinsts"]) |
270 | 270 | ||
271 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", | 271 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", |
272 | True, False, self.d) and \ | ||
273 | not bb.utils.contains("IMAGE_FEATURES", | ||
274 | "read-only-rootfs-delayed-postinsts", | ||
272 | True, False, self.d) | 275 | True, False, self.d) |
276 | |||
273 | image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') | 277 | image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') |
274 | 278 | ||
275 | if image_rorfs or image_rorfs_force == "1": | 279 | if image_rorfs or image_rorfs_force == "1": |
@@ -358,11 +362,12 @@ class Rootfs(object, metaclass=ABCMeta): | |||
358 | 362 | ||
359 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) | 363 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) |
360 | 364 | ||
361 | bb.utils.mkdirhier(versioned_modules_dir) | 365 | if os.path.exists(versioned_modules_dir): |
362 | 366 | bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir) | |
363 | bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir) | 367 | if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]): |
364 | if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]): | 368 | bb.fatal("Kernel modules dependency generation failed") |
365 | bb.fatal("Kernel modules dependency generation failed") | 369 | else: |
370 | bb.note("Not running depmodwrapper for %s since directory does not exist" % versioned_modules_dir) | ||
366 | 371 | ||
367 | """ | 372 | """ |
368 | Create devfs: | 373 | Create devfs: |
@@ -423,12 +428,3 @@ def image_list_installed_packages(d, rootfs_dir=None): | |||
423 | import importlib | 428 | import importlib |
424 | cls = importlib.import_module('oe.package_manager.' + img_type) | 429 | cls = importlib.import_module('oe.package_manager.' + img_type) |
425 | return cls.PMPkgsList(d, rootfs_dir).list_pkgs() | 430 | return cls.PMPkgsList(d, rootfs_dir).list_pkgs() |
426 | |||
427 | if __name__ == "__main__": | ||
428 | """ | ||
429 | We should be able to run this as a standalone script, from outside bitbake | ||
430 | environment. | ||
431 | """ | ||
432 | """ | ||
433 | TBD | ||
434 | """ | ||
diff --git a/meta/lib/oe/rootfspostcommands.py b/meta/lib/oe/rootfspostcommands.py new file mode 100644 index 0000000000..5386eea409 --- /dev/null +++ b/meta/lib/oe/rootfspostcommands.py | |||
@@ -0,0 +1,90 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import os | ||
8 | |||
9 | def sort_shadowutils_file(filename, mapping): | ||
10 | """ | ||
11 | Sorts a passwd or group file based on the numeric ID in the third column. | ||
12 | If a mapping is given, the name from the first column is mapped via that | ||
13 | dictionary instead (necessary for /etc/shadow and /etc/gshadow). If not, | ||
14 | a new mapping is created on the fly and returned. | ||
15 | """ | ||
16 | |||
17 | new_mapping = {} | ||
18 | with open(filename, 'rb+') as f: | ||
19 | lines = f.readlines() | ||
20 | # No explicit error checking for the sake of simplicity. /etc | ||
21 | # files are assumed to be well-formed, causing exceptions if | ||
22 | # not. | ||
23 | for line in lines: | ||
24 | entries = line.split(b':') | ||
25 | name = entries[0] | ||
26 | if mapping is None: | ||
27 | id = int(entries[2]) | ||
28 | else: | ||
29 | id = mapping[name] | ||
30 | new_mapping[name] = id | ||
31 | # Sort by numeric id first, with entire line as secondary key | ||
32 | # (just in case that there is more than one entry for the same id). | ||
33 | lines.sort(key=lambda line: (new_mapping[line.split(b':')[0]], line)) | ||
34 | # We overwrite the entire file, i.e. no truncate() necessary. | ||
35 | f.seek(0) | ||
36 | f.write(b''.join(lines)) | ||
37 | |||
38 | return new_mapping | ||
39 | |||
40 | def sort_shadowutils_files(sysconfdir): | ||
41 | """ | ||
42 | Sorts shadow-utils 'passwd' and 'group' files in a rootfs' /etc directory | ||
43 | by ID. | ||
44 | """ | ||
45 | |||
46 | for main, shadow in (('passwd', 'shadow'), | ||
47 | ('group', 'gshadow')): | ||
48 | filename = os.path.join(sysconfdir, main) | ||
49 | if os.path.exists(filename): | ||
50 | mapping = sort_shadowutils_file(filename, None) | ||
51 | filename = os.path.join(sysconfdir, shadow) | ||
52 | if os.path.exists(filename): | ||
53 | sort_shadowutils_file(filename, mapping) | ||
54 | |||
55 | def remove_shadowutils_backup_file(filename): | ||
56 | """ | ||
57 | Remove shadow-utils backup file for files like /etc/passwd. | ||
58 | """ | ||
59 | |||
60 | backup_filename = filename + '-' | ||
61 | if os.path.exists(backup_filename): | ||
62 | os.unlink(backup_filename) | ||
63 | |||
64 | def remove_shadowutils_backup_files(sysconfdir): | ||
65 | """ | ||
66 | Remove shadow-utils backup files in a rootfs /etc directory. They are not | ||
67 | needed in the initial root filesystem and sorting them can be inconsistent | ||
68 | (YOCTO #11043). | ||
69 | """ | ||
70 | |||
71 | for filename in ( | ||
72 | 'group', | ||
73 | 'gshadow', | ||
74 | 'passwd', | ||
75 | 'shadow', | ||
76 | 'subgid', | ||
77 | 'subuid', | ||
78 | ): | ||
79 | filepath = os.path.join(sysconfdir, filename) | ||
80 | remove_shadowutils_backup_file(filepath) | ||
81 | |||
82 | def tidy_shadowutils_files(sysconfdir): | ||
83 | """ | ||
84 | Tidy up shadow-utils files. | ||
85 | """ | ||
86 | |||
87 | remove_shadowutils_backup_files(sysconfdir) | ||
88 | sort_shadowutils_files(sysconfdir) | ||
89 | |||
90 | return True | ||
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py index 185553eeeb..1dc9cf150d 100644 --- a/meta/lib/oe/rust.py +++ b/meta/lib/oe/rust.py | |||
@@ -8,6 +8,4 @@ | |||
8 | def arch_to_rust_arch(arch): | 8 | def arch_to_rust_arch(arch): |
9 | if arch == "ppc64le": | 9 | if arch == "ppc64le": |
10 | return "powerpc64le" | 10 | return "powerpc64le" |
11 | if arch in ('riscv32', 'riscv64'): | ||
12 | return arch + 'gc' | ||
13 | return arch | 11 | return arch |
diff --git a/meta/lib/oe/sbom30.py b/meta/lib/oe/sbom30.py new file mode 100644 index 0000000000..227ac51877 --- /dev/null +++ b/meta/lib/oe/sbom30.py | |||
@@ -0,0 +1,1096 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | from pathlib import Path | ||
8 | |||
9 | import oe.spdx30 | ||
10 | import bb | ||
11 | import re | ||
12 | import hashlib | ||
13 | import uuid | ||
14 | import os | ||
15 | import oe.spdx_common | ||
16 | from datetime import datetime, timezone | ||
17 | |||
18 | OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/" | ||
19 | |||
20 | VEX_VERSION = "1.0.0" | ||
21 | |||
22 | SPDX_BUILD_TYPE = "http://openembedded.org/bitbake" | ||
23 | |||
24 | OE_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/by-doc-hash/" | ||
25 | OE_DOC_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/doc/" | ||
26 | |||
27 | |||
28 | @oe.spdx30.register(OE_SPDX_BASE + "id-alias") | ||
29 | class OEIdAliasExtension(oe.spdx30.extension_Extension): | ||
30 | """ | ||
31 | This extension allows an Element to provide an internal alias for the SPDX | ||
32 | ID. Since SPDX requires unique URIs for each SPDX ID, most of the objects | ||
33 | created have a unique UUID namespace and the unihash of the task encoded in | ||
34 | their SPDX ID. However, this causes a problem for referencing documents | ||
35 | across recipes, since the taskhash of a dependency may not factor into the | ||
36 | taskhash of the current task and thus the current task won't rebuild and | ||
37 | see the new SPDX ID when the dependency changes (e.g. ABI safe recipes and | ||
38 | tasks). | ||
39 | |||
40 | To help work around this, this extension provides a non-unique alias for an | ||
41 | Element by which it can be referenced from other tasks/recipes. When a | ||
42 | final SBoM is created, references to these aliases will be replaced with | ||
43 | the actual unique SPDX ID. | ||
44 | |||
45 | Most Elements will automatically get an alias created when they are written | ||
46 | out if they do not already have one. To suppress the creation of an alias, | ||
47 | add an extension with a blank `alias` property. | ||
48 | |||
49 | |||
50 | It is in internal extension that should be removed when writing out a final | ||
51 | SBoM | ||
52 | """ | ||
53 | |||
54 | CLOSED = True | ||
55 | INTERNAL = True | ||
56 | |||
57 | @classmethod | ||
58 | def _register_props(cls): | ||
59 | super()._register_props() | ||
60 | cls._add_property( | ||
61 | "alias", | ||
62 | oe.spdx30.StringProp(), | ||
63 | OE_SPDX_BASE + "alias", | ||
64 | max_count=1, | ||
65 | ) | ||
66 | |||
67 | cls._add_property( | ||
68 | "link_name", | ||
69 | oe.spdx30.StringProp(), | ||
70 | OE_SPDX_BASE + "link-name", | ||
71 | max_count=1, | ||
72 | ) | ||
73 | |||
74 | |||
75 | @oe.spdx30.register(OE_SPDX_BASE + "file-name-alias") | ||
76 | class OEFileNameAliasExtension(oe.spdx30.extension_Extension): | ||
77 | CLOSED = True | ||
78 | INTERNAL = True | ||
79 | |||
80 | @classmethod | ||
81 | def _register_props(cls): | ||
82 | super()._register_props() | ||
83 | cls._add_property( | ||
84 | "aliases", | ||
85 | oe.spdx30.ListProp(oe.spdx30.StringProp()), | ||
86 | OE_SPDX_BASE + "filename-alias", | ||
87 | ) | ||
88 | |||
89 | |||
90 | @oe.spdx30.register(OE_SPDX_BASE + "license-scanned") | ||
91 | class OELicenseScannedExtension(oe.spdx30.extension_Extension): | ||
92 | """ | ||
93 | The presence of this extension means the file has already been scanned for | ||
94 | license information | ||
95 | """ | ||
96 | |||
97 | CLOSED = True | ||
98 | INTERNAL = True | ||
99 | |||
100 | |||
101 | @oe.spdx30.register(OE_SPDX_BASE + "document-extension") | ||
102 | class OEDocumentExtension(oe.spdx30.extension_Extension): | ||
103 | """ | ||
104 | This extension is added to a SpdxDocument to indicate various useful bits | ||
105 | of information about its contents | ||
106 | """ | ||
107 | |||
108 | CLOSED = True | ||
109 | |||
110 | @classmethod | ||
111 | def _register_props(cls): | ||
112 | super()._register_props() | ||
113 | cls._add_property( | ||
114 | "is_native", | ||
115 | oe.spdx30.BooleanProp(), | ||
116 | OE_SPDX_BASE + "is-native", | ||
117 | max_count=1, | ||
118 | ) | ||
119 | |||
120 | |||
121 | def spdxid_hash(*items): | ||
122 | h = hashlib.md5() | ||
123 | for i in items: | ||
124 | if isinstance(i, oe.spdx30.Element): | ||
125 | h.update(i._id.encode("utf-8")) | ||
126 | else: | ||
127 | h.update(i.encode("utf-8")) | ||
128 | return h.hexdigest() | ||
129 | |||
130 | |||
131 | def spdx_sde(d): | ||
132 | sde = d.getVar("SOURCE_DATE_EPOCH") | ||
133 | if not sde: | ||
134 | return datetime.now(timezone.utc) | ||
135 | |||
136 | return datetime.fromtimestamp(int(sde), timezone.utc) | ||
137 | |||
138 | |||
139 | def get_element_link_id(e): | ||
140 | """ | ||
141 | Get the string ID which should be used to link to an Element. If the | ||
142 | element has an alias, that will be preferred, otherwise its SPDX ID will be | ||
143 | used. | ||
144 | """ | ||
145 | ext = get_alias(e) | ||
146 | if ext is not None and ext.alias: | ||
147 | return ext.alias | ||
148 | return e._id | ||
149 | |||
150 | |||
151 | def get_alias(obj): | ||
152 | for ext in obj.extension: | ||
153 | if not isinstance(ext, OEIdAliasExtension): | ||
154 | continue | ||
155 | return ext | ||
156 | |||
157 | return None | ||
158 | |||
159 | |||
160 | def hash_id(_id): | ||
161 | return hashlib.sha256(_id.encode("utf-8")).hexdigest() | ||
162 | |||
163 | |||
164 | def to_list(l): | ||
165 | if isinstance(l, set): | ||
166 | l = sorted(list(l)) | ||
167 | |||
168 | if not isinstance(l, (list, tuple)): | ||
169 | raise TypeError("Must be a list or tuple. Got %s" % type(l)) | ||
170 | |||
171 | return l | ||
172 | |||
173 | |||
174 | class ObjectSet(oe.spdx30.SHACLObjectSet): | ||
175 | def __init__(self, d): | ||
176 | super().__init__() | ||
177 | self.d = d | ||
178 | self.alias_prefix = None | ||
179 | |||
180 | def create_index(self): | ||
181 | self.by_sha256_hash = {} | ||
182 | super().create_index() | ||
183 | |||
184 | def add_index(self, obj): | ||
185 | # Check that all elements are given an ID before being inserted | ||
186 | if isinstance(obj, oe.spdx30.Element): | ||
187 | if not obj._id: | ||
188 | raise ValueError("Element missing ID") | ||
189 | |||
190 | alias_ext = get_alias(obj) | ||
191 | if alias_ext is not None and alias_ext.alias: | ||
192 | self.obj_by_id[alias_ext.alias] = obj | ||
193 | |||
194 | for v in obj.verifiedUsing: | ||
195 | if not isinstance(v, oe.spdx30.Hash): | ||
196 | continue | ||
197 | |||
198 | if v.algorithm != oe.spdx30.HashAlgorithm.sha256: | ||
199 | continue | ||
200 | |||
201 | self.by_sha256_hash.setdefault(v.hashValue, set()).add(obj) | ||
202 | |||
203 | super().add_index(obj) | ||
204 | if isinstance(obj, oe.spdx30.SpdxDocument): | ||
205 | self.doc = obj | ||
206 | alias_ext = get_alias(obj) | ||
207 | if alias_ext is not None and alias_ext.alias: | ||
208 | self.alias_prefix = OE_ALIAS_PREFIX + hash_id(alias_ext.alias) + "/" | ||
209 | |||
210 | def __filter_obj(self, obj, attr_filter): | ||
211 | return all(getattr(obj, k) == v for k, v in attr_filter.items()) | ||
212 | |||
213 | def foreach_filter(self, typ, *, match_subclass=True, **attr_filter): | ||
214 | for obj in self.foreach_type(typ, match_subclass=match_subclass): | ||
215 | if self.__filter_obj(obj, attr_filter): | ||
216 | yield obj | ||
217 | |||
218 | def find_filter(self, typ, *, match_subclass=True, **attr_filter): | ||
219 | for obj in self.foreach_filter( | ||
220 | typ, match_subclass=match_subclass, **attr_filter | ||
221 | ): | ||
222 | return obj | ||
223 | return None | ||
224 | |||
225 | def foreach_root(self, typ, **attr_filter): | ||
226 | for obj in self.doc.rootElement: | ||
227 | if not isinstance(obj, typ): | ||
228 | continue | ||
229 | |||
230 | if self.__filter_obj(obj, attr_filter): | ||
231 | yield obj | ||
232 | |||
233 | def find_root(self, typ, **attr_filter): | ||
234 | for obj in self.foreach_root(typ, **attr_filter): | ||
235 | return obj | ||
236 | return None | ||
237 | |||
238 | def add_root(self, obj): | ||
239 | self.add(obj) | ||
240 | self.doc.rootElement.append(obj) | ||
241 | return obj | ||
242 | |||
243 | def is_native(self): | ||
244 | for e in self.doc.extension: | ||
245 | if not isinstance(e, oe.sbom30.OEDocumentExtension): | ||
246 | continue | ||
247 | |||
248 | if e.is_native is not None: | ||
249 | return e.is_native | ||
250 | |||
251 | return False | ||
252 | |||
253 | def set_is_native(self, is_native): | ||
254 | for e in self.doc.extension: | ||
255 | if not isinstance(e, oe.sbom30.OEDocumentExtension): | ||
256 | continue | ||
257 | |||
258 | e.is_native = is_native | ||
259 | return | ||
260 | |||
261 | if is_native: | ||
262 | self.doc.extension.append(oe.sbom30.OEDocumentExtension(is_native=True)) | ||
263 | |||
264 | def add_aliases(self): | ||
265 | for o in self.foreach_type(oe.spdx30.Element): | ||
266 | self.set_element_alias(o) | ||
267 | |||
268 | def new_alias_id(self, obj, replace): | ||
269 | unihash = self.d.getVar("BB_UNIHASH") | ||
270 | namespace = self.get_namespace() | ||
271 | if unihash not in obj._id: | ||
272 | bb.warn(f"Unihash {unihash} not found in {obj._id}") | ||
273 | return None | ||
274 | |||
275 | if namespace not in obj._id: | ||
276 | bb.warn(f"Namespace {namespace} not found in {obj._id}") | ||
277 | return None | ||
278 | |||
279 | return obj._id.replace(unihash, "UNIHASH").replace( | ||
280 | namespace, replace + self.d.getVar("PN") | ||
281 | ) | ||
282 | |||
283 | def remove_internal_extensions(self): | ||
284 | def remove(o): | ||
285 | o.extension = [e for e in o.extension if not getattr(e, "INTERNAL", False)] | ||
286 | |||
287 | for o in self.foreach_type(oe.spdx30.Element): | ||
288 | remove(o) | ||
289 | |||
290 | if self.doc: | ||
291 | remove(self.doc) | ||
292 | |||
293 | def get_namespace(self): | ||
294 | namespace_uuid = uuid.uuid5( | ||
295 | uuid.NAMESPACE_DNS, self.d.getVar("SPDX_UUID_NAMESPACE") | ||
296 | ) | ||
297 | pn = self.d.getVar("PN") | ||
298 | return "%s/%s-%s" % ( | ||
299 | self.d.getVar("SPDX_NAMESPACE_PREFIX"), | ||
300 | pn, | ||
301 | str(uuid.uuid5(namespace_uuid, pn)), | ||
302 | ) | ||
303 | |||
304 | def set_element_alias(self, e): | ||
305 | if not e._id or e._id.startswith("_:"): | ||
306 | return | ||
307 | |||
308 | alias_ext = get_alias(e) | ||
309 | if alias_ext is None: | ||
310 | alias_id = self.new_alias_id(e, self.alias_prefix) | ||
311 | if alias_id is not None: | ||
312 | e.extension.append(OEIdAliasExtension(alias=alias_id)) | ||
313 | elif ( | ||
314 | alias_ext.alias | ||
315 | and not isinstance(e, oe.spdx30.SpdxDocument) | ||
316 | and not alias_ext.alias.startswith(self.alias_prefix) | ||
317 | ): | ||
318 | bb.warn( | ||
319 | f"Element {e._id} has alias {alias_ext.alias}, but it should have prefix {self.alias_prefix}" | ||
320 | ) | ||
321 | |||
322 | def new_spdxid(self, *suffix, include_unihash=True): | ||
323 | items = [self.get_namespace()] | ||
324 | if include_unihash: | ||
325 | unihash = self.d.getVar("BB_UNIHASH") | ||
326 | items.append(unihash) | ||
327 | items.extend(re.sub(r"[^a-zA-Z0-9_-]", "_", s) for s in suffix) | ||
328 | return "/".join(items) | ||
329 | |||
330 | def new_import(self, key): | ||
331 | base = f"SPDX_IMPORTS_{key}" | ||
332 | spdxid = self.d.getVar(f"{base}_spdxid") | ||
333 | if not spdxid: | ||
334 | bb.fatal(f"{key} is not a valid SPDX_IMPORTS key") | ||
335 | |||
336 | for i in self.doc.import_: | ||
337 | if i.externalSpdxId == spdxid: | ||
338 | # Already imported | ||
339 | return spdxid | ||
340 | |||
341 | m = oe.spdx30.ExternalMap(externalSpdxId=spdxid) | ||
342 | |||
343 | uri = self.d.getVar(f"{base}_uri") | ||
344 | if uri: | ||
345 | m.locationHint = uri | ||
346 | |||
347 | for pyname, algorithm in oe.spdx30.HashAlgorithm.NAMED_INDIVIDUALS.items(): | ||
348 | value = self.d.getVar(f"{base}_hash_{pyname}") | ||
349 | if value: | ||
350 | m.verifiedUsing.append( | ||
351 | oe.spdx30.Hash( | ||
352 | algorithm=algorithm, | ||
353 | hashValue=value, | ||
354 | ) | ||
355 | ) | ||
356 | |||
357 | self.doc.import_.append(m) | ||
358 | return spdxid | ||
359 | |||
360 | def new_agent(self, varname, *, creation_info=None, add=True): | ||
361 | ref_varname = self.d.getVar(f"{varname}_ref") | ||
362 | if ref_varname: | ||
363 | if ref_varname == varname: | ||
364 | bb.fatal(f"{varname} cannot reference itself") | ||
365 | return self.new_agent(ref_varname, creation_info=creation_info) | ||
366 | |||
367 | import_key = self.d.getVar(f"{varname}_import") | ||
368 | if import_key: | ||
369 | return self.new_import(import_key) | ||
370 | |||
371 | name = self.d.getVar(f"{varname}_name") | ||
372 | if not name: | ||
373 | return None | ||
374 | |||
375 | spdxid = self.new_spdxid("agent", name) | ||
376 | agent = self.find_by_id(spdxid) | ||
377 | if agent is not None: | ||
378 | return agent | ||
379 | |||
380 | agent_type = self.d.getVar("%s_type" % varname) | ||
381 | if agent_type == "person": | ||
382 | agent = oe.spdx30.Person() | ||
383 | elif agent_type == "software": | ||
384 | agent = oe.spdx30.SoftwareAgent() | ||
385 | elif agent_type == "organization": | ||
386 | agent = oe.spdx30.Organization() | ||
387 | elif not agent_type or agent_type == "agent": | ||
388 | agent = oe.spdx30.Agent() | ||
389 | else: | ||
390 | bb.fatal("Unknown agent type '%s' in %s_type" % (agent_type, varname)) | ||
391 | |||
392 | agent._id = spdxid | ||
393 | agent.creationInfo = creation_info or self.doc.creationInfo | ||
394 | agent.name = name | ||
395 | |||
396 | comment = self.d.getVar("%s_comment" % varname) | ||
397 | if comment: | ||
398 | agent.comment = comment | ||
399 | |||
400 | for ( | ||
401 | pyname, | ||
402 | idtype, | ||
403 | ) in oe.spdx30.ExternalIdentifierType.NAMED_INDIVIDUALS.items(): | ||
404 | value = self.d.getVar("%s_id_%s" % (varname, pyname)) | ||
405 | if value: | ||
406 | agent.externalIdentifier.append( | ||
407 | oe.spdx30.ExternalIdentifier( | ||
408 | externalIdentifierType=idtype, | ||
409 | identifier=value, | ||
410 | ) | ||
411 | ) | ||
412 | |||
413 | if add: | ||
414 | self.add(agent) | ||
415 | |||
416 | return agent | ||
417 | |||
418 | def new_creation_info(self): | ||
419 | creation_info = oe.spdx30.CreationInfo() | ||
420 | |||
421 | name = "%s %s" % ( | ||
422 | self.d.getVar("SPDX_TOOL_NAME"), | ||
423 | self.d.getVar("SPDX_TOOL_VERSION"), | ||
424 | ) | ||
425 | tool = self.add( | ||
426 | oe.spdx30.Tool( | ||
427 | _id=self.new_spdxid("tool", name), | ||
428 | creationInfo=creation_info, | ||
429 | name=name, | ||
430 | ) | ||
431 | ) | ||
432 | |||
433 | authors = [] | ||
434 | for a in self.d.getVar("SPDX_AUTHORS").split(): | ||
435 | varname = "SPDX_AUTHORS_%s" % a | ||
436 | author = self.new_agent(varname, creation_info=creation_info) | ||
437 | |||
438 | if not author: | ||
439 | bb.fatal("Unable to find or create author %s" % a) | ||
440 | |||
441 | authors.append(author) | ||
442 | |||
443 | creation_info.created = spdx_sde(self.d) | ||
444 | creation_info.specVersion = self.d.getVar("SPDX_VERSION") | ||
445 | creation_info.createdBy = authors | ||
446 | creation_info.createdUsing = [tool] | ||
447 | |||
448 | return creation_info | ||
449 | |||
450 | def copy_creation_info(self, copy): | ||
451 | c = oe.spdx30.CreationInfo( | ||
452 | created=spdx_sde(self.d), | ||
453 | specVersion=self.d.getVar("SPDX_VERSION"), | ||
454 | ) | ||
455 | |||
456 | for author in copy.createdBy: | ||
457 | if isinstance(author, str): | ||
458 | c.createdBy.append(author) | ||
459 | else: | ||
460 | c.createdBy.append(author._id) | ||
461 | |||
462 | for tool in copy.createdUsing: | ||
463 | if isinstance(tool, str): | ||
464 | c.createdUsing.append(tool) | ||
465 | else: | ||
466 | c.createdUsing.append(tool._id) | ||
467 | |||
468 | return c | ||
469 | |||
470 | def new_annotation(self, subject, comment, typ): | ||
471 | return self.add( | ||
472 | oe.spdx30.Annotation( | ||
473 | _id=self.new_spdxid("annotation", spdxid_hash(comment, typ)), | ||
474 | creationInfo=self.doc.creationInfo, | ||
475 | annotationType=typ, | ||
476 | subject=subject, | ||
477 | statement=comment, | ||
478 | ) | ||
479 | ) | ||
480 | |||
481 | def _new_relationship( | ||
482 | self, | ||
483 | cls, | ||
484 | from_, | ||
485 | typ, | ||
486 | to, | ||
487 | *, | ||
488 | spdxid_name="relationship", | ||
489 | **props, | ||
490 | ): | ||
491 | from_ = to_list(from_) | ||
492 | to = to_list(to) | ||
493 | |||
494 | if not from_: | ||
495 | return [] | ||
496 | |||
497 | if not to: | ||
498 | to = [oe.spdx30.IndividualElement.NoneElement] | ||
499 | |||
500 | ret = [] | ||
501 | |||
502 | for f in from_: | ||
503 | hash_args = [typ, f] | ||
504 | for k in sorted(props.keys()): | ||
505 | hash_args.append(props[k]) | ||
506 | hash_args.extend(to) | ||
507 | |||
508 | relationship = self.add( | ||
509 | cls( | ||
510 | _id=self.new_spdxid(spdxid_name, spdxid_hash(*hash_args)), | ||
511 | creationInfo=self.doc.creationInfo, | ||
512 | from_=f, | ||
513 | relationshipType=typ, | ||
514 | to=to, | ||
515 | **props, | ||
516 | ) | ||
517 | ) | ||
518 | ret.append(relationship) | ||
519 | |||
520 | return ret | ||
521 | |||
522 | def new_relationship(self, from_, typ, to): | ||
523 | return self._new_relationship(oe.spdx30.Relationship, from_, typ, to) | ||
524 | |||
525 | def new_scoped_relationship(self, from_, typ, scope, to): | ||
526 | return self._new_relationship( | ||
527 | oe.spdx30.LifecycleScopedRelationship, | ||
528 | from_, | ||
529 | typ, | ||
530 | to, | ||
531 | scope=scope, | ||
532 | ) | ||
533 | |||
534 | def new_license_expression( | ||
535 | self, license_expression, license_data, license_text_map={} | ||
536 | ): | ||
537 | license_list_version = license_data["licenseListVersion"] | ||
538 | # SPDX 3 requires that the license list version be a semver | ||
539 | # MAJOR.MINOR.MICRO, but the actual license version might be | ||
540 | # MAJOR.MINOR on some older versions. As such, manually append a .0 | ||
541 | # micro version if its missing to keep SPDX happy | ||
542 | if license_list_version.count(".") < 2: | ||
543 | license_list_version += ".0" | ||
544 | |||
545 | spdxid = [ | ||
546 | "license", | ||
547 | license_list_version, | ||
548 | re.sub(r"[^a-zA-Z0-9_-]", "_", license_expression), | ||
549 | ] | ||
550 | |||
551 | license_text = [ | ||
552 | (k, license_text_map[k]) for k in sorted(license_text_map.keys()) | ||
553 | ] | ||
554 | |||
555 | if not license_text: | ||
556 | lic = self.find_filter( | ||
557 | oe.spdx30.simplelicensing_LicenseExpression, | ||
558 | simplelicensing_licenseExpression=license_expression, | ||
559 | simplelicensing_licenseListVersion=license_list_version, | ||
560 | ) | ||
561 | if lic is not None: | ||
562 | return lic | ||
563 | else: | ||
564 | spdxid.append(spdxid_hash(*(v for _, v in license_text))) | ||
565 | lic = self.find_by_id(self.new_spdxid(*spdxid)) | ||
566 | if lic is not None: | ||
567 | return lic | ||
568 | |||
569 | lic = self.add( | ||
570 | oe.spdx30.simplelicensing_LicenseExpression( | ||
571 | _id=self.new_spdxid(*spdxid), | ||
572 | creationInfo=self.doc.creationInfo, | ||
573 | simplelicensing_licenseExpression=license_expression, | ||
574 | simplelicensing_licenseListVersion=license_list_version, | ||
575 | ) | ||
576 | ) | ||
577 | |||
578 | for key, value in license_text: | ||
579 | lic.simplelicensing_customIdToUri.append( | ||
580 | oe.spdx30.DictionaryEntry(key=key, value=value) | ||
581 | ) | ||
582 | |||
583 | return lic | ||
584 | |||
585 | def scan_declared_licenses(self, spdx_file, filepath, license_data): | ||
586 | for e in spdx_file.extension: | ||
587 | if isinstance(e, OELicenseScannedExtension): | ||
588 | return | ||
589 | |||
590 | file_licenses = set() | ||
591 | for extracted_lic in oe.spdx_common.extract_licenses(filepath): | ||
592 | lic = self.new_license_expression(extracted_lic, license_data) | ||
593 | self.set_element_alias(lic) | ||
594 | file_licenses.add(lic) | ||
595 | |||
596 | self.new_relationship( | ||
597 | [spdx_file], | ||
598 | oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
599 | [oe.sbom30.get_element_link_id(lic_alias) for lic_alias in file_licenses], | ||
600 | ) | ||
601 | spdx_file.extension.append(OELicenseScannedExtension()) | ||
602 | |||
603 | def new_file(self, _id, name, path, *, purposes=[]): | ||
604 | sha256_hash = bb.utils.sha256_file(path) | ||
605 | |||
606 | for f in self.by_sha256_hash.get(sha256_hash, []): | ||
607 | if not isinstance(f, oe.spdx30.software_File): | ||
608 | continue | ||
609 | |||
610 | if purposes: | ||
611 | new_primary = purposes[0] | ||
612 | new_additional = [] | ||
613 | |||
614 | if f.software_primaryPurpose: | ||
615 | new_additional.append(f.software_primaryPurpose) | ||
616 | new_additional.extend(f.software_additionalPurpose) | ||
617 | |||
618 | new_additional = sorted( | ||
619 | list(set(p for p in new_additional if p != new_primary)) | ||
620 | ) | ||
621 | |||
622 | f.software_primaryPurpose = new_primary | ||
623 | f.software_additionalPurpose = new_additional | ||
624 | |||
625 | if f.name != name: | ||
626 | for e in f.extension: | ||
627 | if isinstance(e, OEFileNameAliasExtension): | ||
628 | e.aliases.append(name) | ||
629 | break | ||
630 | else: | ||
631 | f.extension.append(OEFileNameAliasExtension(aliases=[name])) | ||
632 | |||
633 | return f | ||
634 | |||
635 | spdx_file = oe.spdx30.software_File( | ||
636 | _id=_id, | ||
637 | creationInfo=self.doc.creationInfo, | ||
638 | name=name, | ||
639 | ) | ||
640 | if purposes: | ||
641 | spdx_file.software_primaryPurpose = purposes[0] | ||
642 | spdx_file.software_additionalPurpose = purposes[1:] | ||
643 | |||
644 | spdx_file.verifiedUsing.append( | ||
645 | oe.spdx30.Hash( | ||
646 | algorithm=oe.spdx30.HashAlgorithm.sha256, | ||
647 | hashValue=sha256_hash, | ||
648 | ) | ||
649 | ) | ||
650 | |||
651 | return self.add(spdx_file) | ||
652 | |||
653 | def new_cve_vuln(self, cve): | ||
654 | v = oe.spdx30.security_Vulnerability() | ||
655 | v._id = self.new_spdxid("vulnerability", cve) | ||
656 | v.creationInfo = self.doc.creationInfo | ||
657 | |||
658 | v.externalIdentifier.append( | ||
659 | oe.spdx30.ExternalIdentifier( | ||
660 | externalIdentifierType=oe.spdx30.ExternalIdentifierType.cve, | ||
661 | identifier=cve, | ||
662 | identifierLocator=[ | ||
663 | f"https://cveawg.mitre.org/api/cve/{cve}", | ||
664 | f"https://www.cve.org/CVERecord?id={cve}", | ||
665 | ], | ||
666 | ) | ||
667 | ) | ||
668 | return self.add(v) | ||
669 | |||
670 | def new_vex_patched_relationship(self, from_, to): | ||
671 | return self._new_relationship( | ||
672 | oe.spdx30.security_VexFixedVulnAssessmentRelationship, | ||
673 | from_, | ||
674 | oe.spdx30.RelationshipType.fixedIn, | ||
675 | to, | ||
676 | spdxid_name="vex-fixed", | ||
677 | security_vexVersion=VEX_VERSION, | ||
678 | ) | ||
679 | |||
680 | def new_vex_unpatched_relationship(self, from_, to): | ||
681 | return self._new_relationship( | ||
682 | oe.spdx30.security_VexAffectedVulnAssessmentRelationship, | ||
683 | from_, | ||
684 | oe.spdx30.RelationshipType.affects, | ||
685 | to, | ||
686 | spdxid_name="vex-affected", | ||
687 | security_vexVersion=VEX_VERSION, | ||
688 | security_actionStatement="Mitigation action unknown", | ||
689 | ) | ||
690 | |||
691 | def new_vex_ignored_relationship(self, from_, to, *, impact_statement): | ||
692 | return self._new_relationship( | ||
693 | oe.spdx30.security_VexNotAffectedVulnAssessmentRelationship, | ||
694 | from_, | ||
695 | oe.spdx30.RelationshipType.doesNotAffect, | ||
696 | to, | ||
697 | spdxid_name="vex-not-affected", | ||
698 | security_vexVersion=VEX_VERSION, | ||
699 | security_impactStatement=impact_statement, | ||
700 | ) | ||
701 | |||
702 | def import_bitbake_build_objset(self): | ||
703 | deploy_dir_spdx = Path(self.d.getVar("DEPLOY_DIR_SPDX")) | ||
704 | bb_objset = load_jsonld( | ||
705 | self.d, deploy_dir_spdx / "bitbake.spdx.json", required=True | ||
706 | ) | ||
707 | self.doc.import_.extend(bb_objset.doc.import_) | ||
708 | self.update(bb_objset.objects) | ||
709 | |||
710 | return bb_objset | ||
711 | |||
712 | def import_bitbake_build(self): | ||
713 | def find_bitbake_build(objset): | ||
714 | return objset.find_filter( | ||
715 | oe.spdx30.build_Build, | ||
716 | build_buildType=SPDX_BUILD_TYPE, | ||
717 | ) | ||
718 | |||
719 | build = find_bitbake_build(self) | ||
720 | if build: | ||
721 | return build | ||
722 | |||
723 | bb_objset = self.import_bitbake_build_objset() | ||
724 | build = find_bitbake_build(bb_objset) | ||
725 | if build is None: | ||
726 | bb.fatal(f"No build found in {deploy_dir_spdx}") | ||
727 | |||
728 | return build | ||
729 | |||
730 | def new_task_build(self, name, typ): | ||
731 | current_task = self.d.getVar("BB_CURRENTTASK") | ||
732 | pn = self.d.getVar("PN") | ||
733 | |||
734 | build = self.add( | ||
735 | oe.spdx30.build_Build( | ||
736 | _id=self.new_spdxid("build", name), | ||
737 | creationInfo=self.doc.creationInfo, | ||
738 | name=f"{pn}:do_{current_task}:{name}", | ||
739 | build_buildType=f"{SPDX_BUILD_TYPE}/do_{current_task}/{typ}", | ||
740 | ) | ||
741 | ) | ||
742 | |||
743 | if self.d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1": | ||
744 | bitbake_build = self.import_bitbake_build() | ||
745 | |||
746 | self.new_relationship( | ||
747 | [bitbake_build], | ||
748 | oe.spdx30.RelationshipType.ancestorOf, | ||
749 | [build], | ||
750 | ) | ||
751 | |||
752 | if self.d.getVar("SPDX_INCLUDE_BUILD_VARIABLES") == "1": | ||
753 | for varname in sorted(self.d.keys()): | ||
754 | if varname.startswith("__"): | ||
755 | continue | ||
756 | |||
757 | value = self.d.getVar(varname, expand=False) | ||
758 | |||
759 | # TODO: Deal with non-string values | ||
760 | if not isinstance(value, str): | ||
761 | continue | ||
762 | |||
763 | build.build_parameter.append( | ||
764 | oe.spdx30.DictionaryEntry(key=varname, value=value) | ||
765 | ) | ||
766 | |||
767 | return build | ||
768 | |||
769 | def new_archive(self, archive_name): | ||
770 | return self.add( | ||
771 | oe.spdx30.software_File( | ||
772 | _id=self.new_spdxid("archive", str(archive_name)), | ||
773 | creationInfo=self.doc.creationInfo, | ||
774 | name=str(archive_name), | ||
775 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
776 | ) | ||
777 | ) | ||
778 | |||
779 | @classmethod | ||
780 | def new_objset(cls, d, name, copy_from_bitbake_doc=True): | ||
781 | objset = cls(d) | ||
782 | |||
783 | document = oe.spdx30.SpdxDocument( | ||
784 | _id=objset.new_spdxid("document", name), | ||
785 | name=name, | ||
786 | ) | ||
787 | |||
788 | document.extension.append( | ||
789 | OEIdAliasExtension( | ||
790 | alias=objset.new_alias_id( | ||
791 | document, | ||
792 | OE_DOC_ALIAS_PREFIX + d.getVar("PN") + "/" + name + "/", | ||
793 | ), | ||
794 | ) | ||
795 | ) | ||
796 | objset.doc = document | ||
797 | objset.add_index(document) | ||
798 | |||
799 | if copy_from_bitbake_doc: | ||
800 | bb_objset = objset.import_bitbake_build_objset() | ||
801 | document.creationInfo = objset.copy_creation_info( | ||
802 | bb_objset.doc.creationInfo | ||
803 | ) | ||
804 | else: | ||
805 | document.creationInfo = objset.new_creation_info() | ||
806 | |||
807 | return objset | ||
808 | |||
809 | def expand_collection(self, *, add_objectsets=[]): | ||
810 | """ | ||
811 | Expands a collection to pull in all missing elements | ||
812 | |||
813 | Returns the set of ids that could not be found to link into the document | ||
814 | """ | ||
815 | missing_spdxids = set() | ||
816 | imports = {e.externalSpdxId: e for e in self.doc.import_} | ||
817 | |||
818 | def merge_doc(other): | ||
819 | nonlocal imports | ||
820 | |||
821 | for e in other.doc.import_: | ||
822 | if not e.externalSpdxId in imports: | ||
823 | imports[e.externalSpdxId] = e | ||
824 | |||
825 | self.objects |= other.objects | ||
826 | |||
827 | for o in add_objectsets: | ||
828 | merge_doc(o) | ||
829 | |||
830 | needed_spdxids = self.link() | ||
831 | provided_spdxids = set(self.obj_by_id.keys()) | ||
832 | |||
833 | while True: | ||
834 | import_spdxids = set(imports.keys()) | ||
835 | searching_spdxids = ( | ||
836 | needed_spdxids - provided_spdxids - missing_spdxids - import_spdxids | ||
837 | ) | ||
838 | if not searching_spdxids: | ||
839 | break | ||
840 | |||
841 | spdxid = searching_spdxids.pop() | ||
842 | bb.debug( | ||
843 | 1, | ||
844 | f"Searching for {spdxid}. Remaining: {len(searching_spdxids)}, Total: {len(provided_spdxids)}, Missing: {len(missing_spdxids)}, Imports: {len(import_spdxids)}", | ||
845 | ) | ||
846 | dep_objset, dep_path = find_by_spdxid(self.d, spdxid) | ||
847 | |||
848 | if dep_objset: | ||
849 | dep_provided = set(dep_objset.obj_by_id.keys()) | ||
850 | if spdxid not in dep_provided: | ||
851 | bb.fatal(f"{spdxid} not found in {dep_path}") | ||
852 | provided_spdxids |= dep_provided | ||
853 | needed_spdxids |= dep_objset.missing_ids | ||
854 | merge_doc(dep_objset) | ||
855 | else: | ||
856 | missing_spdxids.add(spdxid) | ||
857 | |||
858 | self.doc.import_ = sorted(imports.values(), key=lambda e: e.externalSpdxId) | ||
859 | bb.debug(1, "Linking...") | ||
860 | self.link() | ||
861 | |||
862 | # Manually go through all of the simplelicensing_customIdToUri DictionaryEntry | ||
863 | # items and resolve any aliases to actual objects. | ||
864 | for lic in self.foreach_type(oe.spdx30.simplelicensing_LicenseExpression): | ||
865 | for d in lic.simplelicensing_customIdToUri: | ||
866 | if d.value.startswith(OE_ALIAS_PREFIX): | ||
867 | obj = self.find_by_id(d.value) | ||
868 | if obj is not None: | ||
869 | d.value = obj._id | ||
870 | else: | ||
871 | self.missing_ids.add(d.value) | ||
872 | |||
873 | self.missing_ids -= set(imports.keys()) | ||
874 | return self.missing_ids | ||
875 | |||
876 | |||
877 | def load_jsonld(d, path, required=False): | ||
878 | deserializer = oe.spdx30.JSONLDDeserializer() | ||
879 | objset = ObjectSet(d) | ||
880 | try: | ||
881 | with path.open("rb") as f: | ||
882 | deserializer.read(f, objset) | ||
883 | except FileNotFoundError: | ||
884 | if required: | ||
885 | bb.fatal("No SPDX document named %s found" % path) | ||
886 | return None | ||
887 | |||
888 | if not objset.doc: | ||
889 | bb.fatal("SPDX Document %s has no SPDXDocument element" % path) | ||
890 | return None | ||
891 | |||
892 | objset.objects.remove(objset.doc) | ||
893 | return objset | ||
894 | |||
895 | |||
896 | def jsonld_arch_path(d, arch, subdir, name, deploydir=None): | ||
897 | if deploydir is None: | ||
898 | deploydir = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
899 | return deploydir / arch / subdir / (name + ".spdx.json") | ||
900 | |||
901 | |||
902 | def jsonld_hash_path(h): | ||
903 | return Path("by-spdxid-hash") / h[:2], h | ||
904 | |||
905 | |||
906 | def load_jsonld_by_arch(d, arch, subdir, name, *, required=False): | ||
907 | path = jsonld_arch_path(d, arch, subdir, name) | ||
908 | objset = load_jsonld(d, path, required=required) | ||
909 | if objset is not None: | ||
910 | return (objset, path) | ||
911 | return (None, None) | ||
912 | |||
913 | |||
914 | def find_jsonld(d, subdir, name, *, required=False): | ||
915 | package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split() | ||
916 | package_archs.reverse() | ||
917 | |||
918 | for arch in package_archs: | ||
919 | objset, path = load_jsonld_by_arch(d, arch, subdir, name) | ||
920 | if objset is not None: | ||
921 | return (objset, path) | ||
922 | |||
923 | if required: | ||
924 | bb.fatal("Could not find a %s SPDX document named %s" % (subdir, name)) | ||
925 | |||
926 | return (None, None) | ||
927 | |||
928 | |||
929 | def write_jsonld_doc(d, objset, dest): | ||
930 | if not isinstance(objset, ObjectSet): | ||
931 | bb.fatal("Only an ObjsetSet can be serialized") | ||
932 | return | ||
933 | |||
934 | if not objset.doc: | ||
935 | bb.fatal("ObjectSet is missing a SpdxDocument") | ||
936 | return | ||
937 | |||
938 | objset.doc.rootElement = sorted(list(set(objset.doc.rootElement))) | ||
939 | objset.doc.profileConformance = sorted( | ||
940 | list( | ||
941 | getattr(oe.spdx30.ProfileIdentifierType, p) | ||
942 | for p in d.getVar("SPDX_PROFILES").split() | ||
943 | ) | ||
944 | ) | ||
945 | |||
946 | dest.parent.mkdir(exist_ok=True, parents=True) | ||
947 | |||
948 | if d.getVar("SPDX_PRETTY") == "1": | ||
949 | serializer = oe.spdx30.JSONLDSerializer( | ||
950 | indent=2, | ||
951 | ) | ||
952 | else: | ||
953 | serializer = oe.spdx30.JSONLDInlineSerializer() | ||
954 | |||
955 | objset.objects.add(objset.doc) | ||
956 | with dest.open("wb") as f: | ||
957 | serializer.write(objset, f, force_at_graph=True) | ||
958 | objset.objects.remove(objset.doc) | ||
959 | |||
960 | |||
961 | def write_recipe_jsonld_doc( | ||
962 | d, | ||
963 | objset, | ||
964 | subdir, | ||
965 | deploydir, | ||
966 | *, | ||
967 | create_spdx_id_links=True, | ||
968 | ): | ||
969 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
970 | |||
971 | dest = jsonld_arch_path(d, pkg_arch, subdir, objset.doc.name, deploydir=deploydir) | ||
972 | |||
973 | def link_id(_id): | ||
974 | hash_path = jsonld_hash_path(hash_id(_id)) | ||
975 | |||
976 | link_name = jsonld_arch_path( | ||
977 | d, | ||
978 | pkg_arch, | ||
979 | *hash_path, | ||
980 | deploydir=deploydir, | ||
981 | ) | ||
982 | try: | ||
983 | link_name.parent.mkdir(exist_ok=True, parents=True) | ||
984 | link_name.symlink_to(os.path.relpath(dest, link_name.parent)) | ||
985 | except: | ||
986 | target = link_name.readlink() | ||
987 | bb.warn( | ||
988 | f"Unable to link {_id} in {dest} as {link_name}. Already points to {target}" | ||
989 | ) | ||
990 | raise | ||
991 | |||
992 | return hash_path[-1] | ||
993 | |||
994 | objset.add_aliases() | ||
995 | |||
996 | try: | ||
997 | if create_spdx_id_links: | ||
998 | alias_ext = get_alias(objset.doc) | ||
999 | if alias_ext is not None and alias_ext.alias: | ||
1000 | alias_ext.link_name = link_id(alias_ext.alias) | ||
1001 | |||
1002 | finally: | ||
1003 | # It is really helpful for debugging if the JSON document is written | ||
1004 | # out, so always do that even if there is an error making the links | ||
1005 | write_jsonld_doc(d, objset, dest) | ||
1006 | |||
1007 | |||
1008 | def find_root_obj_in_jsonld(d, subdir, fn_name, obj_type, **attr_filter): | ||
1009 | objset, fn = find_jsonld(d, subdir, fn_name, required=True) | ||
1010 | |||
1011 | spdx_obj = objset.find_root(obj_type, **attr_filter) | ||
1012 | if not spdx_obj: | ||
1013 | bb.fatal("No root %s found in %s" % (obj_type.__name__, fn)) | ||
1014 | |||
1015 | return spdx_obj, objset | ||
1016 | |||
1017 | |||
1018 | def load_obj_in_jsonld(d, arch, subdir, fn_name, obj_type, **attr_filter): | ||
1019 | objset, fn = load_jsonld_by_arch(d, arch, subdir, fn_name, required=True) | ||
1020 | |||
1021 | spdx_obj = objset.find_filter(obj_type, **attr_filter) | ||
1022 | if not spdx_obj: | ||
1023 | bb.fatal("No %s found in %s" % (obj_type.__name__, fn)) | ||
1024 | |||
1025 | return spdx_obj, objset | ||
1026 | |||
1027 | |||
1028 | def find_by_spdxid(d, spdxid, *, required=False): | ||
1029 | if spdxid.startswith(OE_ALIAS_PREFIX): | ||
1030 | h = spdxid[len(OE_ALIAS_PREFIX) :].split("/", 1)[0] | ||
1031 | return find_jsonld(d, *jsonld_hash_path(h), required=required) | ||
1032 | return find_jsonld(d, *jsonld_hash_path(hash_id(spdxid)), required=required) | ||
1033 | |||
1034 | |||
1035 | def create_sbom(d, name, root_elements, add_objectsets=[]): | ||
1036 | objset = ObjectSet.new_objset(d, name) | ||
1037 | |||
1038 | sbom = objset.add( | ||
1039 | oe.spdx30.software_Sbom( | ||
1040 | _id=objset.new_spdxid("sbom", name), | ||
1041 | name=name, | ||
1042 | creationInfo=objset.doc.creationInfo, | ||
1043 | software_sbomType=[oe.spdx30.software_SbomType.build], | ||
1044 | rootElement=root_elements, | ||
1045 | ) | ||
1046 | ) | ||
1047 | |||
1048 | missing_spdxids = objset.expand_collection(add_objectsets=add_objectsets) | ||
1049 | if missing_spdxids: | ||
1050 | bb.warn( | ||
1051 | "The following SPDX IDs were unable to be resolved:\n " | ||
1052 | + "\n ".join(sorted(list(missing_spdxids))) | ||
1053 | ) | ||
1054 | |||
1055 | # Filter out internal extensions from final SBoMs | ||
1056 | objset.remove_internal_extensions() | ||
1057 | |||
1058 | # SBoM should be the only root element of the document | ||
1059 | objset.doc.rootElement = [sbom] | ||
1060 | |||
1061 | # De-duplicate licenses | ||
1062 | unique = set() | ||
1063 | dedup = {} | ||
1064 | for lic in objset.foreach_type(oe.spdx30.simplelicensing_LicenseExpression): | ||
1065 | for u in unique: | ||
1066 | if ( | ||
1067 | u.simplelicensing_licenseExpression | ||
1068 | == lic.simplelicensing_licenseExpression | ||
1069 | and u.simplelicensing_licenseListVersion | ||
1070 | == lic.simplelicensing_licenseListVersion | ||
1071 | ): | ||
1072 | dedup[lic] = u | ||
1073 | break | ||
1074 | else: | ||
1075 | unique.add(lic) | ||
1076 | |||
1077 | if dedup: | ||
1078 | for rel in objset.foreach_filter( | ||
1079 | oe.spdx30.Relationship, | ||
1080 | relationshipType=oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
1081 | ): | ||
1082 | rel.to = [dedup.get(to, to) for to in rel.to] | ||
1083 | |||
1084 | for rel in objset.foreach_filter( | ||
1085 | oe.spdx30.Relationship, | ||
1086 | relationshipType=oe.spdx30.RelationshipType.hasConcludedLicense, | ||
1087 | ): | ||
1088 | rel.to = [dedup.get(to, to) for to in rel.to] | ||
1089 | |||
1090 | for k, v in dedup.items(): | ||
1091 | bb.debug(1, f"Removing duplicate License {k._id} -> {v._id}") | ||
1092 | objset.objects.remove(k) | ||
1093 | |||
1094 | objset.create_index() | ||
1095 | |||
1096 | return objset, sbom | ||
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py index 3dc3672210..9fe0fbb752 100644 --- a/meta/lib/oe/sdk.py +++ b/meta/lib/oe/sdk.py | |||
@@ -148,13 +148,11 @@ def get_extra_sdkinfo(sstate_dir): | |||
148 | extra_info['filesizes'] = {} | 148 | extra_info['filesizes'] = {} |
149 | for root, _, files in os.walk(sstate_dir): | 149 | for root, _, files in os.walk(sstate_dir): |
150 | for fn in files: | 150 | for fn in files: |
151 | if fn.endswith('.tgz'): | 151 | # Note that this makes an assumption about the sstate filenames |
152 | if '.tar.' in fn and not fn.endswith('.siginfo'): | ||
152 | fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024)) | 153 | fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024)) |
153 | task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0] | 154 | task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0] |
154 | origtotal = extra_info['tasksizes'].get(task, 0) | 155 | origtotal = extra_info['tasksizes'].get(task, 0) |
155 | extra_info['tasksizes'][task] = origtotal + fsize | 156 | extra_info['tasksizes'][task] = origtotal + fsize |
156 | extra_info['filesizes'][fn] = fsize | 157 | extra_info['filesizes'][fn] = fsize |
157 | return extra_info | 158 | return extra_info |
158 | |||
159 | if __name__ == "__main__": | ||
160 | pass | ||
diff --git a/meta/lib/oe/spdx30.py b/meta/lib/oe/spdx30.py new file mode 100644 index 0000000000..cd97eebd18 --- /dev/null +++ b/meta/lib/oe/spdx30.py | |||
@@ -0,0 +1,5593 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Generated Python bindings from a SHACL model | ||
4 | # | ||
5 | # This file was automatically generated by shacl2code. DO NOT MANUALLY MODIFY IT | ||
6 | # | ||
7 | # SPDX-License-Identifier: MIT | ||
8 | |||
9 | import functools | ||
10 | import hashlib | ||
11 | import json | ||
12 | import re | ||
13 | import sys | ||
14 | import threading | ||
15 | import time | ||
16 | from contextlib import contextmanager | ||
17 | from datetime import datetime, timezone, timedelta | ||
18 | from enum import Enum | ||
19 | from abc import ABC, abstractmethod | ||
20 | |||
21 | |||
22 | def check_type(obj, types): | ||
23 | if not isinstance(obj, types): | ||
24 | if isinstance(types, (list, tuple)): | ||
25 | raise TypeError( | ||
26 | f"Value must be one of type: {', '.join(t.__name__ for t in types)}. Got {type(obj)}" | ||
27 | ) | ||
28 | raise TypeError(f"Value must be of type {types.__name__}. Got {type(obj)}") | ||
29 | |||
30 | |||
31 | class Property(ABC): | ||
32 | """ | ||
33 | A generic SHACL object property. The different types will derive from this | ||
34 | class | ||
35 | """ | ||
36 | |||
37 | def __init__(self, *, pattern=None): | ||
38 | self.pattern = pattern | ||
39 | |||
40 | def init(self): | ||
41 | return None | ||
42 | |||
43 | def validate(self, value): | ||
44 | check_type(value, self.VALID_TYPES) | ||
45 | if self.pattern is not None and not re.search( | ||
46 | self.pattern, self.to_string(value) | ||
47 | ): | ||
48 | raise ValueError( | ||
49 | f"Value is not correctly formatted. Got '{self.to_string(value)}'" | ||
50 | ) | ||
51 | |||
52 | def set(self, value): | ||
53 | return value | ||
54 | |||
55 | def check_min_count(self, value, min_count): | ||
56 | return min_count == 1 | ||
57 | |||
58 | def check_max_count(self, value, max_count): | ||
59 | return max_count == 1 | ||
60 | |||
61 | def elide(self, value): | ||
62 | return value is None | ||
63 | |||
64 | def walk(self, value, callback, path): | ||
65 | callback(value, path) | ||
66 | |||
67 | def iter_objects(self, value, recursive, visited): | ||
68 | return [] | ||
69 | |||
70 | def link_prop(self, value, objectset, missing, visited): | ||
71 | return value | ||
72 | |||
73 | def to_string(self, value): | ||
74 | return str(value) | ||
75 | |||
76 | @abstractmethod | ||
77 | def encode(self, encoder, value, state): | ||
78 | pass | ||
79 | |||
80 | @abstractmethod | ||
81 | def decode(self, decoder, *, objectset=None): | ||
82 | pass | ||
83 | |||
84 | |||
85 | class StringProp(Property): | ||
86 | """ | ||
87 | A scalar string property for an SHACL object | ||
88 | """ | ||
89 | |||
90 | VALID_TYPES = str | ||
91 | |||
92 | def set(self, value): | ||
93 | return str(value) | ||
94 | |||
95 | def encode(self, encoder, value, state): | ||
96 | encoder.write_string(value) | ||
97 | |||
98 | def decode(self, decoder, *, objectset=None): | ||
99 | return decoder.read_string() | ||
100 | |||
101 | |||
102 | class AnyURIProp(StringProp): | ||
103 | def encode(self, encoder, value, state): | ||
104 | encoder.write_iri(value) | ||
105 | |||
106 | def decode(self, decoder, *, objectset=None): | ||
107 | return decoder.read_iri() | ||
108 | |||
109 | |||
110 | class DateTimeProp(Property): | ||
111 | """ | ||
112 | A Date/Time Object with optional timezone | ||
113 | """ | ||
114 | |||
115 | VALID_TYPES = datetime | ||
116 | UTC_FORMAT_STR = "%Y-%m-%dT%H:%M:%SZ" | ||
117 | REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})?$" | ||
118 | |||
119 | def set(self, value): | ||
120 | return self._normalize(value) | ||
121 | |||
122 | def encode(self, encoder, value, state): | ||
123 | encoder.write_datetime(self.to_string(value)) | ||
124 | |||
125 | def decode(self, decoder, *, objectset=None): | ||
126 | s = decoder.read_datetime() | ||
127 | if s is None: | ||
128 | return None | ||
129 | v = self.from_string(s) | ||
130 | return self._normalize(v) | ||
131 | |||
132 | def _normalize(self, value): | ||
133 | if value.utcoffset() is None: | ||
134 | value = value.astimezone() | ||
135 | offset = value.utcoffset() | ||
136 | seconds = offset % timedelta(minutes=-1 if offset.total_seconds() < 0 else 1) | ||
137 | if seconds: | ||
138 | offset = offset - seconds | ||
139 | value = value.replace(tzinfo=timezone(offset)) | ||
140 | value = value.replace(microsecond=0) | ||
141 | return value | ||
142 | |||
143 | def to_string(self, value): | ||
144 | value = self._normalize(value) | ||
145 | if value.tzinfo == timezone.utc: | ||
146 | return value.strftime(self.UTC_FORMAT_STR) | ||
147 | return value.isoformat() | ||
148 | |||
149 | def from_string(self, value): | ||
150 | if not re.match(self.REGEX, value): | ||
151 | raise ValueError(f"'{value}' is not a correctly formatted datetime") | ||
152 | if "Z" in value: | ||
153 | d = datetime( | ||
154 | *(time.strptime(value, self.UTC_FORMAT_STR)[0:6]), | ||
155 | tzinfo=timezone.utc, | ||
156 | ) | ||
157 | else: | ||
158 | d = datetime.fromisoformat(value) | ||
159 | |||
160 | return self._normalize(d) | ||
161 | |||
162 | |||
163 | class DateTimeStampProp(DateTimeProp): | ||
164 | """ | ||
165 | A Date/Time Object with required timestamp | ||
166 | """ | ||
167 | |||
168 | REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})$" | ||
169 | |||
170 | |||
171 | class IntegerProp(Property): | ||
172 | VALID_TYPES = int | ||
173 | |||
174 | def set(self, value): | ||
175 | return int(value) | ||
176 | |||
177 | def encode(self, encoder, value, state): | ||
178 | encoder.write_integer(value) | ||
179 | |||
180 | def decode(self, decoder, *, objectset=None): | ||
181 | return decoder.read_integer() | ||
182 | |||
183 | |||
184 | class PositiveIntegerProp(IntegerProp): | ||
185 | def validate(self, value): | ||
186 | super().validate(value) | ||
187 | if value < 1: | ||
188 | raise ValueError(f"Value must be >=1. Got {value}") | ||
189 | |||
190 | |||
191 | class NonNegativeIntegerProp(IntegerProp): | ||
192 | def validate(self, value): | ||
193 | super().validate(value) | ||
194 | if value < 0: | ||
195 | raise ValueError(f"Value must be >= 0. Got {value}") | ||
196 | |||
197 | |||
198 | class BooleanProp(Property): | ||
199 | VALID_TYPES = bool | ||
200 | |||
201 | def set(self, value): | ||
202 | return bool(value) | ||
203 | |||
204 | def encode(self, encoder, value, state): | ||
205 | encoder.write_bool(value) | ||
206 | |||
207 | def decode(self, decoder, *, objectset=None): | ||
208 | return decoder.read_bool() | ||
209 | |||
210 | |||
211 | class FloatProp(Property): | ||
212 | VALID_TYPES = (float, int) | ||
213 | |||
214 | def set(self, value): | ||
215 | return float(value) | ||
216 | |||
217 | def encode(self, encoder, value, state): | ||
218 | encoder.write_float(value) | ||
219 | |||
220 | def decode(self, decoder, *, objectset=None): | ||
221 | return decoder.read_float() | ||
222 | |||
223 | |||
224 | class IRIProp(Property): | ||
225 | def __init__(self, context=[], *, pattern=None): | ||
226 | super().__init__(pattern=pattern) | ||
227 | self.context = context | ||
228 | |||
229 | def compact(self, value): | ||
230 | for iri, compact in self.context: | ||
231 | if value == iri: | ||
232 | return compact | ||
233 | return None | ||
234 | |||
235 | def expand(self, value): | ||
236 | for iri, compact in self.context: | ||
237 | if value == compact: | ||
238 | return iri | ||
239 | return None | ||
240 | |||
241 | def iri_values(self): | ||
242 | return (iri for iri, _ in self.context) | ||
243 | |||
244 | |||
245 | class ObjectProp(IRIProp): | ||
246 | """ | ||
247 | A scalar SHACL object property of a SHACL object | ||
248 | """ | ||
249 | |||
250 | def __init__(self, cls, required, context=[]): | ||
251 | super().__init__(context) | ||
252 | self.cls = cls | ||
253 | self.required = required | ||
254 | |||
255 | def init(self): | ||
256 | if self.required and not self.cls.IS_ABSTRACT: | ||
257 | return self.cls() | ||
258 | return None | ||
259 | |||
260 | def validate(self, value): | ||
261 | check_type(value, (self.cls, str)) | ||
262 | |||
263 | def walk(self, value, callback, path): | ||
264 | if value is None: | ||
265 | return | ||
266 | |||
267 | if not isinstance(value, str): | ||
268 | value.walk(callback, path) | ||
269 | else: | ||
270 | callback(value, path) | ||
271 | |||
272 | def iter_objects(self, value, recursive, visited): | ||
273 | if value is None or isinstance(value, str): | ||
274 | return | ||
275 | |||
276 | if value not in visited: | ||
277 | visited.add(value) | ||
278 | yield value | ||
279 | |||
280 | if recursive: | ||
281 | for c in value.iter_objects(recursive=True, visited=visited): | ||
282 | yield c | ||
283 | |||
284 | def encode(self, encoder, value, state): | ||
285 | if value is None: | ||
286 | raise ValueError("Object cannot be None") | ||
287 | |||
288 | if isinstance(value, str): | ||
289 | encoder.write_iri(value, self.compact(value)) | ||
290 | return | ||
291 | |||
292 | return value.encode(encoder, state) | ||
293 | |||
294 | def decode(self, decoder, *, objectset=None): | ||
295 | iri = decoder.read_iri() | ||
296 | if iri is None: | ||
297 | return self.cls.decode(decoder, objectset=objectset) | ||
298 | |||
299 | iri = self.expand(iri) or iri | ||
300 | |||
301 | if objectset is None: | ||
302 | return iri | ||
303 | |||
304 | obj = objectset.find_by_id(iri) | ||
305 | if obj is None: | ||
306 | return iri | ||
307 | |||
308 | self.validate(obj) | ||
309 | return obj | ||
310 | |||
311 | def link_prop(self, value, objectset, missing, visited): | ||
312 | if value is None: | ||
313 | return value | ||
314 | |||
315 | if isinstance(value, str): | ||
316 | o = objectset.find_by_id(value) | ||
317 | if o is not None: | ||
318 | self.validate(o) | ||
319 | return o | ||
320 | |||
321 | if missing is not None: | ||
322 | missing.add(value) | ||
323 | |||
324 | return value | ||
325 | |||
326 | # De-duplicate IDs | ||
327 | if value._id: | ||
328 | value = objectset.find_by_id(value._id, value) | ||
329 | self.validate(value) | ||
330 | |||
331 | value.link_helper(objectset, missing, visited) | ||
332 | return value | ||
333 | |||
334 | |||
335 | class ListProxy(object): | ||
336 | def __init__(self, prop, data=None): | ||
337 | if data is None: | ||
338 | self.__data = [] | ||
339 | else: | ||
340 | self.__data = data | ||
341 | self.__prop = prop | ||
342 | |||
343 | def append(self, value): | ||
344 | self.__prop.validate(value) | ||
345 | self.__data.append(self.__prop.set(value)) | ||
346 | |||
347 | def insert(self, idx, value): | ||
348 | self.__prop.validate(value) | ||
349 | self.__data.insert(idx, self.__prop.set(value)) | ||
350 | |||
351 | def extend(self, items): | ||
352 | for i in items: | ||
353 | self.append(i) | ||
354 | |||
355 | def sort(self, *args, **kwargs): | ||
356 | self.__data.sort(*args, **kwargs) | ||
357 | |||
358 | def __getitem__(self, key): | ||
359 | return self.__data[key] | ||
360 | |||
361 | def __setitem__(self, key, value): | ||
362 | if isinstance(key, slice): | ||
363 | for v in value: | ||
364 | self.__prop.validate(v) | ||
365 | self.__data[key] = [self.__prop.set(v) for v in value] | ||
366 | else: | ||
367 | self.__prop.validate(value) | ||
368 | self.__data[key] = self.__prop.set(value) | ||
369 | |||
370 | def __delitem__(self, key): | ||
371 | del self.__data[key] | ||
372 | |||
373 | def __contains__(self, item): | ||
374 | return item in self.__data | ||
375 | |||
376 | def __iter__(self): | ||
377 | return iter(self.__data) | ||
378 | |||
379 | def __len__(self): | ||
380 | return len(self.__data) | ||
381 | |||
382 | def __str__(self): | ||
383 | return str(self.__data) | ||
384 | |||
385 | def __repr__(self): | ||
386 | return repr(self.__data) | ||
387 | |||
388 | def __eq__(self, other): | ||
389 | if isinstance(other, ListProxy): | ||
390 | return self.__data == other.__data | ||
391 | |||
392 | return self.__data == other | ||
393 | |||
394 | |||
395 | class ListProp(Property): | ||
396 | """ | ||
397 | A list of SHACL properties | ||
398 | """ | ||
399 | |||
400 | VALID_TYPES = (list, ListProxy) | ||
401 | |||
402 | def __init__(self, prop): | ||
403 | super().__init__() | ||
404 | self.prop = prop | ||
405 | |||
406 | def init(self): | ||
407 | return ListProxy(self.prop) | ||
408 | |||
409 | def validate(self, value): | ||
410 | super().validate(value) | ||
411 | |||
412 | for i in value: | ||
413 | self.prop.validate(i) | ||
414 | |||
415 | def set(self, value): | ||
416 | if isinstance(value, ListProxy): | ||
417 | return value | ||
418 | |||
419 | return ListProxy(self.prop, [self.prop.set(d) for d in value]) | ||
420 | |||
421 | def check_min_count(self, value, min_count): | ||
422 | check_type(value, ListProxy) | ||
423 | return len(value) >= min_count | ||
424 | |||
425 | def check_max_count(self, value, max_count): | ||
426 | check_type(value, ListProxy) | ||
427 | return len(value) <= max_count | ||
428 | |||
429 | def elide(self, value): | ||
430 | check_type(value, ListProxy) | ||
431 | return len(value) == 0 | ||
432 | |||
433 | def walk(self, value, callback, path): | ||
434 | callback(value, path) | ||
435 | for idx, v in enumerate(value): | ||
436 | self.prop.walk(v, callback, path + [f"[{idx}]"]) | ||
437 | |||
438 | def iter_objects(self, value, recursive, visited): | ||
439 | for v in value: | ||
440 | for c in self.prop.iter_objects(v, recursive, visited): | ||
441 | yield c | ||
442 | |||
443 | def link_prop(self, value, objectset, missing, visited): | ||
444 | if isinstance(value, ListProxy): | ||
445 | data = [self.prop.link_prop(v, objectset, missing, visited) for v in value] | ||
446 | else: | ||
447 | data = [self.prop.link_prop(v, objectset, missing, visited) for v in value] | ||
448 | |||
449 | return ListProxy(self.prop, data=data) | ||
450 | |||
451 | def encode(self, encoder, value, state): | ||
452 | check_type(value, ListProxy) | ||
453 | |||
454 | with encoder.write_list() as list_s: | ||
455 | for v in value: | ||
456 | with list_s.write_list_item() as item_s: | ||
457 | self.prop.encode(item_s, v, state) | ||
458 | |||
459 | def decode(self, decoder, *, objectset=None): | ||
460 | data = [] | ||
461 | for val_d in decoder.read_list(): | ||
462 | v = self.prop.decode(val_d, objectset=objectset) | ||
463 | self.prop.validate(v) | ||
464 | data.append(v) | ||
465 | |||
466 | return ListProxy(self.prop, data=data) | ||
467 | |||
468 | |||
469 | class EnumProp(IRIProp): | ||
470 | VALID_TYPES = str | ||
471 | |||
472 | def __init__(self, values, *, pattern=None): | ||
473 | super().__init__(values, pattern=pattern) | ||
474 | |||
475 | def validate(self, value): | ||
476 | super().validate(value) | ||
477 | |||
478 | valid_values = self.iri_values() | ||
479 | if value not in valid_values: | ||
480 | raise ValueError( | ||
481 | f"'{value}' is not a valid value. Choose one of {' '.join(valid_values)}" | ||
482 | ) | ||
483 | |||
484 | def encode(self, encoder, value, state): | ||
485 | encoder.write_enum(value, self, self.compact(value)) | ||
486 | |||
487 | def decode(self, decoder, *, objectset=None): | ||
488 | v = decoder.read_enum(self) | ||
489 | return self.expand(v) or v | ||
490 | |||
491 | |||
492 | class NodeKind(Enum): | ||
493 | BlankNode = 1 | ||
494 | IRI = 2 | ||
495 | BlankNodeOrIRI = 3 | ||
496 | |||
497 | |||
498 | def is_IRI(s): | ||
499 | if not isinstance(s, str): | ||
500 | return False | ||
501 | if s.startswith("_:"): | ||
502 | return False | ||
503 | if ":" not in s: | ||
504 | return False | ||
505 | return True | ||
506 | |||
507 | |||
508 | def is_blank_node(s): | ||
509 | if not isinstance(s, str): | ||
510 | return False | ||
511 | if not s.startswith("_:"): | ||
512 | return False | ||
513 | return True | ||
514 | |||
515 | |||
516 | def register(type_iri, *, compact_type=None, abstract=False): | ||
517 | def add_class(key, c): | ||
518 | assert ( | ||
519 | key not in SHACLObject.CLASSES | ||
520 | ), f"{key} already registered to {SHACLObject.CLASSES[key].__name__}" | ||
521 | SHACLObject.CLASSES[key] = c | ||
522 | |||
523 | def decorator(c): | ||
524 | global NAMED_INDIVIDUALS | ||
525 | |||
526 | assert issubclass( | ||
527 | c, SHACLObject | ||
528 | ), f"{c.__name__} is not derived from SHACLObject" | ||
529 | |||
530 | c._OBJ_TYPE = type_iri | ||
531 | c.IS_ABSTRACT = abstract | ||
532 | add_class(type_iri, c) | ||
533 | |||
534 | c._OBJ_COMPACT_TYPE = compact_type | ||
535 | if compact_type: | ||
536 | add_class(compact_type, c) | ||
537 | |||
538 | NAMED_INDIVIDUALS |= set(c.NAMED_INDIVIDUALS.values()) | ||
539 | |||
540 | # Registration is deferred until the first instance of class is created | ||
541 | # so that it has access to any other defined class | ||
542 | c._NEEDS_REG = True | ||
543 | return c | ||
544 | |||
545 | return decorator | ||
546 | |||
547 | |||
548 | register_lock = threading.Lock() | ||
549 | NAMED_INDIVIDUALS = set() | ||
550 | |||
551 | |||
552 | @functools.total_ordering | ||
553 | class SHACLObject(object): | ||
554 | CLASSES = {} | ||
555 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
556 | ID_ALIAS = None | ||
557 | IS_ABSTRACT = True | ||
558 | |||
559 | def __init__(self, **kwargs): | ||
560 | if self._is_abstract(): | ||
561 | raise NotImplementedError( | ||
562 | f"{self.__class__.__name__} is abstract and cannot be implemented" | ||
563 | ) | ||
564 | |||
565 | with register_lock: | ||
566 | cls = self.__class__ | ||
567 | if cls._NEEDS_REG: | ||
568 | cls._OBJ_PROPERTIES = {} | ||
569 | cls._OBJ_IRIS = {} | ||
570 | cls._register_props() | ||
571 | cls._NEEDS_REG = False | ||
572 | |||
573 | self.__dict__["_obj_data"] = {} | ||
574 | self.__dict__["_obj_metadata"] = {} | ||
575 | |||
576 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
577 | self.__dict__["_obj_data"][iri] = prop.init() | ||
578 | |||
579 | for k, v in kwargs.items(): | ||
580 | setattr(self, k, v) | ||
581 | |||
582 | def _is_abstract(self): | ||
583 | return self.__class__.IS_ABSTRACT | ||
584 | |||
585 | @classmethod | ||
586 | def _register_props(cls): | ||
587 | cls._add_property("_id", StringProp(), iri="@id") | ||
588 | |||
589 | @classmethod | ||
590 | def _add_property( | ||
591 | cls, | ||
592 | pyname, | ||
593 | prop, | ||
594 | iri, | ||
595 | min_count=None, | ||
596 | max_count=None, | ||
597 | compact=None, | ||
598 | ): | ||
599 | if pyname in cls._OBJ_IRIS: | ||
600 | raise KeyError(f"'{pyname}' is already defined for '{cls.__name__}'") | ||
601 | if iri in cls._OBJ_PROPERTIES: | ||
602 | raise KeyError(f"'{iri}' is already defined for '{cls.__name__}'") | ||
603 | |||
604 | while hasattr(cls, pyname): | ||
605 | pyname = pyname + "_" | ||
606 | |||
607 | pyname = sys.intern(pyname) | ||
608 | iri = sys.intern(iri) | ||
609 | |||
610 | cls._OBJ_IRIS[pyname] = iri | ||
611 | cls._OBJ_PROPERTIES[iri] = (prop, min_count, max_count, pyname, compact) | ||
612 | |||
613 | def __setattr__(self, name, value): | ||
614 | if name == self.ID_ALIAS: | ||
615 | self["@id"] = value | ||
616 | return | ||
617 | |||
618 | try: | ||
619 | iri = self._OBJ_IRIS[name] | ||
620 | self[iri] = value | ||
621 | except KeyError: | ||
622 | raise AttributeError( | ||
623 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
624 | ) | ||
625 | |||
626 | def __getattr__(self, name): | ||
627 | if name in self._OBJ_IRIS: | ||
628 | return self.__dict__["_obj_data"][self._OBJ_IRIS[name]] | ||
629 | |||
630 | if name == self.ID_ALIAS: | ||
631 | return self.__dict__["_obj_data"]["@id"] | ||
632 | |||
633 | if name == "_metadata": | ||
634 | return self.__dict__["_obj_metadata"] | ||
635 | |||
636 | if name == "_IRI": | ||
637 | return self._OBJ_IRIS | ||
638 | |||
639 | if name == "TYPE": | ||
640 | return self.__class__._OBJ_TYPE | ||
641 | |||
642 | if name == "COMPACT_TYPE": | ||
643 | return self.__class__._OBJ_COMPACT_TYPE | ||
644 | |||
645 | raise AttributeError( | ||
646 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
647 | ) | ||
648 | |||
649 | def __delattr__(self, name): | ||
650 | if name == self.ID_ALIAS: | ||
651 | del self["@id"] | ||
652 | return | ||
653 | |||
654 | try: | ||
655 | iri = self._OBJ_IRIS[name] | ||
656 | del self[iri] | ||
657 | except KeyError: | ||
658 | raise AttributeError( | ||
659 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
660 | ) | ||
661 | |||
662 | def __get_prop(self, iri): | ||
663 | if iri not in self._OBJ_PROPERTIES: | ||
664 | raise KeyError( | ||
665 | f"'{iri}' is not a valid property of {self.__class__.__name__}" | ||
666 | ) | ||
667 | |||
668 | return self._OBJ_PROPERTIES[iri] | ||
669 | |||
670 | def __iter_props(self): | ||
671 | for iri, v in self._OBJ_PROPERTIES.items(): | ||
672 | yield iri, *v | ||
673 | |||
674 | def __getitem__(self, iri): | ||
675 | return self.__dict__["_obj_data"][iri] | ||
676 | |||
677 | def __setitem__(self, iri, value): | ||
678 | if iri == "@id": | ||
679 | if self.NODE_KIND == NodeKind.BlankNode: | ||
680 | if not is_blank_node(value): | ||
681 | raise ValueError( | ||
682 | f"{self.__class__.__name__} ({id(self)}) can only have local reference. Property '{iri}' cannot be set to '{value}' and must start with '_:'" | ||
683 | ) | ||
684 | elif self.NODE_KIND == NodeKind.IRI: | ||
685 | if not is_IRI(value): | ||
686 | raise ValueError( | ||
687 | f"{self.__class__.__name__} ({id(self)}) can only have an IRI value. Property '{iri}' cannot be set to '{value}'" | ||
688 | ) | ||
689 | else: | ||
690 | if not is_blank_node(value) and not is_IRI(value): | ||
691 | raise ValueError( | ||
692 | f"{self.__class__.__name__} ({id(self)}) Has invalid Property '{iri}' '{value}'. Must be a blank node or IRI" | ||
693 | ) | ||
694 | |||
695 | prop, _, _, _, _ = self.__get_prop(iri) | ||
696 | prop.validate(value) | ||
697 | self.__dict__["_obj_data"][iri] = prop.set(value) | ||
698 | |||
699 | def __delitem__(self, iri): | ||
700 | prop, _, _, _, _ = self.__get_prop(iri) | ||
701 | self.__dict__["_obj_data"][iri] = prop.init() | ||
702 | |||
703 | def __iter__(self): | ||
704 | return self._OBJ_PROPERTIES.keys() | ||
705 | |||
706 | def walk(self, callback, path=None): | ||
707 | """ | ||
708 | Walk object tree, invoking the callback for each item | ||
709 | |||
710 | Callback has the form: | ||
711 | |||
712 | def callback(object, path): | ||
713 | """ | ||
714 | if path is None: | ||
715 | path = ["."] | ||
716 | |||
717 | if callback(self, path): | ||
718 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
719 | prop.walk(self.__dict__["_obj_data"][iri], callback, path + [f".{iri}"]) | ||
720 | |||
721 | def property_keys(self): | ||
722 | for iri, _, _, _, pyname, compact in self.__iter_props(): | ||
723 | if iri == "@id": | ||
724 | compact = self.ID_ALIAS | ||
725 | yield pyname, iri, compact | ||
726 | |||
727 | def iter_objects(self, *, recursive=False, visited=None): | ||
728 | """ | ||
729 | Iterate of all objects that are a child of this one | ||
730 | """ | ||
731 | if visited is None: | ||
732 | visited = set() | ||
733 | |||
734 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
735 | for c in prop.iter_objects( | ||
736 | self.__dict__["_obj_data"][iri], recursive=recursive, visited=visited | ||
737 | ): | ||
738 | yield c | ||
739 | |||
740 | def encode(self, encoder, state): | ||
741 | idname = self.ID_ALIAS or self._OBJ_IRIS["_id"] | ||
742 | if not self._id and self.NODE_KIND == NodeKind.IRI: | ||
743 | raise ValueError( | ||
744 | f"{self.__class__.__name__} ({id(self)}) must have a IRI for property '{idname}'" | ||
745 | ) | ||
746 | |||
747 | if state.is_written(self): | ||
748 | encoder.write_iri(state.get_object_id(self)) | ||
749 | return | ||
750 | |||
751 | state.add_written(self) | ||
752 | |||
753 | with encoder.write_object( | ||
754 | self, | ||
755 | state.get_object_id(self), | ||
756 | bool(self._id) or state.is_refed(self), | ||
757 | ) as obj_s: | ||
758 | self._encode_properties(obj_s, state) | ||
759 | |||
760 | def _encode_properties(self, encoder, state): | ||
761 | for iri, prop, min_count, max_count, pyname, compact in self.__iter_props(): | ||
762 | value = self.__dict__["_obj_data"][iri] | ||
763 | if prop.elide(value): | ||
764 | if min_count: | ||
765 | raise ValueError( | ||
766 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) is required (currently {value!r})" | ||
767 | ) | ||
768 | continue | ||
769 | |||
770 | if min_count is not None: | ||
771 | if not prop.check_min_count(value, min_count): | ||
772 | raise ValueError( | ||
773 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a minimum of {min_count} elements" | ||
774 | ) | ||
775 | |||
776 | if max_count is not None: | ||
777 | if not prop.check_max_count(value, max_count): | ||
778 | raise ValueError( | ||
779 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a maximum of {max_count} elements" | ||
780 | ) | ||
781 | |||
782 | if iri == self._OBJ_IRIS["_id"]: | ||
783 | continue | ||
784 | |||
785 | with encoder.write_property(iri, compact) as prop_s: | ||
786 | prop.encode(prop_s, value, state) | ||
787 | |||
788 | @classmethod | ||
789 | def _make_object(cls, typ): | ||
790 | if typ not in cls.CLASSES: | ||
791 | raise TypeError(f"Unknown type {typ}") | ||
792 | |||
793 | return cls.CLASSES[typ]() | ||
794 | |||
795 | @classmethod | ||
796 | def decode(cls, decoder, *, objectset=None): | ||
797 | typ, obj_d = decoder.read_object() | ||
798 | if typ is None: | ||
799 | raise TypeError("Unable to determine type for object") | ||
800 | |||
801 | obj = cls._make_object(typ) | ||
802 | for key in (obj.ID_ALIAS, obj._OBJ_IRIS["_id"]): | ||
803 | with obj_d.read_property(key) as prop_d: | ||
804 | if prop_d is None: | ||
805 | continue | ||
806 | |||
807 | _id = prop_d.read_iri() | ||
808 | if _id is None: | ||
809 | raise TypeError(f"Object key '{key}' is the wrong type") | ||
810 | |||
811 | obj._id = _id | ||
812 | break | ||
813 | |||
814 | if obj.NODE_KIND == NodeKind.IRI and not obj._id: | ||
815 | raise ValueError("Object is missing required IRI") | ||
816 | |||
817 | if objectset is not None: | ||
818 | if obj._id: | ||
819 | v = objectset.find_by_id(_id) | ||
820 | if v is not None: | ||
821 | return v | ||
822 | |||
823 | obj._decode_properties(obj_d, objectset=objectset) | ||
824 | |||
825 | if objectset is not None: | ||
826 | objectset.add_index(obj) | ||
827 | return obj | ||
828 | |||
829 | def _decode_properties(self, decoder, objectset=None): | ||
830 | for key in decoder.object_keys(): | ||
831 | if not self._decode_prop(decoder, key, objectset=objectset): | ||
832 | raise KeyError(f"Unknown property '{key}'") | ||
833 | |||
834 | def _decode_prop(self, decoder, key, objectset=None): | ||
835 | if key in (self._OBJ_IRIS["_id"], self.ID_ALIAS): | ||
836 | return True | ||
837 | |||
838 | for iri, prop, _, _, _, compact in self.__iter_props(): | ||
839 | if compact == key: | ||
840 | read_key = compact | ||
841 | elif iri == key: | ||
842 | read_key = iri | ||
843 | else: | ||
844 | continue | ||
845 | |||
846 | with decoder.read_property(read_key) as prop_d: | ||
847 | v = prop.decode(prop_d, objectset=objectset) | ||
848 | prop.validate(v) | ||
849 | self.__dict__["_obj_data"][iri] = v | ||
850 | return True | ||
851 | |||
852 | return False | ||
853 | |||
854 | def link_helper(self, objectset, missing, visited): | ||
855 | if self in visited: | ||
856 | return | ||
857 | |||
858 | visited.add(self) | ||
859 | |||
860 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
861 | self.__dict__["_obj_data"][iri] = prop.link_prop( | ||
862 | self.__dict__["_obj_data"][iri], | ||
863 | objectset, | ||
864 | missing, | ||
865 | visited, | ||
866 | ) | ||
867 | |||
868 | def __str__(self): | ||
869 | parts = [ | ||
870 | f"{self.__class__.__name__}(", | ||
871 | ] | ||
872 | if self._id: | ||
873 | parts.append(f"@id='{self._id}'") | ||
874 | parts.append(")") | ||
875 | return "".join(parts) | ||
876 | |||
877 | def __hash__(self): | ||
878 | return super().__hash__() | ||
879 | |||
880 | def __eq__(self, other): | ||
881 | return super().__eq__(other) | ||
882 | |||
883 | def __lt__(self, other): | ||
884 | def sort_key(obj): | ||
885 | if isinstance(obj, str): | ||
886 | return (obj, "", "", "") | ||
887 | return ( | ||
888 | obj._id or "", | ||
889 | obj.TYPE, | ||
890 | getattr(obj, "name", None) or "", | ||
891 | id(obj), | ||
892 | ) | ||
893 | |||
894 | return sort_key(self) < sort_key(other) | ||
895 | |||
896 | |||
897 | class SHACLExtensibleObject(object): | ||
898 | CLOSED = False | ||
899 | |||
900 | def __init__(self, typ=None, **kwargs): | ||
901 | if typ: | ||
902 | self.__dict__["_obj_TYPE"] = (typ, None) | ||
903 | else: | ||
904 | self.__dict__["_obj_TYPE"] = (self._OBJ_TYPE, self._OBJ_COMPACT_TYPE) | ||
905 | super().__init__(**kwargs) | ||
906 | |||
907 | def _is_abstract(self): | ||
908 | # Unknown classes are assumed to not be abstract so that they can be | ||
909 | # deserialized | ||
910 | typ = self.__dict__["_obj_TYPE"][0] | ||
911 | if typ in self.__class__.CLASSES: | ||
912 | return self.__class__.CLASSES[typ].IS_ABSTRACT | ||
913 | |||
914 | return False | ||
915 | |||
916 | @classmethod | ||
917 | def _make_object(cls, typ): | ||
918 | # Check for a known type, and if so, deserialize as that instead | ||
919 | if typ in cls.CLASSES: | ||
920 | return cls.CLASSES[typ]() | ||
921 | |||
922 | obj = cls(typ) | ||
923 | return obj | ||
924 | |||
925 | def _decode_properties(self, decoder, objectset=None): | ||
926 | def decode_value(d): | ||
927 | if not d.is_list(): | ||
928 | return d.read_value() | ||
929 | |||
930 | return [decode_value(val_d) for val_d in d.read_list()] | ||
931 | |||
932 | if self.CLOSED: | ||
933 | super()._decode_properties(decoder, objectset=objectset) | ||
934 | return | ||
935 | |||
936 | for key in decoder.object_keys(): | ||
937 | if self._decode_prop(decoder, key, objectset=objectset): | ||
938 | continue | ||
939 | |||
940 | if not is_IRI(key): | ||
941 | raise KeyError( | ||
942 | f"Extensible object properties must be IRIs. Got '{key}'" | ||
943 | ) | ||
944 | |||
945 | with decoder.read_property(key) as prop_d: | ||
946 | self.__dict__["_obj_data"][key] = decode_value(prop_d) | ||
947 | |||
948 | def _encode_properties(self, encoder, state): | ||
949 | def encode_value(encoder, v): | ||
950 | if isinstance(v, bool): | ||
951 | encoder.write_bool(v) | ||
952 | elif isinstance(v, str): | ||
953 | encoder.write_string(v) | ||
954 | elif isinstance(v, int): | ||
955 | encoder.write_integer(v) | ||
956 | elif isinstance(v, float): | ||
957 | encoder.write_float(v) | ||
958 | elif isinstance(v, list): | ||
959 | with encoder.write_list() as list_s: | ||
960 | for i in v: | ||
961 | with list_s.write_list_item() as item_s: | ||
962 | encode_value(item_s, i) | ||
963 | else: | ||
964 | raise TypeError( | ||
965 | f"Unsupported serialized type {type(v)} with value '{v}'" | ||
966 | ) | ||
967 | |||
968 | super()._encode_properties(encoder, state) | ||
969 | if self.CLOSED: | ||
970 | return | ||
971 | |||
972 | for iri, value in self.__dict__["_obj_data"].items(): | ||
973 | if iri in self._OBJ_PROPERTIES: | ||
974 | continue | ||
975 | |||
976 | with encoder.write_property(iri) as prop_s: | ||
977 | encode_value(prop_s, value) | ||
978 | |||
979 | def __setitem__(self, iri, value): | ||
980 | try: | ||
981 | super().__setitem__(iri, value) | ||
982 | except KeyError: | ||
983 | if self.CLOSED: | ||
984 | raise | ||
985 | |||
986 | if not is_IRI(iri): | ||
987 | raise KeyError(f"Key '{iri}' must be an IRI") | ||
988 | self.__dict__["_obj_data"][iri] = value | ||
989 | |||
990 | def __delitem__(self, iri): | ||
991 | try: | ||
992 | super().__delitem__(iri) | ||
993 | except KeyError: | ||
994 | if self.CLOSED: | ||
995 | raise | ||
996 | |||
997 | if not is_IRI(iri): | ||
998 | raise KeyError(f"Key '{iri}' must be an IRI") | ||
999 | del self.__dict__["_obj_data"][iri] | ||
1000 | |||
1001 | def __getattr__(self, name): | ||
1002 | if name == "TYPE": | ||
1003 | return self.__dict__["_obj_TYPE"][0] | ||
1004 | if name == "COMPACT_TYPE": | ||
1005 | return self.__dict__["_obj_TYPE"][1] | ||
1006 | return super().__getattr__(name) | ||
1007 | |||
1008 | def property_keys(self): | ||
1009 | iris = set() | ||
1010 | for pyname, iri, compact in super().property_keys(): | ||
1011 | iris.add(iri) | ||
1012 | yield pyname, iri, compact | ||
1013 | |||
1014 | if self.CLOSED: | ||
1015 | return | ||
1016 | |||
1017 | for iri in self.__dict__["_obj_data"].keys(): | ||
1018 | if iri not in iris: | ||
1019 | yield None, iri, None | ||
1020 | |||
1021 | |||
1022 | class SHACLObjectSet(object): | ||
1023 | def __init__(self, objects=[], *, link=False): | ||
1024 | self.objects = set() | ||
1025 | self.missing_ids = set() | ||
1026 | for o in objects: | ||
1027 | self.objects.add(o) | ||
1028 | self.create_index() | ||
1029 | if link: | ||
1030 | self._link() | ||
1031 | |||
1032 | def create_index(self): | ||
1033 | """ | ||
1034 | (re)Create object index | ||
1035 | |||
1036 | Creates or recreates the indices for the object set to enable fast | ||
1037 | lookup. All objects and their children are walked and indexed | ||
1038 | """ | ||
1039 | self.obj_by_id = {} | ||
1040 | self.obj_by_type = {} | ||
1041 | for o in self.foreach(): | ||
1042 | self.add_index(o) | ||
1043 | |||
1044 | def add_index(self, obj): | ||
1045 | """ | ||
1046 | Add object to index | ||
1047 | |||
1048 | Adds the object to all appropriate indices | ||
1049 | """ | ||
1050 | |||
1051 | def reg_type(typ, compact, o, exact): | ||
1052 | self.obj_by_type.setdefault(typ, set()).add((exact, o)) | ||
1053 | if compact: | ||
1054 | self.obj_by_type.setdefault(compact, set()).add((exact, o)) | ||
1055 | |||
1056 | if not isinstance(obj, SHACLObject): | ||
1057 | raise TypeError("Object is not of type SHACLObject") | ||
1058 | |||
1059 | for typ in SHACLObject.CLASSES.values(): | ||
1060 | if isinstance(obj, typ): | ||
1061 | reg_type( | ||
1062 | typ._OBJ_TYPE, typ._OBJ_COMPACT_TYPE, obj, obj.__class__ is typ | ||
1063 | ) | ||
1064 | |||
1065 | # This covers custom extensions | ||
1066 | reg_type(obj.TYPE, obj.COMPACT_TYPE, obj, True) | ||
1067 | |||
1068 | if not obj._id: | ||
1069 | return | ||
1070 | |||
1071 | self.missing_ids.discard(obj._id) | ||
1072 | |||
1073 | if obj._id in self.obj_by_id: | ||
1074 | return | ||
1075 | |||
1076 | self.obj_by_id[obj._id] = obj | ||
1077 | |||
1078 | def add(self, obj): | ||
1079 | """ | ||
1080 | Add object to object set | ||
1081 | |||
1082 | Adds a SHACLObject to the object set and index it. | ||
1083 | |||
1084 | NOTE: Child objects of the attached object are not indexes | ||
1085 | """ | ||
1086 | if not isinstance(obj, SHACLObject): | ||
1087 | raise TypeError("Object is not of type SHACLObject") | ||
1088 | |||
1089 | if obj not in self.objects: | ||
1090 | self.objects.add(obj) | ||
1091 | self.add_index(obj) | ||
1092 | return obj | ||
1093 | |||
1094 | def update(self, *others): | ||
1095 | """ | ||
1096 | Update object set adding all objects in each other iterable | ||
1097 | """ | ||
1098 | for o in others: | ||
1099 | for obj in o: | ||
1100 | self.add(obj) | ||
1101 | |||
1102 | def __contains__(self, item): | ||
1103 | """ | ||
1104 | Returns True if the item is in the object set | ||
1105 | """ | ||
1106 | return item in self.objects | ||
1107 | |||
1108 | def link(self): | ||
1109 | """ | ||
1110 | Link object set | ||
1111 | |||
1112 | Links the object in the object set by replacing string object | ||
1113 | references with references to the objects themselves. e.g. | ||
1114 | a property that references object "https://foo/bar" by a string | ||
1115 | reference will be replaced with an actual reference to the object in | ||
1116 | the object set with the same ID if it exists in the object set | ||
1117 | |||
1118 | If multiple objects with the same ID are found, the duplicates are | ||
1119 | eliminated | ||
1120 | """ | ||
1121 | self.create_index() | ||
1122 | return self._link() | ||
1123 | |||
1124 | def _link(self): | ||
1125 | global NAMED_INDIVIDUALS | ||
1126 | |||
1127 | self.missing_ids = set() | ||
1128 | visited = set() | ||
1129 | |||
1130 | new_objects = set() | ||
1131 | |||
1132 | for o in self.objects: | ||
1133 | if o._id: | ||
1134 | o = self.find_by_id(o._id, o) | ||
1135 | o.link_helper(self, self.missing_ids, visited) | ||
1136 | new_objects.add(o) | ||
1137 | |||
1138 | self.objects = new_objects | ||
1139 | |||
1140 | # Remove blank nodes | ||
1141 | obj_by_id = {} | ||
1142 | for _id, obj in self.obj_by_id.items(): | ||
1143 | if _id.startswith("_:"): | ||
1144 | del obj._id | ||
1145 | else: | ||
1146 | obj_by_id[_id] = obj | ||
1147 | self.obj_by_id = obj_by_id | ||
1148 | |||
1149 | # Named individuals aren't considered missing | ||
1150 | self.missing_ids -= NAMED_INDIVIDUALS | ||
1151 | |||
1152 | return self.missing_ids | ||
1153 | |||
1154 | def find_by_id(self, _id, default=None): | ||
1155 | """ | ||
1156 | Find object by ID | ||
1157 | |||
1158 | Returns objects that match the specified ID, or default if there is no | ||
1159 | object with the specified ID | ||
1160 | """ | ||
1161 | if _id not in self.obj_by_id: | ||
1162 | return default | ||
1163 | return self.obj_by_id[_id] | ||
1164 | |||
1165 | def foreach(self): | ||
1166 | """ | ||
1167 | Iterate over every object in the object set, and all child objects | ||
1168 | """ | ||
1169 | visited = set() | ||
1170 | for o in self.objects: | ||
1171 | if o not in visited: | ||
1172 | yield o | ||
1173 | visited.add(o) | ||
1174 | |||
1175 | for child in o.iter_objects(recursive=True, visited=visited): | ||
1176 | yield child | ||
1177 | |||
1178 | def foreach_type(self, typ, *, match_subclass=True): | ||
1179 | """ | ||
1180 | Iterate over each object of a specified type (or subclass there of) | ||
1181 | |||
1182 | If match_subclass is True, and class derived from typ will also match | ||
1183 | (similar to isinstance()). If False, only exact matches will be | ||
1184 | returned | ||
1185 | """ | ||
1186 | if not isinstance(typ, str): | ||
1187 | if not issubclass(typ, SHACLObject): | ||
1188 | raise TypeError(f"Type must be derived from SHACLObject, got {typ}") | ||
1189 | typ = typ._OBJ_TYPE | ||
1190 | |||
1191 | if typ not in self.obj_by_type: | ||
1192 | return | ||
1193 | |||
1194 | for exact, o in self.obj_by_type[typ]: | ||
1195 | if match_subclass or exact: | ||
1196 | yield o | ||
1197 | |||
1198 | def merge(self, *objectsets): | ||
1199 | """ | ||
1200 | Merge object sets | ||
1201 | |||
1202 | Returns a new object set that is the combination of this object set and | ||
1203 | all provided arguments | ||
1204 | """ | ||
1205 | new_objects = set() | ||
1206 | new_objects |= self.objects | ||
1207 | for d in objectsets: | ||
1208 | new_objects |= d.objects | ||
1209 | |||
1210 | return SHACLObjectSet(new_objects, link=True) | ||
1211 | |||
1212 | def encode(self, encoder, force_list=False, *, key=None): | ||
1213 | """ | ||
1214 | Serialize a list of objects to a serialization encoder | ||
1215 | |||
1216 | If force_list is true, a list will always be written using the encoder. | ||
1217 | """ | ||
1218 | ref_counts = {} | ||
1219 | state = EncodeState() | ||
1220 | |||
1221 | def walk_callback(value, path): | ||
1222 | nonlocal state | ||
1223 | nonlocal ref_counts | ||
1224 | |||
1225 | if not isinstance(value, SHACLObject): | ||
1226 | return True | ||
1227 | |||
1228 | # Remove blank node ID for re-assignment | ||
1229 | if value._id and value._id.startswith("_:"): | ||
1230 | del value._id | ||
1231 | |||
1232 | if value._id: | ||
1233 | state.add_refed(value) | ||
1234 | |||
1235 | # If the object is referenced more than once, add it to the set of | ||
1236 | # referenced objects | ||
1237 | ref_counts.setdefault(value, 0) | ||
1238 | ref_counts[value] += 1 | ||
1239 | if ref_counts[value] > 1: | ||
1240 | state.add_refed(value) | ||
1241 | return False | ||
1242 | |||
1243 | return True | ||
1244 | |||
1245 | for o in self.objects: | ||
1246 | if o._id: | ||
1247 | state.add_refed(o) | ||
1248 | o.walk(walk_callback) | ||
1249 | |||
1250 | use_list = force_list or len(self.objects) > 1 | ||
1251 | |||
1252 | if use_list: | ||
1253 | # If we are making a list add all the objects referred to by reference | ||
1254 | # to the list | ||
1255 | objects = list(self.objects | state.ref_objects) | ||
1256 | else: | ||
1257 | objects = list(self.objects) | ||
1258 | |||
1259 | objects.sort(key=key) | ||
1260 | |||
1261 | if use_list: | ||
1262 | # Ensure top level objects are only written in the top level graph | ||
1263 | # node, and referenced by ID everywhere else. This is done by setting | ||
1264 | # the flag that indicates this object has been written for all the top | ||
1265 | # level objects, then clearing it right before serializing the object. | ||
1266 | # | ||
1267 | # In this way, if an object is referenced before it is supposed to be | ||
1268 | # serialized into the @graph, it will serialize as a string instead of | ||
1269 | # the actual object | ||
1270 | for o in objects: | ||
1271 | state.written_objects.add(o) | ||
1272 | |||
1273 | with encoder.write_list() as list_s: | ||
1274 | for o in objects: | ||
1275 | # Allow this specific object to be written now | ||
1276 | state.written_objects.remove(o) | ||
1277 | with list_s.write_list_item() as item_s: | ||
1278 | o.encode(item_s, state) | ||
1279 | |||
1280 | elif objects: | ||
1281 | objects[0].encode(encoder, state) | ||
1282 | |||
1283 | def decode(self, decoder): | ||
1284 | self.create_index() | ||
1285 | |||
1286 | for obj_d in decoder.read_list(): | ||
1287 | o = SHACLObject.decode(obj_d, objectset=self) | ||
1288 | self.objects.add(o) | ||
1289 | |||
1290 | self._link() | ||
1291 | |||
1292 | |||
1293 | class EncodeState(object): | ||
1294 | def __init__(self): | ||
1295 | self.ref_objects = set() | ||
1296 | self.written_objects = set() | ||
1297 | self.blank_objects = {} | ||
1298 | |||
1299 | def get_object_id(self, o): | ||
1300 | if o._id: | ||
1301 | return o._id | ||
1302 | |||
1303 | if o not in self.blank_objects: | ||
1304 | _id = f"_:{o.__class__.__name__}{len(self.blank_objects)}" | ||
1305 | self.blank_objects[o] = _id | ||
1306 | |||
1307 | return self.blank_objects[o] | ||
1308 | |||
1309 | def is_refed(self, o): | ||
1310 | return o in self.ref_objects | ||
1311 | |||
1312 | def add_refed(self, o): | ||
1313 | self.ref_objects.add(o) | ||
1314 | |||
1315 | def is_written(self, o): | ||
1316 | return o in self.written_objects | ||
1317 | |||
1318 | def add_written(self, o): | ||
1319 | self.written_objects.add(o) | ||
1320 | |||
1321 | |||
1322 | class Decoder(ABC): | ||
1323 | @abstractmethod | ||
1324 | def read_value(self): | ||
1325 | """ | ||
1326 | Consume next item | ||
1327 | |||
1328 | Consumes the next item of any type | ||
1329 | """ | ||
1330 | pass | ||
1331 | |||
1332 | @abstractmethod | ||
1333 | def read_string(self): | ||
1334 | """ | ||
1335 | Consume the next item as a string. | ||
1336 | |||
1337 | Returns the string value of the next item, or `None` if the next item | ||
1338 | is not a string | ||
1339 | """ | ||
1340 | pass | ||
1341 | |||
1342 | @abstractmethod | ||
1343 | def read_datetime(self): | ||
1344 | """ | ||
1345 | Consumes the next item as a date & time string | ||
1346 | |||
1347 | Returns the string value of the next item, if it is a ISO datetime, or | ||
1348 | `None` if the next item is not a ISO datetime string. | ||
1349 | |||
1350 | Note that validation of the string is done by the caller, so a minimal | ||
1351 | implementation can just check if the next item is a string without | ||
1352 | worrying about the format | ||
1353 | """ | ||
1354 | pass | ||
1355 | |||
1356 | @abstractmethod | ||
1357 | def read_integer(self): | ||
1358 | """ | ||
1359 | Consumes the next item as an integer | ||
1360 | |||
1361 | Returns the integer value of the next item, or `None` if the next item | ||
1362 | is not an integer | ||
1363 | """ | ||
1364 | pass | ||
1365 | |||
1366 | @abstractmethod | ||
1367 | def read_iri(self): | ||
1368 | """ | ||
1369 | Consumes the next item as an IRI string | ||
1370 | |||
1371 | Returns the string value of the next item an IRI, or `None` if the next | ||
1372 | item is not an IRI. | ||
1373 | |||
1374 | The returned string should be either a fully-qualified IRI, or a blank | ||
1375 | node ID | ||
1376 | """ | ||
1377 | pass | ||
1378 | |||
1379 | @abstractmethod | ||
1380 | def read_enum(self, e): | ||
1381 | """ | ||
1382 | Consumes the next item as an Enum value string | ||
1383 | |||
1384 | Returns the fully qualified IRI of the next enum item, or `None` if the | ||
1385 | next item is not an enum value. | ||
1386 | |||
1387 | The callee is responsible for validating that the returned IRI is | ||
1388 | actually a member of the specified Enum, so the `Decoder` does not need | ||
1389 | to check that, but can if it wishes | ||
1390 | """ | ||
1391 | pass | ||
1392 | |||
1393 | @abstractmethod | ||
1394 | def read_bool(self): | ||
1395 | """ | ||
1396 | Consume the next item as a boolean value | ||
1397 | |||
1398 | Returns the boolean value of the next item, or `None` if the next item | ||
1399 | is not a boolean | ||
1400 | """ | ||
1401 | pass | ||
1402 | |||
1403 | @abstractmethod | ||
1404 | def read_float(self): | ||
1405 | """ | ||
1406 | Consume the next item as a float value | ||
1407 | |||
1408 | Returns the float value of the next item, or `None` if the next item is | ||
1409 | not a float | ||
1410 | """ | ||
1411 | pass | ||
1412 | |||
1413 | @abstractmethod | ||
1414 | def read_list(self): | ||
1415 | """ | ||
1416 | Consume the next item as a list generator | ||
1417 | |||
1418 | This should generate a `Decoder` object for each item in the list. The | ||
1419 | generated `Decoder` can be used to read the corresponding item from the | ||
1420 | list | ||
1421 | """ | ||
1422 | pass | ||
1423 | |||
1424 | @abstractmethod | ||
1425 | def is_list(self): | ||
1426 | """ | ||
1427 | Checks if the next item is a list | ||
1428 | |||
1429 | Returns True if the next item is a list, or False if it is a scalar | ||
1430 | """ | ||
1431 | pass | ||
1432 | |||
1433 | @abstractmethod | ||
1434 | def read_object(self): | ||
1435 | """ | ||
1436 | Consume next item as an object | ||
1437 | |||
1438 | A context manager that "enters" the next item as a object and yields a | ||
1439 | `Decoder` that can read properties from it. If the next item is not an | ||
1440 | object, yields `None` | ||
1441 | |||
1442 | Properties will be read out of the object using `read_property` and | ||
1443 | `read_object_id` | ||
1444 | """ | ||
1445 | pass | ||
1446 | |||
1447 | @abstractmethod | ||
1448 | @contextmanager | ||
1449 | def read_property(self, key): | ||
1450 | """ | ||
1451 | Read property from object | ||
1452 | |||
1453 | A context manager that yields a `Decoder` that can be used to read the | ||
1454 | value of the property with the given key in current object, or `None` | ||
1455 | if the property does not exist in the current object. | ||
1456 | """ | ||
1457 | pass | ||
1458 | |||
1459 | @abstractmethod | ||
1460 | def object_keys(self): | ||
1461 | """ | ||
1462 | Read property keys from an object | ||
1463 | |||
1464 | Iterates over all the serialized keys for the current object | ||
1465 | """ | ||
1466 | pass | ||
1467 | |||
1468 | @abstractmethod | ||
1469 | def read_object_id(self, alias=None): | ||
1470 | """ | ||
1471 | Read current object ID property | ||
1472 | |||
1473 | Returns the ID of the current object if one is defined, or `None` if | ||
1474 | the current object has no ID. | ||
1475 | |||
1476 | The ID must be a fully qualified IRI or a blank node | ||
1477 | |||
1478 | If `alias` is provided, is is a hint as to another name by which the ID | ||
1479 | might be found, if the `Decoder` supports aliases for an ID | ||
1480 | """ | ||
1481 | pass | ||
1482 | |||
1483 | |||
1484 | class JSONLDDecoder(Decoder): | ||
1485 | def __init__(self, data, root=False): | ||
1486 | self.data = data | ||
1487 | self.root = root | ||
1488 | |||
1489 | def read_value(self): | ||
1490 | if isinstance(self.data, str): | ||
1491 | try: | ||
1492 | return float(self.data) | ||
1493 | except ValueError: | ||
1494 | pass | ||
1495 | return self.data | ||
1496 | |||
1497 | def read_string(self): | ||
1498 | if isinstance(self.data, str): | ||
1499 | return self.data | ||
1500 | return None | ||
1501 | |||
1502 | def read_datetime(self): | ||
1503 | return self.read_string() | ||
1504 | |||
1505 | def read_integer(self): | ||
1506 | if isinstance(self.data, int): | ||
1507 | return self.data | ||
1508 | return None | ||
1509 | |||
1510 | def read_bool(self): | ||
1511 | if isinstance(self.data, bool): | ||
1512 | return self.data | ||
1513 | return None | ||
1514 | |||
1515 | def read_float(self): | ||
1516 | if isinstance(self.data, (int, float, str)): | ||
1517 | return float(self.data) | ||
1518 | return None | ||
1519 | |||
1520 | def read_iri(self): | ||
1521 | if isinstance(self.data, str): | ||
1522 | return self.data | ||
1523 | return None | ||
1524 | |||
1525 | def read_enum(self, e): | ||
1526 | if isinstance(self.data, str): | ||
1527 | return self.data | ||
1528 | return None | ||
1529 | |||
1530 | def read_list(self): | ||
1531 | if self.is_list(): | ||
1532 | for v in self.data: | ||
1533 | yield self.__class__(v) | ||
1534 | else: | ||
1535 | yield self | ||
1536 | |||
1537 | def is_list(self): | ||
1538 | return isinstance(self.data, (list, tuple, set)) | ||
1539 | |||
1540 | def __get_value(self, *keys): | ||
1541 | for k in keys: | ||
1542 | if k and k in self.data: | ||
1543 | return self.data[k] | ||
1544 | return None | ||
1545 | |||
1546 | @contextmanager | ||
1547 | def read_property(self, key): | ||
1548 | v = self.__get_value(key) | ||
1549 | if v is not None: | ||
1550 | yield self.__class__(v) | ||
1551 | else: | ||
1552 | yield None | ||
1553 | |||
1554 | def object_keys(self): | ||
1555 | for key in self.data.keys(): | ||
1556 | if key in ("@type", "type"): | ||
1557 | continue | ||
1558 | if self.root and key == "@context": | ||
1559 | continue | ||
1560 | yield key | ||
1561 | |||
1562 | def read_object(self): | ||
1563 | typ = self.__get_value("@type", "type") | ||
1564 | if typ is not None: | ||
1565 | return typ, self | ||
1566 | |||
1567 | return None, self | ||
1568 | |||
1569 | def read_object_id(self, alias=None): | ||
1570 | return self.__get_value(alias, "@id") | ||
1571 | |||
1572 | |||
1573 | class JSONLDDeserializer(object): | ||
1574 | def deserialize_data(self, data, objectset: SHACLObjectSet): | ||
1575 | if "@graph" in data: | ||
1576 | h = JSONLDDecoder(data["@graph"], True) | ||
1577 | else: | ||
1578 | h = JSONLDDecoder(data, True) | ||
1579 | |||
1580 | objectset.decode(h) | ||
1581 | |||
1582 | def read(self, f, objectset: SHACLObjectSet): | ||
1583 | data = json.load(f) | ||
1584 | self.deserialize_data(data, objectset) | ||
1585 | |||
1586 | |||
1587 | class Encoder(ABC): | ||
1588 | @abstractmethod | ||
1589 | def write_string(self, v): | ||
1590 | """ | ||
1591 | Write a string value | ||
1592 | |||
1593 | Encodes the value as a string in the output | ||
1594 | """ | ||
1595 | pass | ||
1596 | |||
1597 | @abstractmethod | ||
1598 | def write_datetime(self, v): | ||
1599 | """ | ||
1600 | Write a date & time string | ||
1601 | |||
1602 | Encodes the value as an ISO datetime string | ||
1603 | |||
1604 | Note: The provided string is already correctly encoded as an ISO datetime | ||
1605 | """ | ||
1606 | pass | ||
1607 | |||
1608 | @abstractmethod | ||
1609 | def write_integer(self, v): | ||
1610 | """ | ||
1611 | Write an integer value | ||
1612 | |||
1613 | Encodes the value as an integer in the output | ||
1614 | """ | ||
1615 | pass | ||
1616 | |||
1617 | @abstractmethod | ||
1618 | def write_iri(self, v, compact=None): | ||
1619 | """ | ||
1620 | Write IRI | ||
1621 | |||
1622 | Encodes the string as an IRI. Note that the string will be either a | ||
1623 | fully qualified IRI or a blank node ID. If `compact` is provided and | ||
1624 | the serialization supports compacted IRIs, it should be preferred to | ||
1625 | the full IRI | ||
1626 | """ | ||
1627 | pass | ||
1628 | |||
1629 | @abstractmethod | ||
1630 | def write_enum(self, v, e, compact=None): | ||
1631 | """ | ||
1632 | Write enum value IRI | ||
1633 | |||
1634 | Encodes the string enum value IRI. Note that the string will be a fully | ||
1635 | qualified IRI. If `compact` is provided and the serialization supports | ||
1636 | compacted IRIs, it should be preferred to the full IRI. | ||
1637 | """ | ||
1638 | pass | ||
1639 | |||
1640 | @abstractmethod | ||
1641 | def write_bool(self, v): | ||
1642 | """ | ||
1643 | Write boolean | ||
1644 | |||
1645 | Encodes the value as a boolean in the output | ||
1646 | """ | ||
1647 | pass | ||
1648 | |||
1649 | @abstractmethod | ||
1650 | def write_float(self, v): | ||
1651 | """ | ||
1652 | Write float | ||
1653 | |||
1654 | Encodes the value as a floating point number in the output | ||
1655 | """ | ||
1656 | pass | ||
1657 | |||
1658 | @abstractmethod | ||
1659 | @contextmanager | ||
1660 | def write_object(self, o, _id, needs_id): | ||
1661 | """ | ||
1662 | Write object | ||
1663 | |||
1664 | A context manager that yields an `Encoder` that can be used to encode | ||
1665 | the given object properties. | ||
1666 | |||
1667 | The provided ID will always be a valid ID (even if o._id is `None`), in | ||
1668 | case the `Encoder` _must_ have an ID. `needs_id` is a hint to indicate | ||
1669 | to the `Encoder` if an ID must be written or not (if that is even an | ||
1670 | option). If it is `True`, the `Encoder` must encode an ID for the | ||
1671 | object. If `False`, the encoder is not required to encode an ID and may | ||
1672 | omit it. | ||
1673 | |||
1674 | The ID will be either a fully qualified IRI, or a blank node IRI. | ||
1675 | |||
1676 | Properties will be written the object using `write_property` | ||
1677 | """ | ||
1678 | pass | ||
1679 | |||
1680 | @abstractmethod | ||
1681 | @contextmanager | ||
1682 | def write_property(self, iri, compact=None): | ||
1683 | """ | ||
1684 | Write object property | ||
1685 | |||
1686 | A context manager that yields an `Encoder` that can be used to encode | ||
1687 | the value for the property with the given IRI in the current object | ||
1688 | |||
1689 | Note that the IRI will be fully qualified. If `compact` is provided and | ||
1690 | the serialization supports compacted IRIs, it should be preferred to | ||
1691 | the full IRI. | ||
1692 | """ | ||
1693 | pass | ||
1694 | |||
1695 | @abstractmethod | ||
1696 | @contextmanager | ||
1697 | def write_list(self): | ||
1698 | """ | ||
1699 | Write list | ||
1700 | |||
1701 | A context manager that yields an `Encoder` that can be used to encode a | ||
1702 | list. | ||
1703 | |||
1704 | Each item of the list will be added using `write_list_item` | ||
1705 | """ | ||
1706 | pass | ||
1707 | |||
1708 | @abstractmethod | ||
1709 | @contextmanager | ||
1710 | def write_list_item(self): | ||
1711 | """ | ||
1712 | Write list item | ||
1713 | |||
1714 | A context manager that yields an `Encoder` that can be used to encode | ||
1715 | the value for a list item | ||
1716 | """ | ||
1717 | pass | ||
1718 | |||
1719 | |||
1720 | class JSONLDEncoder(Encoder): | ||
1721 | def __init__(self, data=None): | ||
1722 | self.data = data | ||
1723 | |||
1724 | def write_string(self, v): | ||
1725 | self.data = v | ||
1726 | |||
1727 | def write_datetime(self, v): | ||
1728 | self.data = v | ||
1729 | |||
1730 | def write_integer(self, v): | ||
1731 | self.data = v | ||
1732 | |||
1733 | def write_iri(self, v, compact=None): | ||
1734 | self.write_string(compact or v) | ||
1735 | |||
1736 | def write_enum(self, v, e, compact=None): | ||
1737 | self.write_string(compact or v) | ||
1738 | |||
1739 | def write_bool(self, v): | ||
1740 | self.data = v | ||
1741 | |||
1742 | def write_float(self, v): | ||
1743 | self.data = str(v) | ||
1744 | |||
1745 | @contextmanager | ||
1746 | def write_property(self, iri, compact=None): | ||
1747 | s = self.__class__(None) | ||
1748 | yield s | ||
1749 | if s.data is not None: | ||
1750 | self.data[compact or iri] = s.data | ||
1751 | |||
1752 | @contextmanager | ||
1753 | def write_object(self, o, _id, needs_id): | ||
1754 | self.data = { | ||
1755 | "type": o.COMPACT_TYPE or o.TYPE, | ||
1756 | } | ||
1757 | if needs_id: | ||
1758 | self.data[o.ID_ALIAS or "@id"] = _id | ||
1759 | yield self | ||
1760 | |||
1761 | @contextmanager | ||
1762 | def write_list(self): | ||
1763 | self.data = [] | ||
1764 | yield self | ||
1765 | if not self.data: | ||
1766 | self.data = None | ||
1767 | |||
1768 | @contextmanager | ||
1769 | def write_list_item(self): | ||
1770 | s = self.__class__(None) | ||
1771 | yield s | ||
1772 | if s.data is not None: | ||
1773 | self.data.append(s.data) | ||
1774 | |||
1775 | |||
1776 | class JSONLDSerializer(object): | ||
1777 | def __init__(self, **args): | ||
1778 | self.args = args | ||
1779 | |||
1780 | def serialize_data( | ||
1781 | self, | ||
1782 | objectset: SHACLObjectSet, | ||
1783 | force_at_graph=False, | ||
1784 | ): | ||
1785 | h = JSONLDEncoder() | ||
1786 | objectset.encode(h, force_at_graph) | ||
1787 | data = {} | ||
1788 | if len(CONTEXT_URLS) == 1: | ||
1789 | data["@context"] = CONTEXT_URLS[0] | ||
1790 | elif CONTEXT_URLS: | ||
1791 | data["@context"] = CONTEXT_URLS | ||
1792 | |||
1793 | if isinstance(h.data, list): | ||
1794 | data["@graph"] = h.data | ||
1795 | else: | ||
1796 | for k, v in h.data.items(): | ||
1797 | data[k] = v | ||
1798 | |||
1799 | return data | ||
1800 | |||
1801 | def write( | ||
1802 | self, | ||
1803 | objectset: SHACLObjectSet, | ||
1804 | f, | ||
1805 | force_at_graph=False, | ||
1806 | **kwargs, | ||
1807 | ): | ||
1808 | """ | ||
1809 | Write a SHACLObjectSet to a JSON LD file | ||
1810 | |||
1811 | If force_at_graph is True, a @graph node will always be written | ||
1812 | """ | ||
1813 | data = self.serialize_data(objectset, force_at_graph) | ||
1814 | |||
1815 | args = {**self.args, **kwargs} | ||
1816 | |||
1817 | sha1 = hashlib.sha1() | ||
1818 | for chunk in json.JSONEncoder(**args).iterencode(data): | ||
1819 | chunk = chunk.encode("utf-8") | ||
1820 | f.write(chunk) | ||
1821 | sha1.update(chunk) | ||
1822 | |||
1823 | return sha1.hexdigest() | ||
1824 | |||
1825 | |||
1826 | class JSONLDInlineEncoder(Encoder): | ||
1827 | def __init__(self, f, sha1): | ||
1828 | self.f = f | ||
1829 | self.comma = False | ||
1830 | self.sha1 = sha1 | ||
1831 | |||
1832 | def write(self, s): | ||
1833 | s = s.encode("utf-8") | ||
1834 | self.f.write(s) | ||
1835 | self.sha1.update(s) | ||
1836 | |||
1837 | def _write_comma(self): | ||
1838 | if self.comma: | ||
1839 | self.write(",") | ||
1840 | self.comma = False | ||
1841 | |||
1842 | def write_string(self, v): | ||
1843 | self.write(json.dumps(v)) | ||
1844 | |||
1845 | def write_datetime(self, v): | ||
1846 | self.write_string(v) | ||
1847 | |||
1848 | def write_integer(self, v): | ||
1849 | self.write(f"{v}") | ||
1850 | |||
1851 | def write_iri(self, v, compact=None): | ||
1852 | self.write_string(compact or v) | ||
1853 | |||
1854 | def write_enum(self, v, e, compact=None): | ||
1855 | self.write_iri(v, compact) | ||
1856 | |||
1857 | def write_bool(self, v): | ||
1858 | if v: | ||
1859 | self.write("true") | ||
1860 | else: | ||
1861 | self.write("false") | ||
1862 | |||
1863 | def write_float(self, v): | ||
1864 | self.write(json.dumps(str(v))) | ||
1865 | |||
1866 | @contextmanager | ||
1867 | def write_property(self, iri, compact=None): | ||
1868 | self._write_comma() | ||
1869 | self.write_string(compact or iri) | ||
1870 | self.write(":") | ||
1871 | yield self | ||
1872 | self.comma = True | ||
1873 | |||
1874 | @contextmanager | ||
1875 | def write_object(self, o, _id, needs_id): | ||
1876 | self._write_comma() | ||
1877 | |||
1878 | self.write("{") | ||
1879 | self.write_string("type") | ||
1880 | self.write(":") | ||
1881 | self.write_string(o.COMPACT_TYPE or o.TYPE) | ||
1882 | self.comma = True | ||
1883 | |||
1884 | if needs_id: | ||
1885 | self._write_comma() | ||
1886 | self.write_string(o.ID_ALIAS or "@id") | ||
1887 | self.write(":") | ||
1888 | self.write_string(_id) | ||
1889 | self.comma = True | ||
1890 | |||
1891 | self.comma = True | ||
1892 | yield self | ||
1893 | |||
1894 | self.write("}") | ||
1895 | self.comma = True | ||
1896 | |||
1897 | @contextmanager | ||
1898 | def write_list(self): | ||
1899 | self._write_comma() | ||
1900 | self.write("[") | ||
1901 | yield self.__class__(self.f, self.sha1) | ||
1902 | self.write("]") | ||
1903 | self.comma = True | ||
1904 | |||
1905 | @contextmanager | ||
1906 | def write_list_item(self): | ||
1907 | self._write_comma() | ||
1908 | yield self.__class__(self.f, self.sha1) | ||
1909 | self.comma = True | ||
1910 | |||
1911 | |||
1912 | class JSONLDInlineSerializer(object): | ||
1913 | def write( | ||
1914 | self, | ||
1915 | objectset: SHACLObjectSet, | ||
1916 | f, | ||
1917 | force_at_graph=False, | ||
1918 | ): | ||
1919 | """ | ||
1920 | Write a SHACLObjectSet to a JSON LD file | ||
1921 | |||
1922 | Note: force_at_graph is included for compatibility, but ignored. This | ||
1923 | serializer always writes out a graph | ||
1924 | """ | ||
1925 | sha1 = hashlib.sha1() | ||
1926 | h = JSONLDInlineEncoder(f, sha1) | ||
1927 | h.write('{"@context":') | ||
1928 | if len(CONTEXT_URLS) == 1: | ||
1929 | h.write(f'"{CONTEXT_URLS[0]}"') | ||
1930 | elif CONTEXT_URLS: | ||
1931 | h.write('["') | ||
1932 | h.write('","'.join(CONTEXT_URLS)) | ||
1933 | h.write('"]') | ||
1934 | h.write(",") | ||
1935 | |||
1936 | h.write('"@graph":') | ||
1937 | |||
1938 | objectset.encode(h, True) | ||
1939 | h.write("}") | ||
1940 | return sha1.hexdigest() | ||
1941 | |||
1942 | |||
1943 | def print_tree(objects, all_fields=False): | ||
1944 | """ | ||
1945 | Print object tree | ||
1946 | """ | ||
1947 | seen = set() | ||
1948 | |||
1949 | def callback(value, path): | ||
1950 | nonlocal seen | ||
1951 | |||
1952 | s = (" " * (len(path) - 1)) + f"{path[-1]}" | ||
1953 | if isinstance(value, SHACLObject): | ||
1954 | s += f" {value} ({id(value)})" | ||
1955 | is_empty = False | ||
1956 | elif isinstance(value, ListProxy): | ||
1957 | is_empty = len(value) == 0 | ||
1958 | if is_empty: | ||
1959 | s += " []" | ||
1960 | else: | ||
1961 | s += f" {value!r}" | ||
1962 | is_empty = value is None | ||
1963 | |||
1964 | if all_fields or not is_empty: | ||
1965 | print(s) | ||
1966 | |||
1967 | if isinstance(value, SHACLObject): | ||
1968 | if value in seen: | ||
1969 | return False | ||
1970 | seen.add(value) | ||
1971 | return True | ||
1972 | |||
1973 | return True | ||
1974 | |||
1975 | for o in objects: | ||
1976 | o.walk(callback) | ||
1977 | |||
1978 | |||
1979 | # fmt: off | ||
1980 | """Format Guard""" | ||
1981 | |||
1982 | |||
1983 | CONTEXT_URLS = [ | ||
1984 | "https://spdx.org/rdf/3.0.1/spdx-context.jsonld", | ||
1985 | ] | ||
1986 | |||
1987 | |||
1988 | # CLASSES | ||
1989 | # A class for describing the energy consumption incurred by an AI model in | ||
1990 | # different stages of its lifecycle. | ||
1991 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumption", compact_type="ai_EnergyConsumption", abstract=False) | ||
1992 | class ai_EnergyConsumption(SHACLObject): | ||
1993 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
1994 | NAMED_INDIVIDUALS = { | ||
1995 | } | ||
1996 | |||
1997 | @classmethod | ||
1998 | def _register_props(cls): | ||
1999 | super()._register_props() | ||
2000 | # Specifies the amount of energy consumed when finetuning the AI model that is | ||
2001 | # being used in the AI system. | ||
2002 | cls._add_property( | ||
2003 | "ai_finetuningEnergyConsumption", | ||
2004 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
2005 | iri="https://spdx.org/rdf/3.0.1/terms/AI/finetuningEnergyConsumption", | ||
2006 | compact="ai_finetuningEnergyConsumption", | ||
2007 | ) | ||
2008 | # Specifies the amount of energy consumed during inference time by an AI model | ||
2009 | # that is being used in the AI system. | ||
2010 | cls._add_property( | ||
2011 | "ai_inferenceEnergyConsumption", | ||
2012 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
2013 | iri="https://spdx.org/rdf/3.0.1/terms/AI/inferenceEnergyConsumption", | ||
2014 | compact="ai_inferenceEnergyConsumption", | ||
2015 | ) | ||
2016 | # Specifies the amount of energy consumed when training the AI model that is | ||
2017 | # being used in the AI system. | ||
2018 | cls._add_property( | ||
2019 | "ai_trainingEnergyConsumption", | ||
2020 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
2021 | iri="https://spdx.org/rdf/3.0.1/terms/AI/trainingEnergyConsumption", | ||
2022 | compact="ai_trainingEnergyConsumption", | ||
2023 | ) | ||
2024 | |||
2025 | |||
2026 | # The class that helps note down the quantity of energy consumption and the unit | ||
2027 | # used for measurement. | ||
2028 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumptionDescription", compact_type="ai_EnergyConsumptionDescription", abstract=False) | ||
2029 | class ai_EnergyConsumptionDescription(SHACLObject): | ||
2030 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2031 | NAMED_INDIVIDUALS = { | ||
2032 | } | ||
2033 | |||
2034 | @classmethod | ||
2035 | def _register_props(cls): | ||
2036 | super()._register_props() | ||
2037 | # Represents the energy quantity. | ||
2038 | cls._add_property( | ||
2039 | "ai_energyQuantity", | ||
2040 | FloatProp(), | ||
2041 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyQuantity", | ||
2042 | min_count=1, | ||
2043 | compact="ai_energyQuantity", | ||
2044 | ) | ||
2045 | # Specifies the unit in which energy is measured. | ||
2046 | cls._add_property( | ||
2047 | "ai_energyUnit", | ||
2048 | EnumProp([ | ||
2049 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", "kilowattHour"), | ||
2050 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", "megajoule"), | ||
2051 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", "other"), | ||
2052 | ]), | ||
2053 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyUnit", | ||
2054 | min_count=1, | ||
2055 | compact="ai_energyUnit", | ||
2056 | ) | ||
2057 | |||
2058 | |||
2059 | # Specifies the unit of energy consumption. | ||
2060 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType", compact_type="ai_EnergyUnitType", abstract=False) | ||
2061 | class ai_EnergyUnitType(SHACLObject): | ||
2062 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2063 | NAMED_INDIVIDUALS = { | ||
2064 | "kilowattHour": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", | ||
2065 | "megajoule": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", | ||
2066 | "other": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", | ||
2067 | } | ||
2068 | # Kilowatt-hour. | ||
2069 | kilowattHour = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour" | ||
2070 | # Megajoule. | ||
2071 | megajoule = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule" | ||
2072 | # Any other units of energy measurement. | ||
2073 | other = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other" | ||
2074 | |||
2075 | |||
2076 | # Specifies the safety risk level. | ||
2077 | @register("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType", compact_type="ai_SafetyRiskAssessmentType", abstract=False) | ||
2078 | class ai_SafetyRiskAssessmentType(SHACLObject): | ||
2079 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2080 | NAMED_INDIVIDUALS = { | ||
2081 | "high": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", | ||
2082 | "low": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", | ||
2083 | "medium": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", | ||
2084 | "serious": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", | ||
2085 | } | ||
2086 | # The second-highest level of risk posed by an AI system. | ||
2087 | high = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high" | ||
2088 | # Low/no risk is posed by an AI system. | ||
2089 | low = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low" | ||
2090 | # The third-highest level of risk posed by an AI system. | ||
2091 | medium = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium" | ||
2092 | # The highest level of risk posed by an AI system. | ||
2093 | serious = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious" | ||
2094 | |||
2095 | |||
2096 | # Specifies the type of an annotation. | ||
2097 | @register("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType", compact_type="AnnotationType", abstract=False) | ||
2098 | class AnnotationType(SHACLObject): | ||
2099 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2100 | NAMED_INDIVIDUALS = { | ||
2101 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", | ||
2102 | "review": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", | ||
2103 | } | ||
2104 | # Used to store extra information about an Element which is not part of a review (e.g. extra information provided during the creation of the Element). | ||
2105 | other = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other" | ||
2106 | # Used when someone reviews the Element. | ||
2107 | review = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review" | ||
2108 | |||
2109 | |||
2110 | # Provides information about the creation of the Element. | ||
2111 | @register("https://spdx.org/rdf/3.0.1/terms/Core/CreationInfo", compact_type="CreationInfo", abstract=False) | ||
2112 | class CreationInfo(SHACLObject): | ||
2113 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2114 | NAMED_INDIVIDUALS = { | ||
2115 | } | ||
2116 | |||
2117 | @classmethod | ||
2118 | def _register_props(cls): | ||
2119 | super()._register_props() | ||
2120 | # Provide consumers with comments by the creator of the Element about the | ||
2121 | # Element. | ||
2122 | cls._add_property( | ||
2123 | "comment", | ||
2124 | StringProp(), | ||
2125 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2126 | compact="comment", | ||
2127 | ) | ||
2128 | # Identifies when the Element was originally created. | ||
2129 | cls._add_property( | ||
2130 | "created", | ||
2131 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
2132 | iri="https://spdx.org/rdf/3.0.1/terms/Core/created", | ||
2133 | min_count=1, | ||
2134 | compact="created", | ||
2135 | ) | ||
2136 | # Identifies who or what created the Element. | ||
2137 | cls._add_property( | ||
2138 | "createdBy", | ||
2139 | ListProp(ObjectProp(Agent, False, context=[ | ||
2140 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
2141 | ],)), | ||
2142 | iri="https://spdx.org/rdf/3.0.1/terms/Core/createdBy", | ||
2143 | min_count=1, | ||
2144 | compact="createdBy", | ||
2145 | ) | ||
2146 | # Identifies the tooling that was used during the creation of the Element. | ||
2147 | cls._add_property( | ||
2148 | "createdUsing", | ||
2149 | ListProp(ObjectProp(Tool, False)), | ||
2150 | iri="https://spdx.org/rdf/3.0.1/terms/Core/createdUsing", | ||
2151 | compact="createdUsing", | ||
2152 | ) | ||
2153 | # Provides a reference number that can be used to understand how to parse and | ||
2154 | # interpret an Element. | ||
2155 | cls._add_property( | ||
2156 | "specVersion", | ||
2157 | StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",), | ||
2158 | iri="https://spdx.org/rdf/3.0.1/terms/Core/specVersion", | ||
2159 | min_count=1, | ||
2160 | compact="specVersion", | ||
2161 | ) | ||
2162 | |||
2163 | |||
2164 | # A key with an associated value. | ||
2165 | @register("https://spdx.org/rdf/3.0.1/terms/Core/DictionaryEntry", compact_type="DictionaryEntry", abstract=False) | ||
2166 | class DictionaryEntry(SHACLObject): | ||
2167 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2168 | NAMED_INDIVIDUALS = { | ||
2169 | } | ||
2170 | |||
2171 | @classmethod | ||
2172 | def _register_props(cls): | ||
2173 | super()._register_props() | ||
2174 | # A key used in a generic key-value pair. | ||
2175 | cls._add_property( | ||
2176 | "key", | ||
2177 | StringProp(), | ||
2178 | iri="https://spdx.org/rdf/3.0.1/terms/Core/key", | ||
2179 | min_count=1, | ||
2180 | compact="key", | ||
2181 | ) | ||
2182 | # A value used in a generic key-value pair. | ||
2183 | cls._add_property( | ||
2184 | "value", | ||
2185 | StringProp(), | ||
2186 | iri="https://spdx.org/rdf/3.0.1/terms/Core/value", | ||
2187 | compact="value", | ||
2188 | ) | ||
2189 | |||
2190 | |||
2191 | # Base domain class from which all other SPDX-3.0 domain classes derive. | ||
2192 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Element", compact_type="Element", abstract=True) | ||
2193 | class Element(SHACLObject): | ||
2194 | NODE_KIND = NodeKind.IRI | ||
2195 | ID_ALIAS = "spdxId" | ||
2196 | NAMED_INDIVIDUALS = { | ||
2197 | } | ||
2198 | |||
2199 | @classmethod | ||
2200 | def _register_props(cls): | ||
2201 | super()._register_props() | ||
2202 | # Provide consumers with comments by the creator of the Element about the | ||
2203 | # Element. | ||
2204 | cls._add_property( | ||
2205 | "comment", | ||
2206 | StringProp(), | ||
2207 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2208 | compact="comment", | ||
2209 | ) | ||
2210 | # Provides information about the creation of the Element. | ||
2211 | cls._add_property( | ||
2212 | "creationInfo", | ||
2213 | ObjectProp(CreationInfo, True), | ||
2214 | iri="https://spdx.org/rdf/3.0.1/terms/Core/creationInfo", | ||
2215 | min_count=1, | ||
2216 | compact="creationInfo", | ||
2217 | ) | ||
2218 | # Provides a detailed description of the Element. | ||
2219 | cls._add_property( | ||
2220 | "description", | ||
2221 | StringProp(), | ||
2222 | iri="https://spdx.org/rdf/3.0.1/terms/Core/description", | ||
2223 | compact="description", | ||
2224 | ) | ||
2225 | # Specifies an Extension characterization of some aspect of an Element. | ||
2226 | cls._add_property( | ||
2227 | "extension", | ||
2228 | ListProp(ObjectProp(extension_Extension, False)), | ||
2229 | iri="https://spdx.org/rdf/3.0.1/terms/Core/extension", | ||
2230 | compact="extension", | ||
2231 | ) | ||
2232 | # Provides a reference to a resource outside the scope of SPDX-3.0 content | ||
2233 | # that uniquely identifies an Element. | ||
2234 | cls._add_property( | ||
2235 | "externalIdentifier", | ||
2236 | ListProp(ObjectProp(ExternalIdentifier, False)), | ||
2237 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifier", | ||
2238 | compact="externalIdentifier", | ||
2239 | ) | ||
2240 | # Points to a resource outside the scope of the SPDX-3.0 content | ||
2241 | # that provides additional characteristics of an Element. | ||
2242 | cls._add_property( | ||
2243 | "externalRef", | ||
2244 | ListProp(ObjectProp(ExternalRef, False)), | ||
2245 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRef", | ||
2246 | compact="externalRef", | ||
2247 | ) | ||
2248 | # Identifies the name of an Element as designated by the creator. | ||
2249 | cls._add_property( | ||
2250 | "name", | ||
2251 | StringProp(), | ||
2252 | iri="https://spdx.org/rdf/3.0.1/terms/Core/name", | ||
2253 | compact="name", | ||
2254 | ) | ||
2255 | # A short description of an Element. | ||
2256 | cls._add_property( | ||
2257 | "summary", | ||
2258 | StringProp(), | ||
2259 | iri="https://spdx.org/rdf/3.0.1/terms/Core/summary", | ||
2260 | compact="summary", | ||
2261 | ) | ||
2262 | # Provides an IntegrityMethod with which the integrity of an Element can be | ||
2263 | # asserted. | ||
2264 | cls._add_property( | ||
2265 | "verifiedUsing", | ||
2266 | ListProp(ObjectProp(IntegrityMethod, False)), | ||
2267 | iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing", | ||
2268 | compact="verifiedUsing", | ||
2269 | ) | ||
2270 | |||
2271 | |||
2272 | # A collection of Elements, not necessarily with unifying context. | ||
2273 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ElementCollection", compact_type="ElementCollection", abstract=True) | ||
2274 | class ElementCollection(Element): | ||
2275 | NODE_KIND = NodeKind.IRI | ||
2276 | ID_ALIAS = "spdxId" | ||
2277 | NAMED_INDIVIDUALS = { | ||
2278 | } | ||
2279 | |||
2280 | @classmethod | ||
2281 | def _register_props(cls): | ||
2282 | super()._register_props() | ||
2283 | # Refers to one or more Elements that are part of an ElementCollection. | ||
2284 | cls._add_property( | ||
2285 | "element", | ||
2286 | ListProp(ObjectProp(Element, False, context=[ | ||
2287 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
2288 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
2289 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
2290 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
2291 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
2292 | ],)), | ||
2293 | iri="https://spdx.org/rdf/3.0.1/terms/Core/element", | ||
2294 | compact="element", | ||
2295 | ) | ||
2296 | # Describes one a profile which the creator of this ElementCollection intends to | ||
2297 | # conform to. | ||
2298 | cls._add_property( | ||
2299 | "profileConformance", | ||
2300 | ListProp(EnumProp([ | ||
2301 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", "ai"), | ||
2302 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", "build"), | ||
2303 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", "core"), | ||
2304 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", "dataset"), | ||
2305 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", "expandedLicensing"), | ||
2306 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", "extension"), | ||
2307 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", "lite"), | ||
2308 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", "security"), | ||
2309 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", "simpleLicensing"), | ||
2310 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", "software"), | ||
2311 | ])), | ||
2312 | iri="https://spdx.org/rdf/3.0.1/terms/Core/profileConformance", | ||
2313 | compact="profileConformance", | ||
2314 | ) | ||
2315 | # This property is used to denote the root Element(s) of a tree of elements contained in a BOM. | ||
2316 | cls._add_property( | ||
2317 | "rootElement", | ||
2318 | ListProp(ObjectProp(Element, False, context=[ | ||
2319 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
2320 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
2321 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
2322 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
2323 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
2324 | ],)), | ||
2325 | iri="https://spdx.org/rdf/3.0.1/terms/Core/rootElement", | ||
2326 | compact="rootElement", | ||
2327 | ) | ||
2328 | |||
2329 | |||
2330 | # A reference to a resource identifier defined outside the scope of SPDX-3.0 content that uniquely identifies an Element. | ||
2331 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifier", compact_type="ExternalIdentifier", abstract=False) | ||
2332 | class ExternalIdentifier(SHACLObject): | ||
2333 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2334 | NAMED_INDIVIDUALS = { | ||
2335 | } | ||
2336 | |||
2337 | @classmethod | ||
2338 | def _register_props(cls): | ||
2339 | super()._register_props() | ||
2340 | # Provide consumers with comments by the creator of the Element about the | ||
2341 | # Element. | ||
2342 | cls._add_property( | ||
2343 | "comment", | ||
2344 | StringProp(), | ||
2345 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2346 | compact="comment", | ||
2347 | ) | ||
2348 | # Specifies the type of the external identifier. | ||
2349 | cls._add_property( | ||
2350 | "externalIdentifierType", | ||
2351 | EnumProp([ | ||
2352 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", "cpe22"), | ||
2353 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", "cpe23"), | ||
2354 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", "cve"), | ||
2355 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", "email"), | ||
2356 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", "gitoid"), | ||
2357 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", "other"), | ||
2358 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", "packageUrl"), | ||
2359 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", "securityOther"), | ||
2360 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", "swhid"), | ||
2361 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", "swid"), | ||
2362 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", "urlScheme"), | ||
2363 | ]), | ||
2364 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifierType", | ||
2365 | min_count=1, | ||
2366 | compact="externalIdentifierType", | ||
2367 | ) | ||
2368 | # Uniquely identifies an external element. | ||
2369 | cls._add_property( | ||
2370 | "identifier", | ||
2371 | StringProp(), | ||
2372 | iri="https://spdx.org/rdf/3.0.1/terms/Core/identifier", | ||
2373 | min_count=1, | ||
2374 | compact="identifier", | ||
2375 | ) | ||
2376 | # Provides the location for more information regarding an external identifier. | ||
2377 | cls._add_property( | ||
2378 | "identifierLocator", | ||
2379 | ListProp(AnyURIProp()), | ||
2380 | iri="https://spdx.org/rdf/3.0.1/terms/Core/identifierLocator", | ||
2381 | compact="identifierLocator", | ||
2382 | ) | ||
2383 | # An entity that is authorized to issue identification credentials. | ||
2384 | cls._add_property( | ||
2385 | "issuingAuthority", | ||
2386 | StringProp(), | ||
2387 | iri="https://spdx.org/rdf/3.0.1/terms/Core/issuingAuthority", | ||
2388 | compact="issuingAuthority", | ||
2389 | ) | ||
2390 | |||
2391 | |||
2392 | # Specifies the type of an external identifier. | ||
2393 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType", compact_type="ExternalIdentifierType", abstract=False) | ||
2394 | class ExternalIdentifierType(SHACLObject): | ||
2395 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2396 | NAMED_INDIVIDUALS = { | ||
2397 | "cpe22": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", | ||
2398 | "cpe23": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", | ||
2399 | "cve": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", | ||
2400 | "email": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", | ||
2401 | "gitoid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", | ||
2402 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", | ||
2403 | "packageUrl": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", | ||
2404 | "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", | ||
2405 | "swhid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", | ||
2406 | "swid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", | ||
2407 | "urlScheme": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", | ||
2408 | } | ||
2409 | # [Common Platform Enumeration Specification 2.2](https://cpe.mitre.org/files/cpe-specification_2.2.pdf) | ||
2410 | cpe22 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22" | ||
2411 | # [Common Platform Enumeration: Naming Specification Version 2.3](https://csrc.nist.gov/publications/detail/nistir/7695/final) | ||
2412 | cpe23 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23" | ||
2413 | # Common Vulnerabilities and Exposures identifiers, an identifier for a specific software flaw defined within the official CVE Dictionary and that conforms to the [CVE specification](https://csrc.nist.gov/glossary/term/cve_id). | ||
2414 | cve = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve" | ||
2415 | # Email address, as defined in [RFC 3696](https://datatracker.ietf.org/doc/rfc3986/) Section 3. | ||
2416 | email = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email" | ||
2417 | # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg). | ||
2418 | gitoid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid" | ||
2419 | # Used when the type does not match any of the other options. | ||
2420 | other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other" | ||
2421 | # Package URL, as defined in the corresponding [Annex](../../../annexes/pkg-url-specification.md) of this specification. | ||
2422 | packageUrl = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl" | ||
2423 | # Used when there is a security related identifier of unspecified type. | ||
2424 | securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther" | ||
2425 | # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`. | ||
2426 | swhid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid" | ||
2427 | # Concise Software Identification (CoSWID) tag, as defined in [RFC 9393](https://datatracker.ietf.org/doc/rfc9393/) Section 2.3. | ||
2428 | swid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid" | ||
2429 | # [Uniform Resource Identifier (URI) Schemes](https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml). The scheme used in order to locate a resource. | ||
2430 | urlScheme = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme" | ||
2431 | |||
2432 | |||
2433 | # A map of Element identifiers that are used within an SpdxDocument but defined | ||
2434 | # external to that SpdxDocument. | ||
2435 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalMap", compact_type="ExternalMap", abstract=False) | ||
2436 | class ExternalMap(SHACLObject): | ||
2437 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2438 | NAMED_INDIVIDUALS = { | ||
2439 | } | ||
2440 | |||
2441 | @classmethod | ||
2442 | def _register_props(cls): | ||
2443 | super()._register_props() | ||
2444 | # Artifact representing a serialization instance of SPDX data containing the | ||
2445 | # definition of a particular Element. | ||
2446 | cls._add_property( | ||
2447 | "definingArtifact", | ||
2448 | ObjectProp(Artifact, False), | ||
2449 | iri="https://spdx.org/rdf/3.0.1/terms/Core/definingArtifact", | ||
2450 | compact="definingArtifact", | ||
2451 | ) | ||
2452 | # Identifies an external Element used within an SpdxDocument but defined | ||
2453 | # external to that SpdxDocument. | ||
2454 | cls._add_property( | ||
2455 | "externalSpdxId", | ||
2456 | AnyURIProp(), | ||
2457 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalSpdxId", | ||
2458 | min_count=1, | ||
2459 | compact="externalSpdxId", | ||
2460 | ) | ||
2461 | # Provides an indication of where to retrieve an external Element. | ||
2462 | cls._add_property( | ||
2463 | "locationHint", | ||
2464 | AnyURIProp(), | ||
2465 | iri="https://spdx.org/rdf/3.0.1/terms/Core/locationHint", | ||
2466 | compact="locationHint", | ||
2467 | ) | ||
2468 | # Provides an IntegrityMethod with which the integrity of an Element can be | ||
2469 | # asserted. | ||
2470 | cls._add_property( | ||
2471 | "verifiedUsing", | ||
2472 | ListProp(ObjectProp(IntegrityMethod, False)), | ||
2473 | iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing", | ||
2474 | compact="verifiedUsing", | ||
2475 | ) | ||
2476 | |||
2477 | |||
2478 | # A reference to a resource outside the scope of SPDX-3.0 content related to an Element. | ||
2479 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRef", compact_type="ExternalRef", abstract=False) | ||
2480 | class ExternalRef(SHACLObject): | ||
2481 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2482 | NAMED_INDIVIDUALS = { | ||
2483 | } | ||
2484 | |||
2485 | @classmethod | ||
2486 | def _register_props(cls): | ||
2487 | super()._register_props() | ||
2488 | # Provide consumers with comments by the creator of the Element about the | ||
2489 | # Element. | ||
2490 | cls._add_property( | ||
2491 | "comment", | ||
2492 | StringProp(), | ||
2493 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2494 | compact="comment", | ||
2495 | ) | ||
2496 | # Provides information about the content type of an Element or a Property. | ||
2497 | cls._add_property( | ||
2498 | "contentType", | ||
2499 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
2500 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
2501 | compact="contentType", | ||
2502 | ) | ||
2503 | # Specifies the type of the external reference. | ||
2504 | cls._add_property( | ||
2505 | "externalRefType", | ||
2506 | EnumProp([ | ||
2507 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", "altDownloadLocation"), | ||
2508 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", "altWebPage"), | ||
2509 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", "binaryArtifact"), | ||
2510 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", "bower"), | ||
2511 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", "buildMeta"), | ||
2512 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", "buildSystem"), | ||
2513 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", "certificationReport"), | ||
2514 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", "chat"), | ||
2515 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", "componentAnalysisReport"), | ||
2516 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", "cwe"), | ||
2517 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", "documentation"), | ||
2518 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", "dynamicAnalysisReport"), | ||
2519 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", "eolNotice"), | ||
2520 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", "exportControlAssessment"), | ||
2521 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", "funding"), | ||
2522 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", "issueTracker"), | ||
2523 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", "license"), | ||
2524 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", "mailingList"), | ||
2525 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", "mavenCentral"), | ||
2526 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", "metrics"), | ||
2527 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", "npm"), | ||
2528 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", "nuget"), | ||
2529 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", "other"), | ||
2530 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", "privacyAssessment"), | ||
2531 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", "productMetadata"), | ||
2532 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", "purchaseOrder"), | ||
2533 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", "qualityAssessmentReport"), | ||
2534 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", "releaseHistory"), | ||
2535 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", "releaseNotes"), | ||
2536 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", "riskAssessment"), | ||
2537 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", "runtimeAnalysisReport"), | ||
2538 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", "secureSoftwareAttestation"), | ||
2539 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", "securityAdversaryModel"), | ||
2540 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", "securityAdvisory"), | ||
2541 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", "securityFix"), | ||
2542 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", "securityOther"), | ||
2543 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", "securityPenTestReport"), | ||
2544 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", "securityPolicy"), | ||
2545 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", "securityThreatModel"), | ||
2546 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", "socialMedia"), | ||
2547 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", "sourceArtifact"), | ||
2548 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", "staticAnalysisReport"), | ||
2549 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", "support"), | ||
2550 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", "vcs"), | ||
2551 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", "vulnerabilityDisclosureReport"), | ||
2552 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", "vulnerabilityExploitabilityAssessment"), | ||
2553 | ]), | ||
2554 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRefType", | ||
2555 | compact="externalRefType", | ||
2556 | ) | ||
2557 | # Provides the location of an external reference. | ||
2558 | cls._add_property( | ||
2559 | "locator", | ||
2560 | ListProp(StringProp()), | ||
2561 | iri="https://spdx.org/rdf/3.0.1/terms/Core/locator", | ||
2562 | compact="locator", | ||
2563 | ) | ||
2564 | |||
2565 | |||
2566 | # Specifies the type of an external reference. | ||
2567 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType", compact_type="ExternalRefType", abstract=False) | ||
2568 | class ExternalRefType(SHACLObject): | ||
2569 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2570 | NAMED_INDIVIDUALS = { | ||
2571 | "altDownloadLocation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", | ||
2572 | "altWebPage": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", | ||
2573 | "binaryArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", | ||
2574 | "bower": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", | ||
2575 | "buildMeta": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", | ||
2576 | "buildSystem": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", | ||
2577 | "certificationReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", | ||
2578 | "chat": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", | ||
2579 | "componentAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", | ||
2580 | "cwe": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", | ||
2581 | "documentation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", | ||
2582 | "dynamicAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", | ||
2583 | "eolNotice": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", | ||
2584 | "exportControlAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", | ||
2585 | "funding": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", | ||
2586 | "issueTracker": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", | ||
2587 | "license": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", | ||
2588 | "mailingList": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", | ||
2589 | "mavenCentral": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", | ||
2590 | "metrics": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", | ||
2591 | "npm": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", | ||
2592 | "nuget": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", | ||
2593 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", | ||
2594 | "privacyAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", | ||
2595 | "productMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", | ||
2596 | "purchaseOrder": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", | ||
2597 | "qualityAssessmentReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", | ||
2598 | "releaseHistory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", | ||
2599 | "releaseNotes": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", | ||
2600 | "riskAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", | ||
2601 | "runtimeAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", | ||
2602 | "secureSoftwareAttestation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", | ||
2603 | "securityAdversaryModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", | ||
2604 | "securityAdvisory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", | ||
2605 | "securityFix": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", | ||
2606 | "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", | ||
2607 | "securityPenTestReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", | ||
2608 | "securityPolicy": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", | ||
2609 | "securityThreatModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", | ||
2610 | "socialMedia": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", | ||
2611 | "sourceArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", | ||
2612 | "staticAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", | ||
2613 | "support": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", | ||
2614 | "vcs": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", | ||
2615 | "vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", | ||
2616 | "vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", | ||
2617 | } | ||
2618 | # A reference to an alternative download location. | ||
2619 | altDownloadLocation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation" | ||
2620 | # A reference to an alternative web page. | ||
2621 | altWebPage = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage" | ||
2622 | # A reference to binary artifacts related to a package. | ||
2623 | binaryArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact" | ||
2624 | # A reference to a Bower package. The package locator format, looks like `package#version`, is defined in the "install" section of [Bower API documentation](https://bower.io/docs/api/#install). | ||
2625 | bower = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower" | ||
2626 | # A reference build metadata related to a published package. | ||
2627 | buildMeta = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta" | ||
2628 | # A reference build system used to create or publish the package. | ||
2629 | buildSystem = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem" | ||
2630 | # A reference to a certification report for a package from an accredited/independent body. | ||
2631 | certificationReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport" | ||
2632 | # A reference to the instant messaging system used by the maintainer for a package. | ||
2633 | chat = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat" | ||
2634 | # A reference to a Software Composition Analysis (SCA) report. | ||
2635 | componentAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport" | ||
2636 | # [Common Weakness Enumeration](https://csrc.nist.gov/glossary/term/common_weakness_enumeration). A reference to a source of software flaw defined within the official [CWE List](https://cwe.mitre.org/data/) that conforms to the [CWE specification](https://cwe.mitre.org/). | ||
2637 | cwe = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe" | ||
2638 | # A reference to the documentation for a package. | ||
2639 | documentation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation" | ||
2640 | # A reference to a dynamic analysis report for a package. | ||
2641 | dynamicAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport" | ||
2642 | # A reference to the End Of Sale (EOS) and/or End Of Life (EOL) information related to a package. | ||
2643 | eolNotice = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice" | ||
2644 | # A reference to a export control assessment for a package. | ||
2645 | exportControlAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment" | ||
2646 | # A reference to funding information related to a package. | ||
2647 | funding = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding" | ||
2648 | # A reference to the issue tracker for a package. | ||
2649 | issueTracker = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker" | ||
2650 | # A reference to additional license information related to an artifact. | ||
2651 | license = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license" | ||
2652 | # A reference to the mailing list used by the maintainer for a package. | ||
2653 | mailingList = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList" | ||
2654 | # A reference to a Maven repository artifact. The artifact locator format is defined in the [Maven documentation](https://maven.apache.org/guides/mini/guide-naming-conventions.html) and looks like `groupId:artifactId[:version]`. | ||
2655 | mavenCentral = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral" | ||
2656 | # A reference to metrics related to package such as OpenSSF scorecards. | ||
2657 | metrics = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics" | ||
2658 | # A reference to an npm package. The package locator format is defined in the [npm documentation](https://docs.npmjs.com/cli/v10/configuring-npm/package-json) and looks like `package@version`. | ||
2659 | npm = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm" | ||
2660 | # A reference to a NuGet package. The package locator format is defined in the [NuGet documentation](https://docs.nuget.org) and looks like `package/version`. | ||
2661 | nuget = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget" | ||
2662 | # Used when the type does not match any of the other options. | ||
2663 | other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other" | ||
2664 | # A reference to a privacy assessment for a package. | ||
2665 | privacyAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment" | ||
2666 | # A reference to additional product metadata such as reference within organization's product catalog. | ||
2667 | productMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata" | ||
2668 | # A reference to a purchase order for a package. | ||
2669 | purchaseOrder = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder" | ||
2670 | # A reference to a quality assessment for a package. | ||
2671 | qualityAssessmentReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport" | ||
2672 | # A reference to a published list of releases for a package. | ||
2673 | releaseHistory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory" | ||
2674 | # A reference to the release notes for a package. | ||
2675 | releaseNotes = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes" | ||
2676 | # A reference to a risk assessment for a package. | ||
2677 | riskAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment" | ||
2678 | # A reference to a runtime analysis report for a package. | ||
2679 | runtimeAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport" | ||
2680 | # A reference to information assuring that the software is developed using security practices as defined by [NIST SP 800-218 Secure Software Development Framework (SSDF) Version 1.1](https://csrc.nist.gov/pubs/sp/800/218/final) or [CISA Secure Software Development Attestation Form](https://www.cisa.gov/resources-tools/resources/secure-software-development-attestation-form). | ||
2681 | secureSoftwareAttestation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation" | ||
2682 | # A reference to the security adversary model for a package. | ||
2683 | securityAdversaryModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel" | ||
2684 | # A reference to a published security advisory (where advisory as defined per [ISO 29147:2018](https://www.iso.org/standard/72311.html)) that may affect one or more elements, e.g., vendor advisories or specific NVD entries. | ||
2685 | securityAdvisory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory" | ||
2686 | # A reference to the patch or source code that fixes a vulnerability. | ||
2687 | securityFix = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix" | ||
2688 | # A reference to related security information of unspecified type. | ||
2689 | securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther" | ||
2690 | # A reference to a [penetration test](https://en.wikipedia.org/wiki/Penetration_test) report for a package. | ||
2691 | securityPenTestReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport" | ||
2692 | # A reference to instructions for reporting newly discovered security vulnerabilities for a package. | ||
2693 | securityPolicy = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy" | ||
2694 | # A reference the [security threat model](https://en.wikipedia.org/wiki/Threat_model) for a package. | ||
2695 | securityThreatModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel" | ||
2696 | # A reference to a social media channel for a package. | ||
2697 | socialMedia = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia" | ||
2698 | # A reference to an artifact containing the sources for a package. | ||
2699 | sourceArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact" | ||
2700 | # A reference to a static analysis report for a package. | ||
2701 | staticAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport" | ||
2702 | # A reference to the software support channel or other support information for a package. | ||
2703 | support = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support" | ||
2704 | # A reference to a version control system related to a software artifact. | ||
2705 | vcs = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs" | ||
2706 | # A reference to a Vulnerability Disclosure Report (VDR) which provides the software supplier's analysis and findings describing the impact (or lack of impact) that reported vulnerabilities have on packages or products in the supplier's SBOM as defined in [NIST SP 800-161 Cybersecurity Supply Chain Risk Management Practices for Systems and Organizations](https://csrc.nist.gov/pubs/sp/800/161/r1/final). | ||
2707 | vulnerabilityDisclosureReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport" | ||
2708 | # A reference to a Vulnerability Exploitability eXchange (VEX) statement which provides information on whether a product is impacted by a specific vulnerability in an included package and, if affected, whether there are actions recommended to remediate. See also [NTIA VEX one-page summary](https://ntia.gov/files/ntia/publications/vex_one-page_summary.pdf). | ||
2709 | vulnerabilityExploitabilityAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment" | ||
2710 | |||
2711 | |||
2712 | # A mathematical algorithm that maps data of arbitrary size to a bit string. | ||
2713 | @register("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm", compact_type="HashAlgorithm", abstract=False) | ||
2714 | class HashAlgorithm(SHACLObject): | ||
2715 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2716 | NAMED_INDIVIDUALS = { | ||
2717 | "adler32": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", | ||
2718 | "blake2b256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", | ||
2719 | "blake2b384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", | ||
2720 | "blake2b512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", | ||
2721 | "blake3": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", | ||
2722 | "crystalsDilithium": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", | ||
2723 | "crystalsKyber": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", | ||
2724 | "falcon": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", | ||
2725 | "md2": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", | ||
2726 | "md4": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", | ||
2727 | "md5": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", | ||
2728 | "md6": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", | ||
2729 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", | ||
2730 | "sha1": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", | ||
2731 | "sha224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", | ||
2732 | "sha256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", | ||
2733 | "sha384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", | ||
2734 | "sha3_224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", | ||
2735 | "sha3_256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", | ||
2736 | "sha3_384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", | ||
2737 | "sha3_512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", | ||
2738 | "sha512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", | ||
2739 | } | ||
2740 | # Adler-32 checksum is part of the widely used zlib compression library as defined in [RFC 1950](https://datatracker.ietf.org/doc/rfc1950/) Section 2.3. | ||
2741 | adler32 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32" | ||
2742 | # BLAKE2b algorithm with a digest size of 256, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
2743 | blake2b256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256" | ||
2744 | # BLAKE2b algorithm with a digest size of 384, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
2745 | blake2b384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384" | ||
2746 | # BLAKE2b algorithm with a digest size of 512, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
2747 | blake2b512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512" | ||
2748 | # [BLAKE3](https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf) | ||
2749 | blake3 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3" | ||
2750 | # [Dilithium](https://pq-crystals.org/dilithium/) | ||
2751 | crystalsDilithium = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium" | ||
2752 | # [Kyber](https://pq-crystals.org/kyber/) | ||
2753 | crystalsKyber = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber" | ||
2754 | # [FALCON](https://falcon-sign.info/falcon.pdf) | ||
2755 | falcon = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon" | ||
2756 | # MD2 message-digest algorithm, as defined in [RFC 1319](https://datatracker.ietf.org/doc/rfc1319/). | ||
2757 | md2 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2" | ||
2758 | # MD4 message-digest algorithm, as defined in [RFC 1186](https://datatracker.ietf.org/doc/rfc1186/). | ||
2759 | md4 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4" | ||
2760 | # MD5 message-digest algorithm, as defined in [RFC 1321](https://datatracker.ietf.org/doc/rfc1321/). | ||
2761 | md5 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5" | ||
2762 | # [MD6 hash function](https://people.csail.mit.edu/rivest/pubs/RABCx08.pdf) | ||
2763 | md6 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6" | ||
2764 | # any hashing algorithm that does not exist in this list of entries | ||
2765 | other = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other" | ||
2766 | # SHA-1, a secure hashing algorithm, as defined in [RFC 3174](https://datatracker.ietf.org/doc/rfc3174/). | ||
2767 | sha1 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1" | ||
2768 | # SHA-2 with a digest length of 224, as defined in [RFC 3874](https://datatracker.ietf.org/doc/rfc3874/). | ||
2769 | sha224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224" | ||
2770 | # SHA-2 with a digest length of 256, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
2771 | sha256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256" | ||
2772 | # SHA-2 with a digest length of 384, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
2773 | sha384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384" | ||
2774 | # SHA-3 with a digest length of 224, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2775 | sha3_224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224" | ||
2776 | # SHA-3 with a digest length of 256, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2777 | sha3_256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256" | ||
2778 | # SHA-3 with a digest length of 384, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2779 | sha3_384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384" | ||
2780 | # SHA-3 with a digest length of 512, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
2781 | sha3_512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512" | ||
2782 | # SHA-2 with a digest length of 512, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
2783 | sha512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512" | ||
2784 | |||
2785 | |||
2786 | # A concrete subclass of Element used by Individuals in the | ||
2787 | # Core profile. | ||
2788 | @register("https://spdx.org/rdf/3.0.1/terms/Core/IndividualElement", compact_type="IndividualElement", abstract=False) | ||
2789 | class IndividualElement(Element): | ||
2790 | NODE_KIND = NodeKind.IRI | ||
2791 | ID_ALIAS = "spdxId" | ||
2792 | NAMED_INDIVIDUALS = { | ||
2793 | "NoAssertionElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", | ||
2794 | "NoneElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", | ||
2795 | } | ||
2796 | # An Individual Value for Element representing a set of Elements of unknown | ||
2797 | # identify or cardinality (number). | ||
2798 | NoAssertionElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement" | ||
2799 | # An Individual Value for Element representing a set of Elements with | ||
2800 | # cardinality (number/count) of zero. | ||
2801 | NoneElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement" | ||
2802 | |||
2803 | |||
2804 | # Provides an independently reproducible mechanism that permits verification of a specific Element. | ||
2805 | @register("https://spdx.org/rdf/3.0.1/terms/Core/IntegrityMethod", compact_type="IntegrityMethod", abstract=True) | ||
2806 | class IntegrityMethod(SHACLObject): | ||
2807 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2808 | NAMED_INDIVIDUALS = { | ||
2809 | } | ||
2810 | |||
2811 | @classmethod | ||
2812 | def _register_props(cls): | ||
2813 | super()._register_props() | ||
2814 | # Provide consumers with comments by the creator of the Element about the | ||
2815 | # Element. | ||
2816 | cls._add_property( | ||
2817 | "comment", | ||
2818 | StringProp(), | ||
2819 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
2820 | compact="comment", | ||
2821 | ) | ||
2822 | |||
2823 | |||
2824 | # Provide an enumerated set of lifecycle phases that can provide context to relationships. | ||
2825 | @register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType", compact_type="LifecycleScopeType", abstract=False) | ||
2826 | class LifecycleScopeType(SHACLObject): | ||
2827 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2828 | NAMED_INDIVIDUALS = { | ||
2829 | "build": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", | ||
2830 | "design": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", | ||
2831 | "development": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", | ||
2832 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", | ||
2833 | "runtime": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", | ||
2834 | "test": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", | ||
2835 | } | ||
2836 | # A relationship has specific context implications during an element's build phase, during development. | ||
2837 | build = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build" | ||
2838 | # A relationship has specific context implications during an element's design. | ||
2839 | design = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design" | ||
2840 | # A relationship has specific context implications during development phase of an element. | ||
2841 | development = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development" | ||
2842 | # A relationship has other specific context information necessary to capture that the above set of enumerations does not handle. | ||
2843 | other = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other" | ||
2844 | # A relationship has specific context implications during the execution phase of an element. | ||
2845 | runtime = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime" | ||
2846 | # A relationship has specific context implications during an element's testing phase, during development. | ||
2847 | test = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test" | ||
2848 | |||
2849 | |||
2850 | # A mapping between prefixes and namespace partial URIs. | ||
2851 | @register("https://spdx.org/rdf/3.0.1/terms/Core/NamespaceMap", compact_type="NamespaceMap", abstract=False) | ||
2852 | class NamespaceMap(SHACLObject): | ||
2853 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2854 | NAMED_INDIVIDUALS = { | ||
2855 | } | ||
2856 | |||
2857 | @classmethod | ||
2858 | def _register_props(cls): | ||
2859 | super()._register_props() | ||
2860 | # Provides an unambiguous mechanism for conveying a URI fragment portion of an | ||
2861 | # Element ID. | ||
2862 | cls._add_property( | ||
2863 | "namespace", | ||
2864 | AnyURIProp(), | ||
2865 | iri="https://spdx.org/rdf/3.0.1/terms/Core/namespace", | ||
2866 | min_count=1, | ||
2867 | compact="namespace", | ||
2868 | ) | ||
2869 | # A substitute for a URI. | ||
2870 | cls._add_property( | ||
2871 | "prefix", | ||
2872 | StringProp(), | ||
2873 | iri="https://spdx.org/rdf/3.0.1/terms/Core/prefix", | ||
2874 | min_count=1, | ||
2875 | compact="prefix", | ||
2876 | ) | ||
2877 | |||
2878 | |||
2879 | # An SPDX version 2.X compatible verification method for software packages. | ||
2880 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PackageVerificationCode", compact_type="PackageVerificationCode", abstract=False) | ||
2881 | class PackageVerificationCode(IntegrityMethod): | ||
2882 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2883 | NAMED_INDIVIDUALS = { | ||
2884 | } | ||
2885 | |||
2886 | @classmethod | ||
2887 | def _register_props(cls): | ||
2888 | super()._register_props() | ||
2889 | # Specifies the algorithm used for calculating the hash value. | ||
2890 | cls._add_property( | ||
2891 | "algorithm", | ||
2892 | EnumProp([ | ||
2893 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"), | ||
2894 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"), | ||
2895 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"), | ||
2896 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"), | ||
2897 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"), | ||
2898 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"), | ||
2899 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"), | ||
2900 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"), | ||
2901 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"), | ||
2902 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"), | ||
2903 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"), | ||
2904 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"), | ||
2905 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"), | ||
2906 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"), | ||
2907 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"), | ||
2908 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"), | ||
2909 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"), | ||
2910 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"), | ||
2911 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"), | ||
2912 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"), | ||
2913 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"), | ||
2914 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"), | ||
2915 | ]), | ||
2916 | iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm", | ||
2917 | min_count=1, | ||
2918 | compact="algorithm", | ||
2919 | ) | ||
2920 | # The result of applying a hash algorithm to an Element. | ||
2921 | cls._add_property( | ||
2922 | "hashValue", | ||
2923 | StringProp(), | ||
2924 | iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue", | ||
2925 | min_count=1, | ||
2926 | compact="hashValue", | ||
2927 | ) | ||
2928 | # The relative file name of a file to be excluded from the | ||
2929 | # `PackageVerificationCode`. | ||
2930 | cls._add_property( | ||
2931 | "packageVerificationCodeExcludedFile", | ||
2932 | ListProp(StringProp()), | ||
2933 | iri="https://spdx.org/rdf/3.0.1/terms/Core/packageVerificationCodeExcludedFile", | ||
2934 | compact="packageVerificationCodeExcludedFile", | ||
2935 | ) | ||
2936 | |||
2937 | |||
2938 | # A tuple of two positive integers that define a range. | ||
2939 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PositiveIntegerRange", compact_type="PositiveIntegerRange", abstract=False) | ||
2940 | class PositiveIntegerRange(SHACLObject): | ||
2941 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2942 | NAMED_INDIVIDUALS = { | ||
2943 | } | ||
2944 | |||
2945 | @classmethod | ||
2946 | def _register_props(cls): | ||
2947 | super()._register_props() | ||
2948 | # Defines the beginning of a range. | ||
2949 | cls._add_property( | ||
2950 | "beginIntegerRange", | ||
2951 | PositiveIntegerProp(), | ||
2952 | iri="https://spdx.org/rdf/3.0.1/terms/Core/beginIntegerRange", | ||
2953 | min_count=1, | ||
2954 | compact="beginIntegerRange", | ||
2955 | ) | ||
2956 | # Defines the end of a range. | ||
2957 | cls._add_property( | ||
2958 | "endIntegerRange", | ||
2959 | PositiveIntegerProp(), | ||
2960 | iri="https://spdx.org/rdf/3.0.1/terms/Core/endIntegerRange", | ||
2961 | min_count=1, | ||
2962 | compact="endIntegerRange", | ||
2963 | ) | ||
2964 | |||
2965 | |||
2966 | # Categories of presence or absence. | ||
2967 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType", compact_type="PresenceType", abstract=False) | ||
2968 | class PresenceType(SHACLObject): | ||
2969 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2970 | NAMED_INDIVIDUALS = { | ||
2971 | "no": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", | ||
2972 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", | ||
2973 | "yes": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", | ||
2974 | } | ||
2975 | # Indicates absence of the field. | ||
2976 | no = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no" | ||
2977 | # Makes no assertion about the field. | ||
2978 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion" | ||
2979 | # Indicates presence of the field. | ||
2980 | yes = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes" | ||
2981 | |||
2982 | |||
2983 | # Enumeration of the valid profiles. | ||
2984 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType", compact_type="ProfileIdentifierType", abstract=False) | ||
2985 | class ProfileIdentifierType(SHACLObject): | ||
2986 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
2987 | NAMED_INDIVIDUALS = { | ||
2988 | "ai": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", | ||
2989 | "build": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", | ||
2990 | "core": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", | ||
2991 | "dataset": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", | ||
2992 | "expandedLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", | ||
2993 | "extension": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", | ||
2994 | "lite": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", | ||
2995 | "security": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", | ||
2996 | "simpleLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", | ||
2997 | "software": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", | ||
2998 | } | ||
2999 | # the element follows the AI profile specification | ||
3000 | ai = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai" | ||
3001 | # the element follows the Build profile specification | ||
3002 | build = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build" | ||
3003 | # the element follows the Core profile specification | ||
3004 | core = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core" | ||
3005 | # the element follows the Dataset profile specification | ||
3006 | dataset = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset" | ||
3007 | # the element follows the ExpandedLicensing profile specification | ||
3008 | expandedLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing" | ||
3009 | # the element follows the Extension profile specification | ||
3010 | extension = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension" | ||
3011 | # the element follows the Lite profile specification | ||
3012 | lite = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite" | ||
3013 | # the element follows the Security profile specification | ||
3014 | security = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security" | ||
3015 | # the element follows the SimpleLicensing profile specification | ||
3016 | simpleLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing" | ||
3017 | # the element follows the Software profile specification | ||
3018 | software = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software" | ||
3019 | |||
3020 | |||
3021 | # Describes a relationship between one or more elements. | ||
3022 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Relationship", compact_type="Relationship", abstract=False) | ||
3023 | class Relationship(Element): | ||
3024 | NODE_KIND = NodeKind.IRI | ||
3025 | ID_ALIAS = "spdxId" | ||
3026 | NAMED_INDIVIDUALS = { | ||
3027 | } | ||
3028 | |||
3029 | @classmethod | ||
3030 | def _register_props(cls): | ||
3031 | super()._register_props() | ||
3032 | # Provides information about the completeness of relationships. | ||
3033 | cls._add_property( | ||
3034 | "completeness", | ||
3035 | EnumProp([ | ||
3036 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", "complete"), | ||
3037 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", "incomplete"), | ||
3038 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", "noAssertion"), | ||
3039 | ]), | ||
3040 | iri="https://spdx.org/rdf/3.0.1/terms/Core/completeness", | ||
3041 | compact="completeness", | ||
3042 | ) | ||
3043 | # Specifies the time from which an element is no longer applicable / valid. | ||
3044 | cls._add_property( | ||
3045 | "endTime", | ||
3046 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3047 | iri="https://spdx.org/rdf/3.0.1/terms/Core/endTime", | ||
3048 | compact="endTime", | ||
3049 | ) | ||
3050 | # References the Element on the left-hand side of a relationship. | ||
3051 | cls._add_property( | ||
3052 | "from_", | ||
3053 | ObjectProp(Element, True, context=[ | ||
3054 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
3055 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
3056 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
3057 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
3058 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
3059 | ],), | ||
3060 | iri="https://spdx.org/rdf/3.0.1/terms/Core/from", | ||
3061 | min_count=1, | ||
3062 | compact="from", | ||
3063 | ) | ||
3064 | # Information about the relationship between two Elements. | ||
3065 | cls._add_property( | ||
3066 | "relationshipType", | ||
3067 | EnumProp([ | ||
3068 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", "affects"), | ||
3069 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", "amendedBy"), | ||
3070 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", "ancestorOf"), | ||
3071 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", "availableFrom"), | ||
3072 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", "configures"), | ||
3073 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", "contains"), | ||
3074 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", "coordinatedBy"), | ||
3075 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", "copiedTo"), | ||
3076 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", "delegatedTo"), | ||
3077 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", "dependsOn"), | ||
3078 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", "descendantOf"), | ||
3079 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", "describes"), | ||
3080 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", "doesNotAffect"), | ||
3081 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", "expandsTo"), | ||
3082 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", "exploitCreatedBy"), | ||
3083 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", "fixedBy"), | ||
3084 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", "fixedIn"), | ||
3085 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", "foundBy"), | ||
3086 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", "generates"), | ||
3087 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", "hasAddedFile"), | ||
3088 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", "hasAssessmentFor"), | ||
3089 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", "hasAssociatedVulnerability"), | ||
3090 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", "hasConcludedLicense"), | ||
3091 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", "hasDataFile"), | ||
3092 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", "hasDeclaredLicense"), | ||
3093 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", "hasDeletedFile"), | ||
3094 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", "hasDependencyManifest"), | ||
3095 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", "hasDistributionArtifact"), | ||
3096 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", "hasDocumentation"), | ||
3097 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", "hasDynamicLink"), | ||
3098 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", "hasEvidence"), | ||
3099 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", "hasExample"), | ||
3100 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", "hasHost"), | ||
3101 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", "hasInput"), | ||
3102 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", "hasMetadata"), | ||
3103 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", "hasOptionalComponent"), | ||
3104 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", "hasOptionalDependency"), | ||
3105 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", "hasOutput"), | ||
3106 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", "hasPrerequisite"), | ||
3107 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", "hasProvidedDependency"), | ||
3108 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", "hasRequirement"), | ||
3109 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", "hasSpecification"), | ||
3110 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", "hasStaticLink"), | ||
3111 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", "hasTest"), | ||
3112 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", "hasTestCase"), | ||
3113 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", "hasVariant"), | ||
3114 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", "invokedBy"), | ||
3115 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", "modifiedBy"), | ||
3116 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", "other"), | ||
3117 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", "packagedBy"), | ||
3118 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", "patchedBy"), | ||
3119 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", "publishedBy"), | ||
3120 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", "reportedBy"), | ||
3121 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", "republishedBy"), | ||
3122 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", "serializedInArtifact"), | ||
3123 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", "testedOn"), | ||
3124 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", "trainedOn"), | ||
3125 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", "underInvestigationFor"), | ||
3126 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", "usesTool"), | ||
3127 | ]), | ||
3128 | iri="https://spdx.org/rdf/3.0.1/terms/Core/relationshipType", | ||
3129 | min_count=1, | ||
3130 | compact="relationshipType", | ||
3131 | ) | ||
3132 | # Specifies the time from which an element is applicable / valid. | ||
3133 | cls._add_property( | ||
3134 | "startTime", | ||
3135 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3136 | iri="https://spdx.org/rdf/3.0.1/terms/Core/startTime", | ||
3137 | compact="startTime", | ||
3138 | ) | ||
3139 | # References an Element on the right-hand side of a relationship. | ||
3140 | cls._add_property( | ||
3141 | "to", | ||
3142 | ListProp(ObjectProp(Element, False, context=[ | ||
3143 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
3144 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
3145 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
3146 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
3147 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
3148 | ],)), | ||
3149 | iri="https://spdx.org/rdf/3.0.1/terms/Core/to", | ||
3150 | min_count=1, | ||
3151 | compact="to", | ||
3152 | ) | ||
3153 | |||
3154 | |||
3155 | # Indicates whether a relationship is known to be complete, incomplete, or if no assertion is made with respect to relationship completeness. | ||
3156 | @register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness", compact_type="RelationshipCompleteness", abstract=False) | ||
3157 | class RelationshipCompleteness(SHACLObject): | ||
3158 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3159 | NAMED_INDIVIDUALS = { | ||
3160 | "complete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", | ||
3161 | "incomplete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", | ||
3162 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", | ||
3163 | } | ||
3164 | # The relationship is known to be exhaustive. | ||
3165 | complete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete" | ||
3166 | # The relationship is known not to be exhaustive. | ||
3167 | incomplete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete" | ||
3168 | # No assertion can be made about the completeness of the relationship. | ||
3169 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion" | ||
3170 | |||
3171 | |||
3172 | # Information about the relationship between two Elements. | ||
3173 | @register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType", compact_type="RelationshipType", abstract=False) | ||
3174 | class RelationshipType(SHACLObject): | ||
3175 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3176 | NAMED_INDIVIDUALS = { | ||
3177 | "affects": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", | ||
3178 | "amendedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", | ||
3179 | "ancestorOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", | ||
3180 | "availableFrom": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", | ||
3181 | "configures": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", | ||
3182 | "contains": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", | ||
3183 | "coordinatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", | ||
3184 | "copiedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", | ||
3185 | "delegatedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", | ||
3186 | "dependsOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", | ||
3187 | "descendantOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", | ||
3188 | "describes": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", | ||
3189 | "doesNotAffect": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", | ||
3190 | "expandsTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", | ||
3191 | "exploitCreatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", | ||
3192 | "fixedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", | ||
3193 | "fixedIn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", | ||
3194 | "foundBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", | ||
3195 | "generates": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", | ||
3196 | "hasAddedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", | ||
3197 | "hasAssessmentFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", | ||
3198 | "hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", | ||
3199 | "hasConcludedLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", | ||
3200 | "hasDataFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", | ||
3201 | "hasDeclaredLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", | ||
3202 | "hasDeletedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", | ||
3203 | "hasDependencyManifest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", | ||
3204 | "hasDistributionArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", | ||
3205 | "hasDocumentation": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", | ||
3206 | "hasDynamicLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", | ||
3207 | "hasEvidence": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", | ||
3208 | "hasExample": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", | ||
3209 | "hasHost": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", | ||
3210 | "hasInput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", | ||
3211 | "hasMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", | ||
3212 | "hasOptionalComponent": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", | ||
3213 | "hasOptionalDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", | ||
3214 | "hasOutput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", | ||
3215 | "hasPrerequisite": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", | ||
3216 | "hasProvidedDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", | ||
3217 | "hasRequirement": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", | ||
3218 | "hasSpecification": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", | ||
3219 | "hasStaticLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", | ||
3220 | "hasTest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", | ||
3221 | "hasTestCase": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", | ||
3222 | "hasVariant": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", | ||
3223 | "invokedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", | ||
3224 | "modifiedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", | ||
3225 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", | ||
3226 | "packagedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", | ||
3227 | "patchedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", | ||
3228 | "publishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", | ||
3229 | "reportedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", | ||
3230 | "republishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", | ||
3231 | "serializedInArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", | ||
3232 | "testedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", | ||
3233 | "trainedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", | ||
3234 | "underInvestigationFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", | ||
3235 | "usesTool": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", | ||
3236 | } | ||
3237 | # The `from` Vulnerability affects each `to` Element. The use of the `affects` type is constrained to `VexAffectedVulnAssessmentRelationship` classed relationships. | ||
3238 | affects = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects" | ||
3239 | # The `from` Element is amended by each `to` Element. | ||
3240 | amendedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy" | ||
3241 | # The `from` Element is an ancestor of each `to` Element. | ||
3242 | ancestorOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf" | ||
3243 | # The `from` Element is available from the additional supplier described by each `to` Element. | ||
3244 | availableFrom = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom" | ||
3245 | # The `from` Element is a configuration applied to each `to` Element, during a LifecycleScopeType period. | ||
3246 | configures = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures" | ||
3247 | # The `from` Element contains each `to` Element. | ||
3248 | contains = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains" | ||
3249 | # The `from` Vulnerability is coordinatedBy the `to` Agent(s) (vendor, researcher, or consumer agent). | ||
3250 | coordinatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy" | ||
3251 | # The `from` Element has been copied to each `to` Element. | ||
3252 | copiedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo" | ||
3253 | # The `from` Agent is delegating an action to the Agent of the `to` Relationship (which must be of type invokedBy), during a LifecycleScopeType (e.g. the `to` invokedBy Relationship is being done on behalf of `from`). | ||
3254 | delegatedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo" | ||
3255 | # The `from` Element depends on each `to` Element, during a LifecycleScopeType period. | ||
3256 | dependsOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn" | ||
3257 | # The `from` Element is a descendant of each `to` Element. | ||
3258 | descendantOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf" | ||
3259 | # The `from` Element describes each `to` Element. To denote the root(s) of a tree of elements in a collection, the rootElement property should be used. | ||
3260 | describes = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes" | ||
3261 | # The `from` Vulnerability has no impact on each `to` Element. The use of the `doesNotAffect` is constrained to `VexNotAffectedVulnAssessmentRelationship` classed relationships. | ||
3262 | doesNotAffect = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect" | ||
3263 | # The `from` archive expands out as an artifact described by each `to` Element. | ||
3264 | expandsTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo" | ||
3265 | # The `from` Vulnerability has had an exploit created against it by each `to` Agent. | ||
3266 | exploitCreatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy" | ||
3267 | # Designates a `from` Vulnerability has been fixed by the `to` Agent(s). | ||
3268 | fixedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy" | ||
3269 | # A `from` Vulnerability has been fixed in each `to` Element. The use of the `fixedIn` type is constrained to `VexFixedVulnAssessmentRelationship` classed relationships. | ||
3270 | fixedIn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn" | ||
3271 | # Designates a `from` Vulnerability was originally discovered by the `to` Agent(s). | ||
3272 | foundBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy" | ||
3273 | # The `from` Element generates each `to` Element. | ||
3274 | generates = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates" | ||
3275 | # Every `to` Element is a file added to the `from` Element (`from` hasAddedFile `to`). | ||
3276 | hasAddedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile" | ||
3277 | # Relates a `from` Vulnerability and each `to` Element with a security assessment. To be used with `VulnAssessmentRelationship` types. | ||
3278 | hasAssessmentFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor" | ||
3279 | # Used to associate a `from` Artifact with each `to` Vulnerability. | ||
3280 | hasAssociatedVulnerability = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability" | ||
3281 | # The `from` SoftwareArtifact is concluded by the SPDX data creator to be governed by each `to` license. | ||
3282 | hasConcludedLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense" | ||
3283 | # The `from` Element treats each `to` Element as a data file. A data file is an artifact that stores data required or optional for the `from` Element's functionality. A data file can be a database file, an index file, a log file, an AI model file, a calibration data file, a temporary file, a backup file, and more. For AI training dataset, test dataset, test artifact, configuration data, build input data, and build output data, please consider using the more specific relationship types: `trainedOn`, `testedOn`, `hasTest`, `configures`, `hasInput`, and `hasOutput`, respectively. This relationship does not imply dependency. | ||
3284 | hasDataFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile" | ||
3285 | # The `from` SoftwareArtifact was discovered to actually contain each `to` license, for example as detected by use of automated tooling. | ||
3286 | hasDeclaredLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense" | ||
3287 | # Every `to` Element is a file deleted from the `from` Element (`from` hasDeletedFile `to`). | ||
3288 | hasDeletedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile" | ||
3289 | # The `from` Element has manifest files that contain dependency information in each `to` Element. | ||
3290 | hasDependencyManifest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest" | ||
3291 | # The `from` Element is distributed as an artifact in each `to` Element (e.g. an RPM or archive file). | ||
3292 | hasDistributionArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact" | ||
3293 | # The `from` Element is documented by each `to` Element. | ||
3294 | hasDocumentation = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation" | ||
3295 | # The `from` Element dynamically links in each `to` Element, during a LifecycleScopeType period. | ||
3296 | hasDynamicLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink" | ||
3297 | # Every `to` Element is considered as evidence for the `from` Element (`from` hasEvidence `to`). | ||
3298 | hasEvidence = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence" | ||
3299 | # Every `to` Element is an example for the `from` Element (`from` hasExample `to`). | ||
3300 | hasExample = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample" | ||
3301 | # The `from` Build was run on the `to` Element during a LifecycleScopeType period (e.g. the host that the build runs on). | ||
3302 | hasHost = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost" | ||
3303 | # The `from` Build has each `to` Element as an input, during a LifecycleScopeType period. | ||
3304 | hasInput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput" | ||
3305 | # Every `to` Element is metadata about the `from` Element (`from` hasMetadata `to`). | ||
3306 | hasMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata" | ||
3307 | # Every `to` Element is an optional component of the `from` Element (`from` hasOptionalComponent `to`). | ||
3308 | hasOptionalComponent = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent" | ||
3309 | # The `from` Element optionally depends on each `to` Element, during a LifecycleScopeType period. | ||
3310 | hasOptionalDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency" | ||
3311 | # The `from` Build element generates each `to` Element as an output, during a LifecycleScopeType period. | ||
3312 | hasOutput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput" | ||
3313 | # The `from` Element has a prerequisite on each `to` Element, during a LifecycleScopeType period. | ||
3314 | hasPrerequisite = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite" | ||
3315 | # The `from` Element has a dependency on each `to` Element, dependency is not in the distributed artifact, but assumed to be provided, during a LifecycleScopeType period. | ||
3316 | hasProvidedDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency" | ||
3317 | # The `from` Element has a requirement on each `to` Element, during a LifecycleScopeType period. | ||
3318 | hasRequirement = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement" | ||
3319 | # Every `to` Element is a specification for the `from` Element (`from` hasSpecification `to`), during a LifecycleScopeType period. | ||
3320 | hasSpecification = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification" | ||
3321 | # The `from` Element statically links in each `to` Element, during a LifecycleScopeType period. | ||
3322 | hasStaticLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink" | ||
3323 | # Every `to` Element is a test artifact for the `from` Element (`from` hasTest `to`), during a LifecycleScopeType period. | ||
3324 | hasTest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest" | ||
3325 | # Every `to` Element is a test case for the `from` Element (`from` hasTestCase `to`). | ||
3326 | hasTestCase = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase" | ||
3327 | # Every `to` Element is a variant the `from` Element (`from` hasVariant `to`). | ||
3328 | hasVariant = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant" | ||
3329 | # The `from` Element was invoked by the `to` Agent, during a LifecycleScopeType period (for example, a Build element that describes a build step). | ||
3330 | invokedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy" | ||
3331 | # The `from` Element is modified by each `to` Element. | ||
3332 | modifiedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy" | ||
3333 | # Every `to` Element is related to the `from` Element where the relationship type is not described by any of the SPDX relationship types (this relationship is directionless). | ||
3334 | other = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other" | ||
3335 | # Every `to` Element is a packaged instance of the `from` Element (`from` packagedBy `to`). | ||
3336 | packagedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy" | ||
3337 | # Every `to` Element is a patch for the `from` Element (`from` patchedBy `to`). | ||
3338 | patchedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy" | ||
3339 | # Designates a `from` Vulnerability was made available for public use or reference by each `to` Agent. | ||
3340 | publishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy" | ||
3341 | # Designates a `from` Vulnerability was first reported to a project, vendor, or tracking database for formal identification by each `to` Agent. | ||
3342 | reportedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy" | ||
3343 | # Designates a `from` Vulnerability's details were tracked, aggregated, and/or enriched to improve context (i.e. NVD) by each `to` Agent. | ||
3344 | republishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy" | ||
3345 | # The `from` SpdxDocument can be found in a serialized form in each `to` Artifact. | ||
3346 | serializedInArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact" | ||
3347 | # The `from` Element has been tested on the `to` Element(s). | ||
3348 | testedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn" | ||
3349 | # The `from` Element has been trained on the `to` Element(s). | ||
3350 | trainedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn" | ||
3351 | # The `from` Vulnerability impact is being investigated for each `to` Element. The use of the `underInvestigationFor` type is constrained to `VexUnderInvestigationVulnAssessmentRelationship` classed relationships. | ||
3352 | underInvestigationFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor" | ||
3353 | # The `from` Element uses each `to` Element as a tool, during a LifecycleScopeType period. | ||
3354 | usesTool = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool" | ||
3355 | |||
3356 | |||
3357 | # A collection of SPDX Elements that could potentially be serialized. | ||
3358 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SpdxDocument", compact_type="SpdxDocument", abstract=False) | ||
3359 | class SpdxDocument(ElementCollection): | ||
3360 | NODE_KIND = NodeKind.IRI | ||
3361 | ID_ALIAS = "spdxId" | ||
3362 | NAMED_INDIVIDUALS = { | ||
3363 | } | ||
3364 | |||
3365 | @classmethod | ||
3366 | def _register_props(cls): | ||
3367 | super()._register_props() | ||
3368 | # Provides the license under which the SPDX documentation of the Element can be | ||
3369 | # used. | ||
3370 | cls._add_property( | ||
3371 | "dataLicense", | ||
3372 | ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
3373 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
3374 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
3375 | ],), | ||
3376 | iri="https://spdx.org/rdf/3.0.1/terms/Core/dataLicense", | ||
3377 | compact="dataLicense", | ||
3378 | ) | ||
3379 | # Provides an ExternalMap of Element identifiers. | ||
3380 | cls._add_property( | ||
3381 | "import_", | ||
3382 | ListProp(ObjectProp(ExternalMap, False)), | ||
3383 | iri="https://spdx.org/rdf/3.0.1/terms/Core/import", | ||
3384 | compact="import", | ||
3385 | ) | ||
3386 | # Provides a NamespaceMap of prefixes and associated namespace partial URIs applicable to an SpdxDocument and independent of any specific serialization format or instance. | ||
3387 | cls._add_property( | ||
3388 | "namespaceMap", | ||
3389 | ListProp(ObjectProp(NamespaceMap, False)), | ||
3390 | iri="https://spdx.org/rdf/3.0.1/terms/Core/namespaceMap", | ||
3391 | compact="namespaceMap", | ||
3392 | ) | ||
3393 | |||
3394 | |||
3395 | # Indicates the type of support that is associated with an artifact. | ||
3396 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SupportType", compact_type="SupportType", abstract=False) | ||
3397 | class SupportType(SHACLObject): | ||
3398 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3399 | NAMED_INDIVIDUALS = { | ||
3400 | "deployed": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", | ||
3401 | "development": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", | ||
3402 | "endOfSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", | ||
3403 | "limitedSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", | ||
3404 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", | ||
3405 | "noSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", | ||
3406 | "support": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", | ||
3407 | } | ||
3408 | # in addition to being supported by the supplier, the software is known to have been deployed and is in use. For a software as a service provider, this implies the software is now available as a service. | ||
3409 | deployed = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed" | ||
3410 | # the artifact is in active development and is not considered ready for formal support from the supplier. | ||
3411 | development = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development" | ||
3412 | # there is a defined end of support for the artifact from the supplier. This may also be referred to as end of life. There is a validUntilDate that can be used to signal when support ends for the artifact. | ||
3413 | endOfSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport" | ||
3414 | # the artifact has been released, and there is limited support available from the supplier. There is a validUntilDate that can provide additional information about the duration of support. | ||
3415 | limitedSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport" | ||
3416 | # no assertion about the type of support is made. This is considered the default if no other support type is used. | ||
3417 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion" | ||
3418 | # there is no support for the artifact from the supplier, consumer assumes any support obligations. | ||
3419 | noSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport" | ||
3420 | # the artifact has been released, and is supported from the supplier. There is a validUntilDate that can provide additional information about the duration of support. | ||
3421 | support = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support" | ||
3422 | |||
3423 | |||
3424 | # An element of hardware and/or software utilized to carry out a particular function. | ||
3425 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Tool", compact_type="Tool", abstract=False) | ||
3426 | class Tool(Element): | ||
3427 | NODE_KIND = NodeKind.IRI | ||
3428 | ID_ALIAS = "spdxId" | ||
3429 | NAMED_INDIVIDUALS = { | ||
3430 | } | ||
3431 | |||
3432 | |||
3433 | # Categories of confidentiality level. | ||
3434 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType", compact_type="dataset_ConfidentialityLevelType", abstract=False) | ||
3435 | class dataset_ConfidentialityLevelType(SHACLObject): | ||
3436 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3437 | NAMED_INDIVIDUALS = { | ||
3438 | "amber": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", | ||
3439 | "clear": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", | ||
3440 | "green": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", | ||
3441 | "red": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", | ||
3442 | } | ||
3443 | # Data points in the dataset can be shared only with specific organizations and their clients on a need to know basis. | ||
3444 | amber = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber" | ||
3445 | # Dataset may be distributed freely, without restriction. | ||
3446 | clear = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear" | ||
3447 | # Dataset can be shared within a community of peers and partners. | ||
3448 | green = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green" | ||
3449 | # Data points in the dataset are highly confidential and can only be shared with named recipients. | ||
3450 | red = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red" | ||
3451 | |||
3452 | |||
3453 | # Availability of dataset. | ||
3454 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType", compact_type="dataset_DatasetAvailabilityType", abstract=False) | ||
3455 | class dataset_DatasetAvailabilityType(SHACLObject): | ||
3456 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3457 | NAMED_INDIVIDUALS = { | ||
3458 | "clickthrough": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", | ||
3459 | "directDownload": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", | ||
3460 | "query": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", | ||
3461 | "registration": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", | ||
3462 | "scrapingScript": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", | ||
3463 | } | ||
3464 | # the dataset is not publicly available and can only be accessed after affirmatively accepting terms on a clickthrough webpage. | ||
3465 | clickthrough = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough" | ||
3466 | # the dataset is publicly available and can be downloaded directly. | ||
3467 | directDownload = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload" | ||
3468 | # the dataset is publicly available, but not all at once, and can only be accessed through queries which return parts of the dataset. | ||
3469 | query = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query" | ||
3470 | # the dataset is not publicly available and an email registration is required before accessing the dataset, although without an affirmative acceptance of terms. | ||
3471 | registration = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration" | ||
3472 | # the dataset provider is not making available the underlying data and the dataset must be reassembled, typically using the provided script for scraping the data. | ||
3473 | scrapingScript = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript" | ||
3474 | |||
3475 | |||
3476 | # Enumeration of dataset types. | ||
3477 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType", compact_type="dataset_DatasetType", abstract=False) | ||
3478 | class dataset_DatasetType(SHACLObject): | ||
3479 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3480 | NAMED_INDIVIDUALS = { | ||
3481 | "audio": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", | ||
3482 | "categorical": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", | ||
3483 | "graph": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", | ||
3484 | "image": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", | ||
3485 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", | ||
3486 | "numeric": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", | ||
3487 | "other": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", | ||
3488 | "sensor": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", | ||
3489 | "structured": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", | ||
3490 | "syntactic": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", | ||
3491 | "text": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", | ||
3492 | "timeseries": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", | ||
3493 | "timestamp": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", | ||
3494 | "video": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", | ||
3495 | } | ||
3496 | # data is audio based, such as a collection of music from the 80s. | ||
3497 | audio = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio" | ||
3498 | # data that is classified into a discrete number of categories, such as the eye color of a population of people. | ||
3499 | categorical = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical" | ||
3500 | # data is in the form of a graph where entries are somehow related to each other through edges, such a social network of friends. | ||
3501 | graph = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph" | ||
3502 | # data is a collection of images such as pictures of animals. | ||
3503 | image = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image" | ||
3504 | # data type is not known. | ||
3505 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion" | ||
3506 | # data consists only of numeric entries. | ||
3507 | numeric = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric" | ||
3508 | # data is of a type not included in this list. | ||
3509 | other = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other" | ||
3510 | # data is recorded from a physical sensor, such as a thermometer reading or biometric device. | ||
3511 | sensor = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor" | ||
3512 | # data is stored in tabular format or retrieved from a relational database. | ||
3513 | structured = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured" | ||
3514 | # data describes the syntax or semantics of a language or text, such as a parse tree used for natural language processing. | ||
3515 | syntactic = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic" | ||
3516 | # data consists of unstructured text, such as a book, Wikipedia article (without images), or transcript. | ||
3517 | text = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text" | ||
3518 | # data is recorded in an ordered sequence of timestamped entries, such as the price of a stock over the course of a day. | ||
3519 | timeseries = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries" | ||
3520 | # data is recorded with a timestamp for each entry, but not necessarily ordered or at specific intervals, such as when a taxi ride starts and ends. | ||
3521 | timestamp = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp" | ||
3522 | # data is video based, such as a collection of movie clips featuring Tom Hanks. | ||
3523 | video = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video" | ||
3524 | |||
3525 | |||
3526 | # Abstract class for additional text intended to be added to a License, but | ||
3527 | # which is not itself a standalone License. | ||
3528 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/LicenseAddition", compact_type="expandedlicensing_LicenseAddition", abstract=True) | ||
3529 | class expandedlicensing_LicenseAddition(Element): | ||
3530 | NODE_KIND = NodeKind.IRI | ||
3531 | ID_ALIAS = "spdxId" | ||
3532 | NAMED_INDIVIDUALS = { | ||
3533 | } | ||
3534 | |||
3535 | @classmethod | ||
3536 | def _register_props(cls): | ||
3537 | super()._register_props() | ||
3538 | # Identifies the full text of a LicenseAddition. | ||
3539 | cls._add_property( | ||
3540 | "expandedlicensing_additionText", | ||
3541 | StringProp(), | ||
3542 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/additionText", | ||
3543 | min_count=1, | ||
3544 | compact="expandedlicensing_additionText", | ||
3545 | ) | ||
3546 | # Specifies whether an additional text identifier has been marked as deprecated. | ||
3547 | cls._add_property( | ||
3548 | "expandedlicensing_isDeprecatedAdditionId", | ||
3549 | BooleanProp(), | ||
3550 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedAdditionId", | ||
3551 | compact="expandedlicensing_isDeprecatedAdditionId", | ||
3552 | ) | ||
3553 | # Identifies all the text and metadata associated with a license in the license | ||
3554 | # XML format. | ||
3555 | cls._add_property( | ||
3556 | "expandedlicensing_licenseXml", | ||
3557 | StringProp(), | ||
3558 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml", | ||
3559 | compact="expandedlicensing_licenseXml", | ||
3560 | ) | ||
3561 | # Specifies the licenseId that is preferred to be used in place of a deprecated | ||
3562 | # License or LicenseAddition. | ||
3563 | cls._add_property( | ||
3564 | "expandedlicensing_obsoletedBy", | ||
3565 | StringProp(), | ||
3566 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy", | ||
3567 | compact="expandedlicensing_obsoletedBy", | ||
3568 | ) | ||
3569 | # Contains a URL where the License or LicenseAddition can be found in use. | ||
3570 | cls._add_property( | ||
3571 | "expandedlicensing_seeAlso", | ||
3572 | ListProp(AnyURIProp()), | ||
3573 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso", | ||
3574 | compact="expandedlicensing_seeAlso", | ||
3575 | ) | ||
3576 | # Identifies the full text of a LicenseAddition, in SPDX templating format. | ||
3577 | cls._add_property( | ||
3578 | "expandedlicensing_standardAdditionTemplate", | ||
3579 | StringProp(), | ||
3580 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardAdditionTemplate", | ||
3581 | compact="expandedlicensing_standardAdditionTemplate", | ||
3582 | ) | ||
3583 | |||
3584 | |||
3585 | # A license exception that is listed on the SPDX Exceptions list. | ||
3586 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicenseException", compact_type="expandedlicensing_ListedLicenseException", abstract=False) | ||
3587 | class expandedlicensing_ListedLicenseException(expandedlicensing_LicenseAddition): | ||
3588 | NODE_KIND = NodeKind.IRI | ||
3589 | ID_ALIAS = "spdxId" | ||
3590 | NAMED_INDIVIDUALS = { | ||
3591 | } | ||
3592 | |||
3593 | @classmethod | ||
3594 | def _register_props(cls): | ||
3595 | super()._register_props() | ||
3596 | # Specifies the SPDX License List version in which this license or exception | ||
3597 | # identifier was deprecated. | ||
3598 | cls._add_property( | ||
3599 | "expandedlicensing_deprecatedVersion", | ||
3600 | StringProp(), | ||
3601 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion", | ||
3602 | compact="expandedlicensing_deprecatedVersion", | ||
3603 | ) | ||
3604 | # Specifies the SPDX License List version in which this ListedLicense or | ||
3605 | # ListedLicenseException identifier was first added. | ||
3606 | cls._add_property( | ||
3607 | "expandedlicensing_listVersionAdded", | ||
3608 | StringProp(), | ||
3609 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded", | ||
3610 | compact="expandedlicensing_listVersionAdded", | ||
3611 | ) | ||
3612 | |||
3613 | |||
3614 | # A property name with an associated value. | ||
3615 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertyEntry", compact_type="extension_CdxPropertyEntry", abstract=False) | ||
3616 | class extension_CdxPropertyEntry(SHACLObject): | ||
3617 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3618 | NAMED_INDIVIDUALS = { | ||
3619 | } | ||
3620 | |||
3621 | @classmethod | ||
3622 | def _register_props(cls): | ||
3623 | super()._register_props() | ||
3624 | # A name used in a CdxPropertyEntry name-value pair. | ||
3625 | cls._add_property( | ||
3626 | "extension_cdxPropName", | ||
3627 | StringProp(), | ||
3628 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropName", | ||
3629 | min_count=1, | ||
3630 | compact="extension_cdxPropName", | ||
3631 | ) | ||
3632 | # A value used in a CdxPropertyEntry name-value pair. | ||
3633 | cls._add_property( | ||
3634 | "extension_cdxPropValue", | ||
3635 | StringProp(), | ||
3636 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropValue", | ||
3637 | compact="extension_cdxPropValue", | ||
3638 | ) | ||
3639 | |||
3640 | |||
3641 | # A characterization of some aspect of an Element that is associated with the Element in a generalized fashion. | ||
3642 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/Extension", compact_type="extension_Extension", abstract=True) | ||
3643 | class extension_Extension(SHACLExtensibleObject, SHACLObject): | ||
3644 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3645 | NAMED_INDIVIDUALS = { | ||
3646 | } | ||
3647 | |||
3648 | |||
3649 | # Specifies the CVSS base, temporal, threat, or environmental severity type. | ||
3650 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType", compact_type="security_CvssSeverityType", abstract=False) | ||
3651 | class security_CvssSeverityType(SHACLObject): | ||
3652 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3653 | NAMED_INDIVIDUALS = { | ||
3654 | "critical": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", | ||
3655 | "high": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", | ||
3656 | "low": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", | ||
3657 | "medium": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", | ||
3658 | "none": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", | ||
3659 | } | ||
3660 | # When a CVSS score is between 9.0 - 10.0 | ||
3661 | critical = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical" | ||
3662 | # When a CVSS score is between 7.0 - 8.9 | ||
3663 | high = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high" | ||
3664 | # When a CVSS score is between 0.1 - 3.9 | ||
3665 | low = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low" | ||
3666 | # When a CVSS score is between 4.0 - 6.9 | ||
3667 | medium = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium" | ||
3668 | # When a CVSS score is 0.0 | ||
3669 | none = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none" | ||
3670 | |||
3671 | |||
3672 | # Specifies the exploit catalog type. | ||
3673 | @register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType", compact_type="security_ExploitCatalogType", abstract=False) | ||
3674 | class security_ExploitCatalogType(SHACLObject): | ||
3675 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3676 | NAMED_INDIVIDUALS = { | ||
3677 | "kev": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", | ||
3678 | "other": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", | ||
3679 | } | ||
3680 | # CISA's Known Exploited Vulnerability (KEV) Catalog | ||
3681 | kev = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev" | ||
3682 | # Other exploit catalogs | ||
3683 | other = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other" | ||
3684 | |||
3685 | |||
3686 | # Specifies the SSVC decision type. | ||
3687 | @register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType", compact_type="security_SsvcDecisionType", abstract=False) | ||
3688 | class security_SsvcDecisionType(SHACLObject): | ||
3689 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3690 | NAMED_INDIVIDUALS = { | ||
3691 | "act": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", | ||
3692 | "attend": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", | ||
3693 | "track": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", | ||
3694 | "trackStar": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", | ||
3695 | } | ||
3696 | # The vulnerability requires attention from the organization's internal, supervisory-level and leadership-level individuals. Necessary actions include requesting assistance or information about the vulnerability, as well as publishing a notification either internally and/or externally. Typically, internal groups would meet to determine the overall response and then execute agreed upon actions. CISA recommends remediating Act vulnerabilities as soon as possible. | ||
3697 | act = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act" | ||
3698 | # The vulnerability requires attention from the organization's internal, supervisory-level individuals. Necessary actions include requesting assistance or information about the vulnerability, and may involve publishing a notification either internally and/or externally. CISA recommends remediating Attend vulnerabilities sooner than standard update timelines. | ||
3699 | attend = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend" | ||
3700 | # The vulnerability does not require action at this time. The organization would continue to track the vulnerability and reassess it if new information becomes available. CISA recommends remediating Track vulnerabilities within standard update timelines. | ||
3701 | track = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track" | ||
3702 | # ("Track\*" in the SSVC spec) The vulnerability contains specific characteristics that may require closer monitoring for changes. CISA recommends remediating Track\* vulnerabilities within standard update timelines. | ||
3703 | trackStar = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar" | ||
3704 | |||
3705 | |||
3706 | # Specifies the VEX justification type. | ||
3707 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType", compact_type="security_VexJustificationType", abstract=False) | ||
3708 | class security_VexJustificationType(SHACLObject): | ||
3709 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3710 | NAMED_INDIVIDUALS = { | ||
3711 | "componentNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", | ||
3712 | "inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", | ||
3713 | "vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", | ||
3714 | "vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", | ||
3715 | "vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", | ||
3716 | } | ||
3717 | # The software is not affected because the vulnerable component is not in the product. | ||
3718 | componentNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent" | ||
3719 | # Built-in inline controls or mitigations prevent an adversary from leveraging the vulnerability. | ||
3720 | inlineMitigationsAlreadyExist = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist" | ||
3721 | # The vulnerable component is present, and the component contains the vulnerable code. However, vulnerable code is used in such a way that an attacker cannot mount any anticipated attack. | ||
3722 | vulnerableCodeCannotBeControlledByAdversary = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary" | ||
3723 | # The affected code is not reachable through the execution of the code, including non-anticipated states of the product. | ||
3724 | vulnerableCodeNotInExecutePath = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath" | ||
3725 | # The product is not affected because the code underlying the vulnerability is not present in the product. | ||
3726 | vulnerableCodeNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent" | ||
3727 | |||
3728 | |||
3729 | # Abstract ancestor class for all vulnerability assessments | ||
3730 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VulnAssessmentRelationship", compact_type="security_VulnAssessmentRelationship", abstract=True) | ||
3731 | class security_VulnAssessmentRelationship(Relationship): | ||
3732 | NODE_KIND = NodeKind.IRI | ||
3733 | ID_ALIAS = "spdxId" | ||
3734 | NAMED_INDIVIDUALS = { | ||
3735 | } | ||
3736 | |||
3737 | @classmethod | ||
3738 | def _register_props(cls): | ||
3739 | super()._register_props() | ||
3740 | # Identifies who or what supplied the artifact or VulnAssessmentRelationship | ||
3741 | # referenced by the Element. | ||
3742 | cls._add_property( | ||
3743 | "suppliedBy", | ||
3744 | ObjectProp(Agent, False, context=[ | ||
3745 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
3746 | ],), | ||
3747 | iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy", | ||
3748 | compact="suppliedBy", | ||
3749 | ) | ||
3750 | # Specifies an Element contained in a piece of software where a vulnerability was | ||
3751 | # found. | ||
3752 | cls._add_property( | ||
3753 | "security_assessedElement", | ||
3754 | ObjectProp(software_SoftwareArtifact, False), | ||
3755 | iri="https://spdx.org/rdf/3.0.1/terms/Security/assessedElement", | ||
3756 | compact="security_assessedElement", | ||
3757 | ) | ||
3758 | # Specifies a time when a vulnerability assessment was modified | ||
3759 | cls._add_property( | ||
3760 | "security_modifiedTime", | ||
3761 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3762 | iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime", | ||
3763 | compact="security_modifiedTime", | ||
3764 | ) | ||
3765 | # Specifies the time when a vulnerability was published. | ||
3766 | cls._add_property( | ||
3767 | "security_publishedTime", | ||
3768 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3769 | iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime", | ||
3770 | compact="security_publishedTime", | ||
3771 | ) | ||
3772 | # Specified the time and date when a vulnerability was withdrawn. | ||
3773 | cls._add_property( | ||
3774 | "security_withdrawnTime", | ||
3775 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
3776 | iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime", | ||
3777 | compact="security_withdrawnTime", | ||
3778 | ) | ||
3779 | |||
3780 | |||
3781 | # Abstract class representing a license combination consisting of one or more licenses. | ||
3782 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/AnyLicenseInfo", compact_type="simplelicensing_AnyLicenseInfo", abstract=True) | ||
3783 | class simplelicensing_AnyLicenseInfo(Element): | ||
3784 | NODE_KIND = NodeKind.IRI | ||
3785 | ID_ALIAS = "spdxId" | ||
3786 | NAMED_INDIVIDUALS = { | ||
3787 | } | ||
3788 | |||
3789 | |||
3790 | # An SPDX Element containing an SPDX license expression string. | ||
3791 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/LicenseExpression", compact_type="simplelicensing_LicenseExpression", abstract=False) | ||
3792 | class simplelicensing_LicenseExpression(simplelicensing_AnyLicenseInfo): | ||
3793 | NODE_KIND = NodeKind.IRI | ||
3794 | ID_ALIAS = "spdxId" | ||
3795 | NAMED_INDIVIDUALS = { | ||
3796 | } | ||
3797 | |||
3798 | @classmethod | ||
3799 | def _register_props(cls): | ||
3800 | super()._register_props() | ||
3801 | # Maps a LicenseRef or AdditionRef string for a Custom License or a Custom | ||
3802 | # License Addition to its URI ID. | ||
3803 | cls._add_property( | ||
3804 | "simplelicensing_customIdToUri", | ||
3805 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
3806 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/customIdToUri", | ||
3807 | compact="simplelicensing_customIdToUri", | ||
3808 | ) | ||
3809 | # A string in the license expression format. | ||
3810 | cls._add_property( | ||
3811 | "simplelicensing_licenseExpression", | ||
3812 | StringProp(), | ||
3813 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseExpression", | ||
3814 | min_count=1, | ||
3815 | compact="simplelicensing_licenseExpression", | ||
3816 | ) | ||
3817 | # The version of the SPDX License List used in the license expression. | ||
3818 | cls._add_property( | ||
3819 | "simplelicensing_licenseListVersion", | ||
3820 | StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",), | ||
3821 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseListVersion", | ||
3822 | compact="simplelicensing_licenseListVersion", | ||
3823 | ) | ||
3824 | |||
3825 | |||
3826 | # A license or addition that is not listed on the SPDX License List. | ||
3827 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/SimpleLicensingText", compact_type="simplelicensing_SimpleLicensingText", abstract=False) | ||
3828 | class simplelicensing_SimpleLicensingText(Element): | ||
3829 | NODE_KIND = NodeKind.IRI | ||
3830 | ID_ALIAS = "spdxId" | ||
3831 | NAMED_INDIVIDUALS = { | ||
3832 | } | ||
3833 | |||
3834 | @classmethod | ||
3835 | def _register_props(cls): | ||
3836 | super()._register_props() | ||
3837 | # Identifies the full text of a License or Addition. | ||
3838 | cls._add_property( | ||
3839 | "simplelicensing_licenseText", | ||
3840 | StringProp(), | ||
3841 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText", | ||
3842 | min_count=1, | ||
3843 | compact="simplelicensing_licenseText", | ||
3844 | ) | ||
3845 | |||
3846 | |||
3847 | # A canonical, unique, immutable identifier | ||
3848 | @register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifier", compact_type="software_ContentIdentifier", abstract=False) | ||
3849 | class software_ContentIdentifier(IntegrityMethod): | ||
3850 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3851 | NAMED_INDIVIDUALS = { | ||
3852 | } | ||
3853 | |||
3854 | @classmethod | ||
3855 | def _register_props(cls): | ||
3856 | super()._register_props() | ||
3857 | # Specifies the type of the content identifier. | ||
3858 | cls._add_property( | ||
3859 | "software_contentIdentifierType", | ||
3860 | EnumProp([ | ||
3861 | ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", "gitoid"), | ||
3862 | ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", "swhid"), | ||
3863 | ]), | ||
3864 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierType", | ||
3865 | min_count=1, | ||
3866 | compact="software_contentIdentifierType", | ||
3867 | ) | ||
3868 | # Specifies the value of the content identifier. | ||
3869 | cls._add_property( | ||
3870 | "software_contentIdentifierValue", | ||
3871 | AnyURIProp(), | ||
3872 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierValue", | ||
3873 | min_count=1, | ||
3874 | compact="software_contentIdentifierValue", | ||
3875 | ) | ||
3876 | |||
3877 | |||
3878 | # Specifies the type of a content identifier. | ||
3879 | @register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType", compact_type="software_ContentIdentifierType", abstract=False) | ||
3880 | class software_ContentIdentifierType(SHACLObject): | ||
3881 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3882 | NAMED_INDIVIDUALS = { | ||
3883 | "gitoid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", | ||
3884 | "swhid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", | ||
3885 | } | ||
3886 | # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg). | ||
3887 | gitoid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid" | ||
3888 | # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`. | ||
3889 | swhid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid" | ||
3890 | |||
3891 | |||
3892 | # Enumeration of the different kinds of SPDX file. | ||
3893 | @register("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType", compact_type="software_FileKindType", abstract=False) | ||
3894 | class software_FileKindType(SHACLObject): | ||
3895 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3896 | NAMED_INDIVIDUALS = { | ||
3897 | "directory": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", | ||
3898 | "file": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", | ||
3899 | } | ||
3900 | # The file represents a directory and all content stored in that directory. | ||
3901 | directory = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory" | ||
3902 | # The file represents a single file (default). | ||
3903 | file = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file" | ||
3904 | |||
3905 | |||
3906 | # Provides a set of values to be used to describe the common types of SBOMs that | ||
3907 | # tools may create. | ||
3908 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SbomType", compact_type="software_SbomType", abstract=False) | ||
3909 | class software_SbomType(SHACLObject): | ||
3910 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3911 | NAMED_INDIVIDUALS = { | ||
3912 | "analyzed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", | ||
3913 | "build": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", | ||
3914 | "deployed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", | ||
3915 | "design": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", | ||
3916 | "runtime": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", | ||
3917 | "source": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", | ||
3918 | } | ||
3919 | # SBOM generated through analysis of artifacts (e.g., executables, packages, containers, and virtual machine images) after its build. Such analysis generally requires a variety of heuristics. In some contexts, this may also be referred to as a "3rd party" SBOM. | ||
3920 | analyzed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed" | ||
3921 | # SBOM generated as part of the process of building the software to create a releasable artifact (e.g., executable or package) from data such as source files, dependencies, built components, build process ephemeral data, and other SBOMs. | ||
3922 | build = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build" | ||
3923 | # SBOM provides an inventory of software that is present on a system. This may be an assembly of other SBOMs that combines analysis of configuration options, and examination of execution behavior in a (potentially simulated) deployment environment. | ||
3924 | deployed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed" | ||
3925 | # SBOM of intended, planned software project or product with included components (some of which may not yet exist) for a new software artifact. | ||
3926 | design = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design" | ||
3927 | # SBOM generated through instrumenting the system running the software, to capture only components present in the system, as well as external call-outs or dynamically loaded components. In some contexts, this may also be referred to as an "Instrumented" or "Dynamic" SBOM. | ||
3928 | runtime = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime" | ||
3929 | # SBOM created directly from the development environment, source files, and included dependencies used to build an product artifact. | ||
3930 | source = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source" | ||
3931 | |||
3932 | |||
3933 | # Provides information about the primary purpose of an Element. | ||
3934 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose", compact_type="software_SoftwarePurpose", abstract=False) | ||
3935 | class software_SoftwarePurpose(SHACLObject): | ||
3936 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
3937 | NAMED_INDIVIDUALS = { | ||
3938 | "application": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", | ||
3939 | "archive": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", | ||
3940 | "bom": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", | ||
3941 | "configuration": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", | ||
3942 | "container": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", | ||
3943 | "data": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", | ||
3944 | "device": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", | ||
3945 | "deviceDriver": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", | ||
3946 | "diskImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", | ||
3947 | "documentation": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", | ||
3948 | "evidence": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", | ||
3949 | "executable": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", | ||
3950 | "file": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", | ||
3951 | "filesystemImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", | ||
3952 | "firmware": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", | ||
3953 | "framework": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", | ||
3954 | "install": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", | ||
3955 | "library": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", | ||
3956 | "manifest": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", | ||
3957 | "model": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", | ||
3958 | "module": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", | ||
3959 | "operatingSystem": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", | ||
3960 | "other": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", | ||
3961 | "patch": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", | ||
3962 | "platform": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", | ||
3963 | "requirement": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", | ||
3964 | "source": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", | ||
3965 | "specification": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", | ||
3966 | "test": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", | ||
3967 | } | ||
3968 | # The Element is a software application. | ||
3969 | application = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application" | ||
3970 | # The Element is an archived collection of one or more files (.tar, .zip, etc.). | ||
3971 | archive = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive" | ||
3972 | # The Element is a bill of materials. | ||
3973 | bom = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom" | ||
3974 | # The Element is configuration data. | ||
3975 | configuration = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration" | ||
3976 | # The Element is a container image which can be used by a container runtime application. | ||
3977 | container = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container" | ||
3978 | # The Element is data. | ||
3979 | data = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data" | ||
3980 | # The Element refers to a chipset, processor, or electronic board. | ||
3981 | device = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device" | ||
3982 | # The Element represents software that controls hardware devices. | ||
3983 | deviceDriver = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver" | ||
3984 | # The Element refers to a disk image that can be written to a disk, booted in a VM, etc. A disk image typically contains most or all of the components necessary to boot, such as bootloaders, kernels, firmware, userspace, etc. | ||
3985 | diskImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage" | ||
3986 | # The Element is documentation. | ||
3987 | documentation = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation" | ||
3988 | # The Element is the evidence that a specification or requirement has been fulfilled. | ||
3989 | evidence = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence" | ||
3990 | # The Element is an Artifact that can be run on a computer. | ||
3991 | executable = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable" | ||
3992 | # The Element is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc.). | ||
3993 | file = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file" | ||
3994 | # The Element is a file system image that can be written to a disk (or virtual) partition. | ||
3995 | filesystemImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage" | ||
3996 | # The Element provides low level control over a device's hardware. | ||
3997 | firmware = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware" | ||
3998 | # The Element is a software framework. | ||
3999 | framework = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework" | ||
4000 | # The Element is used to install software on disk. | ||
4001 | install = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install" | ||
4002 | # The Element is a software library. | ||
4003 | library = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library" | ||
4004 | # The Element is a software manifest. | ||
4005 | manifest = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest" | ||
4006 | # The Element is a machine learning or artificial intelligence model. | ||
4007 | model = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model" | ||
4008 | # The Element is a module of a piece of software. | ||
4009 | module = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module" | ||
4010 | # The Element is an operating system. | ||
4011 | operatingSystem = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem" | ||
4012 | # The Element doesn't fit into any of the other categories. | ||
4013 | other = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other" | ||
4014 | # The Element contains a set of changes to update, fix, or improve another Element. | ||
4015 | patch = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch" | ||
4016 | # The Element represents a runtime environment. | ||
4017 | platform = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform" | ||
4018 | # The Element provides a requirement needed as input for another Element. | ||
4019 | requirement = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement" | ||
4020 | # The Element is a single or a collection of source files. | ||
4021 | source = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source" | ||
4022 | # The Element is a plan, guideline or strategy how to create, perform or analyze an application. | ||
4023 | specification = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification" | ||
4024 | # The Element is a test used to verify functionality on an software element. | ||
4025 | test = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test" | ||
4026 | |||
4027 | |||
4028 | # Class that describes a build instance of software/artifacts. | ||
4029 | @register("https://spdx.org/rdf/3.0.1/terms/Build/Build", compact_type="build_Build", abstract=False) | ||
4030 | class build_Build(Element): | ||
4031 | NODE_KIND = NodeKind.IRI | ||
4032 | ID_ALIAS = "spdxId" | ||
4033 | NAMED_INDIVIDUALS = { | ||
4034 | } | ||
4035 | |||
4036 | @classmethod | ||
4037 | def _register_props(cls): | ||
4038 | super()._register_props() | ||
4039 | # Property that describes the time at which a build stops. | ||
4040 | cls._add_property( | ||
4041 | "build_buildEndTime", | ||
4042 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4043 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildEndTime", | ||
4044 | compact="build_buildEndTime", | ||
4045 | ) | ||
4046 | # A buildId is a locally unique identifier used by a builder to identify a unique | ||
4047 | # instance of a build produced by it. | ||
4048 | cls._add_property( | ||
4049 | "build_buildId", | ||
4050 | StringProp(), | ||
4051 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildId", | ||
4052 | compact="build_buildId", | ||
4053 | ) | ||
4054 | # Property describing the start time of a build. | ||
4055 | cls._add_property( | ||
4056 | "build_buildStartTime", | ||
4057 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4058 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildStartTime", | ||
4059 | compact="build_buildStartTime", | ||
4060 | ) | ||
4061 | # A buildType is a hint that is used to indicate the toolchain, platform, or | ||
4062 | # infrastructure that the build was invoked on. | ||
4063 | cls._add_property( | ||
4064 | "build_buildType", | ||
4065 | AnyURIProp(), | ||
4066 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildType", | ||
4067 | min_count=1, | ||
4068 | compact="build_buildType", | ||
4069 | ) | ||
4070 | # Property that describes the digest of the build configuration file used to | ||
4071 | # invoke a build. | ||
4072 | cls._add_property( | ||
4073 | "build_configSourceDigest", | ||
4074 | ListProp(ObjectProp(Hash, False)), | ||
4075 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceDigest", | ||
4076 | compact="build_configSourceDigest", | ||
4077 | ) | ||
4078 | # Property describes the invocation entrypoint of a build. | ||
4079 | cls._add_property( | ||
4080 | "build_configSourceEntrypoint", | ||
4081 | ListProp(StringProp()), | ||
4082 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceEntrypoint", | ||
4083 | compact="build_configSourceEntrypoint", | ||
4084 | ) | ||
4085 | # Property that describes the URI of the build configuration source file. | ||
4086 | cls._add_property( | ||
4087 | "build_configSourceUri", | ||
4088 | ListProp(AnyURIProp()), | ||
4089 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceUri", | ||
4090 | compact="build_configSourceUri", | ||
4091 | ) | ||
4092 | # Property describing the session in which a build is invoked. | ||
4093 | cls._add_property( | ||
4094 | "build_environment", | ||
4095 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
4096 | iri="https://spdx.org/rdf/3.0.1/terms/Build/environment", | ||
4097 | compact="build_environment", | ||
4098 | ) | ||
4099 | # Property describing a parameter used in an instance of a build. | ||
4100 | cls._add_property( | ||
4101 | "build_parameter", | ||
4102 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
4103 | iri="https://spdx.org/rdf/3.0.1/terms/Build/parameter", | ||
4104 | compact="build_parameter", | ||
4105 | ) | ||
4106 | |||
4107 | |||
4108 | # Agent represents anything with the potential to act on a system. | ||
4109 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Agent", compact_type="Agent", abstract=False) | ||
4110 | class Agent(Element): | ||
4111 | NODE_KIND = NodeKind.IRI | ||
4112 | ID_ALIAS = "spdxId" | ||
4113 | NAMED_INDIVIDUALS = { | ||
4114 | } | ||
4115 | |||
4116 | |||
4117 | # An assertion made in relation to one or more elements. | ||
4118 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Annotation", compact_type="Annotation", abstract=False) | ||
4119 | class Annotation(Element): | ||
4120 | NODE_KIND = NodeKind.IRI | ||
4121 | ID_ALIAS = "spdxId" | ||
4122 | NAMED_INDIVIDUALS = { | ||
4123 | } | ||
4124 | |||
4125 | @classmethod | ||
4126 | def _register_props(cls): | ||
4127 | super()._register_props() | ||
4128 | # Describes the type of annotation. | ||
4129 | cls._add_property( | ||
4130 | "annotationType", | ||
4131 | EnumProp([ | ||
4132 | ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", "other"), | ||
4133 | ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", "review"), | ||
4134 | ]), | ||
4135 | iri="https://spdx.org/rdf/3.0.1/terms/Core/annotationType", | ||
4136 | min_count=1, | ||
4137 | compact="annotationType", | ||
4138 | ) | ||
4139 | # Provides information about the content type of an Element or a Property. | ||
4140 | cls._add_property( | ||
4141 | "contentType", | ||
4142 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
4143 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
4144 | compact="contentType", | ||
4145 | ) | ||
4146 | # Commentary on an assertion that an annotator has made. | ||
4147 | cls._add_property( | ||
4148 | "statement", | ||
4149 | StringProp(), | ||
4150 | iri="https://spdx.org/rdf/3.0.1/terms/Core/statement", | ||
4151 | compact="statement", | ||
4152 | ) | ||
4153 | # An Element an annotator has made an assertion about. | ||
4154 | cls._add_property( | ||
4155 | "subject", | ||
4156 | ObjectProp(Element, True, context=[ | ||
4157 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
4158 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
4159 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
4160 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
4161 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
4162 | ],), | ||
4163 | iri="https://spdx.org/rdf/3.0.1/terms/Core/subject", | ||
4164 | min_count=1, | ||
4165 | compact="subject", | ||
4166 | ) | ||
4167 | |||
4168 | |||
4169 | # A distinct article or unit within the digital domain. | ||
4170 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Artifact", compact_type="Artifact", abstract=True) | ||
4171 | class Artifact(Element): | ||
4172 | NODE_KIND = NodeKind.IRI | ||
4173 | ID_ALIAS = "spdxId" | ||
4174 | NAMED_INDIVIDUALS = { | ||
4175 | } | ||
4176 | |||
4177 | @classmethod | ||
4178 | def _register_props(cls): | ||
4179 | super()._register_props() | ||
4180 | # Specifies the time an artifact was built. | ||
4181 | cls._add_property( | ||
4182 | "builtTime", | ||
4183 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4184 | iri="https://spdx.org/rdf/3.0.1/terms/Core/builtTime", | ||
4185 | compact="builtTime", | ||
4186 | ) | ||
4187 | # Identifies from where or whom the Element originally came. | ||
4188 | cls._add_property( | ||
4189 | "originatedBy", | ||
4190 | ListProp(ObjectProp(Agent, False, context=[ | ||
4191 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
4192 | ],)), | ||
4193 | iri="https://spdx.org/rdf/3.0.1/terms/Core/originatedBy", | ||
4194 | compact="originatedBy", | ||
4195 | ) | ||
4196 | # Specifies the time an artifact was released. | ||
4197 | cls._add_property( | ||
4198 | "releaseTime", | ||
4199 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4200 | iri="https://spdx.org/rdf/3.0.1/terms/Core/releaseTime", | ||
4201 | compact="releaseTime", | ||
4202 | ) | ||
4203 | # The name of a relevant standard that may apply to an artifact. | ||
4204 | cls._add_property( | ||
4205 | "standardName", | ||
4206 | ListProp(StringProp()), | ||
4207 | iri="https://spdx.org/rdf/3.0.1/terms/Core/standardName", | ||
4208 | compact="standardName", | ||
4209 | ) | ||
4210 | # Identifies who or what supplied the artifact or VulnAssessmentRelationship | ||
4211 | # referenced by the Element. | ||
4212 | cls._add_property( | ||
4213 | "suppliedBy", | ||
4214 | ObjectProp(Agent, False, context=[ | ||
4215 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
4216 | ],), | ||
4217 | iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy", | ||
4218 | compact="suppliedBy", | ||
4219 | ) | ||
4220 | # Specifies the level of support associated with an artifact. | ||
4221 | cls._add_property( | ||
4222 | "supportLevel", | ||
4223 | ListProp(EnumProp([ | ||
4224 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", "deployed"), | ||
4225 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", "development"), | ||
4226 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", "endOfSupport"), | ||
4227 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", "limitedSupport"), | ||
4228 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", "noAssertion"), | ||
4229 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", "noSupport"), | ||
4230 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", "support"), | ||
4231 | ])), | ||
4232 | iri="https://spdx.org/rdf/3.0.1/terms/Core/supportLevel", | ||
4233 | compact="supportLevel", | ||
4234 | ) | ||
4235 | # Specifies until when the artifact can be used before its usage needs to be | ||
4236 | # reassessed. | ||
4237 | cls._add_property( | ||
4238 | "validUntilTime", | ||
4239 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4240 | iri="https://spdx.org/rdf/3.0.1/terms/Core/validUntilTime", | ||
4241 | compact="validUntilTime", | ||
4242 | ) | ||
4243 | |||
4244 | |||
4245 | # A collection of Elements that have a shared context. | ||
4246 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Bundle", compact_type="Bundle", abstract=False) | ||
4247 | class Bundle(ElementCollection): | ||
4248 | NODE_KIND = NodeKind.IRI | ||
4249 | ID_ALIAS = "spdxId" | ||
4250 | NAMED_INDIVIDUALS = { | ||
4251 | } | ||
4252 | |||
4253 | @classmethod | ||
4254 | def _register_props(cls): | ||
4255 | super()._register_props() | ||
4256 | # Gives information about the circumstances or unifying properties | ||
4257 | # that Elements of the bundle have been assembled under. | ||
4258 | cls._add_property( | ||
4259 | "context", | ||
4260 | StringProp(), | ||
4261 | iri="https://spdx.org/rdf/3.0.1/terms/Core/context", | ||
4262 | compact="context", | ||
4263 | ) | ||
4264 | |||
4265 | |||
4266 | # A mathematically calculated representation of a grouping of data. | ||
4267 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Hash", compact_type="Hash", abstract=False) | ||
4268 | class Hash(IntegrityMethod): | ||
4269 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
4270 | NAMED_INDIVIDUALS = { | ||
4271 | } | ||
4272 | |||
4273 | @classmethod | ||
4274 | def _register_props(cls): | ||
4275 | super()._register_props() | ||
4276 | # Specifies the algorithm used for calculating the hash value. | ||
4277 | cls._add_property( | ||
4278 | "algorithm", | ||
4279 | EnumProp([ | ||
4280 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"), | ||
4281 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"), | ||
4282 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"), | ||
4283 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"), | ||
4284 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"), | ||
4285 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"), | ||
4286 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"), | ||
4287 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"), | ||
4288 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"), | ||
4289 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"), | ||
4290 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"), | ||
4291 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"), | ||
4292 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"), | ||
4293 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"), | ||
4294 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"), | ||
4295 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"), | ||
4296 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"), | ||
4297 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"), | ||
4298 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"), | ||
4299 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"), | ||
4300 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"), | ||
4301 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"), | ||
4302 | ]), | ||
4303 | iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm", | ||
4304 | min_count=1, | ||
4305 | compact="algorithm", | ||
4306 | ) | ||
4307 | # The result of applying a hash algorithm to an Element. | ||
4308 | cls._add_property( | ||
4309 | "hashValue", | ||
4310 | StringProp(), | ||
4311 | iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue", | ||
4312 | min_count=1, | ||
4313 | compact="hashValue", | ||
4314 | ) | ||
4315 | |||
4316 | |||
4317 | # Provide context for a relationship that occurs in the lifecycle. | ||
4318 | @register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopedRelationship", compact_type="LifecycleScopedRelationship", abstract=False) | ||
4319 | class LifecycleScopedRelationship(Relationship): | ||
4320 | NODE_KIND = NodeKind.IRI | ||
4321 | ID_ALIAS = "spdxId" | ||
4322 | NAMED_INDIVIDUALS = { | ||
4323 | } | ||
4324 | |||
4325 | @classmethod | ||
4326 | def _register_props(cls): | ||
4327 | super()._register_props() | ||
4328 | # Capture the scope of information about a specific relationship between elements. | ||
4329 | cls._add_property( | ||
4330 | "scope", | ||
4331 | EnumProp([ | ||
4332 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", "build"), | ||
4333 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", "design"), | ||
4334 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", "development"), | ||
4335 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", "other"), | ||
4336 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", "runtime"), | ||
4337 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", "test"), | ||
4338 | ]), | ||
4339 | iri="https://spdx.org/rdf/3.0.1/terms/Core/scope", | ||
4340 | compact="scope", | ||
4341 | ) | ||
4342 | |||
4343 | |||
4344 | # A group of people who work together in an organized way for a shared purpose. | ||
4345 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Organization", compact_type="Organization", abstract=False) | ||
4346 | class Organization(Agent): | ||
4347 | NODE_KIND = NodeKind.IRI | ||
4348 | ID_ALIAS = "spdxId" | ||
4349 | NAMED_INDIVIDUALS = { | ||
4350 | "SpdxOrganization": "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", | ||
4351 | } | ||
4352 | # An Organization representing the SPDX Project. | ||
4353 | SpdxOrganization = "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization" | ||
4354 | |||
4355 | |||
4356 | # An individual human being. | ||
4357 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Person", compact_type="Person", abstract=False) | ||
4358 | class Person(Agent): | ||
4359 | NODE_KIND = NodeKind.IRI | ||
4360 | ID_ALIAS = "spdxId" | ||
4361 | NAMED_INDIVIDUALS = { | ||
4362 | } | ||
4363 | |||
4364 | |||
4365 | # A software agent. | ||
4366 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SoftwareAgent", compact_type="SoftwareAgent", abstract=False) | ||
4367 | class SoftwareAgent(Agent): | ||
4368 | NODE_KIND = NodeKind.IRI | ||
4369 | ID_ALIAS = "spdxId" | ||
4370 | NAMED_INDIVIDUALS = { | ||
4371 | } | ||
4372 | |||
4373 | |||
4374 | # Portion of an AnyLicenseInfo representing a set of licensing information | ||
4375 | # where all elements apply. | ||
4376 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ConjunctiveLicenseSet", compact_type="expandedlicensing_ConjunctiveLicenseSet", abstract=False) | ||
4377 | class expandedlicensing_ConjunctiveLicenseSet(simplelicensing_AnyLicenseInfo): | ||
4378 | NODE_KIND = NodeKind.IRI | ||
4379 | ID_ALIAS = "spdxId" | ||
4380 | NAMED_INDIVIDUALS = { | ||
4381 | } | ||
4382 | |||
4383 | @classmethod | ||
4384 | def _register_props(cls): | ||
4385 | super()._register_props() | ||
4386 | # A license expression participating in a license set. | ||
4387 | cls._add_property( | ||
4388 | "expandedlicensing_member", | ||
4389 | ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
4390 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
4391 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
4392 | ],)), | ||
4393 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member", | ||
4394 | min_count=2, | ||
4395 | compact="expandedlicensing_member", | ||
4396 | ) | ||
4397 | |||
4398 | |||
4399 | # A license addition that is not listed on the SPDX Exceptions List. | ||
4400 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicenseAddition", compact_type="expandedlicensing_CustomLicenseAddition", abstract=False) | ||
4401 | class expandedlicensing_CustomLicenseAddition(expandedlicensing_LicenseAddition): | ||
4402 | NODE_KIND = NodeKind.IRI | ||
4403 | ID_ALIAS = "spdxId" | ||
4404 | NAMED_INDIVIDUALS = { | ||
4405 | } | ||
4406 | |||
4407 | |||
4408 | # Portion of an AnyLicenseInfo representing a set of licensing information where | ||
4409 | # only one of the elements applies. | ||
4410 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/DisjunctiveLicenseSet", compact_type="expandedlicensing_DisjunctiveLicenseSet", abstract=False) | ||
4411 | class expandedlicensing_DisjunctiveLicenseSet(simplelicensing_AnyLicenseInfo): | ||
4412 | NODE_KIND = NodeKind.IRI | ||
4413 | ID_ALIAS = "spdxId" | ||
4414 | NAMED_INDIVIDUALS = { | ||
4415 | } | ||
4416 | |||
4417 | @classmethod | ||
4418 | def _register_props(cls): | ||
4419 | super()._register_props() | ||
4420 | # A license expression participating in a license set. | ||
4421 | cls._add_property( | ||
4422 | "expandedlicensing_member", | ||
4423 | ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
4424 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
4425 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
4426 | ],)), | ||
4427 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member", | ||
4428 | min_count=2, | ||
4429 | compact="expandedlicensing_member", | ||
4430 | ) | ||
4431 | |||
4432 | |||
4433 | # Abstract class representing a License or an OrLaterOperator. | ||
4434 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ExtendableLicense", compact_type="expandedlicensing_ExtendableLicense", abstract=True) | ||
4435 | class expandedlicensing_ExtendableLicense(simplelicensing_AnyLicenseInfo): | ||
4436 | NODE_KIND = NodeKind.IRI | ||
4437 | ID_ALIAS = "spdxId" | ||
4438 | NAMED_INDIVIDUALS = { | ||
4439 | } | ||
4440 | |||
4441 | |||
4442 | # A concrete subclass of AnyLicenseInfo used by Individuals in the | ||
4443 | # ExpandedLicensing profile. | ||
4444 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/IndividualLicensingInfo", compact_type="expandedlicensing_IndividualLicensingInfo", abstract=False) | ||
4445 | class expandedlicensing_IndividualLicensingInfo(simplelicensing_AnyLicenseInfo): | ||
4446 | NODE_KIND = NodeKind.IRI | ||
4447 | ID_ALIAS = "spdxId" | ||
4448 | NAMED_INDIVIDUALS = { | ||
4449 | "NoAssertionLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", | ||
4450 | "NoneLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", | ||
4451 | } | ||
4452 | # An Individual Value for License when no assertion can be made about its actual | ||
4453 | # value. | ||
4454 | NoAssertionLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense" | ||
4455 | # An Individual Value for License where the SPDX data creator determines that no | ||
4456 | # license is present. | ||
4457 | NoneLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense" | ||
4458 | |||
4459 | |||
4460 | # Abstract class for the portion of an AnyLicenseInfo representing a license. | ||
4461 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/License", compact_type="expandedlicensing_License", abstract=True) | ||
4462 | class expandedlicensing_License(expandedlicensing_ExtendableLicense): | ||
4463 | NODE_KIND = NodeKind.IRI | ||
4464 | ID_ALIAS = "spdxId" | ||
4465 | NAMED_INDIVIDUALS = { | ||
4466 | } | ||
4467 | |||
4468 | @classmethod | ||
4469 | def _register_props(cls): | ||
4470 | super()._register_props() | ||
4471 | # Specifies whether a license or additional text identifier has been marked as | ||
4472 | # deprecated. | ||
4473 | cls._add_property( | ||
4474 | "expandedlicensing_isDeprecatedLicenseId", | ||
4475 | BooleanProp(), | ||
4476 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedLicenseId", | ||
4477 | compact="expandedlicensing_isDeprecatedLicenseId", | ||
4478 | ) | ||
4479 | # Specifies whether the License is listed as free by the | ||
4480 | # Free Software Foundation (FSF). | ||
4481 | cls._add_property( | ||
4482 | "expandedlicensing_isFsfLibre", | ||
4483 | BooleanProp(), | ||
4484 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isFsfLibre", | ||
4485 | compact="expandedlicensing_isFsfLibre", | ||
4486 | ) | ||
4487 | # Specifies whether the License is listed as approved by the | ||
4488 | # Open Source Initiative (OSI). | ||
4489 | cls._add_property( | ||
4490 | "expandedlicensing_isOsiApproved", | ||
4491 | BooleanProp(), | ||
4492 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isOsiApproved", | ||
4493 | compact="expandedlicensing_isOsiApproved", | ||
4494 | ) | ||
4495 | # Identifies all the text and metadata associated with a license in the license | ||
4496 | # XML format. | ||
4497 | cls._add_property( | ||
4498 | "expandedlicensing_licenseXml", | ||
4499 | StringProp(), | ||
4500 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml", | ||
4501 | compact="expandedlicensing_licenseXml", | ||
4502 | ) | ||
4503 | # Specifies the licenseId that is preferred to be used in place of a deprecated | ||
4504 | # License or LicenseAddition. | ||
4505 | cls._add_property( | ||
4506 | "expandedlicensing_obsoletedBy", | ||
4507 | StringProp(), | ||
4508 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy", | ||
4509 | compact="expandedlicensing_obsoletedBy", | ||
4510 | ) | ||
4511 | # Contains a URL where the License or LicenseAddition can be found in use. | ||
4512 | cls._add_property( | ||
4513 | "expandedlicensing_seeAlso", | ||
4514 | ListProp(AnyURIProp()), | ||
4515 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso", | ||
4516 | compact="expandedlicensing_seeAlso", | ||
4517 | ) | ||
4518 | # Provides a License author's preferred text to indicate that a file is covered | ||
4519 | # by the License. | ||
4520 | cls._add_property( | ||
4521 | "expandedlicensing_standardLicenseHeader", | ||
4522 | StringProp(), | ||
4523 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseHeader", | ||
4524 | compact="expandedlicensing_standardLicenseHeader", | ||
4525 | ) | ||
4526 | # Identifies the full text of a License, in SPDX templating format. | ||
4527 | cls._add_property( | ||
4528 | "expandedlicensing_standardLicenseTemplate", | ||
4529 | StringProp(), | ||
4530 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseTemplate", | ||
4531 | compact="expandedlicensing_standardLicenseTemplate", | ||
4532 | ) | ||
4533 | # Identifies the full text of a License or Addition. | ||
4534 | cls._add_property( | ||
4535 | "simplelicensing_licenseText", | ||
4536 | StringProp(), | ||
4537 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText", | ||
4538 | min_count=1, | ||
4539 | compact="simplelicensing_licenseText", | ||
4540 | ) | ||
4541 | |||
4542 | |||
4543 | # A license that is listed on the SPDX License List. | ||
4544 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicense", compact_type="expandedlicensing_ListedLicense", abstract=False) | ||
4545 | class expandedlicensing_ListedLicense(expandedlicensing_License): | ||
4546 | NODE_KIND = NodeKind.IRI | ||
4547 | ID_ALIAS = "spdxId" | ||
4548 | NAMED_INDIVIDUALS = { | ||
4549 | } | ||
4550 | |||
4551 | @classmethod | ||
4552 | def _register_props(cls): | ||
4553 | super()._register_props() | ||
4554 | # Specifies the SPDX License List version in which this license or exception | ||
4555 | # identifier was deprecated. | ||
4556 | cls._add_property( | ||
4557 | "expandedlicensing_deprecatedVersion", | ||
4558 | StringProp(), | ||
4559 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion", | ||
4560 | compact="expandedlicensing_deprecatedVersion", | ||
4561 | ) | ||
4562 | # Specifies the SPDX License List version in which this ListedLicense or | ||
4563 | # ListedLicenseException identifier was first added. | ||
4564 | cls._add_property( | ||
4565 | "expandedlicensing_listVersionAdded", | ||
4566 | StringProp(), | ||
4567 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded", | ||
4568 | compact="expandedlicensing_listVersionAdded", | ||
4569 | ) | ||
4570 | |||
4571 | |||
4572 | # Portion of an AnyLicenseInfo representing this version, or any later version, | ||
4573 | # of the indicated License. | ||
4574 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/OrLaterOperator", compact_type="expandedlicensing_OrLaterOperator", abstract=False) | ||
4575 | class expandedlicensing_OrLaterOperator(expandedlicensing_ExtendableLicense): | ||
4576 | NODE_KIND = NodeKind.IRI | ||
4577 | ID_ALIAS = "spdxId" | ||
4578 | NAMED_INDIVIDUALS = { | ||
4579 | } | ||
4580 | |||
4581 | @classmethod | ||
4582 | def _register_props(cls): | ||
4583 | super()._register_props() | ||
4584 | # A License participating in an 'or later' model. | ||
4585 | cls._add_property( | ||
4586 | "expandedlicensing_subjectLicense", | ||
4587 | ObjectProp(expandedlicensing_License, True), | ||
4588 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectLicense", | ||
4589 | min_count=1, | ||
4590 | compact="expandedlicensing_subjectLicense", | ||
4591 | ) | ||
4592 | |||
4593 | |||
4594 | # Portion of an AnyLicenseInfo representing a License which has additional | ||
4595 | # text applied to it. | ||
4596 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/WithAdditionOperator", compact_type="expandedlicensing_WithAdditionOperator", abstract=False) | ||
4597 | class expandedlicensing_WithAdditionOperator(simplelicensing_AnyLicenseInfo): | ||
4598 | NODE_KIND = NodeKind.IRI | ||
4599 | ID_ALIAS = "spdxId" | ||
4600 | NAMED_INDIVIDUALS = { | ||
4601 | } | ||
4602 | |||
4603 | @classmethod | ||
4604 | def _register_props(cls): | ||
4605 | super()._register_props() | ||
4606 | # A LicenseAddition participating in a 'with addition' model. | ||
4607 | cls._add_property( | ||
4608 | "expandedlicensing_subjectAddition", | ||
4609 | ObjectProp(expandedlicensing_LicenseAddition, True), | ||
4610 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectAddition", | ||
4611 | min_count=1, | ||
4612 | compact="expandedlicensing_subjectAddition", | ||
4613 | ) | ||
4614 | # A License participating in a 'with addition' model. | ||
4615 | cls._add_property( | ||
4616 | "expandedlicensing_subjectExtendableLicense", | ||
4617 | ObjectProp(expandedlicensing_ExtendableLicense, True), | ||
4618 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectExtendableLicense", | ||
4619 | min_count=1, | ||
4620 | compact="expandedlicensing_subjectExtendableLicense", | ||
4621 | ) | ||
4622 | |||
4623 | |||
4624 | # A type of extension consisting of a list of name value pairs. | ||
4625 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertiesExtension", compact_type="extension_CdxPropertiesExtension", abstract=False) | ||
4626 | class extension_CdxPropertiesExtension(extension_Extension): | ||
4627 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
4628 | NAMED_INDIVIDUALS = { | ||
4629 | } | ||
4630 | |||
4631 | @classmethod | ||
4632 | def _register_props(cls): | ||
4633 | super()._register_props() | ||
4634 | # Provides a map of a property names to a values. | ||
4635 | cls._add_property( | ||
4636 | "extension_cdxProperty", | ||
4637 | ListProp(ObjectProp(extension_CdxPropertyEntry, False)), | ||
4638 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxProperty", | ||
4639 | min_count=1, | ||
4640 | compact="extension_cdxProperty", | ||
4641 | ) | ||
4642 | |||
4643 | |||
4644 | # Provides a CVSS version 2.0 assessment for a vulnerability. | ||
4645 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV2VulnAssessmentRelationship", compact_type="security_CvssV2VulnAssessmentRelationship", abstract=False) | ||
4646 | class security_CvssV2VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4647 | NODE_KIND = NodeKind.IRI | ||
4648 | ID_ALIAS = "spdxId" | ||
4649 | NAMED_INDIVIDUALS = { | ||
4650 | } | ||
4651 | |||
4652 | @classmethod | ||
4653 | def _register_props(cls): | ||
4654 | super()._register_props() | ||
4655 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
4656 | cls._add_property( | ||
4657 | "security_score", | ||
4658 | FloatProp(), | ||
4659 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
4660 | min_count=1, | ||
4661 | compact="security_score", | ||
4662 | ) | ||
4663 | # Specifies the CVSS vector string for a vulnerability. | ||
4664 | cls._add_property( | ||
4665 | "security_vectorString", | ||
4666 | StringProp(), | ||
4667 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
4668 | min_count=1, | ||
4669 | compact="security_vectorString", | ||
4670 | ) | ||
4671 | |||
4672 | |||
4673 | # Provides a CVSS version 3 assessment for a vulnerability. | ||
4674 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV3VulnAssessmentRelationship", compact_type="security_CvssV3VulnAssessmentRelationship", abstract=False) | ||
4675 | class security_CvssV3VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4676 | NODE_KIND = NodeKind.IRI | ||
4677 | ID_ALIAS = "spdxId" | ||
4678 | NAMED_INDIVIDUALS = { | ||
4679 | } | ||
4680 | |||
4681 | @classmethod | ||
4682 | def _register_props(cls): | ||
4683 | super()._register_props() | ||
4684 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
4685 | cls._add_property( | ||
4686 | "security_score", | ||
4687 | FloatProp(), | ||
4688 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
4689 | min_count=1, | ||
4690 | compact="security_score", | ||
4691 | ) | ||
4692 | # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software. | ||
4693 | cls._add_property( | ||
4694 | "security_severity", | ||
4695 | EnumProp([ | ||
4696 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"), | ||
4697 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"), | ||
4698 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"), | ||
4699 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"), | ||
4700 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"), | ||
4701 | ]), | ||
4702 | iri="https://spdx.org/rdf/3.0.1/terms/Security/severity", | ||
4703 | min_count=1, | ||
4704 | compact="security_severity", | ||
4705 | ) | ||
4706 | # Specifies the CVSS vector string for a vulnerability. | ||
4707 | cls._add_property( | ||
4708 | "security_vectorString", | ||
4709 | StringProp(), | ||
4710 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
4711 | min_count=1, | ||
4712 | compact="security_vectorString", | ||
4713 | ) | ||
4714 | |||
4715 | |||
4716 | # Provides a CVSS version 4 assessment for a vulnerability. | ||
4717 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV4VulnAssessmentRelationship", compact_type="security_CvssV4VulnAssessmentRelationship", abstract=False) | ||
4718 | class security_CvssV4VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4719 | NODE_KIND = NodeKind.IRI | ||
4720 | ID_ALIAS = "spdxId" | ||
4721 | NAMED_INDIVIDUALS = { | ||
4722 | } | ||
4723 | |||
4724 | @classmethod | ||
4725 | def _register_props(cls): | ||
4726 | super()._register_props() | ||
4727 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
4728 | cls._add_property( | ||
4729 | "security_score", | ||
4730 | FloatProp(), | ||
4731 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
4732 | min_count=1, | ||
4733 | compact="security_score", | ||
4734 | ) | ||
4735 | # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software. | ||
4736 | cls._add_property( | ||
4737 | "security_severity", | ||
4738 | EnumProp([ | ||
4739 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"), | ||
4740 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"), | ||
4741 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"), | ||
4742 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"), | ||
4743 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"), | ||
4744 | ]), | ||
4745 | iri="https://spdx.org/rdf/3.0.1/terms/Security/severity", | ||
4746 | min_count=1, | ||
4747 | compact="security_severity", | ||
4748 | ) | ||
4749 | # Specifies the CVSS vector string for a vulnerability. | ||
4750 | cls._add_property( | ||
4751 | "security_vectorString", | ||
4752 | StringProp(), | ||
4753 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
4754 | min_count=1, | ||
4755 | compact="security_vectorString", | ||
4756 | ) | ||
4757 | |||
4758 | |||
4759 | # Provides an EPSS assessment for a vulnerability. | ||
4760 | @register("https://spdx.org/rdf/3.0.1/terms/Security/EpssVulnAssessmentRelationship", compact_type="security_EpssVulnAssessmentRelationship", abstract=False) | ||
4761 | class security_EpssVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4762 | NODE_KIND = NodeKind.IRI | ||
4763 | ID_ALIAS = "spdxId" | ||
4764 | NAMED_INDIVIDUALS = { | ||
4765 | } | ||
4766 | |||
4767 | @classmethod | ||
4768 | def _register_props(cls): | ||
4769 | super()._register_props() | ||
4770 | # The percentile of the current probability score. | ||
4771 | cls._add_property( | ||
4772 | "security_percentile", | ||
4773 | FloatProp(), | ||
4774 | iri="https://spdx.org/rdf/3.0.1/terms/Security/percentile", | ||
4775 | min_count=1, | ||
4776 | compact="security_percentile", | ||
4777 | ) | ||
4778 | # A probability score between 0 and 1 of a vulnerability being exploited. | ||
4779 | cls._add_property( | ||
4780 | "security_probability", | ||
4781 | FloatProp(), | ||
4782 | iri="https://spdx.org/rdf/3.0.1/terms/Security/probability", | ||
4783 | min_count=1, | ||
4784 | compact="security_probability", | ||
4785 | ) | ||
4786 | |||
4787 | |||
4788 | # Provides an exploit assessment of a vulnerability. | ||
4789 | @register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogVulnAssessmentRelationship", compact_type="security_ExploitCatalogVulnAssessmentRelationship", abstract=False) | ||
4790 | class security_ExploitCatalogVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4791 | NODE_KIND = NodeKind.IRI | ||
4792 | ID_ALIAS = "spdxId" | ||
4793 | NAMED_INDIVIDUALS = { | ||
4794 | } | ||
4795 | |||
4796 | @classmethod | ||
4797 | def _register_props(cls): | ||
4798 | super()._register_props() | ||
4799 | # Specifies the exploit catalog type. | ||
4800 | cls._add_property( | ||
4801 | "security_catalogType", | ||
4802 | EnumProp([ | ||
4803 | ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", "kev"), | ||
4804 | ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", "other"), | ||
4805 | ]), | ||
4806 | iri="https://spdx.org/rdf/3.0.1/terms/Security/catalogType", | ||
4807 | min_count=1, | ||
4808 | compact="security_catalogType", | ||
4809 | ) | ||
4810 | # Describe that a CVE is known to have an exploit because it's been listed in an exploit catalog. | ||
4811 | cls._add_property( | ||
4812 | "security_exploited", | ||
4813 | BooleanProp(), | ||
4814 | iri="https://spdx.org/rdf/3.0.1/terms/Security/exploited", | ||
4815 | min_count=1, | ||
4816 | compact="security_exploited", | ||
4817 | ) | ||
4818 | # Provides the location of an exploit catalog. | ||
4819 | cls._add_property( | ||
4820 | "security_locator", | ||
4821 | AnyURIProp(), | ||
4822 | iri="https://spdx.org/rdf/3.0.1/terms/Security/locator", | ||
4823 | min_count=1, | ||
4824 | compact="security_locator", | ||
4825 | ) | ||
4826 | |||
4827 | |||
4828 | # Provides an SSVC assessment for a vulnerability. | ||
4829 | @register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcVulnAssessmentRelationship", compact_type="security_SsvcVulnAssessmentRelationship", abstract=False) | ||
4830 | class security_SsvcVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4831 | NODE_KIND = NodeKind.IRI | ||
4832 | ID_ALIAS = "spdxId" | ||
4833 | NAMED_INDIVIDUALS = { | ||
4834 | } | ||
4835 | |||
4836 | @classmethod | ||
4837 | def _register_props(cls): | ||
4838 | super()._register_props() | ||
4839 | # Provide the enumeration of possible decisions in the | ||
4840 | # [Stakeholder-Specific Vulnerability Categorization (SSVC) decision tree](https://www.cisa.gov/stakeholder-specific-vulnerability-categorization-ssvc). | ||
4841 | cls._add_property( | ||
4842 | "security_decisionType", | ||
4843 | EnumProp([ | ||
4844 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", "act"), | ||
4845 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", "attend"), | ||
4846 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", "track"), | ||
4847 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", "trackStar"), | ||
4848 | ]), | ||
4849 | iri="https://spdx.org/rdf/3.0.1/terms/Security/decisionType", | ||
4850 | min_count=1, | ||
4851 | compact="security_decisionType", | ||
4852 | ) | ||
4853 | |||
4854 | |||
4855 | # Abstract ancestor class for all VEX relationships | ||
4856 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexVulnAssessmentRelationship", compact_type="security_VexVulnAssessmentRelationship", abstract=True) | ||
4857 | class security_VexVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
4858 | NODE_KIND = NodeKind.IRI | ||
4859 | ID_ALIAS = "spdxId" | ||
4860 | NAMED_INDIVIDUALS = { | ||
4861 | } | ||
4862 | |||
4863 | @classmethod | ||
4864 | def _register_props(cls): | ||
4865 | super()._register_props() | ||
4866 | # Conveys information about how VEX status was determined. | ||
4867 | cls._add_property( | ||
4868 | "security_statusNotes", | ||
4869 | StringProp(), | ||
4870 | iri="https://spdx.org/rdf/3.0.1/terms/Security/statusNotes", | ||
4871 | compact="security_statusNotes", | ||
4872 | ) | ||
4873 | # Specifies the version of a VEX statement. | ||
4874 | cls._add_property( | ||
4875 | "security_vexVersion", | ||
4876 | StringProp(), | ||
4877 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vexVersion", | ||
4878 | compact="security_vexVersion", | ||
4879 | ) | ||
4880 | |||
4881 | |||
4882 | # Specifies a vulnerability and its associated information. | ||
4883 | @register("https://spdx.org/rdf/3.0.1/terms/Security/Vulnerability", compact_type="security_Vulnerability", abstract=False) | ||
4884 | class security_Vulnerability(Artifact): | ||
4885 | NODE_KIND = NodeKind.IRI | ||
4886 | ID_ALIAS = "spdxId" | ||
4887 | NAMED_INDIVIDUALS = { | ||
4888 | } | ||
4889 | |||
4890 | @classmethod | ||
4891 | def _register_props(cls): | ||
4892 | super()._register_props() | ||
4893 | # Specifies a time when a vulnerability assessment was modified | ||
4894 | cls._add_property( | ||
4895 | "security_modifiedTime", | ||
4896 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4897 | iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime", | ||
4898 | compact="security_modifiedTime", | ||
4899 | ) | ||
4900 | # Specifies the time when a vulnerability was published. | ||
4901 | cls._add_property( | ||
4902 | "security_publishedTime", | ||
4903 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4904 | iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime", | ||
4905 | compact="security_publishedTime", | ||
4906 | ) | ||
4907 | # Specified the time and date when a vulnerability was withdrawn. | ||
4908 | cls._add_property( | ||
4909 | "security_withdrawnTime", | ||
4910 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
4911 | iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime", | ||
4912 | compact="security_withdrawnTime", | ||
4913 | ) | ||
4914 | |||
4915 | |||
4916 | # A distinct article or unit related to Software. | ||
4917 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwareArtifact", compact_type="software_SoftwareArtifact", abstract=True) | ||
4918 | class software_SoftwareArtifact(Artifact): | ||
4919 | NODE_KIND = NodeKind.IRI | ||
4920 | ID_ALIAS = "spdxId" | ||
4921 | NAMED_INDIVIDUALS = { | ||
4922 | } | ||
4923 | |||
4924 | @classmethod | ||
4925 | def _register_props(cls): | ||
4926 | super()._register_props() | ||
4927 | # Provides additional purpose information of the software artifact. | ||
4928 | cls._add_property( | ||
4929 | "software_additionalPurpose", | ||
4930 | ListProp(EnumProp([ | ||
4931 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"), | ||
4932 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"), | ||
4933 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"), | ||
4934 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"), | ||
4935 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"), | ||
4936 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"), | ||
4937 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"), | ||
4938 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"), | ||
4939 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"), | ||
4940 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"), | ||
4941 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"), | ||
4942 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"), | ||
4943 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"), | ||
4944 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"), | ||
4945 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"), | ||
4946 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"), | ||
4947 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"), | ||
4948 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"), | ||
4949 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"), | ||
4950 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"), | ||
4951 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"), | ||
4952 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"), | ||
4953 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"), | ||
4954 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"), | ||
4955 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"), | ||
4956 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"), | ||
4957 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"), | ||
4958 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"), | ||
4959 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"), | ||
4960 | ])), | ||
4961 | iri="https://spdx.org/rdf/3.0.1/terms/Software/additionalPurpose", | ||
4962 | compact="software_additionalPurpose", | ||
4963 | ) | ||
4964 | # Provides a place for the SPDX data creator to record acknowledgement text for | ||
4965 | # a software Package, File or Snippet. | ||
4966 | cls._add_property( | ||
4967 | "software_attributionText", | ||
4968 | ListProp(StringProp()), | ||
4969 | iri="https://spdx.org/rdf/3.0.1/terms/Software/attributionText", | ||
4970 | compact="software_attributionText", | ||
4971 | ) | ||
4972 | # A canonical, unique, immutable identifier of the artifact content, that may be | ||
4973 | # used for verifying its identity and/or integrity. | ||
4974 | cls._add_property( | ||
4975 | "software_contentIdentifier", | ||
4976 | ListProp(ObjectProp(software_ContentIdentifier, False)), | ||
4977 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifier", | ||
4978 | compact="software_contentIdentifier", | ||
4979 | ) | ||
4980 | # Identifies the text of one or more copyright notices for a software Package, | ||
4981 | # File or Snippet, if any. | ||
4982 | cls._add_property( | ||
4983 | "software_copyrightText", | ||
4984 | StringProp(), | ||
4985 | iri="https://spdx.org/rdf/3.0.1/terms/Software/copyrightText", | ||
4986 | compact="software_copyrightText", | ||
4987 | ) | ||
4988 | # Provides information about the primary purpose of the software artifact. | ||
4989 | cls._add_property( | ||
4990 | "software_primaryPurpose", | ||
4991 | EnumProp([ | ||
4992 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"), | ||
4993 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"), | ||
4994 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"), | ||
4995 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"), | ||
4996 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"), | ||
4997 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"), | ||
4998 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"), | ||
4999 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"), | ||
5000 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"), | ||
5001 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"), | ||
5002 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"), | ||
5003 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"), | ||
5004 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"), | ||
5005 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"), | ||
5006 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"), | ||
5007 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"), | ||
5008 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"), | ||
5009 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"), | ||
5010 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"), | ||
5011 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"), | ||
5012 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"), | ||
5013 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"), | ||
5014 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"), | ||
5015 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"), | ||
5016 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"), | ||
5017 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"), | ||
5018 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"), | ||
5019 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"), | ||
5020 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"), | ||
5021 | ]), | ||
5022 | iri="https://spdx.org/rdf/3.0.1/terms/Software/primaryPurpose", | ||
5023 | compact="software_primaryPurpose", | ||
5024 | ) | ||
5025 | |||
5026 | |||
5027 | # A container for a grouping of SPDX-3.0 content characterizing details | ||
5028 | # (provenence, composition, licensing, etc.) about a product. | ||
5029 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Bom", compact_type="Bom", abstract=False) | ||
5030 | class Bom(Bundle): | ||
5031 | NODE_KIND = NodeKind.IRI | ||
5032 | ID_ALIAS = "spdxId" | ||
5033 | NAMED_INDIVIDUALS = { | ||
5034 | } | ||
5035 | |||
5036 | |||
5037 | # A license that is not listed on the SPDX License List. | ||
5038 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicense", compact_type="expandedlicensing_CustomLicense", abstract=False) | ||
5039 | class expandedlicensing_CustomLicense(expandedlicensing_License): | ||
5040 | NODE_KIND = NodeKind.IRI | ||
5041 | ID_ALIAS = "spdxId" | ||
5042 | NAMED_INDIVIDUALS = { | ||
5043 | } | ||
5044 | |||
5045 | |||
5046 | # Connects a vulnerability and an element designating the element as a product | ||
5047 | # affected by the vulnerability. | ||
5048 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexAffectedVulnAssessmentRelationship", compact_type="security_VexAffectedVulnAssessmentRelationship", abstract=False) | ||
5049 | class security_VexAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5050 | NODE_KIND = NodeKind.IRI | ||
5051 | ID_ALIAS = "spdxId" | ||
5052 | NAMED_INDIVIDUALS = { | ||
5053 | } | ||
5054 | |||
5055 | @classmethod | ||
5056 | def _register_props(cls): | ||
5057 | super()._register_props() | ||
5058 | # Provides advise on how to mitigate or remediate a vulnerability when a VEX product | ||
5059 | # is affected by it. | ||
5060 | cls._add_property( | ||
5061 | "security_actionStatement", | ||
5062 | StringProp(), | ||
5063 | iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatement", | ||
5064 | min_count=1, | ||
5065 | compact="security_actionStatement", | ||
5066 | ) | ||
5067 | # Records the time when a recommended action was communicated in a VEX statement | ||
5068 | # to mitigate a vulnerability. | ||
5069 | cls._add_property( | ||
5070 | "security_actionStatementTime", | ||
5071 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
5072 | iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatementTime", | ||
5073 | compact="security_actionStatementTime", | ||
5074 | ) | ||
5075 | |||
5076 | |||
5077 | # Links a vulnerability and elements representing products (in the VEX sense) where | ||
5078 | # a fix has been applied and are no longer affected. | ||
5079 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexFixedVulnAssessmentRelationship", compact_type="security_VexFixedVulnAssessmentRelationship", abstract=False) | ||
5080 | class security_VexFixedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5081 | NODE_KIND = NodeKind.IRI | ||
5082 | ID_ALIAS = "spdxId" | ||
5083 | NAMED_INDIVIDUALS = { | ||
5084 | } | ||
5085 | |||
5086 | |||
5087 | # Links a vulnerability and one or more elements designating the latter as products | ||
5088 | # not affected by the vulnerability. | ||
5089 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexNotAffectedVulnAssessmentRelationship", compact_type="security_VexNotAffectedVulnAssessmentRelationship", abstract=False) | ||
5090 | class security_VexNotAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5091 | NODE_KIND = NodeKind.IRI | ||
5092 | ID_ALIAS = "spdxId" | ||
5093 | NAMED_INDIVIDUALS = { | ||
5094 | } | ||
5095 | |||
5096 | @classmethod | ||
5097 | def _register_props(cls): | ||
5098 | super()._register_props() | ||
5099 | # Explains why a VEX product is not affected by a vulnerability. It is an | ||
5100 | # alternative in VexNotAffectedVulnAssessmentRelationship to the machine-readable | ||
5101 | # justification label. | ||
5102 | cls._add_property( | ||
5103 | "security_impactStatement", | ||
5104 | StringProp(), | ||
5105 | iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatement", | ||
5106 | compact="security_impactStatement", | ||
5107 | ) | ||
5108 | # Timestamp of impact statement. | ||
5109 | cls._add_property( | ||
5110 | "security_impactStatementTime", | ||
5111 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
5112 | iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatementTime", | ||
5113 | compact="security_impactStatementTime", | ||
5114 | ) | ||
5115 | # Impact justification label to be used when linking a vulnerability to an element | ||
5116 | # representing a VEX product with a VexNotAffectedVulnAssessmentRelationship | ||
5117 | # relationship. | ||
5118 | cls._add_property( | ||
5119 | "security_justificationType", | ||
5120 | EnumProp([ | ||
5121 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", "componentNotPresent"), | ||
5122 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", "inlineMitigationsAlreadyExist"), | ||
5123 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", "vulnerableCodeCannotBeControlledByAdversary"), | ||
5124 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", "vulnerableCodeNotInExecutePath"), | ||
5125 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", "vulnerableCodeNotPresent"), | ||
5126 | ]), | ||
5127 | iri="https://spdx.org/rdf/3.0.1/terms/Security/justificationType", | ||
5128 | compact="security_justificationType", | ||
5129 | ) | ||
5130 | |||
5131 | |||
5132 | # Designates elements as products where the impact of a vulnerability is being | ||
5133 | # investigated. | ||
5134 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexUnderInvestigationVulnAssessmentRelationship", compact_type="security_VexUnderInvestigationVulnAssessmentRelationship", abstract=False) | ||
5135 | class security_VexUnderInvestigationVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
5136 | NODE_KIND = NodeKind.IRI | ||
5137 | ID_ALIAS = "spdxId" | ||
5138 | NAMED_INDIVIDUALS = { | ||
5139 | } | ||
5140 | |||
5141 | |||
5142 | # Refers to any object that stores content on a computer. | ||
5143 | @register("https://spdx.org/rdf/3.0.1/terms/Software/File", compact_type="software_File", abstract=False) | ||
5144 | class software_File(software_SoftwareArtifact): | ||
5145 | NODE_KIND = NodeKind.IRI | ||
5146 | ID_ALIAS = "spdxId" | ||
5147 | NAMED_INDIVIDUALS = { | ||
5148 | } | ||
5149 | |||
5150 | @classmethod | ||
5151 | def _register_props(cls): | ||
5152 | super()._register_props() | ||
5153 | # Provides information about the content type of an Element or a Property. | ||
5154 | cls._add_property( | ||
5155 | "contentType", | ||
5156 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
5157 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
5158 | compact="contentType", | ||
5159 | ) | ||
5160 | # Describes if a given file is a directory or non-directory kind of file. | ||
5161 | cls._add_property( | ||
5162 | "software_fileKind", | ||
5163 | EnumProp([ | ||
5164 | ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", "directory"), | ||
5165 | ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", "file"), | ||
5166 | ]), | ||
5167 | iri="https://spdx.org/rdf/3.0.1/terms/Software/fileKind", | ||
5168 | compact="software_fileKind", | ||
5169 | ) | ||
5170 | |||
5171 | |||
5172 | # Refers to any unit of content that can be associated with a distribution of | ||
5173 | # software. | ||
5174 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Package", compact_type="software_Package", abstract=False) | ||
5175 | class software_Package(software_SoftwareArtifact): | ||
5176 | NODE_KIND = NodeKind.IRI | ||
5177 | ID_ALIAS = "spdxId" | ||
5178 | NAMED_INDIVIDUALS = { | ||
5179 | } | ||
5180 | |||
5181 | @classmethod | ||
5182 | def _register_props(cls): | ||
5183 | super()._register_props() | ||
5184 | # Identifies the download Uniform Resource Identifier for the package at the time | ||
5185 | # that the document was created. | ||
5186 | cls._add_property( | ||
5187 | "software_downloadLocation", | ||
5188 | AnyURIProp(), | ||
5189 | iri="https://spdx.org/rdf/3.0.1/terms/Software/downloadLocation", | ||
5190 | compact="software_downloadLocation", | ||
5191 | ) | ||
5192 | # A place for the SPDX document creator to record a website that serves as the | ||
5193 | # package's home page. | ||
5194 | cls._add_property( | ||
5195 | "software_homePage", | ||
5196 | AnyURIProp(), | ||
5197 | iri="https://spdx.org/rdf/3.0.1/terms/Software/homePage", | ||
5198 | compact="software_homePage", | ||
5199 | ) | ||
5200 | # Provides a place for the SPDX data creator to record the package URL string | ||
5201 | # (in accordance with the Package URL specification) for a software Package. | ||
5202 | cls._add_property( | ||
5203 | "software_packageUrl", | ||
5204 | AnyURIProp(), | ||
5205 | iri="https://spdx.org/rdf/3.0.1/terms/Software/packageUrl", | ||
5206 | compact="software_packageUrl", | ||
5207 | ) | ||
5208 | # Identify the version of a package. | ||
5209 | cls._add_property( | ||
5210 | "software_packageVersion", | ||
5211 | StringProp(), | ||
5212 | iri="https://spdx.org/rdf/3.0.1/terms/Software/packageVersion", | ||
5213 | compact="software_packageVersion", | ||
5214 | ) | ||
5215 | # Records any relevant background information or additional comments | ||
5216 | # about the origin of the package. | ||
5217 | cls._add_property( | ||
5218 | "software_sourceInfo", | ||
5219 | StringProp(), | ||
5220 | iri="https://spdx.org/rdf/3.0.1/terms/Software/sourceInfo", | ||
5221 | compact="software_sourceInfo", | ||
5222 | ) | ||
5223 | |||
5224 | |||
5225 | # A collection of SPDX Elements describing a single package. | ||
5226 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Sbom", compact_type="software_Sbom", abstract=False) | ||
5227 | class software_Sbom(Bom): | ||
5228 | NODE_KIND = NodeKind.IRI | ||
5229 | ID_ALIAS = "spdxId" | ||
5230 | NAMED_INDIVIDUALS = { | ||
5231 | } | ||
5232 | |||
5233 | @classmethod | ||
5234 | def _register_props(cls): | ||
5235 | super()._register_props() | ||
5236 | # Provides information about the type of an SBOM. | ||
5237 | cls._add_property( | ||
5238 | "software_sbomType", | ||
5239 | ListProp(EnumProp([ | ||
5240 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", "analyzed"), | ||
5241 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", "build"), | ||
5242 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", "deployed"), | ||
5243 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", "design"), | ||
5244 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", "runtime"), | ||
5245 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", "source"), | ||
5246 | ])), | ||
5247 | iri="https://spdx.org/rdf/3.0.1/terms/Software/sbomType", | ||
5248 | compact="software_sbomType", | ||
5249 | ) | ||
5250 | |||
5251 | |||
5252 | # Describes a certain part of a file. | ||
5253 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Snippet", compact_type="software_Snippet", abstract=False) | ||
5254 | class software_Snippet(software_SoftwareArtifact): | ||
5255 | NODE_KIND = NodeKind.IRI | ||
5256 | ID_ALIAS = "spdxId" | ||
5257 | NAMED_INDIVIDUALS = { | ||
5258 | } | ||
5259 | |||
5260 | @classmethod | ||
5261 | def _register_props(cls): | ||
5262 | super()._register_props() | ||
5263 | # Defines the byte range in the original host file that the snippet information | ||
5264 | # applies to. | ||
5265 | cls._add_property( | ||
5266 | "software_byteRange", | ||
5267 | ObjectProp(PositiveIntegerRange, False), | ||
5268 | iri="https://spdx.org/rdf/3.0.1/terms/Software/byteRange", | ||
5269 | compact="software_byteRange", | ||
5270 | ) | ||
5271 | # Defines the line range in the original host file that the snippet information | ||
5272 | # applies to. | ||
5273 | cls._add_property( | ||
5274 | "software_lineRange", | ||
5275 | ObjectProp(PositiveIntegerRange, False), | ||
5276 | iri="https://spdx.org/rdf/3.0.1/terms/Software/lineRange", | ||
5277 | compact="software_lineRange", | ||
5278 | ) | ||
5279 | # Defines the original host file that the snippet information applies to. | ||
5280 | cls._add_property( | ||
5281 | "software_snippetFromFile", | ||
5282 | ObjectProp(software_File, True), | ||
5283 | iri="https://spdx.org/rdf/3.0.1/terms/Software/snippetFromFile", | ||
5284 | min_count=1, | ||
5285 | compact="software_snippetFromFile", | ||
5286 | ) | ||
5287 | |||
5288 | |||
5289 | # Specifies an AI package and its associated information. | ||
5290 | @register("https://spdx.org/rdf/3.0.1/terms/AI/AIPackage", compact_type="ai_AIPackage", abstract=False) | ||
5291 | class ai_AIPackage(software_Package): | ||
5292 | NODE_KIND = NodeKind.IRI | ||
5293 | ID_ALIAS = "spdxId" | ||
5294 | NAMED_INDIVIDUALS = { | ||
5295 | } | ||
5296 | |||
5297 | @classmethod | ||
5298 | def _register_props(cls): | ||
5299 | super()._register_props() | ||
5300 | # Indicates whether the system can perform a decision or action without human | ||
5301 | # involvement or guidance. | ||
5302 | cls._add_property( | ||
5303 | "ai_autonomyType", | ||
5304 | EnumProp([ | ||
5305 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
5306 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
5307 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
5308 | ]), | ||
5309 | iri="https://spdx.org/rdf/3.0.1/terms/AI/autonomyType", | ||
5310 | compact="ai_autonomyType", | ||
5311 | ) | ||
5312 | # Captures the domain in which the AI package can be used. | ||
5313 | cls._add_property( | ||
5314 | "ai_domain", | ||
5315 | ListProp(StringProp()), | ||
5316 | iri="https://spdx.org/rdf/3.0.1/terms/AI/domain", | ||
5317 | compact="ai_domain", | ||
5318 | ) | ||
5319 | # Indicates the amount of energy consumption incurred by an AI model. | ||
5320 | cls._add_property( | ||
5321 | "ai_energyConsumption", | ||
5322 | ObjectProp(ai_EnergyConsumption, False), | ||
5323 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyConsumption", | ||
5324 | compact="ai_energyConsumption", | ||
5325 | ) | ||
5326 | # Records a hyperparameter used to build the AI model contained in the AI | ||
5327 | # package. | ||
5328 | cls._add_property( | ||
5329 | "ai_hyperparameter", | ||
5330 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5331 | iri="https://spdx.org/rdf/3.0.1/terms/AI/hyperparameter", | ||
5332 | compact="ai_hyperparameter", | ||
5333 | ) | ||
5334 | # Provides relevant information about the AI software, not including the model | ||
5335 | # description. | ||
5336 | cls._add_property( | ||
5337 | "ai_informationAboutApplication", | ||
5338 | StringProp(), | ||
5339 | iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutApplication", | ||
5340 | compact="ai_informationAboutApplication", | ||
5341 | ) | ||
5342 | # Describes relevant information about different steps of the training process. | ||
5343 | cls._add_property( | ||
5344 | "ai_informationAboutTraining", | ||
5345 | StringProp(), | ||
5346 | iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutTraining", | ||
5347 | compact="ai_informationAboutTraining", | ||
5348 | ) | ||
5349 | # Captures a limitation of the AI software. | ||
5350 | cls._add_property( | ||
5351 | "ai_limitation", | ||
5352 | StringProp(), | ||
5353 | iri="https://spdx.org/rdf/3.0.1/terms/AI/limitation", | ||
5354 | compact="ai_limitation", | ||
5355 | ) | ||
5356 | # Records the measurement of prediction quality of the AI model. | ||
5357 | cls._add_property( | ||
5358 | "ai_metric", | ||
5359 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5360 | iri="https://spdx.org/rdf/3.0.1/terms/AI/metric", | ||
5361 | compact="ai_metric", | ||
5362 | ) | ||
5363 | # Captures the threshold that was used for computation of a metric described in | ||
5364 | # the metric field. | ||
5365 | cls._add_property( | ||
5366 | "ai_metricDecisionThreshold", | ||
5367 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5368 | iri="https://spdx.org/rdf/3.0.1/terms/AI/metricDecisionThreshold", | ||
5369 | compact="ai_metricDecisionThreshold", | ||
5370 | ) | ||
5371 | # Describes all the preprocessing steps applied to the training data before the | ||
5372 | # model training. | ||
5373 | cls._add_property( | ||
5374 | "ai_modelDataPreprocessing", | ||
5375 | ListProp(StringProp()), | ||
5376 | iri="https://spdx.org/rdf/3.0.1/terms/AI/modelDataPreprocessing", | ||
5377 | compact="ai_modelDataPreprocessing", | ||
5378 | ) | ||
5379 | # Describes methods that can be used to explain the results from the AI model. | ||
5380 | cls._add_property( | ||
5381 | "ai_modelExplainability", | ||
5382 | ListProp(StringProp()), | ||
5383 | iri="https://spdx.org/rdf/3.0.1/terms/AI/modelExplainability", | ||
5384 | compact="ai_modelExplainability", | ||
5385 | ) | ||
5386 | # Records the results of general safety risk assessment of the AI system. | ||
5387 | cls._add_property( | ||
5388 | "ai_safetyRiskAssessment", | ||
5389 | EnumProp([ | ||
5390 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", "high"), | ||
5391 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", "low"), | ||
5392 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", "medium"), | ||
5393 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", "serious"), | ||
5394 | ]), | ||
5395 | iri="https://spdx.org/rdf/3.0.1/terms/AI/safetyRiskAssessment", | ||
5396 | compact="ai_safetyRiskAssessment", | ||
5397 | ) | ||
5398 | # Captures a standard that is being complied with. | ||
5399 | cls._add_property( | ||
5400 | "ai_standardCompliance", | ||
5401 | ListProp(StringProp()), | ||
5402 | iri="https://spdx.org/rdf/3.0.1/terms/AI/standardCompliance", | ||
5403 | compact="ai_standardCompliance", | ||
5404 | ) | ||
5405 | # Records the type of the model used in the AI software. | ||
5406 | cls._add_property( | ||
5407 | "ai_typeOfModel", | ||
5408 | ListProp(StringProp()), | ||
5409 | iri="https://spdx.org/rdf/3.0.1/terms/AI/typeOfModel", | ||
5410 | compact="ai_typeOfModel", | ||
5411 | ) | ||
5412 | # Records if sensitive personal information is used during model training or | ||
5413 | # could be used during the inference. | ||
5414 | cls._add_property( | ||
5415 | "ai_useSensitivePersonalInformation", | ||
5416 | EnumProp([ | ||
5417 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
5418 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
5419 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
5420 | ]), | ||
5421 | iri="https://spdx.org/rdf/3.0.1/terms/AI/useSensitivePersonalInformation", | ||
5422 | compact="ai_useSensitivePersonalInformation", | ||
5423 | ) | ||
5424 | |||
5425 | |||
5426 | # Specifies a data package and its associated information. | ||
5427 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetPackage", compact_type="dataset_DatasetPackage", abstract=False) | ||
5428 | class dataset_DatasetPackage(software_Package): | ||
5429 | NODE_KIND = NodeKind.IRI | ||
5430 | ID_ALIAS = "spdxId" | ||
5431 | NAMED_INDIVIDUALS = { | ||
5432 | } | ||
5433 | |||
5434 | @classmethod | ||
5435 | def _register_props(cls): | ||
5436 | super()._register_props() | ||
5437 | # Describes the anonymization methods used. | ||
5438 | cls._add_property( | ||
5439 | "dataset_anonymizationMethodUsed", | ||
5440 | ListProp(StringProp()), | ||
5441 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/anonymizationMethodUsed", | ||
5442 | compact="dataset_anonymizationMethodUsed", | ||
5443 | ) | ||
5444 | # Describes the confidentiality level of the data points contained in the dataset. | ||
5445 | cls._add_property( | ||
5446 | "dataset_confidentialityLevel", | ||
5447 | EnumProp([ | ||
5448 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", "amber"), | ||
5449 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", "clear"), | ||
5450 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", "green"), | ||
5451 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", "red"), | ||
5452 | ]), | ||
5453 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/confidentialityLevel", | ||
5454 | compact="dataset_confidentialityLevel", | ||
5455 | ) | ||
5456 | # Describes how the dataset was collected. | ||
5457 | cls._add_property( | ||
5458 | "dataset_dataCollectionProcess", | ||
5459 | StringProp(), | ||
5460 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataCollectionProcess", | ||
5461 | compact="dataset_dataCollectionProcess", | ||
5462 | ) | ||
5463 | # Describes the preprocessing steps that were applied to the raw data to create the given dataset. | ||
5464 | cls._add_property( | ||
5465 | "dataset_dataPreprocessing", | ||
5466 | ListProp(StringProp()), | ||
5467 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataPreprocessing", | ||
5468 | compact="dataset_dataPreprocessing", | ||
5469 | ) | ||
5470 | # The field describes the availability of a dataset. | ||
5471 | cls._add_property( | ||
5472 | "dataset_datasetAvailability", | ||
5473 | EnumProp([ | ||
5474 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", "clickthrough"), | ||
5475 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", "directDownload"), | ||
5476 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", "query"), | ||
5477 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", "registration"), | ||
5478 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", "scrapingScript"), | ||
5479 | ]), | ||
5480 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetAvailability", | ||
5481 | compact="dataset_datasetAvailability", | ||
5482 | ) | ||
5483 | # Describes potentially noisy elements of the dataset. | ||
5484 | cls._add_property( | ||
5485 | "dataset_datasetNoise", | ||
5486 | StringProp(), | ||
5487 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetNoise", | ||
5488 | compact="dataset_datasetNoise", | ||
5489 | ) | ||
5490 | # Captures the size of the dataset. | ||
5491 | cls._add_property( | ||
5492 | "dataset_datasetSize", | ||
5493 | NonNegativeIntegerProp(), | ||
5494 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetSize", | ||
5495 | compact="dataset_datasetSize", | ||
5496 | ) | ||
5497 | # Describes the type of the given dataset. | ||
5498 | cls._add_property( | ||
5499 | "dataset_datasetType", | ||
5500 | ListProp(EnumProp([ | ||
5501 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", "audio"), | ||
5502 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", "categorical"), | ||
5503 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", "graph"), | ||
5504 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", "image"), | ||
5505 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", "noAssertion"), | ||
5506 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", "numeric"), | ||
5507 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", "other"), | ||
5508 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", "sensor"), | ||
5509 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", "structured"), | ||
5510 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", "syntactic"), | ||
5511 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", "text"), | ||
5512 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", "timeseries"), | ||
5513 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", "timestamp"), | ||
5514 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", "video"), | ||
5515 | ])), | ||
5516 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetType", | ||
5517 | min_count=1, | ||
5518 | compact="dataset_datasetType", | ||
5519 | ) | ||
5520 | # Describes a mechanism to update the dataset. | ||
5521 | cls._add_property( | ||
5522 | "dataset_datasetUpdateMechanism", | ||
5523 | StringProp(), | ||
5524 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetUpdateMechanism", | ||
5525 | compact="dataset_datasetUpdateMechanism", | ||
5526 | ) | ||
5527 | # Describes if any sensitive personal information is present in the dataset. | ||
5528 | cls._add_property( | ||
5529 | "dataset_hasSensitivePersonalInformation", | ||
5530 | EnumProp([ | ||
5531 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
5532 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
5533 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
5534 | ]), | ||
5535 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/hasSensitivePersonalInformation", | ||
5536 | compact="dataset_hasSensitivePersonalInformation", | ||
5537 | ) | ||
5538 | # Describes what the given dataset should be used for. | ||
5539 | cls._add_property( | ||
5540 | "dataset_intendedUse", | ||
5541 | StringProp(), | ||
5542 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/intendedUse", | ||
5543 | compact="dataset_intendedUse", | ||
5544 | ) | ||
5545 | # Records the biases that the dataset is known to encompass. | ||
5546 | cls._add_property( | ||
5547 | "dataset_knownBias", | ||
5548 | ListProp(StringProp()), | ||
5549 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/knownBias", | ||
5550 | compact="dataset_knownBias", | ||
5551 | ) | ||
5552 | # Describes a sensor used for collecting the data. | ||
5553 | cls._add_property( | ||
5554 | "dataset_sensor", | ||
5555 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
5556 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/sensor", | ||
5557 | compact="dataset_sensor", | ||
5558 | ) | ||
5559 | |||
5560 | |||
5561 | """Format Guard""" | ||
5562 | # fmt: on | ||
5563 | |||
5564 | |||
5565 | def main(): | ||
5566 | import argparse | ||
5567 | from pathlib import Path | ||
5568 | |||
5569 | parser = argparse.ArgumentParser(description="Python SHACL model test") | ||
5570 | parser.add_argument("infile", type=Path, help="Input file") | ||
5571 | parser.add_argument("--print", action="store_true", help="Print object tree") | ||
5572 | parser.add_argument("--outfile", type=Path, help="Output file") | ||
5573 | |||
5574 | args = parser.parse_args() | ||
5575 | |||
5576 | objectset = SHACLObjectSet() | ||
5577 | with args.infile.open("r") as f: | ||
5578 | d = JSONLDDeserializer() | ||
5579 | d.read(f, objectset) | ||
5580 | |||
5581 | if args.print: | ||
5582 | print_tree(objectset.objects) | ||
5583 | |||
5584 | if args.outfile: | ||
5585 | with args.outfile.open("wb") as f: | ||
5586 | s = JSONLDSerializer() | ||
5587 | s.write(objectset, f) | ||
5588 | |||
5589 | return 0 | ||
5590 | |||
5591 | |||
5592 | if __name__ == "__main__": | ||
5593 | sys.exit(main()) | ||
diff --git a/meta/lib/oe/spdx30_tasks.py b/meta/lib/oe/spdx30_tasks.py new file mode 100644 index 0000000000..5d9f3168d9 --- /dev/null +++ b/meta/lib/oe/spdx30_tasks.py | |||
@@ -0,0 +1,1368 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import json | ||
8 | import oe.cve_check | ||
9 | import oe.packagedata | ||
10 | import oe.patch | ||
11 | import oe.sbom30 | ||
12 | import oe.spdx30 | ||
13 | import oe.spdx_common | ||
14 | import oe.sdk | ||
15 | import os | ||
16 | |||
17 | from contextlib import contextmanager | ||
18 | from datetime import datetime, timezone | ||
19 | from pathlib import Path | ||
20 | |||
21 | |||
22 | def walk_error(err): | ||
23 | bb.error(f"ERROR walking {err.filename}: {err}") | ||
24 | |||
25 | |||
26 | def set_timestamp_now(d, o, prop): | ||
27 | if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1": | ||
28 | setattr(o, prop, datetime.now(timezone.utc)) | ||
29 | else: | ||
30 | # Doing this helps to validated that the property actually exists, and | ||
31 | # also that it is not mandatory | ||
32 | delattr(o, prop) | ||
33 | |||
34 | |||
35 | def add_license_expression(d, objset, license_expression, license_data): | ||
36 | simple_license_text = {} | ||
37 | license_text_map = {} | ||
38 | license_ref_idx = 0 | ||
39 | |||
40 | def add_license_text(name): | ||
41 | nonlocal objset | ||
42 | nonlocal simple_license_text | ||
43 | |||
44 | if name in simple_license_text: | ||
45 | return simple_license_text[name] | ||
46 | |||
47 | lic = objset.find_filter( | ||
48 | oe.spdx30.simplelicensing_SimpleLicensingText, | ||
49 | name=name, | ||
50 | ) | ||
51 | |||
52 | if lic is not None: | ||
53 | simple_license_text[name] = lic | ||
54 | return lic | ||
55 | |||
56 | lic = objset.add( | ||
57 | oe.spdx30.simplelicensing_SimpleLicensingText( | ||
58 | _id=objset.new_spdxid("license-text", name), | ||
59 | creationInfo=objset.doc.creationInfo, | ||
60 | name=name, | ||
61 | ) | ||
62 | ) | ||
63 | objset.set_element_alias(lic) | ||
64 | simple_license_text[name] = lic | ||
65 | |||
66 | if name == "PD": | ||
67 | lic.simplelicensing_licenseText = "Software released to the public domain" | ||
68 | return lic | ||
69 | |||
70 | # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH | ||
71 | for directory in [d.getVar("COMMON_LICENSE_DIR")] + ( | ||
72 | d.getVar("LICENSE_PATH") or "" | ||
73 | ).split(): | ||
74 | try: | ||
75 | with (Path(directory) / name).open(errors="replace") as f: | ||
76 | lic.simplelicensing_licenseText = f.read() | ||
77 | return lic | ||
78 | |||
79 | except FileNotFoundError: | ||
80 | pass | ||
81 | |||
82 | # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set | ||
83 | filename = d.getVarFlag("NO_GENERIC_LICENSE", name) | ||
84 | if filename: | ||
85 | filename = d.expand("${S}/" + filename) | ||
86 | with open(filename, errors="replace") as f: | ||
87 | lic.simplelicensing_licenseText = f.read() | ||
88 | return lic | ||
89 | else: | ||
90 | bb.fatal("Cannot find any text for license %s" % name) | ||
91 | |||
92 | def convert(l): | ||
93 | nonlocal license_text_map | ||
94 | nonlocal license_ref_idx | ||
95 | |||
96 | if l == "(" or l == ")": | ||
97 | return l | ||
98 | |||
99 | if l == "&": | ||
100 | return "AND" | ||
101 | |||
102 | if l == "|": | ||
103 | return "OR" | ||
104 | |||
105 | if l == "CLOSED": | ||
106 | return "NONE" | ||
107 | |||
108 | spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l | ||
109 | if spdx_license in license_data["licenses"]: | ||
110 | return spdx_license | ||
111 | |||
112 | spdx_license = "LicenseRef-" + l | ||
113 | if spdx_license not in license_text_map: | ||
114 | license_text_map[spdx_license] = oe.sbom30.get_element_link_id( | ||
115 | add_license_text(l) | ||
116 | ) | ||
117 | |||
118 | return spdx_license | ||
119 | |||
120 | lic_split = ( | ||
121 | license_expression.replace("(", " ( ") | ||
122 | .replace(")", " ) ") | ||
123 | .replace("|", " | ") | ||
124 | .replace("&", " & ") | ||
125 | .split() | ||
126 | ) | ||
127 | spdx_license_expression = " ".join(convert(l) for l in lic_split) | ||
128 | |||
129 | o = objset.new_license_expression( | ||
130 | spdx_license_expression, license_data, license_text_map | ||
131 | ) | ||
132 | objset.set_element_alias(o) | ||
133 | return o | ||
134 | |||
135 | |||
136 | def add_package_files( | ||
137 | d, | ||
138 | objset, | ||
139 | topdir, | ||
140 | get_spdxid, | ||
141 | get_purposes, | ||
142 | license_data=None, | ||
143 | *, | ||
144 | archive=None, | ||
145 | ignore_dirs=[], | ||
146 | ignore_top_level_dirs=[], | ||
147 | ): | ||
148 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") | ||
149 | if source_date_epoch: | ||
150 | source_date_epoch = int(source_date_epoch) | ||
151 | |||
152 | spdx_files = set() | ||
153 | |||
154 | file_counter = 1 | ||
155 | if not os.path.exists(topdir): | ||
156 | bb.note(f"Skip {topdir}") | ||
157 | return spdx_files | ||
158 | |||
159 | check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1" | ||
160 | if check_compiled_sources: | ||
161 | compiled_sources, types = oe.spdx_common.get_compiled_sources(d) | ||
162 | bb.debug(1, f"Total compiled files: {len(compiled_sources)}") | ||
163 | |||
164 | for subdir, dirs, files in os.walk(topdir, onerror=walk_error): | ||
165 | dirs[:] = [d for d in dirs if d not in ignore_dirs] | ||
166 | if subdir == str(topdir): | ||
167 | dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs] | ||
168 | |||
169 | dirs.sort() | ||
170 | files.sort() | ||
171 | for file in files: | ||
172 | filepath = Path(subdir) / file | ||
173 | if filepath.is_symlink() or not filepath.is_file(): | ||
174 | continue | ||
175 | |||
176 | filename = str(filepath.relative_to(topdir)) | ||
177 | file_purposes = get_purposes(filepath) | ||
178 | |||
179 | # Check if file is compiled | ||
180 | if check_compiled_sources: | ||
181 | if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types): | ||
182 | continue | ||
183 | |||
184 | spdx_file = objset.new_file( | ||
185 | get_spdxid(file_counter), | ||
186 | filename, | ||
187 | filepath, | ||
188 | purposes=file_purposes, | ||
189 | ) | ||
190 | spdx_files.add(spdx_file) | ||
191 | |||
192 | if ( | ||
193 | oe.spdx30.software_SoftwarePurpose.source in file_purposes | ||
194 | and license_data is not None | ||
195 | ): | ||
196 | objset.scan_declared_licenses(spdx_file, filepath, license_data) | ||
197 | |||
198 | if archive is not None: | ||
199 | with filepath.open("rb") as f: | ||
200 | info = archive.gettarinfo(fileobj=f) | ||
201 | info.name = filename | ||
202 | info.uid = 0 | ||
203 | info.gid = 0 | ||
204 | info.uname = "root" | ||
205 | info.gname = "root" | ||
206 | |||
207 | if source_date_epoch is not None and info.mtime > source_date_epoch: | ||
208 | info.mtime = source_date_epoch | ||
209 | |||
210 | archive.addfile(info, f) | ||
211 | |||
212 | file_counter += 1 | ||
213 | |||
214 | bb.debug(1, "Added %d files to %s" % (len(spdx_files), objset.doc._id)) | ||
215 | |||
216 | return spdx_files | ||
217 | |||
218 | |||
219 | def get_package_sources_from_debug( | ||
220 | d, package, package_files, sources, source_hash_cache | ||
221 | ): | ||
222 | def file_path_match(file_path, pkg_file): | ||
223 | if file_path.lstrip("/") == pkg_file.name.lstrip("/"): | ||
224 | return True | ||
225 | |||
226 | for e in pkg_file.extension: | ||
227 | if isinstance(e, oe.sbom30.OEFileNameAliasExtension): | ||
228 | for a in e.aliases: | ||
229 | if file_path.lstrip("/") == a.lstrip("/"): | ||
230 | return True | ||
231 | |||
232 | return False | ||
233 | |||
234 | debug_search_paths = [ | ||
235 | Path(d.getVar("SPDXWORK")), | ||
236 | Path(d.getVar("PKGD")), | ||
237 | Path(d.getVar("STAGING_DIR_TARGET")), | ||
238 | Path(d.getVar("STAGING_DIR_NATIVE")), | ||
239 | Path(d.getVar("STAGING_KERNEL_DIR")), | ||
240 | ] | ||
241 | |||
242 | pkg_data = oe.packagedata.read_subpkgdata_extended(package, d) | ||
243 | |||
244 | if pkg_data is None: | ||
245 | return | ||
246 | |||
247 | dep_source_files = set() | ||
248 | |||
249 | for file_path, file_data in pkg_data["files_info"].items(): | ||
250 | if not "debugsrc" in file_data: | ||
251 | continue | ||
252 | |||
253 | if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files): | ||
254 | bb.fatal( | ||
255 | "No package file found for %s in %s; SPDX found: %s" | ||
256 | % (str(file_path), package, " ".join(p.name for p in package_files)) | ||
257 | ) | ||
258 | continue | ||
259 | |||
260 | for debugsrc in file_data["debugsrc"]: | ||
261 | for search in debug_search_paths: | ||
262 | if debugsrc.startswith("/usr/src/kernel"): | ||
263 | debugsrc_path = search / debugsrc.replace("/usr/src/kernel/", "") | ||
264 | else: | ||
265 | debugsrc_path = search / debugsrc.lstrip("/") | ||
266 | |||
267 | if debugsrc_path in source_hash_cache: | ||
268 | file_sha256 = source_hash_cache[debugsrc_path] | ||
269 | if file_sha256 is None: | ||
270 | continue | ||
271 | else: | ||
272 | # We can only hash files below, skip directories, links, etc. | ||
273 | if not debugsrc_path.is_file(): | ||
274 | source_hash_cache[debugsrc_path] = None | ||
275 | continue | ||
276 | |||
277 | file_sha256 = bb.utils.sha256_file(debugsrc_path) | ||
278 | source_hash_cache[debugsrc_path] = file_sha256 | ||
279 | |||
280 | if file_sha256 in sources: | ||
281 | source_file = sources[file_sha256] | ||
282 | dep_source_files.add(source_file) | ||
283 | else: | ||
284 | bb.debug( | ||
285 | 1, | ||
286 | "Debug source %s with SHA256 %s not found in any dependency" | ||
287 | % (str(debugsrc_path), file_sha256), | ||
288 | ) | ||
289 | break | ||
290 | else: | ||
291 | bb.debug(1, "Debug source %s not found" % debugsrc) | ||
292 | |||
293 | return dep_source_files | ||
294 | |||
295 | |||
296 | def collect_dep_objsets(d, build): | ||
297 | deps = oe.spdx_common.get_spdx_deps(d) | ||
298 | |||
299 | dep_objsets = [] | ||
300 | dep_builds = set() | ||
301 | |||
302 | dep_build_spdxids = set() | ||
303 | for dep in deps: | ||
304 | bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn)) | ||
305 | dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld( | ||
306 | d, "recipes", "recipe-" + dep.pn, oe.spdx30.build_Build | ||
307 | ) | ||
308 | # If the dependency is part of the taskhash, return it to be linked | ||
309 | # against. Otherwise, it cannot be linked against because this recipe | ||
310 | # will not rebuilt if dependency changes | ||
311 | if dep.in_taskhash: | ||
312 | dep_objsets.append(dep_objset) | ||
313 | |||
314 | # The build _can_ be linked against (by alias) | ||
315 | dep_builds.add(dep_build) | ||
316 | |||
317 | return dep_objsets, dep_builds | ||
318 | |||
319 | |||
320 | def index_sources_by_hash(sources, dest): | ||
321 | for s in sources: | ||
322 | if not isinstance(s, oe.spdx30.software_File): | ||
323 | continue | ||
324 | |||
325 | if s.software_primaryPurpose != oe.spdx30.software_SoftwarePurpose.source: | ||
326 | continue | ||
327 | |||
328 | for v in s.verifiedUsing: | ||
329 | if v.algorithm == oe.spdx30.HashAlgorithm.sha256: | ||
330 | if not v.hashValue in dest: | ||
331 | dest[v.hashValue] = s | ||
332 | break | ||
333 | else: | ||
334 | bb.fatal(f"No SHA256 found for {s.name}") | ||
335 | |||
336 | |||
337 | def collect_dep_sources(dep_objsets, dest): | ||
338 | for objset in dep_objsets: | ||
339 | # Don't collect sources from native recipes as they | ||
340 | # match non-native sources also. | ||
341 | if objset.is_native(): | ||
342 | continue | ||
343 | |||
344 | bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name)) | ||
345 | |||
346 | dep_build = objset.find_root(oe.spdx30.build_Build) | ||
347 | if not dep_build: | ||
348 | bb.fatal("Unable to find a build") | ||
349 | |||
350 | for e in objset.foreach_type(oe.spdx30.Relationship): | ||
351 | if dep_build is not e.from_: | ||
352 | continue | ||
353 | |||
354 | if e.relationshipType != oe.spdx30.RelationshipType.hasInput: | ||
355 | continue | ||
356 | |||
357 | index_sources_by_hash(e.to, dest) | ||
358 | |||
359 | |||
360 | def add_download_files(d, objset): | ||
361 | inputs = set() | ||
362 | |||
363 | urls = d.getVar("SRC_URI").split() | ||
364 | fetch = bb.fetch2.Fetch(urls, d) | ||
365 | |||
366 | for download_idx, src_uri in enumerate(urls): | ||
367 | fd = fetch.ud[src_uri] | ||
368 | |||
369 | file_name = os.path.basename(fetch.localpath(src_uri)) | ||
370 | if oe.patch.patch_path(src_uri, fetch, "", expand=False): | ||
371 | primary_purpose = oe.spdx30.software_SoftwarePurpose.patch | ||
372 | else: | ||
373 | primary_purpose = oe.spdx30.software_SoftwarePurpose.source | ||
374 | |||
375 | if fd.type == "file": | ||
376 | if os.path.isdir(fd.localpath): | ||
377 | walk_idx = 1 | ||
378 | for root, dirs, files in os.walk(fd.localpath, onerror=walk_error): | ||
379 | dirs.sort() | ||
380 | files.sort() | ||
381 | for f in files: | ||
382 | f_path = os.path.join(root, f) | ||
383 | if os.path.islink(f_path): | ||
384 | # TODO: SPDX doesn't support symlinks yet | ||
385 | continue | ||
386 | |||
387 | file = objset.new_file( | ||
388 | objset.new_spdxid( | ||
389 | "source", str(download_idx + 1), str(walk_idx) | ||
390 | ), | ||
391 | os.path.join( | ||
392 | file_name, os.path.relpath(f_path, fd.localpath) | ||
393 | ), | ||
394 | f_path, | ||
395 | purposes=[primary_purpose], | ||
396 | ) | ||
397 | |||
398 | inputs.add(file) | ||
399 | walk_idx += 1 | ||
400 | |||
401 | else: | ||
402 | file = objset.new_file( | ||
403 | objset.new_spdxid("source", str(download_idx + 1)), | ||
404 | file_name, | ||
405 | fd.localpath, | ||
406 | purposes=[primary_purpose], | ||
407 | ) | ||
408 | inputs.add(file) | ||
409 | |||
410 | else: | ||
411 | dl = objset.add( | ||
412 | oe.spdx30.software_Package( | ||
413 | _id=objset.new_spdxid("source", str(download_idx + 1)), | ||
414 | creationInfo=objset.doc.creationInfo, | ||
415 | name=file_name, | ||
416 | software_primaryPurpose=primary_purpose, | ||
417 | software_downloadLocation=oe.spdx_common.fetch_data_to_uri( | ||
418 | fd, fd.name | ||
419 | ), | ||
420 | ) | ||
421 | ) | ||
422 | |||
423 | if fd.method.supports_checksum(fd): | ||
424 | # TODO Need something better than hard coding this | ||
425 | for checksum_id in ["sha256", "sha1"]: | ||
426 | expected_checksum = getattr( | ||
427 | fd, "%s_expected" % checksum_id, None | ||
428 | ) | ||
429 | if expected_checksum is None: | ||
430 | continue | ||
431 | |||
432 | dl.verifiedUsing.append( | ||
433 | oe.spdx30.Hash( | ||
434 | algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id), | ||
435 | hashValue=expected_checksum, | ||
436 | ) | ||
437 | ) | ||
438 | |||
439 | inputs.add(dl) | ||
440 | |||
441 | return inputs | ||
442 | |||
443 | |||
444 | def set_purposes(d, element, *var_names, force_purposes=[]): | ||
445 | purposes = force_purposes[:] | ||
446 | |||
447 | for var_name in var_names: | ||
448 | val = d.getVar(var_name) | ||
449 | if val: | ||
450 | purposes.extend(val.split()) | ||
451 | break | ||
452 | |||
453 | if not purposes: | ||
454 | bb.warn("No SPDX purposes found in %s" % " ".join(var_names)) | ||
455 | return | ||
456 | |||
457 | element.software_primaryPurpose = getattr( | ||
458 | oe.spdx30.software_SoftwarePurpose, purposes[0] | ||
459 | ) | ||
460 | element.software_additionalPurpose = [ | ||
461 | getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:] | ||
462 | ] | ||
463 | |||
464 | |||
465 | def create_spdx(d): | ||
466 | def set_var_field(var, obj, name, package=None): | ||
467 | val = None | ||
468 | if package: | ||
469 | val = d.getVar("%s:%s" % (var, package)) | ||
470 | |||
471 | if not val: | ||
472 | val = d.getVar(var) | ||
473 | |||
474 | if val: | ||
475 | setattr(obj, name, val) | ||
476 | |||
477 | license_data = oe.spdx_common.load_spdx_license_data(d) | ||
478 | |||
479 | deploydir = Path(d.getVar("SPDXDEPLOY")) | ||
480 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
481 | spdx_workdir = Path(d.getVar("SPDXWORK")) | ||
482 | include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1" | ||
483 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
484 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class( | ||
485 | "cross", d | ||
486 | ) | ||
487 | include_vex = d.getVar("SPDX_INCLUDE_VEX") | ||
488 | if not include_vex in ("none", "current", "all"): | ||
489 | bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'") | ||
490 | |||
491 | build_objset = oe.sbom30.ObjectSet.new_objset(d, "recipe-" + d.getVar("PN")) | ||
492 | |||
493 | build = build_objset.new_task_build("recipe", "recipe") | ||
494 | build_objset.set_element_alias(build) | ||
495 | |||
496 | build_objset.doc.rootElement.append(build) | ||
497 | |||
498 | build_objset.set_is_native(is_native) | ||
499 | |||
500 | for var in (d.getVar("SPDX_CUSTOM_ANNOTATION_VARS") or "").split(): | ||
501 | new_annotation( | ||
502 | d, | ||
503 | build_objset, | ||
504 | build, | ||
505 | "%s=%s" % (var, d.getVar(var)), | ||
506 | oe.spdx30.AnnotationType.other, | ||
507 | ) | ||
508 | |||
509 | build_inputs = set() | ||
510 | |||
511 | # Add CVEs | ||
512 | cve_by_status = {} | ||
513 | if include_vex != "none": | ||
514 | patched_cves = oe.cve_check.get_patched_cves(d) | ||
515 | for cve, patched_cve in patched_cves.items(): | ||
516 | decoded_status = { | ||
517 | "mapping": patched_cve["abbrev-status"], | ||
518 | "detail": patched_cve["status"], | ||
519 | "description": patched_cve.get("justification", None) | ||
520 | } | ||
521 | |||
522 | # If this CVE is fixed upstream, skip it unless all CVEs are | ||
523 | # specified. | ||
524 | if ( | ||
525 | include_vex != "all" | ||
526 | and "detail" in decoded_status | ||
527 | and decoded_status["detail"] | ||
528 | in ( | ||
529 | "fixed-version", | ||
530 | "cpe-stable-backport", | ||
531 | ) | ||
532 | ): | ||
533 | bb.debug(1, "Skipping %s since it is already fixed upstream" % cve) | ||
534 | continue | ||
535 | |||
536 | spdx_cve = build_objset.new_cve_vuln(cve) | ||
537 | build_objset.set_element_alias(spdx_cve) | ||
538 | |||
539 | cve_by_status.setdefault(decoded_status["mapping"], {})[cve] = ( | ||
540 | spdx_cve, | ||
541 | decoded_status["detail"], | ||
542 | decoded_status["description"], | ||
543 | ) | ||
544 | |||
545 | cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION")) | ||
546 | |||
547 | source_files = add_download_files(d, build_objset) | ||
548 | build_inputs |= source_files | ||
549 | |||
550 | recipe_spdx_license = add_license_expression( | ||
551 | d, build_objset, d.getVar("LICENSE"), license_data | ||
552 | ) | ||
553 | build_objset.new_relationship( | ||
554 | source_files, | ||
555 | oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
556 | [oe.sbom30.get_element_link_id(recipe_spdx_license)], | ||
557 | ) | ||
558 | |||
559 | dep_sources = {} | ||
560 | if oe.spdx_common.process_sources(d) and include_sources: | ||
561 | bb.debug(1, "Adding source files to SPDX") | ||
562 | oe.spdx_common.get_patched_src(d) | ||
563 | |||
564 | files = add_package_files( | ||
565 | d, | ||
566 | build_objset, | ||
567 | spdx_workdir, | ||
568 | lambda file_counter: build_objset.new_spdxid( | ||
569 | "sourcefile", str(file_counter) | ||
570 | ), | ||
571 | lambda filepath: [oe.spdx30.software_SoftwarePurpose.source], | ||
572 | license_data, | ||
573 | ignore_dirs=[".git"], | ||
574 | ignore_top_level_dirs=["temp"], | ||
575 | archive=None, | ||
576 | ) | ||
577 | build_inputs |= files | ||
578 | index_sources_by_hash(files, dep_sources) | ||
579 | |||
580 | dep_objsets, dep_builds = collect_dep_objsets(d, build) | ||
581 | if dep_builds: | ||
582 | build_objset.new_scoped_relationship( | ||
583 | [build], | ||
584 | oe.spdx30.RelationshipType.dependsOn, | ||
585 | oe.spdx30.LifecycleScopeType.build, | ||
586 | sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds), | ||
587 | ) | ||
588 | |||
589 | debug_source_ids = set() | ||
590 | source_hash_cache = {} | ||
591 | |||
592 | # Write out the package SPDX data now. It is not complete as we cannot | ||
593 | # write the runtime data, so write it to a staging area and a later task | ||
594 | # will write out the final collection | ||
595 | |||
596 | # TODO: Handle native recipe output | ||
597 | if not is_native: | ||
598 | bb.debug(1, "Collecting Dependency sources files") | ||
599 | collect_dep_sources(dep_objsets, dep_sources) | ||
600 | |||
601 | bb.build.exec_func("read_subpackage_metadata", d) | ||
602 | |||
603 | pkgdest = Path(d.getVar("PKGDEST")) | ||
604 | for package in d.getVar("PACKAGES").split(): | ||
605 | if not oe.packagedata.packaged(package, d): | ||
606 | continue | ||
607 | |||
608 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
609 | |||
610 | bb.debug(1, "Creating SPDX for package %s" % pkg_name) | ||
611 | |||
612 | pkg_objset = oe.sbom30.ObjectSet.new_objset(d, "package-" + pkg_name) | ||
613 | |||
614 | spdx_package = pkg_objset.add_root( | ||
615 | oe.spdx30.software_Package( | ||
616 | _id=pkg_objset.new_spdxid("package", pkg_name), | ||
617 | creationInfo=pkg_objset.doc.creationInfo, | ||
618 | name=pkg_name, | ||
619 | software_packageVersion=d.getVar("SPDX_PACKAGE_VERSION"), | ||
620 | ) | ||
621 | ) | ||
622 | set_timestamp_now(d, spdx_package, "builtTime") | ||
623 | |||
624 | set_purposes( | ||
625 | d, | ||
626 | spdx_package, | ||
627 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package, | ||
628 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE", | ||
629 | force_purposes=["install"], | ||
630 | ) | ||
631 | |||
632 | supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER") | ||
633 | if supplier is not None: | ||
634 | spdx_package.suppliedBy = ( | ||
635 | supplier if isinstance(supplier, str) else supplier._id | ||
636 | ) | ||
637 | |||
638 | set_var_field( | ||
639 | "HOMEPAGE", spdx_package, "software_homePage", package=package | ||
640 | ) | ||
641 | set_var_field("SUMMARY", spdx_package, "summary", package=package) | ||
642 | set_var_field("DESCRIPTION", spdx_package, "description", package=package) | ||
643 | |||
644 | if d.getVar("SPDX_PACKAGE_URL:%s" % package) or d.getVar("SPDX_PACKAGE_URL"): | ||
645 | set_var_field( | ||
646 | "SPDX_PACKAGE_URL", | ||
647 | spdx_package, | ||
648 | "software_packageUrl", | ||
649 | package=package | ||
650 | ) | ||
651 | |||
652 | pkg_objset.new_scoped_relationship( | ||
653 | [oe.sbom30.get_element_link_id(build)], | ||
654 | oe.spdx30.RelationshipType.hasOutput, | ||
655 | oe.spdx30.LifecycleScopeType.build, | ||
656 | [spdx_package], | ||
657 | ) | ||
658 | |||
659 | for cpe_id in cpe_ids: | ||
660 | spdx_package.externalIdentifier.append( | ||
661 | oe.spdx30.ExternalIdentifier( | ||
662 | externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23, | ||
663 | identifier=cpe_id, | ||
664 | ) | ||
665 | ) | ||
666 | |||
667 | # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file | ||
668 | # generated and link it to the package | ||
669 | # spdx_package_file = pkg_objset.add(oe.spdx30.software_File( | ||
670 | # _id=pkg_objset.new_spdxid("distribution", pkg_name), | ||
671 | # creationInfo=pkg_objset.doc.creationInfo, | ||
672 | # name=pkg_name, | ||
673 | # software_primaryPurpose=spdx_package.software_primaryPurpose, | ||
674 | # software_additionalPurpose=spdx_package.software_additionalPurpose, | ||
675 | # )) | ||
676 | # set_timestamp_now(d, spdx_package_file, "builtTime") | ||
677 | |||
678 | ## TODO add hashes | ||
679 | # pkg_objset.new_relationship( | ||
680 | # [spdx_package], | ||
681 | # oe.spdx30.RelationshipType.hasDistributionArtifact, | ||
682 | # [spdx_package_file], | ||
683 | # ) | ||
684 | |||
685 | # NOTE: licenses live in the recipe collection and are referenced | ||
686 | # by ID in the package collection(s). This helps reduce duplication | ||
687 | # (since a lot of packages will have the same license), and also | ||
688 | # prevents duplicate license SPDX IDs in the packages | ||
689 | package_license = d.getVar("LICENSE:%s" % package) | ||
690 | if package_license and package_license != d.getVar("LICENSE"): | ||
691 | package_spdx_license = add_license_expression( | ||
692 | d, build_objset, package_license, license_data | ||
693 | ) | ||
694 | else: | ||
695 | package_spdx_license = recipe_spdx_license | ||
696 | |||
697 | pkg_objset.new_relationship( | ||
698 | [spdx_package], | ||
699 | oe.spdx30.RelationshipType.hasConcludedLicense, | ||
700 | [oe.sbom30.get_element_link_id(package_spdx_license)], | ||
701 | ) | ||
702 | |||
703 | # NOTE: CVE Elements live in the recipe collection | ||
704 | all_cves = set() | ||
705 | for status, cves in cve_by_status.items(): | ||
706 | for cve, items in cves.items(): | ||
707 | spdx_cve, detail, description = items | ||
708 | spdx_cve_id = oe.sbom30.get_element_link_id(spdx_cve) | ||
709 | |||
710 | all_cves.add(spdx_cve_id) | ||
711 | |||
712 | if status == "Patched": | ||
713 | pkg_objset.new_vex_patched_relationship( | ||
714 | [spdx_cve_id], [spdx_package] | ||
715 | ) | ||
716 | elif status == "Unpatched": | ||
717 | pkg_objset.new_vex_unpatched_relationship( | ||
718 | [spdx_cve_id], [spdx_package] | ||
719 | ) | ||
720 | elif status == "Ignored": | ||
721 | spdx_vex = pkg_objset.new_vex_ignored_relationship( | ||
722 | [spdx_cve_id], | ||
723 | [spdx_package], | ||
724 | impact_statement=description, | ||
725 | ) | ||
726 | |||
727 | if detail in ( | ||
728 | "ignored", | ||
729 | "cpe-incorrect", | ||
730 | "disputed", | ||
731 | "upstream-wontfix", | ||
732 | ): | ||
733 | # VEX doesn't have justifications for this | ||
734 | pass | ||
735 | elif detail in ( | ||
736 | "not-applicable-config", | ||
737 | "not-applicable-platform", | ||
738 | ): | ||
739 | for v in spdx_vex: | ||
740 | v.security_justificationType = ( | ||
741 | oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent | ||
742 | ) | ||
743 | else: | ||
744 | bb.fatal(f"Unknown detail '{detail}' for ignored {cve}") | ||
745 | elif status == "Unknown": | ||
746 | bb.note(f"Skipping {cve} with status 'Unknown'") | ||
747 | else: | ||
748 | bb.fatal(f"Unknown {cve} status '{status}'") | ||
749 | |||
750 | if all_cves: | ||
751 | pkg_objset.new_relationship( | ||
752 | [spdx_package], | ||
753 | oe.spdx30.RelationshipType.hasAssociatedVulnerability, | ||
754 | sorted(list(all_cves)), | ||
755 | ) | ||
756 | |||
757 | bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name) | ||
758 | package_files = add_package_files( | ||
759 | d, | ||
760 | pkg_objset, | ||
761 | pkgdest / package, | ||
762 | lambda file_counter: pkg_objset.new_spdxid( | ||
763 | "package", pkg_name, "file", str(file_counter) | ||
764 | ), | ||
765 | # TODO: Can we know the purpose here? | ||
766 | lambda filepath: [], | ||
767 | license_data, | ||
768 | ignore_top_level_dirs=["CONTROL", "DEBIAN"], | ||
769 | archive=None, | ||
770 | ) | ||
771 | |||
772 | if package_files: | ||
773 | pkg_objset.new_relationship( | ||
774 | [spdx_package], | ||
775 | oe.spdx30.RelationshipType.contains, | ||
776 | sorted(list(package_files)), | ||
777 | ) | ||
778 | |||
779 | if include_sources: | ||
780 | debug_sources = get_package_sources_from_debug( | ||
781 | d, package, package_files, dep_sources, source_hash_cache | ||
782 | ) | ||
783 | debug_source_ids |= set( | ||
784 | oe.sbom30.get_element_link_id(d) for d in debug_sources | ||
785 | ) | ||
786 | |||
787 | oe.sbom30.write_recipe_jsonld_doc( | ||
788 | d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False | ||
789 | ) | ||
790 | |||
791 | if include_sources: | ||
792 | bb.debug(1, "Adding sysroot files to SPDX") | ||
793 | sysroot_files = add_package_files( | ||
794 | d, | ||
795 | build_objset, | ||
796 | d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"), | ||
797 | lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)), | ||
798 | lambda filepath: [], | ||
799 | license_data, | ||
800 | archive=None, | ||
801 | ) | ||
802 | |||
803 | if sysroot_files: | ||
804 | build_objset.new_scoped_relationship( | ||
805 | [build], | ||
806 | oe.spdx30.RelationshipType.hasOutput, | ||
807 | oe.spdx30.LifecycleScopeType.build, | ||
808 | sorted(list(sysroot_files)), | ||
809 | ) | ||
810 | |||
811 | if build_inputs or debug_source_ids: | ||
812 | build_objset.new_scoped_relationship( | ||
813 | [build], | ||
814 | oe.spdx30.RelationshipType.hasInput, | ||
815 | oe.spdx30.LifecycleScopeType.build, | ||
816 | sorted(list(build_inputs)) + sorted(list(debug_source_ids)), | ||
817 | ) | ||
818 | |||
819 | oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir) | ||
820 | |||
821 | |||
822 | def create_package_spdx(d): | ||
823 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
824 | deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY")) | ||
825 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class( | ||
826 | "cross", d | ||
827 | ) | ||
828 | |||
829 | providers = oe.spdx_common.collect_package_providers(d) | ||
830 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
831 | |||
832 | if is_native: | ||
833 | return | ||
834 | |||
835 | bb.build.exec_func("read_subpackage_metadata", d) | ||
836 | |||
837 | dep_package_cache = {} | ||
838 | |||
839 | # Any element common to all packages that need to be referenced by ID | ||
840 | # should be written into this objset set | ||
841 | common_objset = oe.sbom30.ObjectSet.new_objset( | ||
842 | d, "%s-package-common" % d.getVar("PN") | ||
843 | ) | ||
844 | |||
845 | pkgdest = Path(d.getVar("PKGDEST")) | ||
846 | for package in d.getVar("PACKAGES").split(): | ||
847 | localdata = bb.data.createCopy(d) | ||
848 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
849 | localdata.setVar("PKG", pkg_name) | ||
850 | localdata.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + ":" + package) | ||
851 | |||
852 | if not oe.packagedata.packaged(package, localdata): | ||
853 | continue | ||
854 | |||
855 | spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld( | ||
856 | d, | ||
857 | pkg_arch, | ||
858 | "packages-staging", | ||
859 | "package-" + pkg_name, | ||
860 | oe.spdx30.software_Package, | ||
861 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
862 | ) | ||
863 | |||
864 | # We will write out a new collection, so link it to the new | ||
865 | # creation info in the common package data. The old creation info | ||
866 | # should still exist and be referenced by all the existing elements | ||
867 | # in the package | ||
868 | pkg_objset.creationInfo = pkg_objset.copy_creation_info( | ||
869 | common_objset.doc.creationInfo | ||
870 | ) | ||
871 | |||
872 | runtime_spdx_deps = set() | ||
873 | |||
874 | deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") | ||
875 | seen_deps = set() | ||
876 | for dep, _ in deps.items(): | ||
877 | if dep in seen_deps: | ||
878 | continue | ||
879 | |||
880 | if dep not in providers: | ||
881 | continue | ||
882 | |||
883 | (dep, _) = providers[dep] | ||
884 | |||
885 | if not oe.packagedata.packaged(dep, localdata): | ||
886 | continue | ||
887 | |||
888 | dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d) | ||
889 | dep_pkg = dep_pkg_data["PKG"] | ||
890 | |||
891 | if dep in dep_package_cache: | ||
892 | dep_spdx_package = dep_package_cache[dep] | ||
893 | else: | ||
894 | bb.debug(1, "Searching for %s" % dep_pkg) | ||
895 | dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
896 | d, | ||
897 | "packages-staging", | ||
898 | "package-" + dep_pkg, | ||
899 | oe.spdx30.software_Package, | ||
900 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
901 | ) | ||
902 | dep_package_cache[dep] = dep_spdx_package | ||
903 | |||
904 | runtime_spdx_deps.add(dep_spdx_package) | ||
905 | seen_deps.add(dep) | ||
906 | |||
907 | if runtime_spdx_deps: | ||
908 | pkg_objset.new_scoped_relationship( | ||
909 | [spdx_package], | ||
910 | oe.spdx30.RelationshipType.dependsOn, | ||
911 | oe.spdx30.LifecycleScopeType.runtime, | ||
912 | [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps], | ||
913 | ) | ||
914 | |||
915 | oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir) | ||
916 | |||
917 | oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir) | ||
918 | |||
919 | |||
920 | def write_bitbake_spdx(d): | ||
921 | # Set PN to "bitbake" so that SPDX IDs can be generated | ||
922 | d.setVar("PN", "bitbake") | ||
923 | d.setVar("BB_TASKHASH", "bitbake") | ||
924 | oe.spdx_common.load_spdx_license_data(d) | ||
925 | |||
926 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
927 | |||
928 | objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False) | ||
929 | |||
930 | host_import_key = d.getVar("SPDX_BUILD_HOST") | ||
931 | invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False) | ||
932 | on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False) | ||
933 | |||
934 | if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1": | ||
935 | # Since the Build objects are unique, we may as well set the creation | ||
936 | # time to the current time instead of the fallback SDE | ||
937 | objset.doc.creationInfo.created = datetime.now(timezone.utc) | ||
938 | |||
939 | # Each invocation of bitbake should have a unique ID since it is a | ||
940 | # unique build | ||
941 | nonce = os.urandom(16).hex() | ||
942 | |||
943 | build = objset.add_root( | ||
944 | oe.spdx30.build_Build( | ||
945 | _id=objset.new_spdxid(nonce, include_unihash=False), | ||
946 | creationInfo=objset.doc.creationInfo, | ||
947 | build_buildType=oe.sbom30.SPDX_BUILD_TYPE, | ||
948 | ) | ||
949 | ) | ||
950 | set_timestamp_now(d, build, "build_buildStartTime") | ||
951 | |||
952 | if host_import_key: | ||
953 | objset.new_scoped_relationship( | ||
954 | [build], | ||
955 | oe.spdx30.RelationshipType.hasHost, | ||
956 | oe.spdx30.LifecycleScopeType.build, | ||
957 | [objset.new_import(host_import_key)], | ||
958 | ) | ||
959 | |||
960 | if invoked_by: | ||
961 | objset.add(invoked_by) | ||
962 | invoked_by_spdx = objset.new_scoped_relationship( | ||
963 | [build], | ||
964 | oe.spdx30.RelationshipType.invokedBy, | ||
965 | oe.spdx30.LifecycleScopeType.build, | ||
966 | [invoked_by], | ||
967 | ) | ||
968 | |||
969 | if on_behalf_of: | ||
970 | objset.add(on_behalf_of) | ||
971 | objset.new_scoped_relationship( | ||
972 | [on_behalf_of], | ||
973 | oe.spdx30.RelationshipType.delegatedTo, | ||
974 | oe.spdx30.LifecycleScopeType.build, | ||
975 | invoked_by_spdx, | ||
976 | ) | ||
977 | |||
978 | elif on_behalf_of: | ||
979 | bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set") | ||
980 | |||
981 | else: | ||
982 | if host_import_key: | ||
983 | bb.warn( | ||
984 | "SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
985 | ) | ||
986 | |||
987 | if invoked_by: | ||
988 | bb.warn( | ||
989 | "SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
990 | ) | ||
991 | |||
992 | if on_behalf_of: | ||
993 | bb.warn( | ||
994 | "SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
995 | ) | ||
996 | |||
997 | for obj in objset.foreach_type(oe.spdx30.Element): | ||
998 | obj.extension.append(oe.sbom30.OEIdAliasExtension()) | ||
999 | |||
1000 | oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json") | ||
1001 | |||
1002 | |||
1003 | def collect_build_package_inputs(d, objset, build, packages, files_by_hash=None): | ||
1004 | import oe.sbom30 | ||
1005 | |||
1006 | providers = oe.spdx_common.collect_package_providers(d) | ||
1007 | |||
1008 | build_deps = set() | ||
1009 | missing_providers = set() | ||
1010 | |||
1011 | for name in sorted(packages.keys()): | ||
1012 | if name not in providers: | ||
1013 | missing_providers.add(name) | ||
1014 | continue | ||
1015 | |||
1016 | pkg_name, pkg_hashfn = providers[name] | ||
1017 | |||
1018 | # Copy all of the package SPDX files into the Sbom elements | ||
1019 | pkg_spdx, pkg_objset = oe.sbom30.find_root_obj_in_jsonld( | ||
1020 | d, | ||
1021 | "packages", | ||
1022 | "package-" + pkg_name, | ||
1023 | oe.spdx30.software_Package, | ||
1024 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
1025 | ) | ||
1026 | build_deps.add(oe.sbom30.get_element_link_id(pkg_spdx)) | ||
1027 | |||
1028 | if files_by_hash is not None: | ||
1029 | for h, f in pkg_objset.by_sha256_hash.items(): | ||
1030 | files_by_hash.setdefault(h, set()).update(f) | ||
1031 | |||
1032 | if missing_providers: | ||
1033 | bb.fatal( | ||
1034 | f"Unable to find SPDX provider(s) for: {', '.join(sorted(missing_providers))}" | ||
1035 | ) | ||
1036 | |||
1037 | if build_deps: | ||
1038 | objset.new_scoped_relationship( | ||
1039 | [build], | ||
1040 | oe.spdx30.RelationshipType.hasInput, | ||
1041 | oe.spdx30.LifecycleScopeType.build, | ||
1042 | sorted(list(build_deps)), | ||
1043 | ) | ||
1044 | |||
1045 | |||
1046 | def create_rootfs_spdx(d): | ||
1047 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
1048 | deploydir = Path(d.getVar("SPDXROOTFSDEPLOY")) | ||
1049 | root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES")) | ||
1050 | image_basename = d.getVar("IMAGE_BASENAME") | ||
1051 | image_rootfs = d.getVar("IMAGE_ROOTFS") | ||
1052 | machine = d.getVar("MACHINE") | ||
1053 | |||
1054 | with root_packages_file.open("r") as f: | ||
1055 | packages = json.load(f) | ||
1056 | |||
1057 | objset = oe.sbom30.ObjectSet.new_objset( | ||
1058 | d, "%s-%s-rootfs" % (image_basename, machine) | ||
1059 | ) | ||
1060 | |||
1061 | rootfs = objset.add_root( | ||
1062 | oe.spdx30.software_Package( | ||
1063 | _id=objset.new_spdxid("rootfs", image_basename), | ||
1064 | creationInfo=objset.doc.creationInfo, | ||
1065 | name=image_basename, | ||
1066 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
1067 | ) | ||
1068 | ) | ||
1069 | set_timestamp_now(d, rootfs, "builtTime") | ||
1070 | |||
1071 | rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs")) | ||
1072 | set_timestamp_now(d, rootfs_build, "build_buildEndTime") | ||
1073 | |||
1074 | objset.new_scoped_relationship( | ||
1075 | [rootfs_build], | ||
1076 | oe.spdx30.RelationshipType.hasOutput, | ||
1077 | oe.spdx30.LifecycleScopeType.build, | ||
1078 | [rootfs], | ||
1079 | ) | ||
1080 | |||
1081 | files_by_hash = {} | ||
1082 | collect_build_package_inputs(d, objset, rootfs_build, packages, files_by_hash) | ||
1083 | |||
1084 | files = set() | ||
1085 | for dirpath, dirnames, filenames in os.walk(image_rootfs, onerror=walk_error): | ||
1086 | dirnames.sort() | ||
1087 | filenames.sort() | ||
1088 | for fn in filenames: | ||
1089 | fpath = Path(dirpath) / fn | ||
1090 | if fpath.is_symlink() or not fpath.is_file(): | ||
1091 | continue | ||
1092 | |||
1093 | relpath = str(fpath.relative_to(image_rootfs)) | ||
1094 | h = bb.utils.sha256_file(fpath) | ||
1095 | |||
1096 | found = False | ||
1097 | if h in files_by_hash: | ||
1098 | for f in files_by_hash[h]: | ||
1099 | if isinstance(f, oe.spdx30.software_File) and f.name == relpath: | ||
1100 | files.add(oe.sbom30.get_element_link_id(f)) | ||
1101 | found = True | ||
1102 | break | ||
1103 | |||
1104 | if not found: | ||
1105 | files.add( | ||
1106 | objset.new_file( | ||
1107 | objset.new_spdxid("rootfs-file", relpath), | ||
1108 | relpath, | ||
1109 | fpath, | ||
1110 | ) | ||
1111 | ) | ||
1112 | |||
1113 | if files: | ||
1114 | objset.new_relationship( | ||
1115 | [rootfs], | ||
1116 | oe.spdx30.RelationshipType.contains, | ||
1117 | sorted(list(files)), | ||
1118 | ) | ||
1119 | |||
1120 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir) | ||
1121 | |||
1122 | |||
1123 | def create_image_spdx(d): | ||
1124 | import oe.sbom30 | ||
1125 | |||
1126 | image_deploy_dir = Path(d.getVar("IMGDEPLOYDIR")) | ||
1127 | manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST")) | ||
1128 | spdx_work_dir = Path(d.getVar("SPDXIMAGEWORK")) | ||
1129 | |||
1130 | image_basename = d.getVar("IMAGE_BASENAME") | ||
1131 | machine = d.getVar("MACHINE") | ||
1132 | |||
1133 | objset = oe.sbom30.ObjectSet.new_objset( | ||
1134 | d, "%s-%s-image" % (image_basename, machine) | ||
1135 | ) | ||
1136 | |||
1137 | with manifest_path.open("r") as f: | ||
1138 | manifest = json.load(f) | ||
1139 | |||
1140 | builds = [] | ||
1141 | for task in manifest: | ||
1142 | imagetype = task["imagetype"] | ||
1143 | taskname = task["taskname"] | ||
1144 | |||
1145 | image_build = objset.add_root( | ||
1146 | objset.new_task_build(taskname, "image/%s" % imagetype) | ||
1147 | ) | ||
1148 | set_timestamp_now(d, image_build, "build_buildEndTime") | ||
1149 | builds.append(image_build) | ||
1150 | |||
1151 | artifacts = [] | ||
1152 | |||
1153 | for image in task["images"]: | ||
1154 | image_filename = image["filename"] | ||
1155 | image_path = image_deploy_dir / image_filename | ||
1156 | if os.path.isdir(image_path): | ||
1157 | a = add_package_files( | ||
1158 | d, | ||
1159 | objset, | ||
1160 | image_path, | ||
1161 | lambda file_counter: objset.new_spdxid( | ||
1162 | "imagefile", str(file_counter) | ||
1163 | ), | ||
1164 | lambda filepath: [], | ||
1165 | license_data=None, | ||
1166 | ignore_dirs=[], | ||
1167 | ignore_top_level_dirs=[], | ||
1168 | archive=None, | ||
1169 | ) | ||
1170 | artifacts.extend(a) | ||
1171 | else: | ||
1172 | a = objset.add_root( | ||
1173 | oe.spdx30.software_File( | ||
1174 | _id=objset.new_spdxid("image", image_filename), | ||
1175 | creationInfo=objset.doc.creationInfo, | ||
1176 | name=image_filename, | ||
1177 | verifiedUsing=[ | ||
1178 | oe.spdx30.Hash( | ||
1179 | algorithm=oe.spdx30.HashAlgorithm.sha256, | ||
1180 | hashValue=bb.utils.sha256_file(image_path), | ||
1181 | ) | ||
1182 | ], | ||
1183 | ) | ||
1184 | ) | ||
1185 | |||
1186 | artifacts.append(a) | ||
1187 | |||
1188 | for a in artifacts: | ||
1189 | set_purposes( | ||
1190 | d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE" | ||
1191 | ) | ||
1192 | |||
1193 | set_timestamp_now(d, a, "builtTime") | ||
1194 | |||
1195 | |||
1196 | if artifacts: | ||
1197 | objset.new_scoped_relationship( | ||
1198 | [image_build], | ||
1199 | oe.spdx30.RelationshipType.hasOutput, | ||
1200 | oe.spdx30.LifecycleScopeType.build, | ||
1201 | artifacts, | ||
1202 | ) | ||
1203 | |||
1204 | if builds: | ||
1205 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
1206 | d, | ||
1207 | "rootfs", | ||
1208 | "%s-%s-rootfs" % (image_basename, machine), | ||
1209 | oe.spdx30.software_Package, | ||
1210 | # TODO: Should use a purpose to filter here? | ||
1211 | ) | ||
1212 | objset.new_scoped_relationship( | ||
1213 | builds, | ||
1214 | oe.spdx30.RelationshipType.hasInput, | ||
1215 | oe.spdx30.LifecycleScopeType.build, | ||
1216 | [oe.sbom30.get_element_link_id(rootfs_image)], | ||
1217 | ) | ||
1218 | |||
1219 | objset.add_aliases() | ||
1220 | objset.link() | ||
1221 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir) | ||
1222 | |||
1223 | |||
1224 | def create_image_sbom_spdx(d): | ||
1225 | import oe.sbom30 | ||
1226 | |||
1227 | image_name = d.getVar("IMAGE_NAME") | ||
1228 | image_basename = d.getVar("IMAGE_BASENAME") | ||
1229 | image_link_name = d.getVar("IMAGE_LINK_NAME") | ||
1230 | imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR")) | ||
1231 | machine = d.getVar("MACHINE") | ||
1232 | |||
1233 | spdx_path = imgdeploydir / (image_name + ".spdx.json") | ||
1234 | |||
1235 | root_elements = [] | ||
1236 | |||
1237 | # TODO: Do we need to add the rootfs or are the image files sufficient? | ||
1238 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
1239 | d, | ||
1240 | "rootfs", | ||
1241 | "%s-%s-rootfs" % (image_basename, machine), | ||
1242 | oe.spdx30.software_Package, | ||
1243 | # TODO: Should use a purpose here? | ||
1244 | ) | ||
1245 | root_elements.append(oe.sbom30.get_element_link_id(rootfs_image)) | ||
1246 | |||
1247 | image_objset, _ = oe.sbom30.find_jsonld( | ||
1248 | d, "image", "%s-%s-image" % (image_basename, machine), required=True | ||
1249 | ) | ||
1250 | for o in image_objset.foreach_root(oe.spdx30.software_File): | ||
1251 | root_elements.append(oe.sbom30.get_element_link_id(o)) | ||
1252 | |||
1253 | objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements) | ||
1254 | |||
1255 | oe.sbom30.write_jsonld_doc(d, objset, spdx_path) | ||
1256 | |||
1257 | def make_image_link(target_path, suffix): | ||
1258 | if image_link_name: | ||
1259 | link = imgdeploydir / (image_link_name + suffix) | ||
1260 | if link != target_path: | ||
1261 | link.symlink_to(os.path.relpath(target_path, link.parent)) | ||
1262 | |||
1263 | make_image_link(spdx_path, ".spdx.json") | ||
1264 | |||
1265 | |||
1266 | def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname): | ||
1267 | sdk_name = toolchain_outputname + "-" + sdk_type | ||
1268 | sdk_packages = oe.sdk.sdk_list_installed_packages(d, sdk_type == "target") | ||
1269 | |||
1270 | objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name) | ||
1271 | |||
1272 | sdk_rootfs = objset.add_root( | ||
1273 | oe.spdx30.software_Package( | ||
1274 | _id=objset.new_spdxid("sdk-rootfs", sdk_name), | ||
1275 | creationInfo=objset.doc.creationInfo, | ||
1276 | name=sdk_name, | ||
1277 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
1278 | ) | ||
1279 | ) | ||
1280 | set_timestamp_now(d, sdk_rootfs, "builtTime") | ||
1281 | |||
1282 | sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs")) | ||
1283 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
1284 | |||
1285 | objset.new_scoped_relationship( | ||
1286 | [sdk_build], | ||
1287 | oe.spdx30.RelationshipType.hasOutput, | ||
1288 | oe.spdx30.LifecycleScopeType.build, | ||
1289 | [sdk_rootfs], | ||
1290 | ) | ||
1291 | |||
1292 | collect_build_package_inputs(d, objset, sdk_build, sdk_packages) | ||
1293 | |||
1294 | objset.add_aliases() | ||
1295 | oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json") | ||
1296 | |||
1297 | |||
1298 | def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname): | ||
1299 | # Load the document written earlier | ||
1300 | rootfs_objset = oe.sbom30.load_jsonld( | ||
1301 | d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True | ||
1302 | ) | ||
1303 | |||
1304 | # Create a new build for the SDK installer | ||
1305 | sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate") | ||
1306 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
1307 | |||
1308 | rootfs = rootfs_objset.find_root(oe.spdx30.software_Package) | ||
1309 | if rootfs is None: | ||
1310 | bb.fatal("Unable to find rootfs artifact") | ||
1311 | |||
1312 | rootfs_objset.new_scoped_relationship( | ||
1313 | [sdk_build], | ||
1314 | oe.spdx30.RelationshipType.hasInput, | ||
1315 | oe.spdx30.LifecycleScopeType.build, | ||
1316 | [rootfs], | ||
1317 | ) | ||
1318 | |||
1319 | files = set() | ||
1320 | root_files = [] | ||
1321 | |||
1322 | # NOTE: os.walk() doesn't return symlinks | ||
1323 | for dirpath, dirnames, filenames in os.walk(sdk_deploydir, onerror=walk_error): | ||
1324 | dirnames.sort() | ||
1325 | filenames.sort() | ||
1326 | for fn in filenames: | ||
1327 | fpath = Path(dirpath) / fn | ||
1328 | if not fpath.is_file() or fpath.is_symlink(): | ||
1329 | continue | ||
1330 | |||
1331 | relpath = str(fpath.relative_to(sdk_deploydir)) | ||
1332 | |||
1333 | f = rootfs_objset.new_file( | ||
1334 | rootfs_objset.new_spdxid("sdk-installer", relpath), | ||
1335 | relpath, | ||
1336 | fpath, | ||
1337 | ) | ||
1338 | set_timestamp_now(d, f, "builtTime") | ||
1339 | |||
1340 | if fn.endswith(".manifest"): | ||
1341 | f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest | ||
1342 | elif fn.endswith(".testdata.json"): | ||
1343 | f.software_primaryPurpose = ( | ||
1344 | oe.spdx30.software_SoftwarePurpose.configuration | ||
1345 | ) | ||
1346 | else: | ||
1347 | set_purposes(d, f, "SPDX_SDK_PURPOSE") | ||
1348 | root_files.append(f) | ||
1349 | |||
1350 | files.add(f) | ||
1351 | |||
1352 | if files: | ||
1353 | rootfs_objset.new_scoped_relationship( | ||
1354 | [sdk_build], | ||
1355 | oe.spdx30.RelationshipType.hasOutput, | ||
1356 | oe.spdx30.LifecycleScopeType.build, | ||
1357 | files, | ||
1358 | ) | ||
1359 | else: | ||
1360 | bb.warn(f"No SDK output files found in {sdk_deploydir}") | ||
1361 | |||
1362 | objset, sbom = oe.sbom30.create_sbom( | ||
1363 | d, toolchain_outputname, sorted(list(files)), [rootfs_objset] | ||
1364 | ) | ||
1365 | |||
1366 | oe.sbom30.write_jsonld_doc( | ||
1367 | d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json") | ||
1368 | ) | ||
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py new file mode 100644 index 0000000000..c2dec65563 --- /dev/null +++ b/meta/lib/oe/spdx_common.py | |||
@@ -0,0 +1,285 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import bb | ||
8 | import collections | ||
9 | import json | ||
10 | import oe.packagedata | ||
11 | import re | ||
12 | import shutil | ||
13 | |||
14 | from pathlib import Path | ||
15 | from dataclasses import dataclass | ||
16 | |||
17 | LIC_REGEX = re.compile( | ||
18 | rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$", | ||
19 | re.MULTILINE, | ||
20 | ) | ||
21 | |||
22 | |||
23 | def extract_licenses(filename): | ||
24 | """ | ||
25 | Extract SPDX License identifiers from a file | ||
26 | """ | ||
27 | try: | ||
28 | with open(filename, "rb") as f: | ||
29 | size = min(15000, os.stat(filename).st_size) | ||
30 | txt = f.read(size) | ||
31 | licenses = re.findall(LIC_REGEX, txt) | ||
32 | if licenses: | ||
33 | ascii_licenses = [lic.decode("ascii") for lic in licenses] | ||
34 | return ascii_licenses | ||
35 | except Exception as e: | ||
36 | bb.warn(f"Exception reading {filename}: {e}") | ||
37 | return [] | ||
38 | |||
39 | |||
40 | def is_work_shared_spdx(d): | ||
41 | return '/work-shared/' in d.getVar('S') | ||
42 | |||
43 | |||
44 | def load_spdx_license_data(d): | ||
45 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
46 | data = json.load(f) | ||
47 | # Transform the license array to a dictionary | ||
48 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
49 | |||
50 | return data | ||
51 | |||
52 | |||
53 | def process_sources(d): | ||
54 | """ | ||
55 | Returns True if the sources for this recipe should be included in the SPDX | ||
56 | or False if not | ||
57 | """ | ||
58 | pn = d.getVar("PN") | ||
59 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
60 | if pn in assume_provided: | ||
61 | for p in d.getVar("PROVIDES").split(): | ||
62 | if p != pn: | ||
63 | pn = p | ||
64 | break | ||
65 | |||
66 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
67 | # so avoid archiving source here. | ||
68 | if pn.startswith("glibc-locale"): | ||
69 | return False | ||
70 | if d.getVar("PN") == "libtool-cross": | ||
71 | return False | ||
72 | if d.getVar("PN") == "libgcc-initial": | ||
73 | return False | ||
74 | if d.getVar("PN") == "shadow-sysroot": | ||
75 | return False | ||
76 | |||
77 | return True | ||
78 | |||
79 | |||
80 | @dataclass(frozen=True) | ||
81 | class Dep(object): | ||
82 | pn: str | ||
83 | hashfn: str | ||
84 | in_taskhash: bool | ||
85 | |||
86 | |||
87 | def collect_direct_deps(d, dep_task): | ||
88 | """ | ||
89 | Find direct dependencies of current task | ||
90 | |||
91 | Returns the list of recipes that have a dep_task that the current task | ||
92 | depends on | ||
93 | """ | ||
94 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
95 | pn = d.getVar("PN") | ||
96 | |||
97 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
98 | |||
99 | for this_dep in taskdepdata.values(): | ||
100 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
101 | break | ||
102 | else: | ||
103 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
104 | |||
105 | deps = set() | ||
106 | |||
107 | for dep_name in this_dep.deps: | ||
108 | dep_data = taskdepdata[dep_name] | ||
109 | if dep_data.taskname == dep_task and dep_data.pn != pn: | ||
110 | deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) | ||
111 | |||
112 | return sorted(deps) | ||
113 | |||
114 | |||
115 | def get_spdx_deps(d): | ||
116 | """ | ||
117 | Reads the SPDX dependencies JSON file and returns the data | ||
118 | """ | ||
119 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
120 | |||
121 | deps = [] | ||
122 | with spdx_deps_file.open("r") as f: | ||
123 | for d in json.load(f): | ||
124 | deps.append(Dep(*d)) | ||
125 | return deps | ||
126 | |||
127 | |||
128 | def collect_package_providers(d): | ||
129 | """ | ||
130 | Returns a dictionary where each RPROVIDES is mapped to the package that | ||
131 | provides it | ||
132 | """ | ||
133 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
134 | |||
135 | providers = {} | ||
136 | |||
137 | deps = collect_direct_deps(d, "do_create_spdx") | ||
138 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
139 | |||
140 | for dep_pn, dep_hashfn, _ in deps: | ||
141 | localdata = d | ||
142 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
143 | if not recipe_data: | ||
144 | localdata = bb.data.createCopy(d) | ||
145 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
146 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
147 | |||
148 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
149 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
150 | rprovides = set( | ||
151 | n | ||
152 | for n, _ in bb.utils.explode_dep_versions2( | ||
153 | pkg_data.get("RPROVIDES", "") | ||
154 | ).items() | ||
155 | ) | ||
156 | rprovides.add(pkg) | ||
157 | |||
158 | if "PKG" in pkg_data: | ||
159 | pkg = pkg_data["PKG"] | ||
160 | rprovides.add(pkg) | ||
161 | |||
162 | for r in rprovides: | ||
163 | providers[r] = (pkg, dep_hashfn) | ||
164 | |||
165 | return providers | ||
166 | |||
167 | |||
168 | def get_patched_src(d): | ||
169 | """ | ||
170 | Save patched source of the recipe in SPDX_WORKDIR. | ||
171 | """ | ||
172 | spdx_workdir = d.getVar("SPDXWORK") | ||
173 | spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE") | ||
174 | pn = d.getVar("PN") | ||
175 | |||
176 | workdir = d.getVar("WORKDIR") | ||
177 | |||
178 | try: | ||
179 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
180 | if not is_work_shared_spdx(d): | ||
181 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
182 | d.setVar("WORKDIR", spdx_workdir) | ||
183 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
184 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
185 | |||
186 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
187 | # possibly requiring of the following tasks (such as some recipes's | ||
188 | # do_patch required 'B' existed). | ||
189 | bb.utils.mkdirhier(d.getVar("B")) | ||
190 | |||
191 | bb.build.exec_func("do_unpack", d) | ||
192 | |||
193 | if d.getVar("SRC_URI") != "": | ||
194 | if bb.data.inherits_class('dos2unix', d): | ||
195 | bb.build.exec_func('do_convert_crlf_to_lf', d) | ||
196 | bb.build.exec_func("do_patch", d) | ||
197 | |||
198 | # Copy source from work-share to spdx_workdir | ||
199 | if is_work_shared_spdx(d): | ||
200 | share_src = d.getVar('S') | ||
201 | d.setVar("WORKDIR", spdx_workdir) | ||
202 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
203 | # Copy source to ${SPDXWORK}, same basename dir of ${S}; | ||
204 | src_dir = ( | ||
205 | spdx_workdir | ||
206 | + "/" | ||
207 | + os.path.basename(share_src) | ||
208 | ) | ||
209 | # For kernel souce, rename suffix dir 'kernel-source' | ||
210 | # to ${BP} (${BPN}-${PV}) | ||
211 | if bb.data.inherits_class("kernel", d): | ||
212 | src_dir = spdx_workdir + "/" + d.getVar('BP') | ||
213 | |||
214 | bb.note(f"copyhardlinktree {share_src} to {src_dir}") | ||
215 | oe.path.copyhardlinktree(share_src, src_dir) | ||
216 | |||
217 | # Some userland has no source. | ||
218 | if not os.path.exists(spdx_workdir): | ||
219 | bb.utils.mkdirhier(spdx_workdir) | ||
220 | finally: | ||
221 | d.setVar("WORKDIR", workdir) | ||
222 | |||
223 | |||
224 | def has_task(d, task): | ||
225 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) | ||
226 | |||
227 | |||
228 | def fetch_data_to_uri(fd, name): | ||
229 | """ | ||
230 | Translates a bitbake FetchData to a string URI | ||
231 | """ | ||
232 | uri = fd.type | ||
233 | # Map gitsm to git, since gitsm:// is not a valid URI protocol | ||
234 | if uri == "gitsm": | ||
235 | uri = "git" | ||
236 | proto = getattr(fd, "proto", None) | ||
237 | if proto is not None: | ||
238 | uri = uri + "+" + proto | ||
239 | uri = uri + "://" + fd.host + fd.path | ||
240 | |||
241 | if fd.method.supports_srcrev(): | ||
242 | uri = uri + "@" + fd.revision | ||
243 | |||
244 | return uri | ||
245 | |||
246 | def is_compiled_source (filename, compiled_sources, types): | ||
247 | """ | ||
248 | Check if the file is a compiled file | ||
249 | """ | ||
250 | import os | ||
251 | # If we don't have compiled source, we assume all are compiled. | ||
252 | if not compiled_sources: | ||
253 | return True | ||
254 | |||
255 | # We return always true if the file type is not in the list of compiled files. | ||
256 | # Some files in the source directory are not compiled, for example, Makefiles, | ||
257 | # but also python .py file. We need to include them in the SPDX. | ||
258 | basename = os.path.basename(filename) | ||
259 | ext = basename.partition(".")[2] | ||
260 | if ext not in types: | ||
261 | return True | ||
262 | # Check that the file is in the list | ||
263 | return filename in compiled_sources | ||
264 | |||
265 | def get_compiled_sources(d): | ||
266 | """ | ||
267 | Get list of compiled sources from debug information and normalize the paths | ||
268 | """ | ||
269 | import itertools | ||
270 | source_info = oe.package.read_debugsources_info(d) | ||
271 | if not source_info: | ||
272 | bb.debug(1, "Do not have debugsources.list. Skipping") | ||
273 | return [], [] | ||
274 | |||
275 | # Sources are not split now in SPDX, so we aggregate them | ||
276 | sources = set(itertools.chain.from_iterable(source_info.values())) | ||
277 | # Check extensions of files | ||
278 | types = set() | ||
279 | for src in sources: | ||
280 | basename = os.path.basename(src) | ||
281 | ext = basename.partition(".")[2] | ||
282 | if ext not in types and ext: | ||
283 | types.add(ext) | ||
284 | bb.debug(1, f"Num of sources: {len(sources)} and types: {len(types)} {str(types)}") | ||
285 | return sources, types | ||
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index a46e5502ab..ef687f5d41 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -3,6 +3,7 @@ | |||
3 | # | 3 | # |
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | import bb.parse | ||
6 | import bb.siggen | 7 | import bb.siggen |
7 | import bb.runqueue | 8 | import bb.runqueue |
8 | import oe | 9 | import oe |
@@ -93,6 +94,14 @@ def sstate_lockedsigs(d): | |||
93 | sigs[pn][task] = [h, siggen_lockedsigs_var] | 94 | sigs[pn][task] = [h, siggen_lockedsigs_var] |
94 | return sigs | 95 | return sigs |
95 | 96 | ||
97 | def lockedsigs_unihashmap(d): | ||
98 | unihashmap = {} | ||
99 | data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split() | ||
100 | for entry in data: | ||
101 | pn, task, taskhash, unihash = entry.split(":") | ||
102 | unihashmap[(pn, task)] = (taskhash, unihash) | ||
103 | return unihashmap | ||
104 | |||
96 | class SignatureGeneratorOEBasicHashMixIn(object): | 105 | class SignatureGeneratorOEBasicHashMixIn(object): |
97 | supports_multiconfig_datacaches = True | 106 | supports_multiconfig_datacaches = True |
98 | 107 | ||
@@ -100,6 +109,7 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
100 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() | 109 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
101 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() | 110 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
102 | self.lockedsigs = sstate_lockedsigs(data) | 111 | self.lockedsigs = sstate_lockedsigs(data) |
112 | self.unihashmap = lockedsigs_unihashmap(data) | ||
103 | self.lockedhashes = {} | 113 | self.lockedhashes = {} |
104 | self.lockedpnmap = {} | 114 | self.lockedpnmap = {} |
105 | self.lockedhashfn = {} | 115 | self.lockedhashfn = {} |
@@ -209,6 +219,15 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
209 | def get_cached_unihash(self, tid): | 219 | def get_cached_unihash(self, tid): |
210 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: | 220 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: |
211 | return self.lockedhashes[tid] | 221 | return self.lockedhashes[tid] |
222 | |||
223 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
224 | recipename = self.lockedpnmap[fn] | ||
225 | |||
226 | if (recipename, task) in self.unihashmap: | ||
227 | taskhash, unihash = self.unihashmap[(recipename, task)] | ||
228 | if taskhash == self.taskhash[tid]: | ||
229 | return unihash | ||
230 | |||
212 | return super().get_cached_unihash(tid) | 231 | return super().get_cached_unihash(tid) |
213 | 232 | ||
214 | def dump_sigtask(self, fn, task, stampbase, runtime): | 233 | def dump_sigtask(self, fn, task, stampbase, runtime): |
@@ -219,6 +238,7 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
219 | 238 | ||
220 | def dump_lockedsigs(self, sigfile, taskfilter=None): | 239 | def dump_lockedsigs(self, sigfile, taskfilter=None): |
221 | types = {} | 240 | types = {} |
241 | unihashmap = {} | ||
222 | for tid in self.runtaskdeps: | 242 | for tid in self.runtaskdeps: |
223 | # Bitbake changed this to a tuple in newer versions | 243 | # Bitbake changed this to a tuple in newer versions |
224 | if isinstance(tid, tuple): | 244 | if isinstance(tid, tuple): |
@@ -226,13 +246,18 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
226 | if taskfilter: | 246 | if taskfilter: |
227 | if not tid in taskfilter: | 247 | if not tid in taskfilter: |
228 | continue | 248 | continue |
229 | fn = bb.runqueue.fn_from_tid(tid) | 249 | (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) |
230 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] | 250 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] |
231 | t = 't-' + t.replace('_', '-') | 251 | t = 't-' + t.replace('_', '-') |
232 | if t not in types: | 252 | if t not in types: |
233 | types[t] = [] | 253 | types[t] = [] |
234 | types[t].append(tid) | 254 | types[t].append(tid) |
235 | 255 | ||
256 | taskhash = self.taskhash[tid] | ||
257 | unihash = self.get_unihash(tid) | ||
258 | if taskhash != unihash: | ||
259 | unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash | ||
260 | |||
236 | with open(sigfile, "w") as f: | 261 | with open(sigfile, "w") as f: |
237 | l = sorted(types) | 262 | l = sorted(types) |
238 | for t in l: | 263 | for t in l: |
@@ -245,7 +270,12 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
245 | continue | 270 | continue |
246 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") | 271 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") |
247 | f.write(' "\n') | 272 | f.write(' "\n') |
248 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l))) | 273 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l))) |
274 | f.write('SIGGEN_UNIHASHMAP += "\\\n') | ||
275 | sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)]) | ||
276 | for tid in sortedtid: | ||
277 | f.write(unihashmap[tid] + " \\\n") | ||
278 | f.write(' "\n') | ||
249 | 279 | ||
250 | def dump_siglist(self, sigfile, path_prefix_strip=None): | 280 | def dump_siglist(self, sigfile, path_prefix_strip=None): |
251 | def strip_fn(fn): | 281 | def strip_fn(fn): |
@@ -327,7 +357,6 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge | |||
327 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') | 357 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') |
328 | if not self.method: | 358 | if not self.method: |
329 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") | 359 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") |
330 | self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1) | ||
331 | self.username = data.getVar("BB_HASHSERVE_USERNAME") | 360 | self.username = data.getVar("BB_HASHSERVE_USERNAME") |
332 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") | 361 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") |
333 | if not self.username or not self.password: | 362 | if not self.username or not self.password: |
@@ -371,7 +400,13 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
371 | return siginfo.rpartition('.')[2] | 400 | return siginfo.rpartition('.')[2] |
372 | 401 | ||
373 | def get_time(fullpath): | 402 | def get_time(fullpath): |
374 | return os.stat(fullpath).st_mtime | 403 | # NFS can end up in a weird state where the file exists but has no stat info. |
404 | # If that happens, we assume it doesn't acutally exist and show a warning | ||
405 | try: | ||
406 | return os.stat(fullpath).st_mtime | ||
407 | except FileNotFoundError: | ||
408 | bb.warn("Could not obtain mtime for {}".format(fullpath)) | ||
409 | return None | ||
375 | 410 | ||
376 | # First search in stamps dir | 411 | # First search in stamps dir |
377 | localdata = d.createCopy() | 412 | localdata = d.createCopy() |
@@ -384,6 +419,9 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
384 | if pn.startswith("gcc-source"): | 419 | if pn.startswith("gcc-source"): |
385 | # gcc-source shared workdir is a special case :( | 420 | # gcc-source shared workdir is a special case :( |
386 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") | 421 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") |
422 | elif pn.startswith("llvm-project-source"): | ||
423 | # llvm-project-source shared workdir is also a special case :*( | ||
424 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}") | ||
387 | 425 | ||
388 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) | 426 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) |
389 | foundall = False | 427 | foundall = False |
@@ -394,13 +432,17 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
394 | if taskhashlist: | 432 | if taskhashlist: |
395 | for taskhash in taskhashlist: | 433 | for taskhash in taskhashlist: |
396 | if fullpath.endswith('.%s' % taskhash): | 434 | if fullpath.endswith('.%s' % taskhash): |
397 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} | 435 | mtime = get_time(fullpath) |
436 | if mtime: | ||
437 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
398 | if len(hashfiles) == len(taskhashlist): | 438 | if len(hashfiles) == len(taskhashlist): |
399 | foundall = True | 439 | foundall = True |
400 | break | 440 | break |
401 | else: | 441 | else: |
402 | hashval = get_hashval(fullpath) | 442 | hashval = get_hashval(fullpath) |
403 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} | 443 | mtime = get_time(fullpath) |
444 | if mtime: | ||
445 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
404 | 446 | ||
405 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): | 447 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): |
406 | # That didn't work, look in sstate-cache | 448 | # That didn't work, look in sstate-cache |
@@ -431,7 +473,9 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
431 | actual_hashval = get_hashval(fullpath) | 473 | actual_hashval = get_hashval(fullpath) |
432 | if actual_hashval in hashfiles: | 474 | if actual_hashval in hashfiles: |
433 | continue | 475 | continue |
434 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)} | 476 | mtime = get_time(fullpath) |
477 | if mtime: | ||
478 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime} | ||
435 | 479 | ||
436 | return hashfiles | 480 | return hashfiles |
437 | 481 | ||
@@ -450,6 +494,7 @@ def sstate_get_manifest_filename(task, d): | |||
450 | d2.setVar("SSTATE_MANMACH", extrainf) | 494 | d2.setVar("SSTATE_MANMACH", extrainf) |
451 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) | 495 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) |
452 | 496 | ||
497 | @bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS") | ||
453 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | 498 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): |
454 | d2 = d | 499 | d2 = d |
455 | variant = '' | 500 | variant = '' |
@@ -524,6 +569,7 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
524 | if task == "package": | 569 | if task == "package": |
525 | include_timestamps = True | 570 | include_timestamps = True |
526 | include_root = False | 571 | include_root = False |
572 | source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH")) | ||
527 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') | 573 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') |
528 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") | 574 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") |
529 | 575 | ||
@@ -615,7 +661,11 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
615 | raise Exception(msg).with_traceback(e.__traceback__) | 661 | raise Exception(msg).with_traceback(e.__traceback__) |
616 | 662 | ||
617 | if include_timestamps: | 663 | if include_timestamps: |
618 | update_hash(" %10d" % s.st_mtime) | 664 | # Need to clamp to SOURCE_DATE_EPOCH |
665 | if s.st_mtime > source_date_epoch: | ||
666 | update_hash(" %10d" % source_date_epoch) | ||
667 | else: | ||
668 | update_hash(" %10d" % s.st_mtime) | ||
619 | 669 | ||
620 | update_hash(" ") | 670 | update_hash(" ") |
621 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): | 671 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): |
diff --git a/meta/lib/oe/tune.py b/meta/lib/oe/tune.py new file mode 100644 index 0000000000..7fda19430d --- /dev/null +++ b/meta/lib/oe/tune.py | |||
@@ -0,0 +1,81 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | # riscv_isa_to_tune(isa) | ||
8 | # | ||
9 | # Automatically translate a RISC-V ISA string to TUNE_FEATURES | ||
10 | # | ||
11 | # Abbreviations, such as rv32g -> rv32imaffd_zicsr_zifencei are supported. | ||
12 | # | ||
13 | # Profiles, such as rva22u64, are NOT supported, you must use ISA strings. | ||
14 | # | ||
15 | def riscv_isa_to_tune(isa): | ||
16 | _isa = isa.lower() | ||
17 | |||
18 | feature = [] | ||
19 | iter = 0 | ||
20 | |||
21 | # rv or riscv | ||
22 | if _isa[iter:].startswith('rv'): | ||
23 | feature.append('rv') | ||
24 | iter = iter + 2 | ||
25 | elif _isa[iter:].startswith('riscv'): | ||
26 | feature.append('rv') | ||
27 | iter = iter + 5 | ||
28 | else: | ||
29 | # Not a risc-v ISA! | ||
30 | return _isa | ||
31 | |||
32 | while (_isa[iter:]): | ||
33 | # Skip _ and whitespace | ||
34 | if _isa[iter] == '_' or _isa[iter].isspace(): | ||
35 | iter = iter + 1 | ||
36 | continue | ||
37 | |||
38 | # Length, just capture numbers here | ||
39 | if _isa[iter].isdigit(): | ||
40 | iter_end = iter | ||
41 | while iter_end < len(_isa) and _isa[iter_end].isdigit(): | ||
42 | iter_end = iter_end + 1 | ||
43 | |||
44 | feature.append(_isa[iter:iter_end]) | ||
45 | iter = iter_end | ||
46 | continue | ||
47 | |||
48 | # Typically i, e or g is next, followed by extensions. | ||
49 | # Extensions are single character, except for Z, Ss, Sh, Sm, Sv, and X | ||
50 | |||
51 | # If the extension starts with 'Z', 'S' or 'X' use the name until the next _, whitespace or end | ||
52 | if _isa[iter] in ['z', 's', 'x']: | ||
53 | ext_type = _isa[iter] | ||
54 | iter_end = iter + 1 | ||
55 | |||
56 | # Multicharacter extension, these are supposed to have a _ before the next multicharacter extension | ||
57 | # See 37.4 and 37.5: | ||
58 | # 37.4: Underscores "_" may be used to separate ISA extensions... | ||
59 | # 37.5: All multi-letter extensions ... must be separated from other multi-letter extensions by an underscore... | ||
60 | # Some extensions permit only alphabetic characters, while others allow alphanumeric chartacters | ||
61 | while iter_end < len(_isa) and _isa[iter_end] != "_" and not _isa[iter_end].isspace(): | ||
62 | iter_end = iter_end + 1 | ||
63 | |||
64 | feature.append(_isa[iter:iter_end]) | ||
65 | iter = iter_end | ||
66 | continue | ||
67 | |||
68 | # 'g' is special, it's an abbreviation for imafd_zicsr_zifencei | ||
69 | # When expanding the abbreviation, any additional letters must appear before the _z* extensions | ||
70 | if _isa[iter] == 'g': | ||
71 | _isa = 'imafd' + _isa[iter+1:] + '_zicsr_zifencei' | ||
72 | iter = 0 | ||
73 | continue | ||
74 | |||
75 | feature.append(_isa[iter]) | ||
76 | iter = iter + 1 | ||
77 | continue | ||
78 | |||
79 | # Eliminate duplicates, but preserve the order | ||
80 | feature = list(dict.fromkeys(feature)) | ||
81 | return ' '.join(feature) | ||
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 14a7d07ef0..a11db5f3cd 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -9,6 +9,8 @@ import multiprocessing | |||
9 | import traceback | 9 | import traceback |
10 | import errno | 10 | import errno |
11 | 11 | ||
12 | import bb.parse | ||
13 | |||
12 | def read_file(filename): | 14 | def read_file(filename): |
13 | try: | 15 | try: |
14 | f = open( filename, "r" ) | 16 | f = open( filename, "r" ) |
@@ -265,6 +267,7 @@ def execute_pre_post_process(d, cmds): | |||
265 | bb.note("Executing %s ..." % cmd) | 267 | bb.note("Executing %s ..." % cmd) |
266 | bb.build.exec_func(cmd, d) | 268 | bb.build.exec_func(cmd, d) |
267 | 269 | ||
270 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
268 | def get_bb_number_threads(d): | 271 | def get_bb_number_threads(d): |
269 | return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) | 272 | return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) |
270 | 273 | ||
@@ -316,7 +319,9 @@ def multiprocess_launch_mp(target, items, max_process, extraargs=None): | |||
316 | items = list(items) | 319 | items = list(items) |
317 | while (items and not errors) or launched: | 320 | while (items and not errors) or launched: |
318 | if not errors and items and len(launched) < max_process: | 321 | if not errors and items and len(launched) < max_process: |
319 | args = (items.pop(),) | 322 | args = items.pop() |
323 | if not type(args) is tuple: | ||
324 | args = (args,) | ||
320 | if extraargs is not None: | 325 | if extraargs is not None: |
321 | args = args + extraargs | 326 | args = args + extraargs |
322 | p = ProcessLaunch(target=target, args=args) | 327 | p = ProcessLaunch(target=target, args=args) |
@@ -465,7 +470,7 @@ def host_gcc_version(d, taskcontextonly=False): | |||
465 | version = match.group(1) | 470 | version = match.group(1) |
466 | return "-%s" % version if version in ("4.8", "4.9") else "" | 471 | return "-%s" % version if version in ("4.8", "4.9") else "" |
467 | 472 | ||
468 | 473 | @bb.parse.vardepsexclude("DEFAULTTUNE_MULTILIB_ORIGINAL", "OVERRIDES") | |
469 | def get_multilib_datastore(variant, d): | 474 | def get_multilib_datastore(variant, d): |
470 | localdata = bb.data.createCopy(d) | 475 | localdata = bb.data.createCopy(d) |
471 | if variant: | 476 | if variant: |
@@ -482,19 +487,6 @@ def get_multilib_datastore(variant, d): | |||
482 | localdata.setVar("MLPREFIX", "") | 487 | localdata.setVar("MLPREFIX", "") |
483 | return localdata | 488 | return localdata |
484 | 489 | ||
485 | class ImageQAFailed(Exception): | ||
486 | def __init__(self, description, name=None, logfile=None): | ||
487 | self.description = description | ||
488 | self.name = name | ||
489 | self.logfile=logfile | ||
490 | |||
491 | def __str__(self): | ||
492 | msg = 'Function failed: %s' % self.name | ||
493 | if self.description: | ||
494 | msg = msg + ' (%s)' % self.description | ||
495 | |||
496 | return msg | ||
497 | |||
498 | def sh_quote(string): | 490 | def sh_quote(string): |
499 | import shlex | 491 | import shlex |
500 | return shlex.quote(string) | 492 | return shlex.quote(string) |