summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oe')
-rw-r--r--meta/lib/oe/__init__.py9
-rw-r--r--meta/lib/oe/bootfiles.py57
-rw-r--r--meta/lib/oe/buildcfg.py79
-rw-r--r--meta/lib/oe/buildhistory_analysis.py2
-rw-r--r--meta/lib/oe/buildstats.py254
-rw-r--r--meta/lib/oe/cachedpath.py6
-rw-r--r--meta/lib/oe/classextend.py13
-rw-r--r--meta/lib/oe/classutils.py2
-rw-r--r--meta/lib/oe/copy_buildsystem.py30
-rw-r--r--meta/lib/oe/cve_check.py324
-rw-r--r--meta/lib/oe/data.py2
-rw-r--r--meta/lib/oe/distro_check.py4
-rw-r--r--meta/lib/oe/elf.py257
-rw-r--r--meta/lib/oe/fitimage.py547
-rw-r--r--meta/lib/oe/go.py38
-rw-r--r--meta/lib/oe/gpg_sign.py58
-rw-r--r--meta/lib/oe/license.py265
-rw-r--r--meta/lib/oe/license_finder.py179
-rw-r--r--meta/lib/oe/lsb.py2
-rw-r--r--meta/lib/oe/maketype.py9
-rw-r--r--meta/lib/oe/manifest.py6
-rw-r--r--meta/lib/oe/npm_registry.py175
-rw-r--r--meta/lib/oe/overlayfs.py54
-rw-r--r--meta/lib/oe/package.py1855
-rw-r--r--meta/lib/oe/package_manager/__init__.py109
-rw-r--r--meta/lib/oe/package_manager/common_deb_ipk.py97
-rw-r--r--meta/lib/oe/package_manager/deb/__init__.py122
-rw-r--r--meta/lib/oe/package_manager/deb/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/deb/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/deb/sdk.py11
-rw-r--r--meta/lib/oe/package_manager/ipk/__init__.py113
-rw-r--r--meta/lib/oe/package_manager/ipk/manifest.py3
-rw-r--r--meta/lib/oe/package_manager/ipk/rootfs.py41
-rw-r--r--meta/lib/oe/package_manager/ipk/sdk.py11
-rw-r--r--meta/lib/oe/package_manager/rpm/__init__.py53
-rw-r--r--meta/lib/oe/package_manager/rpm/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/rootfs.py4
-rw-r--r--meta/lib/oe/package_manager/rpm/sdk.py10
-rw-r--r--meta/lib/oe/packagedata.py279
-rw-r--r--meta/lib/oe/packagegroup.py2
-rw-r--r--meta/lib/oe/patch.py258
-rw-r--r--meta/lib/oe/path.py11
-rw-r--r--meta/lib/oe/prservice.py33
-rw-r--r--meta/lib/oe/qa.py69
-rw-r--r--meta/lib/oe/qemu.py54
-rw-r--r--meta/lib/oe/recipeutils.py210
-rw-r--r--meta/lib/oe/reproducible.py117
-rw-r--r--meta/lib/oe/rootfs.py117
-rw-r--r--meta/lib/oe/rootfspostcommands.py90
-rw-r--r--meta/lib/oe/rust.py11
-rw-r--r--meta/lib/oe/sbom.py120
-rw-r--r--meta/lib/oe/sbom30.py1096
-rw-r--r--meta/lib/oe/sdk.py14
-rw-r--r--meta/lib/oe/spdx.py357
-rw-r--r--meta/lib/oe/spdx30.py5593
-rw-r--r--meta/lib/oe/spdx30_tasks.py1367
-rw-r--r--meta/lib/oe/spdx_common.py285
-rw-r--r--meta/lib/oe/sstatesig.py289
-rw-r--r--meta/lib/oe/terminal.py30
-rw-r--r--meta/lib/oe/tune.py81
-rw-r--r--meta/lib/oe/types.py2
-rw-r--r--meta/lib/oe/useradd.py4
-rw-r--r--meta/lib/oe/utils.py247
63 files changed, 14609 insertions, 934 deletions
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py
index 4e7c09da04..73de774266 100644
--- a/meta/lib/oe/__init__.py
+++ b/meta/lib/oe/__init__.py
@@ -1,6 +1,15 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5from pkgutil import extend_path 7from pkgutil import extend_path
6__path__ = extend_path(__path__, __name__) 8__path__ = extend_path(__path__, __name__)
9
10# Modules with vistorcode need to go first else anything depending on them won't be
11# processed correctly (e.g. qa)
12BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \
13 "packagegroup", "sstatesig", "lsb", "cachedpath", "license", "qemu", \
14 "reproducible", "rust", "buildcfg", "go", "spdx30_tasks", "spdx_common", \
15 "cve_check", "tune"]
diff --git a/meta/lib/oe/bootfiles.py b/meta/lib/oe/bootfiles.py
new file mode 100644
index 0000000000..7ee148c4e2
--- /dev/null
+++ b/meta/lib/oe/bootfiles.py
@@ -0,0 +1,57 @@
1#
2# SPDX-License-Identifier: MIT
3#
4# Copyright (C) 2024 Marcus Folkesson
5# Author: Marcus Folkesson <marcus.folkesson@gmail.com>
6#
7# Utility functions handling boot files
8#
9# Look into deploy_dir and search for boot_files.
10# Returns a list of tuples with (original filepath relative to
11# deploy_dir, desired filepath renaming)
12#
13# Heavily inspired of bootimg_partition.py
14#
15def get_boot_files(deploy_dir, boot_files):
16 import re
17 import os
18 from glob import glob
19
20 if boot_files is None:
21 return None
22
23 # list of tuples (src_name, dst_name)
24 deploy_files = []
25 for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
26 if ';' in src_entry:
27 dst_entry = tuple(src_entry.split(';'))
28 if not dst_entry[0] or not dst_entry[1]:
29 raise ValueError('Malformed boot file entry: %s' % src_entry)
30 else:
31 dst_entry = (src_entry, src_entry)
32
33 deploy_files.append(dst_entry)
34
35 install_files = []
36 for deploy_entry in deploy_files:
37 src, dst = deploy_entry
38 if '*' in src:
39 # by default install files under their basename
40 entry_name_fn = os.path.basename
41 if dst != src:
42 # unless a target name was given, then treat name
43 # as a directory and append a basename
44 entry_name_fn = lambda name: \
45 os.path.join(dst,
46 os.path.basename(name))
47
48 srcs = glob(os.path.join(deploy_dir, src))
49
50 for entry in srcs:
51 src = os.path.relpath(entry, deploy_dir)
52 entry_dst_name = entry_name_fn(entry)
53 install_files.append((src, entry_dst_name))
54 else:
55 install_files.append((src, dst))
56
57 return install_files
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py
new file mode 100644
index 0000000000..85b903fab0
--- /dev/null
+++ b/meta/lib/oe/buildcfg.py
@@ -0,0 +1,79 @@
1
2import os
3import subprocess
4import bb.process
5
6def detect_revision(d):
7 path = get_scmbasepath(d)
8 return get_metadata_git_revision(path)
9
10def detect_branch(d):
11 path = get_scmbasepath(d)
12 return get_metadata_git_branch(path)
13
14def get_scmbasepath(d):
15 return os.path.join(d.getVar('COREBASE'), 'meta')
16
17def get_metadata_git_branch(path):
18 try:
19 rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path)
20 except (bb.process.ExecutionError, bb.process.NotFoundError):
21 rev = '<unknown>'
22 return rev.strip()
23
24def get_metadata_git_revision(path):
25 try:
26 rev, _ = bb.process.run('git rev-parse HEAD', cwd=path)
27 except (bb.process.ExecutionError, bb.process.NotFoundError):
28 rev = '<unknown>'
29 return rev.strip()
30
31def get_metadata_git_toplevel(path):
32 try:
33 toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path)
34 except (bb.process.ExecutionError, bb.process.NotFoundError):
35 return ""
36 return toplevel.strip()
37
38def get_metadata_git_remotes(path):
39 try:
40 remotes_list, _ = bb.process.run('git remote', cwd=path)
41 remotes = remotes_list.split()
42 except (bb.process.ExecutionError, bb.process.NotFoundError):
43 remotes = []
44 return remotes
45
46def get_metadata_git_remote_url(path, remote):
47 try:
48 uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path)
49 except (bb.process.ExecutionError, bb.process.NotFoundError):
50 return ""
51 return uri.strip()
52
53def get_metadata_git_describe(path):
54 try:
55 describe, _ = bb.process.run('git describe --tags --dirty', cwd=path)
56 except (bb.process.ExecutionError, bb.process.NotFoundError):
57 return ""
58 return describe.strip()
59
60def is_layer_modified(path):
61 try:
62 subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
63 git diff --quiet --no-ext-diff
64 git diff --quiet --no-ext-diff --cached""" % path,
65 shell=True,
66 stderr=subprocess.STDOUT)
67 return ""
68 except subprocess.CalledProcessError as ex:
69 # Silently treat errors as "modified", without checking for the
70 # (expected) return code 1 in a modified git repo. For example, we get
71 # output and a 129 return code when a layer isn't a git repo at all.
72 return " -- modified"
73
74def get_layer_revisions(d):
75 layers = (d.getVar("BBLAYERS") or "").split()
76 revisions = []
77 for i in layers:
78 revisions.append((i, os.path.basename(i), get_metadata_git_branch(i).strip(), get_metadata_git_revision(i), is_layer_modified(i)))
79 return revisions
diff --git a/meta/lib/oe/buildhistory_analysis.py b/meta/lib/oe/buildhistory_analysis.py
index b1856846b6..4edad01580 100644
--- a/meta/lib/oe/buildhistory_analysis.py
+++ b/meta/lib/oe/buildhistory_analysis.py
@@ -562,7 +562,7 @@ def compare_siglists(a_blob, b_blob, taskdiff=False):
562 elif not hash2 in hashfiles: 562 elif not hash2 in hashfiles:
563 out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2)) 563 out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2))
564 else: 564 else:
565 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, collapsed=True) 565 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, collapsed=True)
566 for line in out2: 566 for line in out2:
567 m = hashlib.sha256() 567 m = hashlib.sha256()
568 m.update(line.encode('utf-8')) 568 m.update(line.encode('utf-8'))
diff --git a/meta/lib/oe/buildstats.py b/meta/lib/oe/buildstats.py
new file mode 100644
index 0000000000..2700245ec6
--- /dev/null
+++ b/meta/lib/oe/buildstats.py
@@ -0,0 +1,254 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# Implements system state sampling. Called by buildstats.bbclass.
7# Because it is a real Python module, it can hold persistent state,
8# like open log files and the time of the last sampling.
9
10import time
11import re
12import bb.event
13from collections import deque
14
15class SystemStats:
16 def __init__(self, d):
17 bn = d.getVar('BUILDNAME')
18 bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
19 bb.utils.mkdirhier(bsdir)
20 file_handlers = [('diskstats', self._reduce_diskstats),
21 ('meminfo', self._reduce_meminfo),
22 ('stat', self._reduce_stat),
23 ('net/dev', self._reduce_net)]
24
25 # Some hosts like openSUSE have readable /proc/pressure files
26 # but throw errors when these files are opened. Catch these error
27 # and ensure that the reduce_proc_pressure directory is not created.
28 if os.path.exists("/proc/pressure"):
29 try:
30 with open('/proc/pressure/cpu', 'rb') as source:
31 source.read()
32 pressuredir = os.path.join(bsdir, 'reduced_proc_pressure')
33 bb.utils.mkdirhier(pressuredir)
34 file_handlers.extend([('pressure/cpu', self._reduce_pressure),
35 ('pressure/io', self._reduce_pressure),
36 ('pressure/memory', self._reduce_pressure)])
37 except Exception:
38 pass
39
40 self.proc_files = []
41 for filename, handler in (file_handlers):
42 # The corresponding /proc files might not exist on the host.
43 # For example, /proc/diskstats is not available in virtualized
44 # environments like Linux-VServer. Silently skip collecting
45 # the data.
46 if os.path.exists(os.path.join('/proc', filename)):
47 # In practice, this class gets instantiated only once in
48 # the bitbake cooker process. Therefore 'append' mode is
49 # not strictly necessary, but using it makes the class
50 # more robust should two processes ever write
51 # concurrently.
52 if filename == 'net/dev':
53 destfile = os.path.join(bsdir, 'reduced_proc_net.log')
54 else:
55 destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename))
56 self.proc_files.append((filename, open(destfile, 'ab'), handler))
57 self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab')
58 # Last time that we sampled /proc data resp. recorded disk monitoring data.
59 self.last_proc = 0
60 self.last_disk_monitor = 0
61 # Minimum number of seconds between recording a sample. This becames relevant when we get
62 # called very often while many short tasks get started. Sampling during quiet periods
63 # depends on the heartbeat event, which fires less often.
64 # By default, the Heartbeat events occur roughly once every second but the actual time
65 # between these events deviates by a few milliseconds, in most cases. Hence
66 # pick a somewhat arbitary tolerance such that we sample a large majority
67 # of the Heartbeat events. This ignores rare events that fall outside the minimum
68 # and may lead an extra sample in a given second every so often. However, it allows for fairly
69 # consistent intervals between samples without missing many events.
70 self.tolerance = 0.01
71 self.min_seconds = 1.0 - self.tolerance
72
73 self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)')
74 self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+|nvme\d+n\d+.*)$')
75 self.diskstats_ltime = None
76 self.diskstats_data = None
77 self.stat_ltimes = None
78 # Last time we sampled /proc/pressure. All resources stored in a single dict with the key as filename
79 self.last_pressure = {"pressure/cpu": None, "pressure/io": None, "pressure/memory": None}
80 self.net_stats = {}
81
82 def close(self):
83 self.monitor_disk.close()
84 for _, output, _ in self.proc_files:
85 output.close()
86
87 def _reduce_meminfo(self, time, data, filename):
88 """
89 Extracts 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree'
90 and writes their values into a single line, in that order.
91 """
92 values = {}
93 for line in data.split(b'\n'):
94 m = self.meminfo_regex.match(line)
95 if m:
96 values[m.group(1)] = m.group(2)
97 if len(values) == 6:
98 return (time,
99 b' '.join([values[x] for x in
100 (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n')
101
102 def _reduce_net(self, time, data, filename):
103 data = data.split(b'\n')
104 for line in data[2:]:
105 if b":" not in line:
106 continue
107 try:
108 parts = line.split()
109 iface = (parts[0].strip(b':')).decode('ascii')
110 receive_bytes = int(parts[1])
111 transmit_bytes = int(parts[9])
112 except Exception:
113 continue
114
115 if iface not in self.net_stats:
116 self.net_stats[iface] = deque(maxlen=2)
117 self.net_stats[iface].append((receive_bytes, transmit_bytes, 0, 0))
118 prev = self.net_stats[iface][-1] if self.net_stats[iface] else (0, 0, 0, 0)
119 receive_diff = receive_bytes - prev[0]
120 transmit_diff = transmit_bytes - prev[1]
121 self.net_stats[iface].append((
122 receive_bytes,
123 transmit_bytes,
124 receive_diff,
125 transmit_diff
126 ))
127
128 result_str = "\n".join(
129 f"{iface}: {net_data[-1][0]} {net_data[-1][1]} {net_data[-1][2]} {net_data[-1][3]}"
130 for iface, net_data in self.net_stats.items()
131 ) + "\n"
132
133 return time, result_str.encode('ascii')
134
135 def _diskstats_is_relevant_line(self, linetokens):
136 if len(linetokens) < 14:
137 return False
138 disk = linetokens[2]
139 return self.diskstats_regex.match(disk)
140
141 def _reduce_diskstats(self, time, data, filename):
142 relevant_tokens = filter(self._diskstats_is_relevant_line, map(lambda x: x.split(), data.split(b'\n')))
143 diskdata = [0] * 3
144 reduced = None
145 for tokens in relevant_tokens:
146 # rsect
147 diskdata[0] += int(tokens[5])
148 # wsect
149 diskdata[1] += int(tokens[9])
150 # use
151 diskdata[2] += int(tokens[12])
152 if self.diskstats_ltime:
153 # We need to compute information about the time interval
154 # since the last sampling and record the result as sample
155 # for that point in the past.
156 interval = time - self.diskstats_ltime
157 if interval > 0:
158 sums = [ a - b for a, b in zip(diskdata, self.diskstats_data) ]
159 readTput = sums[0] / 2.0 * 100.0 / interval
160 writeTput = sums[1] / 2.0 * 100.0 / interval
161 util = float( sums[2] ) / 10 / interval
162 util = max(0.0, min(1.0, util))
163 reduced = (self.diskstats_ltime, (readTput, writeTput, util))
164
165 self.diskstats_ltime = time
166 self.diskstats_data = diskdata
167 return reduced
168
169
170 def _reduce_nop(self, time, data, filename):
171 return (time, data)
172
173 def _reduce_stat(self, time, data, filename):
174 if not data:
175 return None
176 # CPU times {user, nice, system, idle, io_wait, irq, softirq} from first line
177 tokens = data.split(b'\n', 1)[0].split()
178 times = [ int(token) for token in tokens[1:] ]
179 reduced = None
180 if self.stat_ltimes:
181 user = float((times[0] + times[1]) - (self.stat_ltimes[0] + self.stat_ltimes[1]))
182 system = float((times[2] + times[5] + times[6]) - (self.stat_ltimes[2] + self.stat_ltimes[5] + self.stat_ltimes[6]))
183 idle = float(times[3] - self.stat_ltimes[3])
184 iowait = float(times[4] - self.stat_ltimes[4])
185
186 aSum = max(user + system + idle + iowait, 1)
187 reduced = (time, (user/aSum, system/aSum, iowait/aSum))
188
189 self.stat_ltimes = times
190 return reduced
191
192 def _reduce_pressure(self, time, data, filename):
193 """
194 Return reduced pressure: {avg10, avg60, avg300} and delta total compared to the previous sample
195 for the cpu, io and memory resources. A common function is used for all 3 resources since the
196 format of the /proc/pressure file is the same in each case.
197 """
198 if not data:
199 return None
200 tokens = data.split(b'\n', 1)[0].split()
201 avg10 = float(tokens[1].split(b'=')[1])
202 avg60 = float(tokens[2].split(b'=')[1])
203 avg300 = float(tokens[3].split(b'=')[1])
204 total = int(tokens[4].split(b'=')[1])
205
206 reduced = None
207 if self.last_pressure[filename]:
208 delta = total - self.last_pressure[filename]
209 reduced = (time, (avg10, avg60, avg300, delta))
210 self.last_pressure[filename] = total
211 return reduced
212
213 def sample(self, event, force):
214 """
215 Collect and log proc or disk_monitor stats periodically.
216 Return True if a new sample is collected and hence the value last_proc or last_disk_monitor
217 is changed.
218 """
219 retval = False
220 now = time.time()
221 if (now - self.last_proc > self.min_seconds) or force:
222 for filename, output, handler in self.proc_files:
223 with open(os.path.join('/proc', filename), 'rb') as input:
224 data = input.read()
225 if handler:
226 reduced = handler(now, data, filename)
227 else:
228 reduced = (now, data)
229 if reduced:
230 if isinstance(reduced[1], bytes):
231 # Use as it is.
232 data = reduced[1]
233 else:
234 # Convert to a single line.
235 data = (' '.join([str(x) for x in reduced[1]]) + '\n').encode('ascii')
236 # Unbuffered raw write, less overhead and useful
237 # in case that we end up with concurrent writes.
238 os.write(output.fileno(),
239 ('%.0f\n' % reduced[0]).encode('ascii') +
240 data +
241 b'\n')
242 self.last_proc = now
243 retval = True
244
245 if isinstance(event, bb.event.MonitorDiskEvent) and \
246 ((now - self.last_disk_monitor > self.min_seconds) or force):
247 os.write(self.monitor_disk.fileno(),
248 ('%.0f\n' % now).encode('ascii') +
249 ''.join(['%s: %d\n' % (dev, sample.total_bytes - sample.free_bytes)
250 for dev, sample in event.disk_usage.items()]).encode('ascii') +
251 b'\n')
252 self.last_disk_monitor = now
253 retval = True
254 return retval \ No newline at end of file
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py
index 254257a83f..68c85807d9 100644
--- a/meta/lib/oe/cachedpath.py
+++ b/meta/lib/oe/cachedpath.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# Based on standard python library functions but avoid 6# Based on standard python library functions but avoid
@@ -109,9 +111,13 @@ class CachedPath(object):
109 return True 111 return True
110 return False 112 return False
111 113
114 # WARNING - this is not currently a drop in replacement since they return False
115 # rather than raise exceptions.
112 def stat(self, path): 116 def stat(self, path):
113 return self.callstat(path) 117 return self.callstat(path)
114 118
119 # WARNING - this is not currently a drop in replacement since they return False
120 # rather than raise exceptions.
115 def lstat(self, path): 121 def lstat(self, path):
116 return self.calllstat(path) 122 return self.calllstat(path)
117 123
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py
index d3d8fbe724..8ae5d3b715 100644
--- a/meta/lib/oe/classextend.py
+++ b/meta/lib/oe/classextend.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -30,6 +32,9 @@ class ClassExtender(object):
30 if name.endswith("-" + self.extname): 32 if name.endswith("-" + self.extname):
31 name = name.replace("-" + self.extname, "") 33 name = name.replace("-" + self.extname, "")
32 if name.startswith("virtual/"): 34 if name.startswith("virtual/"):
35 # Assume large numbers of dashes means a triplet is present and we don't need to convert
36 if name.count("-") >= 3 and name.endswith(("-go",)):
37 return name
33 subs = name.split("/", 1)[1] 38 subs = name.split("/", 1)[1]
34 if not subs.startswith(self.extname): 39 if not subs.startswith(self.extname):
35 return "virtual/" + self.extname + "-" + subs 40 return "virtual/" + self.extname + "-" + subs
@@ -87,7 +92,7 @@ class ClassExtender(object):
87 def map_depends_variable(self, varname, suffix = ""): 92 def map_depends_variable(self, varname, suffix = ""):
88 # We need to preserve EXTENDPKGV so it can be expanded correctly later 93 # We need to preserve EXTENDPKGV so it can be expanded correctly later
89 if suffix: 94 if suffix:
90 varname = varname + "_" + suffix 95 varname = varname + ":" + suffix
91 orig = self.d.getVar("EXTENDPKGV", False) 96 orig = self.d.getVar("EXTENDPKGV", False)
92 self.d.setVar("EXTENDPKGV", "EXTENDPKGV") 97 self.d.setVar("EXTENDPKGV", "EXTENDPKGV")
93 deps = self.d.getVar(varname) 98 deps = self.d.getVar(varname)
@@ -142,15 +147,13 @@ class ClassExtender(object):
142 if pkg_mapping[0].startswith("${") and pkg_mapping[0].endswith("}"): 147 if pkg_mapping[0].startswith("${") and pkg_mapping[0].endswith("}"):
143 continue 148 continue
144 for subs in variables: 149 for subs in variables:
145 self.d.renameVar("%s_%s" % (subs, pkg_mapping[0]), "%s_%s" % (subs, pkg_mapping[1])) 150 self.d.renameVar("%s:%s" % (subs, pkg_mapping[0]), "%s:%s" % (subs, pkg_mapping[1]))
146 151
147class NativesdkClassExtender(ClassExtender): 152class NativesdkClassExtender(ClassExtender):
148 def map_depends(self, dep): 153 def map_depends(self, dep):
149 if dep.startswith(self.extname): 154 if dep.startswith(self.extname):
150 return dep 155 return dep
151 if dep.endswith(("-gcc", "-g++")): 156 if dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
152 return dep + "-crosssdk"
153 elif dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
154 return dep 157 return dep
155 else: 158 else:
156 return self.extend_name(dep) 159 return self.extend_name(dep)
diff --git a/meta/lib/oe/classutils.py b/meta/lib/oe/classutils.py
index 08bb66b365..ec3f6ad720 100644
--- a/meta/lib/oe/classutils.py
+++ b/meta/lib/oe/classutils.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py
index 31a84f5b06..ced751b835 100644
--- a/meta/lib/oe/copy_buildsystem.py
+++ b/meta/lib/oe/copy_buildsystem.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# This class should provide easy access to the different aspects of the 6# This class should provide easy access to the different aspects of the
@@ -20,7 +22,7 @@ def _smart_copy(src, dest):
20 mode = os.stat(src).st_mode 22 mode = os.stat(src).st_mode
21 if stat.S_ISDIR(mode): 23 if stat.S_ISDIR(mode):
22 bb.utils.mkdirhier(dest) 24 bb.utils.mkdirhier(dest)
23 cmd = "tar --exclude='.git' --xattrs --xattrs-include='*' -chf - -C %s -p . \ 25 cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -cf - -C %s -p . \
24 | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) 26 | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest)
25 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) 27 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
26 else: 28 else:
@@ -45,9 +47,6 @@ class BuildSystem(object):
45 47
46 corebase = os.path.abspath(self.d.getVar('COREBASE')) 48 corebase = os.path.abspath(self.d.getVar('COREBASE'))
47 layers.append(corebase) 49 layers.append(corebase)
48 # Get relationship between TOPDIR and COREBASE
49 # Layers should respect it
50 corebase_relative = os.path.dirname(os.path.relpath(os.path.abspath(self.d.getVar('TOPDIR')), corebase))
51 # The bitbake build system uses the meta-skeleton layer as a layout 50 # The bitbake build system uses the meta-skeleton layer as a layout
52 # for common recipies, e.g: the recipetool script to create kernel recipies 51 # for common recipies, e.g: the recipetool script to create kernel recipies
53 # Add the meta-skeleton layer to be included as part of the eSDK installation 52 # Add the meta-skeleton layer to be included as part of the eSDK installation
@@ -100,11 +99,10 @@ class BuildSystem(object):
100 layerdestpath = destdir 99 layerdestpath = destdir
101 if corebase == os.path.dirname(layer): 100 if corebase == os.path.dirname(layer):
102 layerdestpath += '/' + os.path.basename(corebase) 101 layerdestpath += '/' + os.path.basename(corebase)
103 else: 102 # If the layer is located somewhere under the same parent directory
104 layer_relative = os.path.relpath(layer, corebase) 103 # as corebase we keep the layer structure.
105 if os.path.dirname(layer_relative) == corebase_relative: 104 elif os.path.commonpath([layer, corebase]) == os.path.dirname(corebase):
106 layer_relative = os.path.dirname(corebase_relative) + '/' + layernewname 105 layer_relative = os.path.relpath(layer, os.path.dirname(corebase))
107 layer_relative = os.path.basename(corebase) + '/' + layer_relative
108 if os.path.dirname(layer_relative) != layernewname: 106 if os.path.dirname(layer_relative) != layernewname:
109 layerdestpath += '/' + os.path.dirname(layer_relative) 107 layerdestpath += '/' + os.path.dirname(layer_relative)
110 108
@@ -195,13 +193,17 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, p
195 else: 193 else:
196 f.write(line) 194 f.write(line)
197 invalue = False 195 invalue = False
198 elif line.startswith('SIGGEN_LOCKEDSIGS'): 196 elif line.startswith('SIGGEN_LOCKEDSIGS_t'):
199 invalue = True 197 invalue = True
200 f.write(line) 198 f.write(line)
199 else:
200 invalue = False
201 f.write(line)
201 202
202def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): 203def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None):
203 merged = {} 204 merged = {}
204 arch_order = [] 205 arch_order = []
206 otherdata = []
205 with open(lockedsigs_main, 'r') as f: 207 with open(lockedsigs_main, 'r') as f:
206 invalue = None 208 invalue = None
207 for line in f: 209 for line in f:
@@ -214,6 +216,9 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
214 invalue = line[18:].split('=', 1)[0].rstrip() 216 invalue = line[18:].split('=', 1)[0].rstrip()
215 merged[invalue] = [] 217 merged[invalue] = []
216 arch_order.append(invalue) 218 arch_order.append(invalue)
219 else:
220 invalue = None
221 otherdata.append(line)
217 222
218 with open(lockedsigs_extra, 'r') as f: 223 with open(lockedsigs_extra, 'r') as f:
219 invalue = None 224 invalue = None
@@ -248,6 +253,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
248 f.write(' "\n') 253 f.write(' "\n')
249 fulltypes.append(typename) 254 fulltypes.append(typename)
250 f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) 255 f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes))
256 f.write('\n' + ''.join(otherdata))
251 257
252 if copy_output: 258 if copy_output:
253 write_sigs_file(copy_output, list(tocopy.keys()), tocopy) 259 write_sigs_file(copy_output, list(tocopy.keys()), tocopy)
@@ -259,7 +265,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac
259 bb.note('Generating sstate-cache...') 265 bb.note('Generating sstate-cache...')
260 266
261 nativelsbstring = d.getVar('NATIVELSBSTRING') 267 nativelsbstring = d.getVar('NATIVELSBSTRING')
262 bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) 268 bb.process.run("PYTHONDONTWRITEBYTECODE=1 gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or ''))
263 if fixedlsbstring and nativelsbstring != fixedlsbstring: 269 if fixedlsbstring and nativelsbstring != fixedlsbstring:
264 nativedir = output_sstate_cache + '/' + nativelsbstring 270 nativedir = output_sstate_cache + '/' + nativelsbstring
265 if os.path.isdir(nativedir): 271 if os.path.isdir(nativedir):
@@ -286,7 +292,7 @@ def check_sstate_task_list(d, targets, filteroutfile, cmdprefix='', cwd=None, lo
286 logparam = '-l %s' % logfile 292 logparam = '-l %s' % logfile
287 else: 293 else:
288 logparam = '' 294 logparam = ''
289 cmd = "%sBB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) 295 cmd = "%sPYTHONDONTWRITEBYTECODE=1 BB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam)
290 env = dict(d.getVar('BB_ORIGENV', False)) 296 env = dict(d.getVar('BB_ORIGENV', False))
291 env.pop('BUILDDIR', '') 297 env.pop('BUILDDIR', '')
292 env.pop('BBPATH', '') 298 env.pop('BBPATH', '')
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py
index ce755f940a..ae194f27cf 100644
--- a/meta/lib/oe/cve_check.py
+++ b/meta/lib/oe/cve_check.py
@@ -1,7 +1,15 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
1import collections 7import collections
2import re
3import itertools
4import functools 8import functools
9import itertools
10import os.path
11import re
12import oe.patch
5 13
6_Version = collections.namedtuple( 14_Version = collections.namedtuple(
7 "_Version", ["release", "patch_l", "pre_l", "pre_v"] 15 "_Version", ["release", "patch_l", "pre_l", "pre_v"]
@@ -11,8 +19,13 @@ _Version = collections.namedtuple(
11class Version(): 19class Version():
12 20
13 def __init__(self, version, suffix=None): 21 def __init__(self, version, suffix=None):
22
23 suffixes = ["alphabetical", "patch"]
24
14 if str(suffix) == "alphabetical": 25 if str(suffix) == "alphabetical":
15 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" 26 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
27 elif str(suffix) == "patch":
28 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(p|patch)(?P<patch_l>[0-9]+))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
16 else: 29 else:
17 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" 30 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
18 regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) 31 regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE)
@@ -23,7 +36,7 @@ class Version():
23 36
24 self._version = _Version( 37 self._version = _Version(
25 release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), 38 release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")),
26 patch_l=match.group("patch_l") if str(suffix) == "alphabetical" and match.group("patch_l") else "", 39 patch_l=match.group("patch_l") if str(suffix) in suffixes and match.group("patch_l") else "",
27 pre_l=match.group("pre_l"), 40 pre_l=match.group("pre_l"),
28 pre_v=match.group("pre_v") 41 pre_v=match.group("pre_v")
29 ) 42 )
@@ -58,3 +71,308 @@ def _cmpkey(release, patch_l, pre_l, pre_v):
58 else: 71 else:
59 _pre = float(pre_v) if pre_v else float('-inf') 72 _pre = float(pre_v) if pre_v else float('-inf')
60 return _release, _patch, _pre 73 return _release, _patch, _pre
74
75
76def parse_cve_from_filename(patch_filename):
77 """
78 Parses CVE ID from the filename
79
80 Matches the last "CVE-YYYY-ID" in the file name, also if written
81 in lowercase. Possible to have multiple CVE IDs in a single
82 file name, but only the last one will be detected from the file name.
83
84 Returns the last CVE ID foudn in the filename. If no CVE ID is found
85 an empty string is returned.
86 """
87 cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d{4,})", re.IGNORECASE)
88
89 # Check patch file name for CVE ID
90 fname_match = cve_file_name_match.search(patch_filename)
91 return fname_match.group(1).upper() if fname_match else ""
92
93
94def parse_cves_from_patch_contents(patch_contents):
95 """
96 Parses CVE IDs from patch contents
97
98 Matches all CVE IDs contained on a line that starts with "CVE: ". Any
99 delimiter (',', '&', "and", etc.) can be used without any issues. Multiple
100 "CVE:" lines can also exist.
101
102 Returns a set of all CVE IDs found in the patch contents.
103 """
104 cve_ids = set()
105 cve_match = re.compile(r"CVE-\d{4}-\d{4,}")
106 # Search for one or more "CVE: " lines
107 for line in patch_contents.split("\n"):
108 if not line.startswith("CVE:"):
109 continue
110 cve_ids.update(cve_match.findall(line))
111 return cve_ids
112
113
114def parse_cves_from_patch_file(patch_file):
115 """
116 Parses CVE IDs associated with a particular patch file, using both the filename
117 and patch contents.
118
119 Returns a set of all CVE IDs found in the patch filename and contents.
120 """
121 cve_ids = set()
122 filename_cve = parse_cve_from_filename(patch_file)
123 if filename_cve:
124 bb.debug(2, "Found %s from patch file name %s" % (filename_cve, patch_file))
125 cve_ids.add(parse_cve_from_filename(patch_file))
126
127 # Remote patches won't be present and compressed patches won't be
128 # unpacked, so say we're not scanning them
129 if not os.path.isfile(patch_file):
130 bb.note("%s is remote or compressed, not scanning content" % patch_file)
131 return cve_ids
132
133 with open(patch_file, "r", encoding="utf-8") as f:
134 try:
135 patch_text = f.read()
136 except UnicodeDecodeError:
137 bb.debug(
138 1,
139 "Failed to read patch %s using UTF-8 encoding"
140 " trying with iso8859-1" % patch_file,
141 )
142 f.close()
143 with open(patch_file, "r", encoding="iso8859-1") as f:
144 patch_text = f.read()
145
146 cve_ids.update(parse_cves_from_patch_contents(patch_text))
147
148 if not cve_ids:
149 bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
150 else:
151 bb.debug(2, "Patch %s solves %s" % (patch_file, ", ".join(sorted(cve_ids))))
152
153 return cve_ids
154
155
156@bb.parse.vardeps("CVE_STATUS")
157def get_patched_cves(d):
158 """
159 Determines the CVE IDs that have been solved by either patches incuded within
160 SRC_URI or by setting CVE_STATUS.
161
162 Returns a dictionary with the CVE IDs as keys and an associated dictonary of
163 relevant metadata as the value.
164 """
165 patched_cves = {}
166 patches = oe.patch.src_patches(d)
167 bb.debug(2, "Scanning %d patches for CVEs" % len(patches))
168
169 # Check each patch file
170 for url in patches:
171 patch_file = bb.fetch.decodeurl(url)[2]
172 for cve_id in parse_cves_from_patch_file(patch_file):
173 if cve_id not in patched_cves:
174 patched_cves[cve_id] = {
175 "abbrev-status": "Patched",
176 "status": "fix-file-included",
177 "resource": [patch_file],
178 }
179 else:
180 patched_cves[cve_id]["resource"].append(patch_file)
181
182 # Search for additional patched CVEs
183 for cve_id in d.getVarFlags("CVE_STATUS") or {}:
184 decoded_status = decode_cve_status(d, cve_id)
185 products = d.getVar("CVE_PRODUCT")
186 if has_cve_product_match(decoded_status, products):
187 if cve_id in patched_cves:
188 bb.warn(
189 'CVE_STATUS[%s] = "%s" is overwriting previous status of "%s: %s"'
190 % (
191 cve_id,
192 d.getVarFlag("CVE_STATUS", cve_id),
193 patched_cves[cve_id]["abbrev-status"],
194 patched_cves[cve_id]["status"],
195 )
196 )
197 patched_cves[cve_id] = {
198 "abbrev-status": decoded_status["mapping"],
199 "status": decoded_status["detail"],
200 "justification": decoded_status["description"],
201 "affected-vendor": decoded_status["vendor"],
202 "affected-product": decoded_status["product"],
203 }
204
205 return patched_cves
206
207
208def get_cpe_ids(cve_product, version):
209 """
210 Get list of CPE identifiers for the given product and version
211 """
212
213 version = version.split("+git")[0]
214
215 cpe_ids = []
216 for product in cve_product.split():
217 # CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not,
218 # use wildcard for vendor.
219 if ":" in product:
220 vendor, product = product.split(":", 1)
221 else:
222 vendor = "*"
223
224 cpe_id = 'cpe:2.3:*:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version)
225 cpe_ids.append(cpe_id)
226
227 return cpe_ids
228
229def cve_check_merge_jsons(output, data):
230 """
231 Merge the data in the "package" property to the main data file
232 output
233 """
234 if output["version"] != data["version"]:
235 bb.error("Version mismatch when merging JSON outputs")
236 return
237
238 for product in output["package"]:
239 if product["name"] == data["package"][0]["name"]:
240 bb.error("Error adding the same package %s twice" % product["name"])
241 return
242
243 output["package"].append(data["package"][0])
244
245def update_symlinks(target_path, link_path):
246 """
247 Update a symbolic link link_path to point to target_path.
248 Remove the link and recreate it if exist and is different.
249 """
250 if link_path != target_path and os.path.exists(target_path):
251 if os.path.exists(os.path.realpath(link_path)):
252 os.remove(link_path)
253 os.symlink(os.path.basename(target_path), link_path)
254
255
256def convert_cve_version(version):
257 """
258 This function converts from CVE format to Yocto version format.
259 eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1
260
261 Unless it is redefined using CVE_VERSION in the recipe,
262 cve_check uses the version in the name of the recipe (${PV})
263 to check vulnerabilities against a CVE in the database downloaded from NVD.
264
265 When the version has an update, i.e.
266 "p1" in OpenSSH 8.3p1,
267 "-rc1" in linux kernel 6.2-rc1,
268 the database stores the version as version_update (8.3_p1, 6.2_rc1).
269 Therefore, we must transform this version before comparing to the
270 recipe version.
271
272 In this case, the parameter of the function is 8.3_p1.
273 If the version uses the Release Candidate format, "rc",
274 this function replaces the '_' by '-'.
275 If the version uses the Update format, "p",
276 this function removes the '_' completely.
277 """
278 import re
279
280 matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version)
281
282 if not matches:
283 return version
284
285 version = matches.group(1)
286 update = matches.group(2)
287
288 if matches.group(3) == "rc":
289 return version + '-' + update
290
291 return version + update
292
293@bb.parse.vardeps("CVE_STATUS", "CVE_CHECK_STATUSMAP")
294def decode_cve_status(d, cve):
295 """
296 Convert CVE_STATUS into status, vendor, product, detail and description.
297 """
298 status = d.getVarFlag("CVE_STATUS", cve)
299 if not status:
300 return {}
301
302 status_split = status.split(':', 4)
303 status_out = {}
304 status_out["detail"] = status_split[0]
305 product = "*"
306 vendor = "*"
307 description = ""
308 if len(status_split) >= 4 and status_split[1].strip() == "cpe":
309 # Both vendor and product are mandatory if cpe: present, the syntax is then:
310 # detail: cpe:vendor:product:description
311 vendor = status_split[2].strip()
312 product = status_split[3].strip()
313 description = status_split[4].strip()
314 elif len(status_split) >= 2 and status_split[1].strip() == "cpe":
315 # Malformed CPE
316 bb.warn(
317 'Invalid CPE information for CVE_STATUS[%s] = "%s", not setting CPE'
318 % (cve, status)
319 )
320 else:
321 # Other case: no CPE, the syntax is then:
322 # detail: description
323 description = status.split(':', 1)[1].strip() if (len(status_split) > 1) else ""
324
325 status_out["vendor"] = vendor
326 status_out["product"] = product
327 status_out["description"] = description
328
329 detail = status_out["detail"]
330 status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail)
331 if status_mapping is None:
332 bb.warn(
333 'Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched'
334 % (detail, cve, status)
335 )
336 status_mapping = "Unpatched"
337 status_out["mapping"] = status_mapping
338
339 return status_out
340
341def has_cve_product_match(detailed_status, products):
342 """
343 Check product/vendor match between detailed_status from decode_cve_status and a string of
344 products (like from CVE_PRODUCT)
345 """
346 for product in products.split():
347 vendor = "*"
348 if ":" in product:
349 vendor, product = product.split(":", 1)
350
351 if (vendor == detailed_status["vendor"] or detailed_status["vendor"] == "*") and \
352 (product == detailed_status["product"] or detailed_status["product"] == "*"):
353 return True
354
355 #if no match, return False
356 return False
357
358def extend_cve_status(d):
359 # do this only once in case multiple classes use this
360 if d.getVar("CVE_STATUS_EXTENDED"):
361 return
362 d.setVar("CVE_STATUS_EXTENDED", "1")
363
364 # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS
365 cve_check_ignore = d.getVar("CVE_CHECK_IGNORE")
366 if cve_check_ignore:
367 bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS")
368 for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split():
369 d.setVarFlag("CVE_STATUS", cve, "ignored")
370
371 # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once
372 for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split():
373 cve_group = d.getVar(cve_status_group)
374 if cve_group is not None:
375 for cve in cve_group.split():
376 d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status"))
377 else:
378 bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group)
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py
index 602130a904..37121cfad2 100644
--- a/meta/lib/oe/data.py
+++ b/meta/lib/oe/data.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index 88e46c354d..3494520f40 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -26,7 +28,7 @@ def find_latest_numeric_release(url, d):
26 maxstr="" 28 maxstr=""
27 for link in get_links_from_url(url, d): 29 for link in get_links_from_url(url, d):
28 try: 30 try:
29 # TODO use LooseVersion 31 # TODO use bb.utils.vercmp_string_op()
30 release = float(link) 32 release = float(link)
31 except: 33 except:
32 release = 0 34 release = 0
diff --git a/meta/lib/oe/elf.py b/meta/lib/oe/elf.py
index df0a4593fa..9794453092 100644
--- a/meta/lib/oe/elf.py
+++ b/meta/lib/oe/elf.py
@@ -1,133 +1,148 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5def machine_dict(d): 7def machine_dict(d):
6# TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? 8 # Generating this data is slow, so cache it
7 machdata = { 9 if not hasattr(machine_dict, "machdata"):
8 "darwin9" : { 10 machine_dict.machdata = {
9 "arm" : (40, 0, 0, True, 32), 11 # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit?
10 }, 12 "darwin9" : {
11 "eabi" : { 13 "arm" : (40, 0, 0, True, 32),
12 "arm" : (40, 0, 0, True, 32), 14 },
13 }, 15 "eabi" : {
14 "elf" : { 16 "arm" : (40, 0, 0, True, 32),
15 "aarch64" : (183, 0, 0, True, 64), 17 },
16 "aarch64_be" :(183, 0, 0, False, 64), 18 "elf" : {
17 "i586" : (3, 0, 0, True, 32), 19 "aarch64" : (183, 0, 0, True, 64),
18 "i686" : (3, 0, 0, True, 32), 20 "aarch64_be" :(183, 0, 0, False, 64),
19 "x86_64": (62, 0, 0, True, 64), 21 "i586" : (3, 0, 0, True, 32),
20 "epiphany": (4643, 0, 0, True, 32), 22 "i686" : (3, 0, 0, True, 32),
21 "lm32": (138, 0, 0, False, 32), 23 "x86_64": (62, 0, 0, True, 64),
22 "mips": ( 8, 0, 0, False, 32), 24 "epiphany": (4643, 0, 0, True, 32),
23 "mipsel": ( 8, 0, 0, True, 32), 25 "lm32": (138, 0, 0, False, 32),
24 "microblaze": (189, 0, 0, False, 32), 26 "loongarch64":(258, 0, 0, True, 64),
25 "microblazeel":(189, 0, 0, True, 32), 27 "mips": ( 8, 0, 0, False, 32),
26 "powerpc": (20, 0, 0, False, 32), 28 "mipsel": ( 8, 0, 0, True, 32),
27 "riscv32": (243, 0, 0, True, 32), 29 "microblaze": (189, 0, 0, False, 32),
28 "riscv64": (243, 0, 0, True, 64), 30 "microblazeel":(189, 0, 0, True, 32),
29 }, 31 "powerpc": (20, 0, 0, False, 32),
30 "linux" : { 32 "riscv32": (243, 0, 0, True, 32),
31 "aarch64" : (183, 0, 0, True, 64), 33 "riscv64": (243, 0, 0, True, 64),
32 "aarch64_be" :(183, 0, 0, False, 64), 34 },
33 "arm" : (40, 97, 0, True, 32), 35 "linux" : {
34 "armeb": (40, 97, 0, False, 32), 36 "aarch64" : (183, 0, 0, True, 64),
35 "powerpc": (20, 0, 0, False, 32), 37 "aarch64_be" :(183, 0, 0, False, 64),
36 "powerpc64": (21, 0, 0, False, 64), 38 "arm" : (40, 97, 0, True, 32),
37 "powerpc64le": (21, 0, 0, True, 64), 39 "armeb": (40, 97, 0, False, 32),
38 "i386": ( 3, 0, 0, True, 32), 40 "powerpc": (20, 0, 0, False, 32),
39 "i486": ( 3, 0, 0, True, 32), 41 "powerpc64": (21, 0, 0, False, 64),
40 "i586": ( 3, 0, 0, True, 32), 42 "powerpc64le": (21, 0, 0, True, 64),
41 "i686": ( 3, 0, 0, True, 32), 43 "i386": ( 3, 0, 0, True, 32),
42 "x86_64": (62, 0, 0, True, 64), 44 "i486": ( 3, 0, 0, True, 32),
43 "ia64": (50, 0, 0, True, 64), 45 "i586": ( 3, 0, 0, True, 32),
44 "alpha": (36902, 0, 0, True, 64), 46 "i686": ( 3, 0, 0, True, 32),
45 "hppa": (15, 3, 0, False, 32), 47 "x86_64": (62, 0, 0, True, 64),
46 "m68k": ( 4, 0, 0, False, 32), 48 "ia64": (50, 0, 0, True, 64),
47 "mips": ( 8, 0, 0, False, 32), 49 "alpha": (36902, 0, 0, True, 64),
48 "mipsel": ( 8, 0, 0, True, 32), 50 "hppa": (15, 3, 0, False, 32),
49 "mips64": ( 8, 0, 0, False, 64), 51 "loongarch64":(258, 0, 0, True, 64),
50 "mips64el": ( 8, 0, 0, True, 64), 52 "m68k": ( 4, 0, 0, False, 32),
51 "mipsisa32r6": ( 8, 0, 0, False, 32), 53 "mips": ( 8, 0, 0, False, 32),
52 "mipsisa32r6el": ( 8, 0, 0, True, 32), 54 "mipsel": ( 8, 0, 0, True, 32),
53 "mipsisa64r6": ( 8, 0, 0, False, 64), 55 "mips64": ( 8, 0, 0, False, 64),
54 "mipsisa64r6el": ( 8, 0, 0, True, 64), 56 "mips64el": ( 8, 0, 0, True, 64),
55 "nios2": (113, 0, 0, True, 32), 57 "mipsisa32r6": ( 8, 0, 0, False, 32),
56 "riscv32": (243, 0, 0, True, 32), 58 "mipsisa32r6el": ( 8, 0, 0, True, 32),
57 "riscv64": (243, 0, 0, True, 64), 59 "mipsisa64r6": ( 8, 0, 0, False, 64),
58 "s390": (22, 0, 0, False, 32), 60 "mipsisa64r6el": ( 8, 0, 0, True, 64),
59 "sh4": (42, 0, 0, True, 32), 61 "nios2": (113, 0, 0, True, 32),
60 "sparc": ( 2, 0, 0, False, 32), 62 "riscv32": (243, 0, 0, True, 32),
61 "microblaze": (189, 0, 0, False, 32), 63 "riscv64": (243, 0, 0, True, 64),
62 "microblazeel":(189, 0, 0, True, 32), 64 "s390": (22, 0, 0, False, 32),
63 }, 65 "sh4": (42, 0, 0, True, 32),
64 "linux-musl" : { 66 "sparc": ( 2, 0, 0, False, 32),
65 "aarch64" : (183, 0, 0, True, 64), 67 "microblaze": (189, 0, 0, False, 32),
66 "aarch64_be" :(183, 0, 0, False, 64), 68 "microblazeel":(189, 0, 0, True, 32),
67 "arm" : ( 40, 97, 0, True, 32), 69 },
68 "armeb": ( 40, 97, 0, False, 32), 70 "linux-android" : {
69 "powerpc": ( 20, 0, 0, False, 32), 71 "aarch64" : (183, 0, 0, True, 64),
70 "powerpc64": ( 21, 0, 0, False, 64), 72 "i686": ( 3, 0, 0, True, 32),
71 "powerpc64le": (21, 0, 0, True, 64), 73 "x86_64": (62, 0, 0, True, 64),
72 "i386": ( 3, 0, 0, True, 32), 74 },
73 "i486": ( 3, 0, 0, True, 32), 75 "linux-androideabi" : {
74 "i586": ( 3, 0, 0, True, 32), 76 "arm" : (40, 97, 0, True, 32),
75 "i686": ( 3, 0, 0, True, 32), 77 },
76 "x86_64": ( 62, 0, 0, True, 64), 78 "linux-musl" : {
77 "mips": ( 8, 0, 0, False, 32), 79 "aarch64" : (183, 0, 0, True, 64),
78 "mipsel": ( 8, 0, 0, True, 32), 80 "aarch64_be" :(183, 0, 0, False, 64),
79 "mips64": ( 8, 0, 0, False, 64), 81 "arm" : ( 40, 97, 0, True, 32),
80 "mips64el": ( 8, 0, 0, True, 64), 82 "armeb": ( 40, 97, 0, False, 32),
81 "microblaze": (189, 0, 0, False, 32), 83 "powerpc": ( 20, 0, 0, False, 32),
82 "microblazeel":(189, 0, 0, True, 32), 84 "powerpc64": ( 21, 0, 0, False, 64),
83 "riscv32": (243, 0, 0, True, 32), 85 "powerpc64le": (21, 0, 0, True, 64),
84 "riscv64": (243, 0, 0, True, 64), 86 "i386": ( 3, 0, 0, True, 32),
85 "sh4": ( 42, 0, 0, True, 32), 87 "i486": ( 3, 0, 0, True, 32),
86 }, 88 "i586": ( 3, 0, 0, True, 32),
87 "uclinux-uclibc" : { 89 "i686": ( 3, 0, 0, True, 32),
88 "bfin": ( 106, 0, 0, True, 32), 90 "x86_64": ( 62, 0, 0, True, 64),
89 }, 91 "loongarch64":( 258, 0, 0, True, 64),
90 "linux-gnueabi" : { 92 "mips": ( 8, 0, 0, False, 32),
91 "arm" : (40, 0, 0, True, 32), 93 "mipsel": ( 8, 0, 0, True, 32),
92 "armeb" : (40, 0, 0, False, 32), 94 "mips64": ( 8, 0, 0, False, 64),
93 }, 95 "mips64el": ( 8, 0, 0, True, 64),
94 "linux-musleabi" : { 96 "microblaze": (189, 0, 0, False, 32),
95 "arm" : (40, 0, 0, True, 32), 97 "microblazeel":(189, 0, 0, True, 32),
96 "armeb" : (40, 0, 0, False, 32), 98 "riscv32": (243, 0, 0, True, 32),
97 }, 99 "riscv64": (243, 0, 0, True, 64),
98 "linux-gnuspe" : { 100 "sh4": ( 42, 0, 0, True, 32),
99 "powerpc": (20, 0, 0, False, 32), 101 },
100 }, 102 "uclinux-uclibc" : {
101 "linux-muslspe" : { 103 "bfin": ( 106, 0, 0, True, 32),
102 "powerpc": (20, 0, 0, False, 32), 104 },
103 }, 105 "linux-gnueabi" : {
104 "linux-gnu" : { 106 "arm" : (40, 0, 0, True, 32),
105 "powerpc": (20, 0, 0, False, 32), 107 "armeb" : (40, 0, 0, False, 32),
106 "sh4": (42, 0, 0, True, 32), 108 },
107 }, 109 "linux-musleabi" : {
108 "linux-gnu_ilp32" : { 110 "arm" : (40, 0, 0, True, 32),
109 "aarch64" : (183, 0, 0, True, 32), 111 "armeb" : (40, 0, 0, False, 32),
110 }, 112 },
111 "linux-gnux32" : { 113 "linux-gnuspe" : {
112 "x86_64": (62, 0, 0, True, 32), 114 "powerpc": (20, 0, 0, False, 32),
113 }, 115 },
114 "linux-muslx32" : { 116 "linux-muslspe" : {
115 "x86_64": (62, 0, 0, True, 32), 117 "powerpc": (20, 0, 0, False, 32),
116 }, 118 },
117 "linux-gnun32" : { 119 "linux-gnu" : {
118 "mips64": ( 8, 0, 0, False, 32), 120 "powerpc": (20, 0, 0, False, 32),
119 "mips64el": ( 8, 0, 0, True, 32), 121 "sh4": (42, 0, 0, True, 32),
120 "mipsisa64r6": ( 8, 0, 0, False, 32), 122 },
121 "mipsisa64r6el":( 8, 0, 0, True, 32), 123 "linux-gnu_ilp32" : {
122 }, 124 "aarch64" : (183, 0, 0, True, 32),
123 } 125 },
126 "linux-gnux32" : {
127 "x86_64": (62, 0, 0, True, 32),
128 },
129 "linux-muslx32" : {
130 "x86_64": (62, 0, 0, True, 32),
131 },
132 "linux-gnun32" : {
133 "mips64": ( 8, 0, 0, False, 32),
134 "mips64el": ( 8, 0, 0, True, 32),
135 "mipsisa64r6": ( 8, 0, 0, False, 32),
136 "mipsisa64r6el":( 8, 0, 0, True, 32),
137 },
138 }
124 139
125 # Add in any extra user supplied data which may come from a BSP layer, removing the 140 # Add in any extra user supplied data which may come from a BSP layer, removing the
126 # need to always change this class directly 141 # need to always change this class directly
127 extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split() 142 extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split()
128 for m in extra_machdata: 143 for m in extra_machdata:
129 call = m + "(machdata, d)" 144 call = m + "(machdata, d)"
130 locs = { "machdata" : machdata, "d" : d} 145 locs = { "machdata" : machine_dict.machdata, "d" : d}
131 machdata = bb.utils.better_eval(call, locs) 146 machine_dict.machdata = bb.utils.better_eval(call, locs)
132 147
133 return machdata 148 return machine_dict.machdata
diff --git a/meta/lib/oe/fitimage.py b/meta/lib/oe/fitimage.py
new file mode 100644
index 0000000000..f303799155
--- /dev/null
+++ b/meta/lib/oe/fitimage.py
@@ -0,0 +1,547 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# This file contains common functions for the fitimage generation
7
8import os
9import shlex
10import subprocess
11import bb
12
13from oeqa.utils.commands import runCmd
14
15class ItsNode:
16 INDENT_SIZE = 8
17
18 def __init__(self, name, parent_node, sub_nodes=None, properties=None):
19 self.name = name
20 self.parent_node = parent_node
21
22 self.sub_nodes = []
23 if sub_nodes:
24 self.sub_nodes = sub_nodes
25
26 self.properties = {}
27 if properties:
28 self.properties = properties
29
30 if parent_node:
31 parent_node.add_sub_node(self)
32
33 def add_sub_node(self, sub_node):
34 self.sub_nodes.append(sub_node)
35
36 def add_property(self, key, value):
37 self.properties[key] = value
38
39 def emit(self, f, indent):
40 indent_str_name = " " * indent
41 indent_str_props = " " * (indent + self.INDENT_SIZE)
42 f.write("%s%s {\n" % (indent_str_name, self.name))
43 for key, value in self.properties.items():
44 bb.debug(1, "key: %s, value: %s" % (key, str(value)))
45 # Single integer: <0x12ab>
46 if isinstance(value, int):
47 f.write(indent_str_props + key + ' = <0x%x>;\n' % value)
48 # list of strings: "string1", "string2" or integers: <0x12ab 0x34cd>
49 elif isinstance(value, list):
50 if len(value) == 0:
51 f.write(indent_str_props + key + ' = "";\n')
52 elif isinstance(value[0], int):
53 list_entries = ' '.join('0x%x' % entry for entry in value)
54 f.write(indent_str_props + key + ' = <%s>;\n' % list_entries)
55 else:
56 list_entries = ', '.join('"%s"' % entry for entry in value)
57 f.write(indent_str_props + key + ' = %s;\n' % list_entries)
58 elif isinstance(value, str):
59 # path: /incbin/("path/to/file")
60 if key in ["data"] and value.startswith('/incbin/('):
61 f.write(indent_str_props + key + ' = %s;\n' % value)
62 # Integers which are already string formatted
63 elif value.startswith("<") and value.endswith(">"):
64 f.write(indent_str_props + key + ' = %s;\n' % value)
65 else:
66 f.write(indent_str_props + key + ' = "%s";\n' % value)
67 else:
68 bb.fatal("%s has unexpexted data type." % str(value))
69 for sub_node in self.sub_nodes:
70 sub_node.emit(f, indent + self.INDENT_SIZE)
71 f.write(indent_str_name + '};\n')
72
73class ItsNodeImages(ItsNode):
74 def __init__(self, parent_node):
75 super().__init__("images", parent_node)
76
77class ItsNodeConfigurations(ItsNode):
78 def __init__(self, parent_node):
79 super().__init__("configurations", parent_node)
80
81class ItsNodeHash(ItsNode):
82 def __init__(self, name, parent_node, algo, opt_props=None):
83 properties = {
84 "algo": algo
85 }
86 if opt_props:
87 properties.update(opt_props)
88 super().__init__(name, parent_node, None, properties)
89
90class ItsImageSignature(ItsNode):
91 def __init__(self, name, parent_node, algo, keyname, opt_props=None):
92 properties = {
93 "algo": algo,
94 "key-name-hint": keyname
95 }
96 if opt_props:
97 properties.update(opt_props)
98 super().__init__(name, parent_node, None, properties)
99
100class ItsNodeImage(ItsNode):
101 def __init__(self, name, parent_node, description, type, compression, sub_nodes=None, opt_props=None):
102 properties = {
103 "description": description,
104 "type": type,
105 "compression": compression,
106 }
107 if opt_props:
108 properties.update(opt_props)
109 super().__init__(name, parent_node, sub_nodes, properties)
110
111class ItsNodeDtb(ItsNodeImage):
112 def __init__(self, name, parent_node, description, type, compression,
113 sub_nodes=None, opt_props=None, compatible=None):
114 super().__init__(name, parent_node, description, type, compression, sub_nodes, opt_props)
115 self.compatible = compatible
116
117class ItsNodeDtbAlias(ItsNode):
118 """Additional Configuration Node for a DTB
119
120 Symlinks pointing to a DTB file are handled by an addtitional
121 configuration node referring to another DTB image node.
122 """
123 def __init__(self, name, alias_name, compatible=None):
124 super().__init__(name, parent_node=None, sub_nodes=None, properties=None)
125 self.alias_name = alias_name
126 self.compatible = compatible
127
128class ItsNodeConfigurationSignature(ItsNode):
129 def __init__(self, name, parent_node, algo, keyname, opt_props=None):
130 properties = {
131 "algo": algo,
132 "key-name-hint": keyname
133 }
134 if opt_props:
135 properties.update(opt_props)
136 super().__init__(name, parent_node, None, properties)
137
138class ItsNodeConfiguration(ItsNode):
139 def __init__(self, name, parent_node, description, sub_nodes=None, opt_props=None):
140 properties = {
141 "description": description,
142 }
143 if opt_props:
144 properties.update(opt_props)
145 super().__init__(name, parent_node, sub_nodes, properties)
146
147class ItsNodeRootKernel(ItsNode):
148 """Create FIT images for the kernel
149
150 Currently only a single kernel (no less or more) can be added to the FIT
151 image along with 0 or more device trees and 0 or 1 ramdisk.
152
153 If a device tree included in the FIT image, the default configuration is the
154 firt DTB. If there is no dtb present than the default configuation the kernel.
155 """
156 def __init__(self, description, address_cells, host_prefix, arch, conf_prefix,
157 sign_enable=False, sign_keydir=None,
158 mkimage=None, mkimage_dtcopts=None,
159 mkimage_sign=None, mkimage_sign_args=None,
160 hash_algo=None, sign_algo=None, pad_algo=None,
161 sign_keyname_conf=None,
162 sign_individual=False, sign_keyname_img=None):
163 props = {
164 "description": description,
165 "#address-cells": f"<{address_cells}>"
166 }
167 super().__init__("/", None, None, props)
168 self.images = ItsNodeImages(self)
169 self.configurations = ItsNodeConfigurations(self)
170
171 self._host_prefix = host_prefix
172 self._arch = arch
173 self._conf_prefix = conf_prefix
174
175 # Signature related properties
176 self._sign_enable = sign_enable
177 self._sign_keydir = sign_keydir
178 self._mkimage = mkimage
179 self._mkimage_dtcopts = mkimage_dtcopts
180 self._mkimage_sign = mkimage_sign
181 self._mkimage_sign_args = mkimage_sign_args
182 self._hash_algo = hash_algo
183 self._sign_algo = sign_algo
184 self._pad_algo = pad_algo
185 self._sign_keyname_conf = sign_keyname_conf
186 self._sign_individual = sign_individual
187 self._sign_keyname_img = sign_keyname_img
188 self._sanitize_sign_config()
189
190 self._dtbs = []
191 self._dtb_alias = []
192 self._kernel = None
193 self._ramdisk = None
194 self._bootscr = None
195 self._setup = None
196
197 def _sanitize_sign_config(self):
198 if self._sign_enable:
199 if not self._hash_algo:
200 bb.fatal("FIT image signing is enabled but no hash algorithm is provided.")
201 if not self._sign_algo:
202 bb.fatal("FIT image signing is enabled but no signature algorithm is provided.")
203 if not self._pad_algo:
204 bb.fatal("FIT image signing is enabled but no padding algorithm is provided.")
205 if not self._sign_keyname_conf:
206 bb.fatal("FIT image signing is enabled but no configuration key name is provided.")
207 if self._sign_individual and not self._sign_keyname_img:
208 bb.fatal("FIT image signing is enabled for individual images but no image key name is provided.")
209
210 def write_its_file(self, itsfile):
211 with open(itsfile, 'w') as f:
212 f.write("/dts-v1/;\n\n")
213 self.emit(f, 0)
214
215 def its_add_node_image(self, image_id, description, image_type, compression, opt_props):
216 image_node = ItsNodeImage(
217 image_id,
218 self.images,
219 description,
220 image_type,
221 compression,
222 opt_props=opt_props
223 )
224 if self._hash_algo:
225 ItsNodeHash(
226 "hash-1",
227 image_node,
228 self._hash_algo
229 )
230 if self._sign_individual:
231 ItsImageSignature(
232 "signature-1",
233 image_node,
234 f"{self._hash_algo},{self._sign_algo}",
235 self._sign_keyname_img
236 )
237 return image_node
238
239 def its_add_node_dtb(self, image_id, description, image_type, compression, opt_props, compatible):
240 dtb_node = ItsNodeDtb(
241 image_id,
242 self.images,
243 description,
244 image_type,
245 compression,
246 opt_props=opt_props,
247 compatible=compatible
248 )
249 if self._hash_algo:
250 ItsNodeHash(
251 "hash-1",
252 dtb_node,
253 self._hash_algo
254 )
255 if self._sign_individual:
256 ItsImageSignature(
257 "signature-1",
258 dtb_node,
259 f"{self._hash_algo},{self._sign_algo}",
260 self._sign_keyname_img
261 )
262 return dtb_node
263
264 def fitimage_emit_section_kernel(self, kernel_id, kernel_path, compression,
265 load, entrypoint, mkimage_kernel_type, entrysymbol=None):
266 """Emit the fitImage ITS kernel section"""
267 if self._kernel:
268 bb.fatal("Kernel section already exists in the ITS file.")
269 if entrysymbol:
270 result = subprocess.run([self._host_prefix + "nm", "vmlinux"], capture_output=True, text=True)
271 for line in result.stdout.splitlines():
272 parts = line.split()
273 if len(parts) == 3 and parts[2] == entrysymbol:
274 entrypoint = "<0x%s>" % parts[0]
275 break
276 kernel_node = self.its_add_node_image(
277 kernel_id,
278 "Linux kernel",
279 mkimage_kernel_type,
280 compression,
281 {
282 "data": '/incbin/("' + kernel_path + '")',
283 "arch": self._arch,
284 "os": "linux",
285 "load": f"<{load}>",
286 "entry": f"<{entrypoint}>"
287 }
288 )
289 self._kernel = kernel_node
290
291 def fitimage_emit_section_dtb(self, dtb_id, dtb_path, dtb_loadaddress=None,
292 dtbo_loadaddress=None, add_compatible=False):
293 """Emit the fitImage ITS DTB section"""
294 load=None
295 dtb_ext = os.path.splitext(dtb_path)[1]
296 if dtb_ext == ".dtbo":
297 if dtbo_loadaddress:
298 load = dtbo_loadaddress
299 elif dtb_loadaddress:
300 load = dtb_loadaddress
301
302 opt_props = {
303 "data": '/incbin/("' + dtb_path + '")',
304 "arch": self._arch
305 }
306 if load:
307 opt_props["load"] = f"<{load}>"
308
309 # Preserve the DTB's compatible string to be added to the configuration node
310 compatible = None
311 if add_compatible:
312 compatible = get_compatible_from_dtb(dtb_path)
313
314 dtb_node = self.its_add_node_dtb(
315 "fdt-" + dtb_id,
316 "Flattened Device Tree blob",
317 "flat_dt",
318 "none",
319 opt_props,
320 compatible
321 )
322 self._dtbs.append(dtb_node)
323
324 def fitimage_emit_section_dtb_alias(self, dtb_alias_id, dtb_path, add_compatible=False):
325 """Add a configuration node referring to another DTB"""
326 # Preserve the DTB's compatible string to be added to the configuration node
327 compatible = None
328 if add_compatible:
329 compatible = get_compatible_from_dtb(dtb_path)
330
331 dtb_id = os.path.basename(dtb_path)
332 dtb_alias_node = ItsNodeDtbAlias("fdt-" + dtb_id, dtb_alias_id, compatible)
333 self._dtb_alias.append(dtb_alias_node)
334 bb.warn(f"compatible: {compatible}, dtb_alias_id: {dtb_alias_id}, dtb_id: {dtb_id}, dtb_path: {dtb_path}")
335
336 def fitimage_emit_section_boot_script(self, bootscr_id, bootscr_path):
337 """Emit the fitImage ITS u-boot script section"""
338 if self._bootscr:
339 bb.fatal("U-boot script section already exists in the ITS file.")
340 bootscr_node = self.its_add_node_image(
341 bootscr_id,
342 "U-boot script",
343 "script",
344 "none",
345 {
346 "data": '/incbin/("' + bootscr_path + '")',
347 "arch": self._arch,
348 "type": "script"
349 }
350 )
351 self._bootscr = bootscr_node
352
353 def fitimage_emit_section_setup(self, setup_id, setup_path):
354 """Emit the fitImage ITS setup section"""
355 if self._setup:
356 bb.fatal("Setup section already exists in the ITS file.")
357 load = "<0x00090000>"
358 entry = "<0x00090000>"
359 setup_node = self.its_add_node_image(
360 setup_id,
361 "Linux setup.bin",
362 "x86_setup",
363 "none",
364 {
365 "data": '/incbin/("' + setup_path + '")',
366 "arch": self._arch,
367 "os": "linux",
368 "load": load,
369 "entry": entry
370 }
371 )
372 self._setup = setup_node
373
374 def fitimage_emit_section_ramdisk(self, ramdisk_id, ramdisk_path, description="ramdisk", load=None, entry=None):
375 """Emit the fitImage ITS ramdisk section"""
376 if self._ramdisk:
377 bb.fatal("Ramdisk section already exists in the ITS file.")
378 opt_props = {
379 "data": '/incbin/("' + ramdisk_path + '")',
380 "type": "ramdisk",
381 "arch": self._arch,
382 "os": "linux"
383 }
384 if load:
385 opt_props["load"] = f"<{load}>"
386 if entry:
387 opt_props["entry"] = f"<{entry}>"
388
389 ramdisk_node = self.its_add_node_image(
390 ramdisk_id,
391 description,
392 "ramdisk",
393 "none",
394 opt_props
395 )
396 self._ramdisk = ramdisk_node
397
398 def _fitimage_emit_one_section_config(self, conf_node_name, dtb=None):
399 """Emit the fitImage ITS configuration section"""
400 opt_props = {}
401 conf_desc = []
402 sign_entries = []
403
404 if self._kernel:
405 conf_desc.append("Linux kernel")
406 opt_props["kernel"] = self._kernel.name
407 if self._sign_enable:
408 sign_entries.append("kernel")
409
410 if dtb:
411 conf_desc.append("FDT blob")
412 opt_props["fdt"] = dtb.name
413 if dtb.compatible:
414 opt_props["compatible"] = dtb.compatible
415 if self._sign_enable:
416 sign_entries.append("fdt")
417
418 if self._ramdisk:
419 conf_desc.append("ramdisk")
420 opt_props["ramdisk"] = self._ramdisk.name
421 if self._sign_enable:
422 sign_entries.append("ramdisk")
423
424 if self._bootscr:
425 conf_desc.append("u-boot script")
426 opt_props["bootscr"] = self._bootscr.name
427 if self._sign_enable:
428 sign_entries.append("bootscr")
429
430 if self._setup:
431 conf_desc.append("setup")
432 opt_props["setup"] = self._setup.name
433 if self._sign_enable:
434 sign_entries.append("setup")
435
436 # First added configuration is the default configuration
437 default_flag = "0"
438 if len(self.configurations.sub_nodes) == 0:
439 default_flag = "1"
440
441 conf_node = ItsNodeConfiguration(
442 conf_node_name,
443 self.configurations,
444 f"{default_flag} {', '.join(conf_desc)}",
445 opt_props=opt_props
446 )
447 if self._hash_algo:
448 ItsNodeHash(
449 "hash-1",
450 conf_node,
451 self._hash_algo
452 )
453 if self._sign_enable:
454 ItsNodeConfigurationSignature(
455 "signature-1",
456 conf_node,
457 f"{self._hash_algo},{self._sign_algo}",
458 self._sign_keyname_conf,
459 opt_props={
460 "padding": self._pad_algo,
461 "sign-images": sign_entries
462 }
463 )
464
465 def fitimage_emit_section_config(self, default_dtb_image=None):
466 if self._dtbs:
467 for dtb in self._dtbs:
468 dtb_name = dtb.name
469 if dtb.name.startswith("fdt-"):
470 dtb_name = dtb.name[len("fdt-"):]
471 self._fitimage_emit_one_section_config(self._conf_prefix + dtb_name, dtb)
472 for dtb in self._dtb_alias:
473 self._fitimage_emit_one_section_config(self._conf_prefix + dtb.alias_name, dtb)
474 else:
475 # Currently exactly one kernel is supported.
476 self._fitimage_emit_one_section_config(self._conf_prefix + "1")
477
478 default_conf = self.configurations.sub_nodes[0].name
479 if default_dtb_image and self._dtbs:
480 default_conf = self._conf_prefix + default_dtb_image
481 self.configurations.add_property('default', default_conf)
482
483 def run_mkimage_assemble(self, itsfile, fitfile):
484 cmd = [
485 self._mkimage,
486 '-f', itsfile,
487 fitfile
488 ]
489 if self._mkimage_dtcopts:
490 cmd.insert(1, '-D')
491 cmd.insert(2, self._mkimage_dtcopts)
492 try:
493 subprocess.run(cmd, check=True, capture_output=True)
494 except subprocess.CalledProcessError as e:
495 bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}\nitsflile: {os.path.abspath(itsfile)}")
496
497 def run_mkimage_sign(self, fitfile):
498 if not self._sign_enable:
499 bb.debug(1, "FIT image signing is disabled. Skipping signing.")
500 return
501
502 # Some sanity checks because mkimage exits with 0 also without needed keys
503 sign_key_path = os.path.join(self._sign_keydir, self._sign_keyname_conf)
504 if not os.path.exists(sign_key_path + '.key') or not os.path.exists(sign_key_path + '.crt'):
505 bb.fatal("%s.key or .crt does not exist" % sign_key_path)
506 if self._sign_individual:
507 sign_key_img_path = os.path.join(self._sign_keydir, self._sign_keyname_img)
508 if not os.path.exists(sign_key_img_path + '.key') or not os.path.exists(sign_key_img_path + '.crt'):
509 bb.fatal("%s.key or .crt does not exist" % sign_key_img_path)
510
511 cmd = [
512 self._mkimage_sign,
513 '-F',
514 '-k', self._sign_keydir,
515 '-r', fitfile
516 ]
517 if self._mkimage_dtcopts:
518 cmd.extend(['-D', self._mkimage_dtcopts])
519 if self._mkimage_sign_args:
520 cmd.extend(shlex.split(self._mkimage_sign_args))
521 try:
522 subprocess.run(cmd, check=True, capture_output=True)
523 except subprocess.CalledProcessError as e:
524 bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}")
525
526
527def symlink_points_below(file_or_symlink, expected_parent_dir):
528 """returns symlink destination if it points below directory"""
529 file_path = os.path.join(expected_parent_dir, file_or_symlink)
530 if not os.path.islink(file_path):
531 return None
532
533 realpath = os.path.relpath(os.path.realpath(file_path), expected_parent_dir)
534 if realpath.startswith(".."):
535 return None
536
537 return realpath
538
539def get_compatible_from_dtb(dtb_path, fdtget_path="fdtget"):
540 compatible = None
541 cmd = [fdtget_path, "-t", "s", dtb_path, "/", "compatible"]
542 try:
543 ret = subprocess.run(cmd, check=True, capture_output=True, text=True)
544 compatible = ret.stdout.strip().split()
545 except subprocess.CalledProcessError:
546 compatible = None
547 return compatible
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py
new file mode 100644
index 0000000000..4559dc63b2
--- /dev/null
+++ b/meta/lib/oe/go.py
@@ -0,0 +1,38 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import re
8
9def map_arch(a):
10 """
11 Map our architecture names to Go's GOARCH names.
12 See https://github.com/golang/go/blob/master/src/internal/syslist/syslist.go for the complete list.
13 """
14 if re.match('i.86', a):
15 return '386'
16 elif a == 'x86_64':
17 return 'amd64'
18 elif re.match('arm.*', a):
19 return 'arm'
20 elif re.match('aarch64.*', a):
21 return 'arm64'
22 elif re.match('mips64el.*', a):
23 return 'mips64le'
24 elif re.match('mips64.*', a):
25 return 'mips64'
26 elif a == 'mips':
27 return 'mips'
28 elif a == 'mipsel':
29 return 'mipsle'
30 elif re.match('p(pc|owerpc)(64le)', a):
31 return 'ppc64le'
32 elif re.match('p(pc|owerpc)(64)', a):
33 return 'ppc64'
34 elif a == 'riscv64':
35 return 'riscv64'
36 elif a == 'loongarch64':
37 return 'loong64'
38 raise KeyError(f"Cannot map architecture {a}")
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py
index 7634d7ef1d..ede6186c84 100644
--- a/meta/lib/oe/gpg_sign.py
+++ b/meta/lib/oe/gpg_sign.py
@@ -1,13 +1,16 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5"""Helper module for GPG signing""" 7"""Helper module for GPG signing"""
6import os
7 8
8import bb 9import bb
9import subprocess 10import os
10import shlex 11import shlex
12import subprocess
13import tempfile
11 14
12class LocalSigner(object): 15class LocalSigner(object):
13 """Class for handling local (on the build host) signing""" 16 """Class for handling local (on the build host) signing"""
@@ -58,7 +61,7 @@ class LocalSigner(object):
58 for i in range(0, len(files), sign_chunk): 61 for i in range(0, len(files), sign_chunk):
59 subprocess.check_output(shlex.split(cmd + ' '.join(files[i:i+sign_chunk])), stderr=subprocess.STDOUT) 62 subprocess.check_output(shlex.split(cmd + ' '.join(files[i:i+sign_chunk])), stderr=subprocess.STDOUT)
60 63
61 def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True): 64 def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True, output_suffix=None, use_sha256=False):
62 """Create a detached signature of a file""" 65 """Create a detached signature of a file"""
63 66
64 if passphrase_file and passphrase: 67 if passphrase_file and passphrase:
@@ -71,25 +74,35 @@ class LocalSigner(object):
71 cmd += ['--homedir', self.gpg_path] 74 cmd += ['--homedir', self.gpg_path]
72 if armor: 75 if armor:
73 cmd += ['--armor'] 76 cmd += ['--armor']
77 if use_sha256:
78 cmd += ['--digest-algo', "SHA256"]
74 79
75 #gpg > 2.1 supports password pipes only through the loopback interface 80 #gpg > 2.1 supports password pipes only through the loopback interface
76 #gpg < 2.1 errors out if given unknown parameters 81 #gpg < 2.1 errors out if given unknown parameters
77 if self.gpg_version > (2,1,): 82 if self.gpg_version > (2,1,):
78 cmd += ['--pinentry-mode', 'loopback'] 83 cmd += ['--pinentry-mode', 'loopback']
79 84
80 cmd += [input_file]
81
82 try: 85 try:
83 if passphrase_file: 86 if passphrase_file:
84 with open(passphrase_file) as fobj: 87 with open(passphrase_file) as fobj:
85 passphrase = fobj.readline(); 88 passphrase = fobj.readline();
86 89
87 job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE) 90 if not output_suffix:
88 (_, stderr) = job.communicate(passphrase.encode("utf-8")) 91 output_suffix = 'asc' if armor else 'sig'
92 output_file = input_file + "." + output_suffix
93 with tempfile.TemporaryDirectory(dir=os.path.dirname(output_file)) as tmp_dir:
94 tmp_file = os.path.join(tmp_dir, os.path.basename(output_file))
95 cmd += ['-o', tmp_file]
96
97 cmd += [input_file]
98
99 job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
100 (_, stderr) = job.communicate(passphrase.encode("utf-8"))
89 101
90 if job.returncode: 102 if job.returncode:
91 bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8"))) 103 bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8")))
92 104
105 os.rename(tmp_file, output_file)
93 except IOError as e: 106 except IOError as e:
94 bb.error("IO error (%s): %s" % (e.errno, e.strerror)) 107 bb.error("IO error (%s): %s" % (e.errno, e.strerror))
95 raise Exception("Failed to sign '%s'" % input_file) 108 raise Exception("Failed to sign '%s'" % input_file)
@@ -109,16 +122,33 @@ class LocalSigner(object):
109 bb.fatal("Could not get gpg version: %s" % e) 122 bb.fatal("Could not get gpg version: %s" % e)
110 123
111 124
112 def verify(self, sig_file): 125 def verify(self, sig_file, valid_sigs = ''):
113 """Verify signature""" 126 """Verify signature"""
114 cmd = self.gpg_cmd + [" --verify", "--no-permission-warning"] 127 cmd = self.gpg_cmd + ["--verify", "--no-permission-warning", "--status-fd", "1"]
115 if self.gpg_path: 128 if self.gpg_path:
116 cmd += ["--homedir", self.gpg_path] 129 cmd += ["--homedir", self.gpg_path]
117 130
118 cmd += [sig_file] 131 cmd += [sig_file]
119 status = subprocess.call(cmd) 132 status = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
120 ret = False if status else True 133 # Valid if any key matches if unspecified
121 return ret 134 if not valid_sigs:
135 ret = False if status.returncode else True
136 return ret
137
138 import re
139 goodsigs = []
140 sigre = re.compile(r'^\[GNUPG:\] GOODSIG (\S+)\s(.*)$')
141 for l in status.stdout.decode("utf-8").splitlines():
142 s = sigre.match(l)
143 if s:
144 goodsigs += [s.group(1)]
145
146 for sig in valid_sigs.split():
147 if sig in goodsigs:
148 return True
149 if len(goodsigs):
150 bb.warn('No accepted signatures found. Good signatures found: %s.' % ' '.join(goodsigs))
151 return False
122 152
123 153
124def get_signer(d, backend): 154def get_signer(d, backend):
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py
index 665d32ecbb..6e55fa1e7f 100644
--- a/meta/lib/oe/license.py
+++ b/meta/lib/oe/license.py
@@ -1,10 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4"""Code for parsing OpenEmbedded license strings""" 6"""Code for parsing OpenEmbedded license strings"""
5 7
6import ast 8import ast
7import re 9import re
10import oe.qa
8from fnmatch import fnmatchcase as fnmatch 11from fnmatch import fnmatchcase as fnmatch
9 12
10def license_ok(license, dont_want_licenses): 13def license_ok(license, dont_want_licenses):
@@ -14,6 +17,16 @@ def license_ok(license, dont_want_licenses):
14 return False 17 return False
15 return True 18 return True
16 19
20def obsolete_license_list():
21 return ["AGPL-3", "AGPL-3+", "AGPLv3", "AGPLv3+", "AGPLv3.0", "AGPLv3.0+", "AGPL-3.0", "AGPL-3.0+", "BSD-0-Clause",
22 "GPL-1", "GPL-1+", "GPLv1", "GPLv1+", "GPLv1.0", "GPLv1.0+", "GPL-1.0", "GPL-1.0+", "GPL-2", "GPL-2+", "GPLv2",
23 "GPLv2+", "GPLv2.0", "GPLv2.0+", "GPL-2.0", "GPL-2.0+", "GPL-3", "GPL-3+", "GPLv3", "GPLv3+", "GPLv3.0", "GPLv3.0+",
24 "GPL-3.0", "GPL-3.0+", "LGPLv2", "LGPLv2+", "LGPLv2.0", "LGPLv2.0+", "LGPL-2.0", "LGPL-2.0+", "LGPL2.1", "LGPL2.1+",
25 "LGPLv2.1", "LGPLv2.1+", "LGPL-2.1", "LGPL-2.1+", "LGPLv3", "LGPLv3+", "LGPL-3.0", "LGPL-3.0+", "MPL-1", "MPLv1",
26 "MPLv1.1", "MPLv2", "MIT-X", "MIT-style", "openssl", "PSF", "PSFv2", "Python-2", "Apachev2", "Apache-2", "Artisticv1",
27 "Artistic-1", "AFL-2", "AFL-1", "AFLv2", "AFLv1", "CDDLv1", "CDDL-1", "EPLv1.0", "FreeType", "Nauman",
28 "tcl", "vim", "SGIv1"]
29
17class LicenseError(Exception): 30class LicenseError(Exception):
18 pass 31 pass
19 32
@@ -74,6 +87,9 @@ class FlattenVisitor(LicenseVisitor):
74 def visit_Str(self, node): 87 def visit_Str(self, node):
75 self.licenses.append(node.s) 88 self.licenses.append(node.s)
76 89
90 def visit_Constant(self, node):
91 self.licenses.append(node.value)
92
77 def visit_BinOp(self, node): 93 def visit_BinOp(self, node):
78 if isinstance(node.op, ast.BitOr): 94 if isinstance(node.op, ast.BitOr):
79 left = FlattenVisitor(self.choose_licenses) 95 left = FlattenVisitor(self.choose_licenses)
@@ -96,26 +112,26 @@ def flattened_licenses(licensestr, choose_licenses):
96 raise LicenseSyntaxError(licensestr, exc) 112 raise LicenseSyntaxError(licensestr, exc)
97 return flatten.licenses 113 return flatten.licenses
98 114
99def is_included(licensestr, whitelist=None, blacklist=None): 115def is_included(licensestr, include_licenses=None, exclude_licenses=None):
100 """Given a license string and whitelist and blacklist, determine if the 116 """Given a license string, a list of licenses to include and a list of
101 license string matches the whitelist and does not match the blacklist. 117 licenses to exclude, determine if the license string matches the include
118 list and does not match the exclude list.
102 119
103 Returns a tuple holding the boolean state and a list of the applicable 120 Returns a tuple holding the boolean state and a list of the applicable
104 licenses that were excluded if state is False, or the licenses that were 121 licenses that were excluded if state is False, or the licenses that were
105 included if the state is True. 122 included if the state is True."""
106 """
107 123
108 def include_license(license): 124 def include_license(license):
109 return any(fnmatch(license, pattern) for pattern in whitelist) 125 return any(fnmatch(license, pattern) for pattern in include_licenses)
110 126
111 def exclude_license(license): 127 def exclude_license(license):
112 return any(fnmatch(license, pattern) for pattern in blacklist) 128 return any(fnmatch(license, pattern) for pattern in exclude_licenses)
113 129
114 def choose_licenses(alpha, beta): 130 def choose_licenses(alpha, beta):
115 """Select the option in an OR which is the 'best' (has the most 131 """Select the option in an OR which is the 'best' (has the most
116 included licenses and no excluded licenses).""" 132 included licenses and no excluded licenses)."""
117 # The factor 1000 below is arbitrary, just expected to be much larger 133 # The factor 1000 below is arbitrary, just expected to be much larger
118 # that the number of licenses actually specified. That way the weight 134 # than the number of licenses actually specified. That way the weight
119 # will be negative if the list of licenses contains an excluded license, 135 # will be negative if the list of licenses contains an excluded license,
120 # but still gives a higher weight to the list with the most included 136 # but still gives a higher weight to the list with the most included
121 # licenses. 137 # licenses.
@@ -128,11 +144,11 @@ def is_included(licensestr, whitelist=None, blacklist=None):
128 else: 144 else:
129 return beta 145 return beta
130 146
131 if not whitelist: 147 if not include_licenses:
132 whitelist = ['*'] 148 include_licenses = ['*']
133 149
134 if not blacklist: 150 if not exclude_licenses:
135 blacklist = [] 151 exclude_licenses = []
136 152
137 licenses = flattened_licenses(licensestr, choose_licenses) 153 licenses = flattened_licenses(licensestr, choose_licenses)
138 excluded = [lic for lic in licenses if exclude_license(lic)] 154 excluded = [lic for lic in licenses if exclude_license(lic)]
@@ -227,6 +243,9 @@ class ListVisitor(LicenseVisitor):
227 def visit_Str(self, node): 243 def visit_Str(self, node):
228 self.licenses.add(node.s) 244 self.licenses.add(node.s)
229 245
246 def visit_Constant(self, node):
247 self.licenses.add(node.value)
248
230def list_licenses(licensestr): 249def list_licenses(licensestr):
231 """Simply get a list of all licenses mentioned in a license string. 250 """Simply get a list of all licenses mentioned in a license string.
232 Binary operators are not applied or taken into account in any way""" 251 Binary operators are not applied or taken into account in any way"""
@@ -236,3 +255,225 @@ def list_licenses(licensestr):
236 except SyntaxError as exc: 255 except SyntaxError as exc:
237 raise LicenseSyntaxError(licensestr, exc) 256 raise LicenseSyntaxError(licensestr, exc)
238 return visitor.licenses 257 return visitor.licenses
258
259def apply_pkg_license_exception(pkg, bad_licenses, exceptions):
260 """Return remaining bad licenses after removing any package exceptions"""
261
262 return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions]
263
264def return_spdx(d, license):
265 """
266 This function returns the spdx mapping of a license if it exists.
267 """
268 return d.getVarFlag('SPDXLICENSEMAP', license)
269
270def canonical_license(d, license):
271 """
272 Return the canonical (SPDX) form of the license if available (so GPLv3
273 becomes GPL-3.0-only) or the passed license if there is no canonical form.
274 """
275 return d.getVarFlag('SPDXLICENSEMAP', license) or license
276
277def expand_wildcard_licenses(d, wildcard_licenses):
278 """
279 There are some common wildcard values users may want to use. Support them
280 here.
281 """
282 licenses = set(wildcard_licenses)
283 mapping = {
284 "AGPL-3.0*" : ["AGPL-3.0-only", "AGPL-3.0-or-later"],
285 "GPL-3.0*" : ["GPL-3.0-only", "GPL-3.0-or-later"],
286 "LGPL-3.0*" : ["LGPL-3.0-only", "LGPL-3.0-or-later"],
287 }
288 for k in mapping:
289 if k in wildcard_licenses:
290 licenses.remove(k)
291 for item in mapping[k]:
292 licenses.add(item)
293
294 for l in licenses:
295 if l in obsolete_license_list():
296 bb.fatal("Error, %s is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE" % l)
297 if "*" in l:
298 bb.fatal("Error, %s is an invalid license wildcard entry" % l)
299
300 return list(licenses)
301
302def incompatible_license_contains(license, truevalue, falsevalue, d):
303 license = canonical_license(d, license)
304 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
305 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
306 return truevalue if license in bad_licenses else falsevalue
307
308def incompatible_pkg_license(d, dont_want_licenses, license):
309 # Handles an "or" or two license sets provided by
310 # flattened_licenses(), pick one that works if possible.
311 def choose_lic_set(a, b):
312 return a if all(license_ok(canonical_license(d, lic),
313 dont_want_licenses) for lic in a) else b
314
315 try:
316 licenses = flattened_licenses(license, choose_lic_set)
317 except LicenseError as exc:
318 bb.fatal('%s: %s' % (d.getVar('P'), exc))
319
320 incompatible_lic = []
321 for l in licenses:
322 license = canonical_license(d, l)
323 if not license_ok(license, dont_want_licenses):
324 incompatible_lic.append(license)
325
326 return sorted(incompatible_lic)
327
328def incompatible_license(d, dont_want_licenses, package=None):
329 """
330 This function checks if a recipe has only incompatible licenses. It also
331 take into consideration 'or' operand. dont_want_licenses should be passed
332 as canonical (SPDX) names.
333 """
334 license = d.getVar("LICENSE:%s" % package) if package else None
335 if not license:
336 license = d.getVar('LICENSE')
337
338 return incompatible_pkg_license(d, dont_want_licenses, license)
339
340def check_license_flags(d):
341 """
342 This function checks if a recipe has any LICENSE_FLAGS that
343 aren't acceptable.
344
345 If it does, it returns the all LICENSE_FLAGS missing from the list
346 of acceptable license flags, or all of the LICENSE_FLAGS if there
347 is no list of acceptable flags.
348
349 If everything is is acceptable, it returns None.
350 """
351
352 def license_flag_matches(flag, acceptlist, pn):
353 """
354 Return True if flag matches something in acceptlist, None if not.
355
356 Before we test a flag against the acceptlist, we append _${PN}
357 to it. We then try to match that string against the
358 acceptlist. This covers the normal case, where we expect
359 LICENSE_FLAGS to be a simple string like 'commercial', which
360 the user typically matches exactly in the acceptlist by
361 explicitly appending the package name e.g 'commercial_foo'.
362 If we fail the match however, we then split the flag across
363 '_' and append each fragment and test until we either match or
364 run out of fragments.
365 """
366 flag_pn = ("%s_%s" % (flag, pn))
367 for candidate in acceptlist:
368 if flag_pn == candidate:
369 return True
370
371 flag_cur = ""
372 flagments = flag_pn.split("_")
373 flagments.pop() # we've already tested the full string
374 for flagment in flagments:
375 if flag_cur:
376 flag_cur += "_"
377 flag_cur += flagment
378 for candidate in acceptlist:
379 if flag_cur == candidate:
380 return True
381 return False
382
383 def all_license_flags_match(license_flags, acceptlist):
384 """ Return all unmatched flags, None if all flags match """
385 pn = d.getVar('PN')
386 split_acceptlist = acceptlist.split()
387 flags = []
388 for flag in license_flags.split():
389 if not license_flag_matches(flag, split_acceptlist, pn):
390 flags.append(flag)
391 return flags if flags else None
392
393 license_flags = d.getVar('LICENSE_FLAGS')
394 if license_flags:
395 acceptlist = d.getVar('LICENSE_FLAGS_ACCEPTED')
396 if not acceptlist:
397 return license_flags.split()
398 unmatched_flags = all_license_flags_match(license_flags, acceptlist)
399 if unmatched_flags:
400 return unmatched_flags
401 return None
402
403def check_license_format(d):
404 """
405 This function checks if LICENSE is well defined,
406 Validate operators in LICENSES.
407 No spaces are allowed between LICENSES.
408 """
409 pn = d.getVar('PN')
410 licenses = d.getVar('LICENSE')
411
412 elements = list(filter(lambda x: x.strip(), license_operator.split(licenses)))
413 for pos, element in enumerate(elements):
414 if license_pattern.match(element):
415 if pos > 0 and license_pattern.match(elements[pos - 1]):
416 oe.qa.handle_error('license-format',
417 '%s: LICENSE value "%s" has an invalid format - license names ' \
418 'must be separated by the following characters to indicate ' \
419 'the license selection: %s' %
420 (pn, licenses, license_operator_chars), d)
421 elif not license_operator.match(element):
422 oe.qa.handle_error('license-format',
423 '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \
424 'in the valid list of separators (%s)' %
425 (pn, licenses, element, license_operator_chars), d)
426
427def skip_incompatible_package_licenses(d, pkgs):
428 if not pkgs:
429 return {}
430
431 pn = d.getVar("PN")
432
433 check_license = False if pn.startswith("nativesdk-") else True
434 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
435 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
436 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
437 if pn.endswith(d.expand(t)):
438 check_license = False
439 if pn.startswith("gcc-source-"):
440 check_license = False
441
442 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
443 if not check_license or not bad_licenses:
444 return {}
445
446 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
447
448 exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split()
449
450 for lic_exception in exceptions:
451 if ":" in lic_exception:
452 lic_exception = lic_exception.split(":")[1]
453 if lic_exception in obsolete_license_list():
454 bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception)
455
456 skipped_pkgs = {}
457 for pkg in pkgs:
458 remaining_bad_licenses = apply_pkg_license_exception(pkg, bad_licenses, exceptions)
459
460 incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg)
461 if incompatible_lic:
462 skipped_pkgs[pkg] = incompatible_lic
463
464 return skipped_pkgs
465
466def tidy_licenses(value):
467 """
468 Flat, split and sort licenses.
469 """
470 from oe.license import flattened_licenses
471
472 def _choose(a, b):
473 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
474 return ["(%s | %s)" % (str_a, str_b)]
475
476 if not isinstance(value, str):
477 value = " & ".join(value)
478
479 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
diff --git a/meta/lib/oe/license_finder.py b/meta/lib/oe/license_finder.py
new file mode 100644
index 0000000000..16f5d7c94c
--- /dev/null
+++ b/meta/lib/oe/license_finder.py
@@ -0,0 +1,179 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import fnmatch
8import hashlib
9import logging
10import os
11import re
12
13import bb
14import bb.utils
15
16logger = logging.getLogger("BitBake.OE.LicenseFinder")
17
18def _load_hash_csv(d):
19 """
20 Load a mapping of (checksum: license name) from all files/license-hashes.csv
21 files that can be found in the available layers.
22 """
23 import csv
24 md5sums = {}
25
26 # Read license md5sums from csv file
27 for path in d.getVar('BBPATH').split(':'):
28 csv_path = os.path.join(path, 'files', 'license-hashes.csv')
29 if os.path.isfile(csv_path):
30 with open(csv_path, newline='') as csv_file:
31 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=['md5sum', 'license'])
32 for row in reader:
33 md5sums[row['md5sum']] = row['license']
34
35 return md5sums
36
37
38def _crunch_known_licenses(d):
39 """
40 Calculate the MD5 checksums for the original and "crunched" versions of all
41 known licenses.
42 """
43 md5sums = {}
44
45 lic_dirs = [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or "").split()
46 for lic_dir in lic_dirs:
47 for fn in os.listdir(lic_dir):
48 path = os.path.join(lic_dir, fn)
49 # Hash the exact contents
50 md5value = bb.utils.md5_file(path)
51 md5sums[md5value] = fn
52 # Also hash a "crunched" version
53 md5value = _crunch_license(path)
54 md5sums[md5value] = fn
55
56 return md5sums
57
58
59def _crunch_license(licfile):
60 '''
61 Remove non-material text from a license file and then calculate its
62 md5sum. This works well for licenses that contain a copyright statement,
63 but is also a useful way to handle people's insistence upon reformatting
64 the license text slightly (with no material difference to the text of the
65 license).
66 '''
67
68 import oe.utils
69
70 # Note: these are carefully constructed!
71 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
72 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
73 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
74 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
75 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
76 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
77 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
78 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
79
80 lictext = []
81 with open(licfile, 'r', errors='surrogateescape') as f:
82 for line in f:
83 # Drop opening statements
84 if copyright_re.match(line):
85 continue
86 elif disclaimer_re.match(line):
87 continue
88 elif email_re.match(line):
89 continue
90 elif header_re.match(line):
91 continue
92 elif tag_re.match(line):
93 continue
94 elif url_re.match(line):
95 continue
96 elif license_title_re.match(line):
97 continue
98 elif license_statement_re.match(line):
99 continue
100 # Strip comment symbols
101 line = line.replace('*', '') \
102 .replace('#', '')
103 # Unify spelling
104 line = line.replace('sub-license', 'sublicense')
105 # Squash spaces
106 line = oe.utils.squashspaces(line.strip())
107 # Replace smart quotes, double quotes and backticks with single quotes
108 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
109 # Unify brackets
110 line = line.replace("{", "[").replace("}", "]")
111 if line:
112 lictext.append(line)
113
114 m = hashlib.md5()
115 try:
116 m.update(' '.join(lictext).encode('utf-8'))
117 md5val = m.hexdigest()
118 except UnicodeEncodeError:
119 md5val = None
120 return md5val
121
122
123def find_license_files(srctree, first_only=False):
124 """
125 Search srctree for files that look like they could be licenses.
126 If first_only is True, only return the first file found.
127 """
128 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
129 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go", ".sh")
130 licfiles = []
131 for root, dirs, files in os.walk(srctree):
132 # Sort files so that LICENSE is before LICENSE.subcomponent, which is
133 # meaningful if first_only is set.
134 for fn in sorted(files):
135 if fn.endswith(skip_extensions):
136 continue
137 for spec in licspecs:
138 if fnmatch.fnmatch(fn, spec):
139 fullpath = os.path.join(root, fn)
140 if not fullpath in licfiles:
141 licfiles.append(fullpath)
142 if first_only:
143 return licfiles
144
145 return licfiles
146
147
148def match_licenses(licfiles, srctree, d, extra_hashes={}):
149 md5sums = {}
150 md5sums.update(_load_hash_csv(d))
151 md5sums.update(_crunch_known_licenses(d))
152 md5sums.update(extra_hashes)
153
154 licenses = []
155 for licfile in sorted(licfiles):
156 resolved_licfile = d.expand(licfile)
157 md5value = bb.utils.md5_file(resolved_licfile)
158 license = md5sums.get(md5value, None)
159 if not license:
160 crunched_md5 = _crunch_license(resolved_licfile)
161 license = md5sums.get(crunched_md5, None)
162 if not license:
163 license = 'Unknown'
164 logger.info("Please add the following line for '%s' to a 'license-hashes.csv' " \
165 "and replace `Unknown` with the license:\n" \
166 "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value))
167
168 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
169
170 return licenses
171
172
173def find_licenses(srctree, d, first_only=False, extra_hashes={}):
174 licfiles = find_license_files(srctree, first_only)
175 licenses = match_licenses(licfiles, srctree, d, extra_hashes)
176
177 # FIXME should we grab at least one source file with a license header and add that too?
178
179 return licenses
diff --git a/meta/lib/oe/lsb.py b/meta/lib/oe/lsb.py
index 43e46380d7..3ec03e5042 100644
--- a/meta/lib/oe/lsb.py
+++ b/meta/lib/oe/lsb.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/maketype.py b/meta/lib/oe/maketype.py
index d929c8b3e5..7a83bdf602 100644
--- a/meta/lib/oe/maketype.py
+++ b/meta/lib/oe/maketype.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4"""OpenEmbedded variable typing support 6"""OpenEmbedded variable typing support
@@ -10,12 +12,7 @@ the arguments of the type's factory for details.
10 12
11import inspect 13import inspect
12import oe.types as types 14import oe.types as types
13try: 15from collections.abc import Callable
14 # Python 3.7+
15 from collections.abc import Callable
16except ImportError:
17 # Python < 3.7
18 from collections import Callable
19 16
20available_types = {} 17available_types = {}
21 18
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py
index 1a058dcd73..cf7a13c247 100644
--- a/meta/lib/oe/manifest.py
+++ b/meta/lib/oe/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -198,7 +200,3 @@ def create_manifest(d, final_manifest=False, manifest_dir=None,
198 manifest.create_final() 200 manifest.create_final()
199 else: 201 else:
200 manifest.create_initial() 202 manifest.create_initial()
201
202
203if __name__ == "__main__":
204 pass
diff --git a/meta/lib/oe/npm_registry.py b/meta/lib/oe/npm_registry.py
new file mode 100644
index 0000000000..d97ced7cda
--- /dev/null
+++ b/meta/lib/oe/npm_registry.py
@@ -0,0 +1,175 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import bb
8import json
9import subprocess
10
11_ALWAYS_SAFE = frozenset('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
12 'abcdefghijklmnopqrstuvwxyz'
13 '0123456789'
14 '_.-~()')
15
16MISSING_OK = object()
17
18REGISTRY = "https://registry.npmjs.org"
19
20# we can not use urllib.parse here because npm expects lowercase
21# hex-chars but urllib generates uppercase ones
22def uri_quote(s, safe = '/'):
23 res = ""
24 safe_set = set(safe)
25 for c in s:
26 if c in _ALWAYS_SAFE or c in safe_set:
27 res += c
28 else:
29 res += '%%%02x' % ord(c)
30 return res
31
32class PackageJson:
33 def __init__(self, spec):
34 self.__spec = spec
35
36 @property
37 def name(self):
38 return self.__spec['name']
39
40 @property
41 def version(self):
42 return self.__spec['version']
43
44 @property
45 def empty_manifest(self):
46 return {
47 'name': self.name,
48 'description': self.__spec.get('description', ''),
49 'versions': {},
50 }
51
52 def base_filename(self):
53 return uri_quote(self.name, safe = '@')
54
55 def as_manifest_entry(self, tarball_uri):
56 res = {}
57
58 ## NOTE: 'npm install' requires more than basic meta information;
59 ## e.g. it takes 'bin' from this manifest entry but not the actual
60 ## 'package.json'
61 for (idx,dflt) in [('name', None),
62 ('description', ""),
63 ('version', None),
64 ('bin', MISSING_OK),
65 ('man', MISSING_OK),
66 ('scripts', MISSING_OK),
67 ('directories', MISSING_OK),
68 ('dependencies', MISSING_OK),
69 ('devDependencies', MISSING_OK),
70 ('optionalDependencies', MISSING_OK),
71 ('license', "unknown")]:
72 if idx in self.__spec:
73 res[idx] = self.__spec[idx]
74 elif dflt == MISSING_OK:
75 pass
76 elif dflt != None:
77 res[idx] = dflt
78 else:
79 raise Exception("%s-%s: missing key %s" % (self.name,
80 self.version,
81 idx))
82
83 res['dist'] = {
84 'tarball': tarball_uri,
85 }
86
87 return res
88
89class ManifestImpl:
90 def __init__(self, base_fname, spec):
91 self.__base = base_fname
92 self.__spec = spec
93
94 def load(self):
95 try:
96 with open(self.filename, "r") as f:
97 res = json.load(f)
98 except IOError:
99 res = self.__spec.empty_manifest
100
101 return res
102
103 def save(self, meta):
104 with open(self.filename, "w") as f:
105 json.dump(meta, f, indent = 2)
106
107 @property
108 def filename(self):
109 return self.__base + ".meta"
110
111class Manifest:
112 def __init__(self, base_fname, spec):
113 self.__base = base_fname
114 self.__spec = spec
115 self.__lockf = None
116 self.__impl = None
117
118 def __enter__(self):
119 self.__lockf = bb.utils.lockfile(self.__base + ".lock")
120 self.__impl = ManifestImpl(self.__base, self.__spec)
121 return self.__impl
122
123 def __exit__(self, exc_type, exc_val, exc_tb):
124 bb.utils.unlockfile(self.__lockf)
125
126class NpmCache:
127 def __init__(self, cache):
128 self.__cache = cache
129
130 @property
131 def path(self):
132 return self.__cache
133
134 def run(self, type, key, fname):
135 subprocess.run(['oe-npm-cache', self.__cache, type, key, fname],
136 check = True)
137
138class NpmRegistry:
139 def __init__(self, path, cache):
140 self.__path = path
141 self.__cache = NpmCache(cache + '/_cacache')
142 bb.utils.mkdirhier(self.__path)
143 bb.utils.mkdirhier(self.__cache.path)
144
145 @staticmethod
146 ## This function is critical and must match nodejs expectations
147 def _meta_uri(spec):
148 return REGISTRY + '/' + uri_quote(spec.name, safe = '@')
149
150 @staticmethod
151 ## Exact return value does not matter; just make it look like a
152 ## usual registry url
153 def _tarball_uri(spec):
154 return '%s/%s/-/%s-%s.tgz' % (REGISTRY,
155 uri_quote(spec.name, safe = '@'),
156 uri_quote(spec.name, safe = '@/'),
157 spec.version)
158
159 def add_pkg(self, tarball, pkg_json):
160 pkg_json = PackageJson(pkg_json)
161 base = os.path.join(self.__path, pkg_json.base_filename())
162
163 with Manifest(base, pkg_json) as manifest:
164 meta = manifest.load()
165 tarball_uri = self._tarball_uri(pkg_json)
166
167 meta['versions'][pkg_json.version] = pkg_json.as_manifest_entry(tarball_uri)
168
169 manifest.save(meta)
170
171 ## Cache entries are a little bit dependent on the nodejs
172 ## version; version specific cache implementation must
173 ## mitigate differences
174 self.__cache.run('meta', self._meta_uri(pkg_json), manifest.filename);
175 self.__cache.run('tgz', tarball_uri, tarball);
diff --git a/meta/lib/oe/overlayfs.py b/meta/lib/oe/overlayfs.py
new file mode 100644
index 0000000000..8b88900f71
--- /dev/null
+++ b/meta/lib/oe/overlayfs.py
@@ -0,0 +1,54 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# This file contains common functions for overlayfs and its QA check
7
8# this function is based on https://github.com/systemd/systemd/blob/main/src/basic/unit-name.c
9def escapeSystemdUnitName(path):
10 escapeMap = {
11 '/': '-',
12 '-': "\\x2d",
13 '\\': "\\x5d"
14 }
15 return "".join([escapeMap.get(c, c) for c in path.strip('/')])
16
17def strForBash(s):
18 return s.replace('\\', '\\\\')
19
20def allOverlaysUnitName(d):
21 return d.getVar('PN') + '-overlays.service'
22
23def mountUnitName(unit):
24 return escapeSystemdUnitName(unit) + '.mount'
25
26def helperUnitName(unit):
27 return escapeSystemdUnitName(unit) + '-create-upper-dir.service'
28
29def unitFileList(d):
30 fileList = []
31 overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT")
32
33 if not overlayMountPoints:
34 bb.fatal("A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
35
36 # check that we have required mount points set first
37 requiredMountPoints = d.getVarFlags('OVERLAYFS_WRITABLE_PATHS')
38 for mountPoint in requiredMountPoints:
39 if mountPoint not in overlayMountPoints:
40 bb.fatal("Missing required mount point for OVERLAYFS_MOUNT_POINT[%s] in your MACHINE configuration" % mountPoint)
41
42 for mountPoint in overlayMountPoints:
43 mountPointList = d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint)
44 if not mountPointList:
45 bb.debug(1, "No mount points defined for %s flag, don't add to file list", mountPoint)
46 continue
47 for path in mountPointList.split():
48 fileList.append(mountUnitName(path))
49 fileList.append(helperUnitName(path))
50
51 fileList.append(allOverlaysUnitName(d))
52
53 return fileList
54
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index dd700cbb0c..ce69151e5d 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -1,12 +1,25 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import errno
8import fnmatch
9import itertools
10import os
11import shlex
12import re
13import glob
5import stat 14import stat
6import mmap 15import mmap
7import subprocess 16import subprocess
17import shutil
18
19import bb.parse
20import oe.cachedpath
8 21
9def runstrip(arg): 22def runstrip(file, elftype, strip, extra_strip_sections=''):
10 # Function to strip a single file, called from split_and_strip_files below 23 # Function to strip a single file, called from split_and_strip_files below
11 # A working 'file' (one which works on the target architecture) 24 # A working 'file' (one which works on the target architecture)
12 # 25 #
@@ -16,8 +29,6 @@ def runstrip(arg):
16 # 8 - shared library 29 # 8 - shared library
17 # 16 - kernel module 30 # 16 - kernel module
18 31
19 (file, elftype, strip) = arg
20
21 newmode = None 32 newmode = None
22 if not os.access(file, os.W_OK) or os.access(file, os.R_OK): 33 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
23 origmode = os.stat(file)[stat.ST_MODE] 34 origmode = os.stat(file)[stat.ST_MODE]
@@ -26,7 +37,7 @@ def runstrip(arg):
26 37
27 stripcmd = [strip] 38 stripcmd = [strip]
28 skip_strip = False 39 skip_strip = False
29 # kernel module 40 # kernel module
30 if elftype & 16: 41 if elftype & 16:
31 if is_kernel_module_signed(file): 42 if is_kernel_module_signed(file):
32 bb.debug(1, "Skip strip on signed module %s" % file) 43 bb.debug(1, "Skip strip on signed module %s" % file)
@@ -40,6 +51,9 @@ def runstrip(arg):
40 # shared or executable: 51 # shared or executable:
41 elif elftype & 8 or elftype & 4: 52 elif elftype & 8 or elftype & 4:
42 stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"]) 53 stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"])
54 if extra_strip_sections != '':
55 for section in extra_strip_sections.split():
56 stripcmd.extend(["--remove-section=" + section])
43 57
44 stripcmd.append(file) 58 stripcmd.append(file)
45 bb.debug(1, "runstrip: %s" % stripcmd) 59 bb.debug(1, "runstrip: %s" % stripcmd)
@@ -96,7 +110,7 @@ def is_static_lib(path):
96 return start == magic 110 return start == magic
97 return False 111 return False
98 112
99def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripped=False): 113def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False):
100 """ 114 """
101 Strip executable code (like executables, shared libraries) _in_place_ 115 Strip executable code (like executables, shared libraries) _in_place_
102 - Based on sysroot_strip in staging.bbclass 116 - Based on sysroot_strip in staging.bbclass
@@ -104,6 +118,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
104 :param strip_cmd: Strip command (usually ${STRIP}) 118 :param strip_cmd: Strip command (usually ${STRIP})
105 :param libdir: ${libdir} - strip .so files in this directory 119 :param libdir: ${libdir} - strip .so files in this directory
106 :param base_libdir: ${base_libdir} - strip .so files in this directory 120 :param base_libdir: ${base_libdir} - strip .so files in this directory
121 :param max_process: number of stripping processes started in parallel
107 :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP} 122 :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP}
108 This is for proper logging and messages only. 123 This is for proper logging and messages only.
109 """ 124 """
@@ -146,7 +161,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
146 # ...but is it ELF, and is it already stripped? 161 # ...but is it ELF, and is it already stripped?
147 checkelf.append(file) 162 checkelf.append(file)
148 inodecache[file] = s.st_ino 163 inodecache[file] = s.st_ino
149 results = oe.utils.multiprocess_launch(is_elf, checkelf, d) 164 results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process)
150 for (file, elf_file) in results: 165 for (file, elf_file) in results:
151 #elf_file = is_elf(file) 166 #elf_file = is_elf(file)
152 if elf_file & 1: 167 if elf_file & 1:
@@ -174,22 +189,35 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
174 elf_file = int(elffiles[file]) 189 elf_file = int(elffiles[file])
175 sfiles.append((file, elf_file, strip_cmd)) 190 sfiles.append((file, elf_file, strip_cmd))
176 191
177 oe.utils.multiprocess_launch(runstrip, sfiles, d) 192 oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
178 193
194TRANSLATE = (
195 ("@", "@at@"),
196 (" ", "@space@"),
197 ("\t", "@tab@"),
198 ("[", "@openbrace@"),
199 ("]", "@closebrace@"),
200 ("_", "@underscore@"),
201 (":", "@colon@"),
202)
179 203
180def file_translate(file): 204def file_translate(file):
181 ft = file.replace("@", "@at@") 205 ft = file
182 ft = ft.replace(" ", "@space@") 206 for s, replace in TRANSLATE:
183 ft = ft.replace("\t", "@tab@") 207 ft = ft.replace(s, replace)
184 ft = ft.replace("[", "@openbrace@") 208
185 ft = ft.replace("]", "@closebrace@") 209 return ft
186 ft = ft.replace("_", "@underscore@") 210
211def file_reverse_translate(file):
212 ft = file
213 for s, replace in reversed(TRANSLATE):
214 ft = ft.replace(replace, s)
215
187 return ft 216 return ft
188 217
189def filedeprunner(arg): 218def filedeprunner(pkg, pkgfiles, rpmdeps, pkgdest):
190 import re, subprocess, shlex 219 import re, subprocess, shlex
191 220
192 (pkg, pkgfiles, rpmdeps, pkgdest) = arg
193 provides = {} 221 provides = {}
194 requires = {} 222 requires = {}
195 223
@@ -283,3 +311,1800 @@ def read_shlib_providers(d):
283 shlib_provider[s[0]] = {} 311 shlib_provider[s[0]] = {}
284 shlib_provider[s[0]][s[1]] = (dep_pkg, s[2]) 312 shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
285 return shlib_provider 313 return shlib_provider
314
315# We generate a master list of directories to process, we start by
316# seeding this list with reasonable defaults, then load from
317# the fs-perms.txt files
318def fixup_perms(d):
319 import pwd, grp
320
321 cpath = oe.cachedpath.CachedPath()
322 dvar = d.getVar('PKGD')
323
324 # init using a string with the same format as a line as documented in
325 # the fs-perms.txt file
326 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
327 # <path> link <link target>
328 #
329 # __str__ can be used to print out an entry in the input format
330 #
331 # if fs_perms_entry.path is None:
332 # an error occurred
333 # if fs_perms_entry.link, you can retrieve:
334 # fs_perms_entry.path = path
335 # fs_perms_entry.link = target of link
336 # if not fs_perms_entry.link, you can retrieve:
337 # fs_perms_entry.path = path
338 # fs_perms_entry.mode = expected dir mode or None
339 # fs_perms_entry.uid = expected uid or -1
340 # fs_perms_entry.gid = expected gid or -1
341 # fs_perms_entry.walk = 'true' or something else
342 # fs_perms_entry.fmode = expected file mode or None
343 # fs_perms_entry.fuid = expected file uid or -1
344 # fs_perms_entry_fgid = expected file gid or -1
345 class fs_perms_entry():
346 def __init__(self, line):
347 lsplit = line.split()
348 if len(lsplit) == 3 and lsplit[1].lower() == "link":
349 self._setlink(lsplit[0], lsplit[2])
350 elif len(lsplit) == 8:
351 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
352 else:
353 msg = "Fixup Perms: invalid config line %s" % line
354 oe.qa.handle_error("perm-config", msg, d)
355 self.path = None
356 self.link = None
357
358 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
359 self.path = os.path.normpath(path)
360 self.link = None
361 self.mode = self._procmode(mode)
362 self.uid = self._procuid(uid)
363 self.gid = self._procgid(gid)
364 self.walk = walk.lower()
365 self.fmode = self._procmode(fmode)
366 self.fuid = self._procuid(fuid)
367 self.fgid = self._procgid(fgid)
368
369 def _setlink(self, path, link):
370 self.path = os.path.normpath(path)
371 self.link = link
372
373 def _procmode(self, mode):
374 if not mode or (mode and mode == "-"):
375 return None
376 else:
377 return int(mode,8)
378
379 # Note uid/gid -1 has special significance in os.lchown
380 def _procuid(self, uid):
381 if uid is None or uid == "-":
382 return -1
383 elif uid.isdigit():
384 return int(uid)
385 else:
386 return pwd.getpwnam(uid).pw_uid
387
388 def _procgid(self, gid):
389 if gid is None or gid == "-":
390 return -1
391 elif gid.isdigit():
392 return int(gid)
393 else:
394 return grp.getgrnam(gid).gr_gid
395
396 # Use for debugging the entries
397 def __str__(self):
398 if self.link:
399 return "%s link %s" % (self.path, self.link)
400 else:
401 mode = "-"
402 if self.mode:
403 mode = "0%o" % self.mode
404 fmode = "-"
405 if self.fmode:
406 fmode = "0%o" % self.fmode
407 uid = self._mapugid(self.uid)
408 gid = self._mapugid(self.gid)
409 fuid = self._mapugid(self.fuid)
410 fgid = self._mapugid(self.fgid)
411 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
412
413 def _mapugid(self, id):
414 if id is None or id == -1:
415 return "-"
416 else:
417 return "%d" % id
418
419 # Fix the permission, owner and group of path
420 def fix_perms(path, mode, uid, gid, dir):
421 if mode and not os.path.islink(path):
422 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
423 os.chmod(path, mode)
424 # -1 is a special value that means don't change the uid/gid
425 # if they are BOTH -1, don't bother to lchown
426 if not (uid == -1 and gid == -1):
427 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
428 os.lchown(path, uid, gid)
429
430 # Return a list of configuration files based on either the default
431 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
432 # paths are resolved via BBPATH
433 def get_fs_perms_list(d):
434 str = ""
435 bbpath = d.getVar('BBPATH')
436 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
437 for conf_file in fs_perms_tables.split():
438 confpath = bb.utils.which(bbpath, conf_file)
439 if confpath:
440 str += " %s" % bb.utils.which(bbpath, conf_file)
441 else:
442 bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
443 return str
444
445 fs_perms_table = {}
446 fs_link_table = {}
447
448 # By default all of the standard directories specified in
449 # bitbake.conf will get 0755 root:root.
450 target_path_vars = [ 'base_prefix',
451 'prefix',
452 'exec_prefix',
453 'base_bindir',
454 'base_sbindir',
455 'base_libdir',
456 'datadir',
457 'sysconfdir',
458 'servicedir',
459 'sharedstatedir',
460 'localstatedir',
461 'infodir',
462 'mandir',
463 'docdir',
464 'bindir',
465 'sbindir',
466 'libexecdir',
467 'libdir',
468 'includedir' ]
469
470 for path in target_path_vars:
471 dir = d.getVar(path) or ""
472 if dir == "":
473 continue
474 fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
475
476 # Now we actually load from the configuration files
477 for conf in get_fs_perms_list(d).split():
478 if not os.path.exists(conf):
479 continue
480 with open(conf) as f:
481 for line in f:
482 if line.startswith('#'):
483 continue
484 lsplit = line.split()
485 if len(lsplit) == 0:
486 continue
487 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
488 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
489 oe.qa.handle_error("perm-line", msg, d)
490 continue
491 entry = fs_perms_entry(d.expand(line))
492 if entry and entry.path:
493 if entry.link:
494 fs_link_table[entry.path] = entry
495 if entry.path in fs_perms_table:
496 fs_perms_table.pop(entry.path)
497 else:
498 fs_perms_table[entry.path] = entry
499 if entry.path in fs_link_table:
500 fs_link_table.pop(entry.path)
501
502 # Debug -- list out in-memory table
503 #for dir in fs_perms_table:
504 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
505 #for link in fs_link_table:
506 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
507
508 # We process links first, so we can go back and fixup directory ownership
509 # for any newly created directories
510 # Process in sorted order so /run gets created before /run/lock, etc.
511 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
512 link = entry.link
513 dir = entry.path
514 origin = dvar + dir
515 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
516 continue
517
518 if link[0] == "/":
519 target = dvar + link
520 ptarget = link
521 else:
522 target = os.path.join(os.path.dirname(origin), link)
523 ptarget = os.path.join(os.path.dirname(dir), link)
524 if os.path.exists(target):
525 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
526 oe.qa.handle_error("perm-link", msg, d)
527 continue
528
529 # Create path to move directory to, move it, and then setup the symlink
530 bb.utils.mkdirhier(os.path.dirname(target))
531 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
532 bb.utils.rename(origin, target)
533 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
534 os.symlink(link, origin)
535
536 for dir in fs_perms_table:
537 origin = dvar + dir
538 if not (cpath.exists(origin) and cpath.isdir(origin)):
539 continue
540
541 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
542
543 if fs_perms_table[dir].walk == 'true':
544 for root, dirs, files in os.walk(origin):
545 for dr in dirs:
546 each_dir = os.path.join(root, dr)
547 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
548 for f in files:
549 each_file = os.path.join(root, f)
550 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
551
552# Get a list of files from file vars by searching files under current working directory
553# The list contains symlinks, directories and normal files.
554def files_from_filevars(filevars):
555 cpath = oe.cachedpath.CachedPath()
556 files = []
557 for f in filevars:
558 if os.path.isabs(f):
559 f = '.' + f
560 if not f.startswith("./"):
561 f = './' + f
562 globbed = glob.glob(f, recursive=True)
563 if globbed:
564 if [ f ] != globbed:
565 files += globbed
566 continue
567 files.append(f)
568
569 symlink_paths = []
570 for ind, f in enumerate(files):
571 # Handle directory symlinks. Truncate path to the lowest level symlink
572 parent = ''
573 for dirname in f.split('/')[:-1]:
574 parent = os.path.join(parent, dirname)
575 if dirname == '.':
576 continue
577 if cpath.islink(parent):
578 bb.warn("FILES contains file '%s' which resides under a "
579 "directory symlink. Please fix the recipe and use the "
580 "real path for the file." % f[1:])
581 symlink_paths.append(f)
582 files[ind] = parent
583 f = parent
584 break
585
586 if not cpath.islink(f):
587 if cpath.isdir(f):
588 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
589 if newfiles:
590 files += newfiles
591
592 return files, symlink_paths
593
594# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
595def get_conffiles(pkg, d):
596 pkgdest = d.getVar('PKGDEST')
597 root = os.path.join(pkgdest, pkg)
598 cwd = os.getcwd()
599 os.chdir(root)
600
601 conffiles = d.getVar('CONFFILES:%s' % pkg);
602 if conffiles == None:
603 conffiles = d.getVar('CONFFILES')
604 if conffiles == None:
605 conffiles = ""
606 conffiles = conffiles.split()
607 conf_orig_list = files_from_filevars(conffiles)[0]
608
609 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
610 conf_list = []
611 for f in conf_orig_list:
612 if os.path.isdir(f):
613 continue
614 if os.path.islink(f):
615 continue
616 if not os.path.exists(f):
617 continue
618 conf_list.append(f)
619
620 # Remove the leading './'
621 for i in range(0, len(conf_list)):
622 conf_list[i] = conf_list[i][1:]
623
624 os.chdir(cwd)
625 return sorted(conf_list)
626
627def legitimize_package_name(s):
628 """
629 Make sure package names are legitimate strings
630 """
631
632 def fixutf(m):
633 cp = m.group(1)
634 if cp:
635 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
636
637 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
638 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
639
640 # Remaining package name validity fixes
641 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
642
643def split_locales(d):
644 cpath = oe.cachedpath.CachedPath()
645 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
646 bb.debug(1, "package requested not splitting locales")
647 return
648
649 packages = (d.getVar('PACKAGES') or "").split()
650
651 dvar = d.getVar('PKGD')
652 pn = d.getVar('LOCALEBASEPN')
653
654 try:
655 locale_index = packages.index(pn + '-locale')
656 packages.pop(locale_index)
657 except ValueError:
658 locale_index = len(packages)
659
660 lic = d.getVar("LICENSE:" + pn + "-locale")
661
662 localepaths = []
663 locales = set()
664 for localepath in (d.getVar('LOCALE_PATHS') or "").split():
665 localedir = dvar + localepath
666 if not cpath.isdir(localedir):
667 bb.debug(1, 'No locale files in %s' % localepath)
668 continue
669
670 localepaths.append(localepath)
671 with os.scandir(localedir) as it:
672 for entry in it:
673 if entry.is_dir():
674 locales.add(entry.name)
675
676 if len(locales) == 0:
677 bb.debug(1, "No locale files in this package")
678 return
679
680 summary = d.getVar('SUMMARY') or pn
681 description = d.getVar('DESCRIPTION') or ""
682 locale_section = d.getVar('LOCALE_SECTION')
683 mlprefix = d.getVar('MLPREFIX') or ""
684 for l in sorted(locales):
685 ln = legitimize_package_name(l)
686 pkg = pn + '-locale-' + ln
687 packages.insert(locale_index, pkg)
688 locale_index += 1
689 files = []
690 for localepath in localepaths:
691 files.append(os.path.join(localepath, l))
692 d.setVar('FILES:' + pkg, " ".join(files))
693 d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
694 d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
695 d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
696 d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
697 if lic:
698 d.setVar('LICENSE:' + pkg, lic)
699 if locale_section:
700 d.setVar('SECTION:' + pkg, locale_section)
701
702 d.setVar('PACKAGES', ' '.join(packages))
703
704 # Disabled by RP 18/06/07
705 # Wildcards aren't supported in debian
706 # They break with ipkg since glibc-locale* will mean that
707 # glibc-localedata-translit* won't install as a dependency
708 # for some other package which breaks meta-toolchain
709 # Probably breaks since virtual-locale- isn't provided anywhere
710 #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
711 #rdep.append('%s-locale*' % pn)
712 #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
713
714def package_debug_vars(d):
715 # We default to '.debug' style
716 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
717 # Single debug-file-directory style debug info
718 debug_vars = {
719 "append": ".debug",
720 "staticappend": "",
721 "dir": "",
722 "staticdir": "",
723 "libdir": "/usr/lib/debug",
724 "staticlibdir": "/usr/lib/debug-static",
725 "srcdir": "/usr/src/debug",
726 }
727 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
728 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
729 debug_vars = {
730 "append": "",
731 "staticappend": "",
732 "dir": "/.debug",
733 "staticdir": "/.debug-static",
734 "libdir": "",
735 "staticlibdir": "",
736 "srcdir": "",
737 }
738 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
739 debug_vars = {
740 "append": "",
741 "staticappend": "",
742 "dir": "/.debug",
743 "staticdir": "/.debug-static",
744 "libdir": "",
745 "staticlibdir": "",
746 "srcdir": "/usr/src/debug",
747 }
748 else:
749 # Original OE-core, a.k.a. ".debug", style debug info
750 debug_vars = {
751 "append": "",
752 "staticappend": "",
753 "dir": "/.debug",
754 "staticdir": "/.debug-static",
755 "libdir": "",
756 "staticlibdir": "",
757 "srcdir": "/usr/src/debug",
758 }
759
760 return debug_vars
761
762
763def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
764 debugfiles = {}
765
766 for line in dwarfsrcfiles_output.splitlines():
767 if line.startswith("\t"):
768 debugfiles[os.path.normpath(line.split()[0])] = ""
769
770 return debugfiles.keys()
771
772def source_info(file, d, fatal=True):
773 cmd = ["dwarfsrcfiles", file]
774 try:
775 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
776 retval = 0
777 except subprocess.CalledProcessError as exc:
778 output = exc.output
779 retval = exc.returncode
780
781 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
782 if retval != 0 and retval != 255:
783 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
784 if fatal:
785 bb.fatal(msg)
786 bb.note(msg)
787
788 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
789
790 return list(debugsources)
791
792def splitdebuginfo(file, dvar, dv, d):
793 # Function to split a single file into two components, one is the stripped
794 # target system binary, the other contains any debugging information. The
795 # two files are linked to reference each other.
796 #
797 # return a mapping of files:debugsources
798
799 src = file[len(dvar):]
800 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
801 debugfile = dvar + dest
802 sources = []
803
804 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
805 if oe.package.is_kernel_module_signed(file):
806 bb.debug(1, "Skip strip on signed module %s" % file)
807 return (file, sources)
808
809 # Split the file...
810 bb.utils.mkdirhier(os.path.dirname(debugfile))
811 #bb.note("Split %s -> %s" % (file, debugfile))
812 # Only store off the hard link reference if we successfully split!
813
814 dvar = d.getVar('PKGD')
815 objcopy = d.getVar("OBJCOPY")
816
817 newmode = None
818 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
819 origmode = os.stat(file)[stat.ST_MODE]
820 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
821 os.chmod(file, newmode)
822
823 # We need to extract the debug src information here...
824 if dv["srcdir"]:
825 sources = source_info(file, d)
826
827 bb.utils.mkdirhier(os.path.dirname(debugfile))
828
829 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
830
831 # Set the debuglink to have the view of the file path on the target
832 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
833
834 if newmode:
835 os.chmod(file, origmode)
836
837 return (file, sources)
838
839def splitstaticdebuginfo(file, dvar, dv, d):
840 # Unlike the function above, there is no way to split a static library
841 # two components. So to get similar results we will copy the unmodified
842 # static library (containing the debug symbols) into a new directory.
843 # We will then strip (preserving symbols) the static library in the
844 # typical location.
845 #
846 # return a mapping of files:debugsources
847
848 src = file[len(dvar):]
849 dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
850 debugfile = dvar + dest
851 sources = []
852
853 # Copy the file...
854 bb.utils.mkdirhier(os.path.dirname(debugfile))
855 #bb.note("Copy %s -> %s" % (file, debugfile))
856
857 dvar = d.getVar('PKGD')
858
859 newmode = None
860 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
861 origmode = os.stat(file)[stat.ST_MODE]
862 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
863 os.chmod(file, newmode)
864
865 # We need to extract the debug src information here...
866 if dv["srcdir"]:
867 sources = source_info(file, d)
868
869 bb.utils.mkdirhier(os.path.dirname(debugfile))
870
871 # Copy the unmodified item to the debug directory
872 shutil.copy2(file, debugfile)
873
874 if newmode:
875 os.chmod(file, origmode)
876
877 return (file, sources)
878
879def inject_minidebuginfo(file, dvar, dv, d):
880 # Extract just the symbols from debuginfo into minidebuginfo,
881 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
882 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
883
884 readelf = d.getVar('READELF')
885 nm = d.getVar('NM')
886 objcopy = d.getVar('OBJCOPY')
887
888 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
889
890 src = file[len(dvar):]
891 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
892 debugfile = dvar + dest
893 minidebugfile = minidebuginfodir + src + '.minidebug'
894 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
895
896 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
897 # so skip it.
898 if not os.path.exists(debugfile):
899 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
900 return
901
902 # minidebuginfo does not make sense to apply to ELF objects other than
903 # executables and shared libraries, skip applying the minidebuginfo
904 # generation for objects like kernel modules.
905 for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
906 if not line.strip().startswith("Type:"):
907 continue
908 elftype = line.split(":")[1].strip()
909 if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
910 bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
911 return
912 break
913
914 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
915 # We will exclude all of these from minidebuginfo to save space.
916 remove_section_names = []
917 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
918 # strip the leading " [ 1]" section index to allow splitting on space
919 if ']' not in line:
920 continue
921 fields = line[line.index(']') + 1:].split()
922 if len(fields) < 7:
923 continue
924 name = fields[0]
925 type = fields[1]
926 flags = fields[6]
927 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
928 if name.startswith('.debug_'):
929 continue
930 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
931 remove_section_names.append(name)
932
933 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
934 # because they are always present in the binary.
935 dynsyms = set()
936 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
937 dynsyms.add(line.split()[0])
938
939 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
940 # These are the ones we want to keep in minidebuginfo.
941 keep_symbols_file = minidebugfile + '.symlist'
942 found_any_symbols = False
943 with open(keep_symbols_file, 'w') as f:
944 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
945 fields = line.split('|')
946 if len(fields) < 7:
947 continue
948 name = fields[0].strip()
949 type = fields[3].strip()
950 if type == 'FUNC' and name not in dynsyms:
951 f.write('{}\n'.format(name))
952 found_any_symbols = True
953
954 if not found_any_symbols:
955 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
956 return
957
958 bb.utils.remove(minidebugfile)
959 bb.utils.remove(minidebugfile + '.xz')
960
961 subprocess.check_call([objcopy, '-S'] +
962 ['--remove-section={}'.format(s) for s in remove_section_names] +
963 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
964
965 subprocess.check_call(['xz', '--keep', minidebugfile])
966
967 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
968
969def copydebugsources(debugsrcdir, sources, d):
970 # The debug src information written out to sourcefile is further processed
971 # and copied to the destination here.
972
973 cpath = oe.cachedpath.CachedPath()
974
975 if debugsrcdir and sources:
976 sourcefile = d.expand("${WORKDIR}/debugsources.list")
977 bb.utils.remove(sourcefile)
978
979 # filenames are null-separated - this is an artefact of the previous use
980 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
981 # is still assuming that.
982 debuglistoutput = '\0'.join(sources) + '\0'
983 with open(sourcefile, 'a') as sf:
984 sf.write(debuglistoutput)
985
986 dvar = d.getVar('PKGD')
987 strip = d.getVar("STRIP")
988 objcopy = d.getVar("OBJCOPY")
989 workdir = d.getVar("WORKDIR")
990 sdir = d.getVar("S")
991 cflags = d.expand("${CFLAGS}")
992
993 prefixmap = {}
994 for flag in cflags.split():
995 if not flag.startswith("-ffile-prefix-map"):
996 continue
997 if "recipe-sysroot" in flag:
998 continue
999 flag = flag.split("=")
1000 prefixmap[flag[1]] = flag[2]
1001
1002 nosuchdir = []
1003 basepath = dvar
1004 for p in debugsrcdir.split("/"):
1005 basepath = basepath + "/" + p
1006 if not cpath.exists(basepath):
1007 nosuchdir.append(basepath)
1008 bb.utils.mkdirhier(basepath)
1009 cpath.updatecache(basepath)
1010
1011 for pmap in prefixmap:
1012 # Ignore files from the recipe sysroots (target and native)
1013 cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
1014 # We need to ignore files that are not actually ours
1015 # we do this by only paying attention to items from this package
1016 cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
1017 # Remove prefix in the source paths
1018 cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
1019 cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
1020
1021 try:
1022 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1023 except subprocess.CalledProcessError:
1024 # Can "fail" if internal headers/transient sources are attempted
1025 pass
1026 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
1027 # Work around this by manually finding and copying any symbolic links that made it through.
1028 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
1029 (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
1030 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1031
1032 # debugsources.list may be polluted from the host if we used externalsrc,
1033 # cpio uses copy-pass and may have just created a directory structure
1034 # matching the one from the host, if thats the case move those files to
1035 # debugsrcdir to avoid host contamination.
1036 # Empty dir structure will be deleted in the next step.
1037
1038 # Same check as above for externalsrc
1039 if workdir not in sdir:
1040 if os.path.exists(dvar + debugsrcdir + sdir):
1041 cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
1042 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1043
1044 # The copy by cpio may have resulted in some empty directories! Remove these
1045 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
1046 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1047
1048 # Also remove debugsrcdir if its empty
1049 for p in nosuchdir[::-1]:
1050 if os.path.exists(p) and not os.listdir(p):
1051 os.rmdir(p)
1052
1053@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
1054def save_debugsources_info(debugsrcdir, sources_raw, d):
1055 import json
1056 import bb.compress.zstd
1057 if debugsrcdir and sources_raw:
1058 debugsources_file = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd")
1059 debugsources_dir = os.path.dirname(debugsources_file)
1060 if not os.path.isdir(debugsources_dir):
1061 bb.utils.mkdirhier(debugsources_dir)
1062 bb.utils.remove(debugsources_file)
1063
1064 workdir = d.getVar("WORKDIR")
1065 pn = d.getVar('PN')
1066
1067 # Kernel sources are in a different directory and are special case
1068 # we format the sources as expected by spdx by replacing /usr/src/kernel/
1069 # into BP/
1070 kernel_src = d.getVar('KERNEL_SRC_PATH')
1071 bp = d.getVar('BP')
1072 sources_dict = {}
1073 for file, src_files in sources_raw:
1074 file_clean = file.replace(f"{workdir}/package/","")
1075 sources_clean = [
1076 src.replace(f"{debugsrcdir}/{pn}/", "")
1077 if not kernel_src else src.replace(f"{kernel_src}/", f"{bp}/")
1078 for src in src_files
1079 if not any(keyword in src for keyword in ("<internal>", "<built-in>")) and not src.endswith("/")
1080 ]
1081 sources_dict[file_clean] = sorted(sources_clean)
1082 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1083 with bb.compress.zstd.open(debugsources_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
1084 json.dump(sources_dict, f, sort_keys=True)
1085
1086@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
1087def read_debugsources_info(d):
1088 import json
1089 import bb.compress.zstd
1090 try:
1091 fn = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd")
1092 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1093 with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f:
1094 return json.load(f)
1095 except FileNotFoundError:
1096 bb.debug(1, f"File not found: {fn}")
1097 return None
1098
1099def process_split_and_strip_files(d):
1100 cpath = oe.cachedpath.CachedPath()
1101
1102 dvar = d.getVar('PKGD')
1103 pn = d.getVar('PN')
1104 hostos = d.getVar('HOST_OS')
1105
1106 oldcwd = os.getcwd()
1107 os.chdir(dvar)
1108
1109 dv = package_debug_vars(d)
1110
1111 #
1112 # First lets figure out all of the files we may have to process ... do this only once!
1113 #
1114 elffiles = {}
1115 symlinks = {}
1116 staticlibs = []
1117 inodes = {}
1118 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1119 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1120 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1121 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1122 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1123 checkelf = {}
1124 checkelflinks = {}
1125 checkstatic = {}
1126 for root, dirs, files in cpath.walk(dvar):
1127 for f in files:
1128 file = os.path.join(root, f)
1129
1130 # Skip debug files
1131 if dv["append"] and file.endswith(dv["append"]):
1132 continue
1133 if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1134 continue
1135
1136 if file in skipfiles:
1137 continue
1138
1139 try:
1140 ltarget = cpath.realpath(file, dvar, False)
1141 s = cpath.lstat(ltarget)
1142 except OSError as e:
1143 (err, strerror) = e.args
1144 if err != errno.ENOENT:
1145 raise
1146 # Skip broken symlinks
1147 continue
1148 if not s:
1149 continue
1150
1151 if oe.package.is_static_lib(file):
1152 # Use a reference of device ID and inode number to identify files
1153 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1154 checkstatic[file] = (file, file_reference)
1155 continue
1156
1157 # Check its an executable
1158 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1159 or (s[stat.ST_MODE] & stat.S_IXOTH) \
1160 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1161 and (".so" in f or ".node" in f)) \
1162 or (f.startswith('vmlinux') or ".ko" in f):
1163
1164 if cpath.islink(file):
1165 checkelflinks[file] = ltarget
1166 continue
1167 # Use a reference of device ID and inode number to identify files
1168 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1169 checkelf[file] = (file, file_reference)
1170
1171 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1172 results_map = {}
1173 for (ltarget, elf_file) in results:
1174 results_map[ltarget] = elf_file
1175 for file in checkelflinks:
1176 ltarget = checkelflinks[file]
1177 # If it's a symlink, and points to an ELF file, we capture the readlink target
1178 if results_map[ltarget]:
1179 target = os.readlink(file)
1180 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1181 symlinks[file] = target
1182
1183 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1184
1185 # Sort results by file path. This ensures that the files are always
1186 # processed in the same order, which is important to make sure builds
1187 # are reproducible when dealing with hardlinks
1188 results.sort(key=lambda x: x[0])
1189
1190 for (file, elf_file) in results:
1191 # It's a file (or hardlink), not a link
1192 # ...but is it ELF, and is it already stripped?
1193 if elf_file & 1:
1194 if elf_file & 2:
1195 if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1196 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1197 else:
1198 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1199 oe.qa.handle_error("already-stripped", msg, d)
1200 continue
1201
1202 # At this point we have an unstripped elf file. We need to:
1203 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
1204 # b) Only strip any hardlinked file once (no races)
1205 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1206
1207 # Use a reference of device ID and inode number to identify files
1208 file_reference = checkelf[file][1]
1209 if file_reference in inodes:
1210 os.unlink(file)
1211 os.link(inodes[file_reference][0], file)
1212 inodes[file_reference].append(file)
1213 else:
1214 inodes[file_reference] = [file]
1215 # break hardlink
1216 bb.utils.break_hardlinks(file)
1217 elffiles[file] = elf_file
1218 # Modified the file so clear the cache
1219 cpath.updatecache(file)
1220
1221 # Do the same hardlink processing as above, but for static libraries
1222 results = list(checkstatic.keys())
1223
1224 # As above, sort the results.
1225 results.sort(key=lambda x: x[0])
1226
1227 for file in results:
1228 # Use a reference of device ID and inode number to identify files
1229 file_reference = checkstatic[file][1]
1230 if file_reference in inodes:
1231 os.unlink(file)
1232 os.link(inodes[file_reference][0], file)
1233 inodes[file_reference].append(file)
1234 else:
1235 inodes[file_reference] = [file]
1236 # break hardlink
1237 bb.utils.break_hardlinks(file)
1238 staticlibs.append(file)
1239 # Modified the file so clear the cache
1240 cpath.updatecache(file)
1241
1242 def strip_pkgd_prefix(f):
1243 nonlocal dvar
1244
1245 if f.startswith(dvar):
1246 return f[len(dvar):]
1247
1248 return f
1249
1250 #
1251 # First lets process debug splitting
1252 #
1253 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1254 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1255
1256 if dv["srcdir"] and not hostos.startswith("mingw"):
1257 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1258 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1259 else:
1260 for file in staticlibs:
1261 results.append( (file,source_info(file, d)) )
1262
1263 d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1264
1265 sources = set()
1266 for r in results:
1267 sources.update(r[1])
1268
1269 # Hardlink our debug symbols to the other hardlink copies
1270 for ref in inodes:
1271 if len(inodes[ref]) == 1:
1272 continue
1273
1274 target = inodes[ref][0][len(dvar):]
1275 for file in inodes[ref][1:]:
1276 src = file[len(dvar):]
1277 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1278 fpath = dvar + dest
1279 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1280 if os.access(ftarget, os.R_OK):
1281 bb.utils.mkdirhier(os.path.dirname(fpath))
1282 # Only one hardlink of separated debug info file in each directory
1283 if not os.access(fpath, os.R_OK):
1284 #bb.note("Link %s -> %s" % (fpath, ftarget))
1285 os.link(ftarget, fpath)
1286 elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1287 deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"]
1288 fpath = dvar + deststatic
1289 ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"]
1290 if os.access(ftarget, os.R_OK):
1291 bb.utils.mkdirhier(os.path.dirname(fpath))
1292 # Only one hardlink of separated debug info file in each directory
1293 if not os.access(fpath, os.R_OK):
1294 #bb.note("Link %s -> %s" % (fpath, ftarget))
1295 os.link(ftarget, fpath)
1296 else:
1297 bb.note("Unable to find inode link target %s" % (target))
1298
1299 # Create symlinks for all cases we were able to split symbols
1300 for file in symlinks:
1301 src = file[len(dvar):]
1302 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1303 fpath = dvar + dest
1304 # Skip it if the target doesn't exist
1305 try:
1306 s = os.stat(fpath)
1307 except OSError as e:
1308 (err, strerror) = e.args
1309 if err != errno.ENOENT:
1310 raise
1311 continue
1312
1313 ltarget = symlinks[file]
1314 lpath = os.path.dirname(ltarget)
1315 lbase = os.path.basename(ltarget)
1316 ftarget = ""
1317 if lpath and lpath != ".":
1318 ftarget += lpath + dv["dir"] + "/"
1319 ftarget += lbase + dv["append"]
1320 if lpath.startswith(".."):
1321 ftarget = os.path.join("..", ftarget)
1322 bb.utils.mkdirhier(os.path.dirname(fpath))
1323 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1324 os.symlink(ftarget, fpath)
1325
1326 # Process the dv["srcdir"] if requested...
1327 # This copies and places the referenced sources for later debugging...
1328 copydebugsources(dv["srcdir"], sources, d)
1329
1330 # Save source info to be accessible to other tasks
1331 save_debugsources_info(dv["srcdir"], results, d)
1332 #
1333 # End of debug splitting
1334 #
1335
1336 #
1337 # Now lets go back over things and strip them
1338 #
1339 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1340 strip = d.getVar("STRIP")
1341 sfiles = []
1342 for file in elffiles:
1343 elf_file = int(elffiles[file])
1344 #bb.note("Strip %s" % file)
1345 sfiles.append((file, elf_file, strip))
1346 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1347 for f in staticlibs:
1348 sfiles.append((f, 16, strip))
1349
1350 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1351
1352 # Build "minidebuginfo" and reinject it back into the stripped binaries
1353 if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
1354 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1355 extraargs=(dvar, dv, d))
1356
1357 #
1358 # End of strip
1359 #
1360 os.chdir(oldcwd)
1361
1362
1363def populate_packages(d):
1364 cpath = oe.cachedpath.CachedPath()
1365
1366 workdir = d.getVar('WORKDIR')
1367 outdir = d.getVar('DEPLOY_DIR')
1368 dvar = d.getVar('PKGD')
1369 packages = d.getVar('PACKAGES').split()
1370 pn = d.getVar('PN')
1371
1372 bb.utils.mkdirhier(outdir)
1373 os.chdir(dvar)
1374
1375 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1376
1377 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1378
1379 # If debug-with-srcpkg mode is enabled then add the source package if it
1380 # doesn't exist and add the source file contents to the source package.
1381 if split_source_package:
1382 src_package_name = ('%s-src' % d.getVar('PN'))
1383 if not src_package_name in packages:
1384 packages.append(src_package_name)
1385 d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1386
1387 # Sanity check PACKAGES for duplicates
1388 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1389 package_dict = {}
1390
1391 for i, pkg in enumerate(packages):
1392 if pkg in package_dict:
1393 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1394 oe.qa.handle_error("packages-list", msg, d)
1395 # Ensure the source package gets the chance to pick up the source files
1396 # before the debug package by ordering it first in PACKAGES. Whether it
1397 # actually picks up any source files is controlled by
1398 # PACKAGE_DEBUG_SPLIT_STYLE.
1399 elif pkg.endswith("-src"):
1400 package_dict[pkg] = (10, i)
1401 elif autodebug and pkg.endswith("-dbg"):
1402 package_dict[pkg] = (30, i)
1403 else:
1404 package_dict[pkg] = (50, i)
1405 packages = sorted(package_dict.keys(), key=package_dict.get)
1406 d.setVar('PACKAGES', ' '.join(packages))
1407 pkgdest = d.getVar('PKGDEST')
1408
1409 seen = []
1410
1411 # os.mkdir masks the permissions with umask so we have to unset it first
1412 oldumask = os.umask(0)
1413
1414 debug = []
1415 for root, dirs, files in cpath.walk(dvar):
1416 dir = root[len(dvar):]
1417 if not dir:
1418 dir = os.sep
1419 for f in (files + dirs):
1420 path = "." + os.path.join(dir, f)
1421 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1422 debug.append(path)
1423
1424 for pkg in packages:
1425 root = os.path.join(pkgdest, pkg)
1426 bb.utils.mkdirhier(root)
1427
1428 filesvar = d.getVar('FILES:%s' % pkg) or ""
1429 if "//" in filesvar:
1430 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1431 oe.qa.handle_error("files-invalid", msg, d)
1432 filesvar.replace("//", "/")
1433
1434 origfiles = filesvar.split()
1435 files, symlink_paths = oe.package.files_from_filevars(origfiles)
1436
1437 if autodebug and pkg.endswith("-dbg"):
1438 files.extend(debug)
1439
1440 for file in files:
1441 if (not cpath.islink(file)) and (not cpath.exists(file)):
1442 continue
1443 if file in seen:
1444 continue
1445 seen.append(file)
1446
1447 def mkdir(src, dest, p):
1448 src = os.path.join(src, p)
1449 dest = os.path.join(dest, p)
1450 fstat = cpath.stat(src)
1451 os.mkdir(dest)
1452 os.chmod(dest, fstat.st_mode)
1453 os.chown(dest, fstat.st_uid, fstat.st_gid)
1454 if p not in seen:
1455 seen.append(p)
1456 cpath.updatecache(dest)
1457
1458 def mkdir_recurse(src, dest, paths):
1459 if cpath.exists(dest + '/' + paths):
1460 return
1461 while paths.startswith("./"):
1462 paths = paths[2:]
1463 p = "."
1464 for c in paths.split("/"):
1465 p = os.path.join(p, c)
1466 if not cpath.exists(os.path.join(dest, p)):
1467 mkdir(src, dest, p)
1468
1469 if cpath.isdir(file) and not cpath.islink(file):
1470 mkdir_recurse(dvar, root, file)
1471 continue
1472
1473 mkdir_recurse(dvar, root, os.path.dirname(file))
1474 fpath = os.path.join(root,file)
1475 if not cpath.islink(file):
1476 os.link(file, fpath)
1477 continue
1478 ret = bb.utils.copyfile(file, fpath)
1479 if ret is False or ret == 0:
1480 bb.fatal("File population failed")
1481
1482 # Check if symlink paths exist
1483 for file in symlink_paths:
1484 if not os.path.exists(os.path.join(root,file)):
1485 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1486 "parent directory structure does not exist. One of "
1487 "its parent directories is a symlink whose target "
1488 "directory is not included in the package." %
1489 (file, pkg))
1490
1491 os.umask(oldumask)
1492 os.chdir(workdir)
1493
1494 # Handle excluding packages with incompatible licenses
1495 package_list = []
1496 skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages)
1497 for pkg in packages:
1498 if pkg in skipped_pkgs:
1499 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg])
1500 oe.qa.handle_error("incompatible-license", msg, d)
1501 else:
1502 package_list.append(pkg)
1503 d.setVar('PACKAGES', ' '.join(package_list))
1504
1505 unshipped = []
1506 for root, dirs, files in cpath.walk(dvar):
1507 dir = root[len(dvar):]
1508 if not dir:
1509 dir = os.sep
1510 for f in (files + dirs):
1511 path = os.path.join(dir, f)
1512 if ('.' + path) not in seen:
1513 unshipped.append(path)
1514
1515 if unshipped != []:
1516 msg = pn + ": Files/directories were installed but not shipped in any package:"
1517 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1518 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1519 else:
1520 for f in unshipped:
1521 msg = msg + "\n " + f
1522 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1523 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1524 oe.qa.handle_error("installed-vs-shipped", msg, d)
1525
1526def process_fixsymlinks(pkgfiles, d):
1527 cpath = oe.cachedpath.CachedPath()
1528 pkgdest = d.getVar('PKGDEST')
1529 packages = d.getVar("PACKAGES", False).split()
1530
1531 dangling_links = {}
1532 pkg_files = {}
1533 for pkg in packages:
1534 dangling_links[pkg] = []
1535 pkg_files[pkg] = []
1536 inst_root = os.path.join(pkgdest, pkg)
1537 for path in pkgfiles[pkg]:
1538 rpath = path[len(inst_root):]
1539 pkg_files[pkg].append(rpath)
1540 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1541 if not cpath.lexists(rtarget):
1542 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1543
1544 newrdepends = {}
1545 for pkg in dangling_links:
1546 for l in dangling_links[pkg]:
1547 found = False
1548 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1549 for p in packages:
1550 if l in pkg_files[p]:
1551 found = True
1552 bb.debug(1, "target found in %s" % p)
1553 if p == pkg:
1554 break
1555 if pkg not in newrdepends:
1556 newrdepends[pkg] = []
1557 newrdepends[pkg].append(p)
1558 break
1559 if found == False:
1560 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1561
1562 for pkg in newrdepends:
1563 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1564 for p in newrdepends[pkg]:
1565 if p not in rdepends:
1566 rdepends[p] = []
1567 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1568
1569def process_filedeps(pkgfiles, d):
1570 """
1571 Collect perfile run-time dependency metadata
1572 Output:
1573 FILERPROVIDESFLIST:pkg - list of all files w/ deps
1574 FILERPROVIDES:filepath:pkg - per file dep
1575
1576 FILERDEPENDSFLIST:pkg - list of all files w/ deps
1577 FILERDEPENDS:filepath:pkg - per file dep
1578 """
1579 if d.getVar('SKIP_FILEDEPS') == '1':
1580 return
1581
1582 pkgdest = d.getVar('PKGDEST')
1583 packages = d.getVar('PACKAGES')
1584 rpmdeps = d.getVar('RPMDEPS')
1585
1586 def chunks(files, n):
1587 return [files[i:i+n] for i in range(0, len(files), n)]
1588
1589 pkglist = []
1590 for pkg in packages.split():
1591 if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1592 continue
1593 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1594 continue
1595 for files in chunks(pkgfiles[pkg], 100):
1596 pkglist.append((pkg, files, rpmdeps, pkgdest))
1597
1598 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1599
1600 provides_files = {}
1601 requires_files = {}
1602
1603 for result in processed:
1604 (pkg, provides, requires) = result
1605
1606 if pkg not in provides_files:
1607 provides_files[pkg] = []
1608 if pkg not in requires_files:
1609 requires_files[pkg] = []
1610
1611 for file in sorted(provides):
1612 provides_files[pkg].append(file)
1613 key = "FILERPROVIDES:" + file + ":" + pkg
1614 d.appendVar(key, " " + " ".join(provides[file]))
1615
1616 for file in sorted(requires):
1617 requires_files[pkg].append(file)
1618 key = "FILERDEPENDS:" + file + ":" + pkg
1619 d.appendVar(key, " " + " ".join(requires[file]))
1620
1621 for pkg in requires_files:
1622 d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1623 for pkg in provides_files:
1624 d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1625
1626def process_shlibs(pkgfiles, d):
1627 cpath = oe.cachedpath.CachedPath()
1628
1629 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1630 if exclude_shlibs:
1631 bb.note("not generating shlibs")
1632 return
1633
1634 lib_re = re.compile(r"^.*\.so")
1635 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1636
1637 packages = d.getVar('PACKAGES')
1638
1639 shlib_pkgs = []
1640 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1641 if exclusion_list:
1642 for pkg in packages.split():
1643 if pkg not in exclusion_list.split():
1644 shlib_pkgs.append(pkg)
1645 else:
1646 bb.note("not generating shlibs for %s" % pkg)
1647 else:
1648 shlib_pkgs = packages.split()
1649
1650 hostos = d.getVar('HOST_OS')
1651
1652 workdir = d.getVar('WORKDIR')
1653
1654 ver = d.getVar('PKGV')
1655 if not ver:
1656 msg = "PKGV not defined"
1657 oe.qa.handle_error("pkgv-undefined", msg, d)
1658 return
1659
1660 pkgdest = d.getVar('PKGDEST')
1661
1662 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1663
1664 def linux_so(file, pkg, pkgver, d):
1665 needs_ldconfig = False
1666 needed = set()
1667 sonames = set()
1668 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1669 cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null"
1670 fd = os.popen(cmd)
1671 lines = fd.readlines()
1672 fd.close()
1673 rpath = tuple()
1674 for l in lines:
1675 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1676 if m:
1677 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1678 rpath = tuple(map(os.path.normpath, rpaths))
1679 for l in lines:
1680 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1681 if m:
1682 dep = m.group(1)
1683 if dep not in needed:
1684 needed.add((dep, file, rpath))
1685 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1686 if m:
1687 this_soname = m.group(1)
1688 prov = (this_soname, ldir, pkgver)
1689 if not prov in sonames:
1690 # if library is private (only used by package) then do not build shlib for it
1691 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1692 sonames.add(prov)
1693 if libdir_re.match(os.path.dirname(file)):
1694 needs_ldconfig = True
1695 return (needs_ldconfig, needed, sonames)
1696
1697 def darwin_so(file, needed, sonames, pkgver):
1698 if not os.path.exists(file):
1699 return
1700 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1701
1702 def get_combinations(base):
1703 #
1704 # Given a base library name, find all combinations of this split by "." and "-"
1705 #
1706 combos = []
1707 options = base.split(".")
1708 for i in range(1, len(options) + 1):
1709 combos.append(".".join(options[0:i]))
1710 options = base.split("-")
1711 for i in range(1, len(options) + 1):
1712 combos.append("-".join(options[0:i]))
1713 return combos
1714
1715 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1716 # Drop suffix
1717 name = os.path.basename(file).rsplit(".",1)[0]
1718 # Find all combinations
1719 combos = get_combinations(name)
1720 for combo in combos:
1721 if not combo in sonames:
1722 prov = (combo, ldir, pkgver)
1723 sonames.add(prov)
1724 if file.endswith('.dylib') or file.endswith('.so'):
1725 rpath = []
1726 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1727 out, err = p.communicate()
1728 # If returned successfully, process stdout for results
1729 if p.returncode == 0:
1730 for l in out.split("\n"):
1731 l = l.strip()
1732 if l.startswith('path '):
1733 rpath.append(l.split()[1])
1734
1735 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1736 out, err = p.communicate()
1737 # If returned successfully, process stdout for results
1738 if p.returncode == 0:
1739 for l in out.split("\n"):
1740 l = l.strip()
1741 if not l or l.endswith(":"):
1742 continue
1743 if "is not an object file" in l:
1744 continue
1745 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1746 if name and name not in needed[pkg]:
1747 needed[pkg].add((name, file, tuple()))
1748
1749 def mingw_dll(file, needed, sonames, pkgver):
1750 if not os.path.exists(file):
1751 return
1752
1753 if file.endswith(".dll"):
1754 # assume all dlls are shared objects provided by the package
1755 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1756
1757 if (file.endswith(".dll") or file.endswith(".exe")):
1758 # use objdump to search for "DLL Name: .*\.dll"
1759 p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1760 out, err = p.communicate()
1761 # process the output, grabbing all .dll names
1762 if p.returncode == 0:
1763 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1764 dllname = m.group(1)
1765 if dllname:
1766 needed[pkg].add((dllname, file, tuple()))
1767
1768 needed = {}
1769
1770 shlib_provider = oe.package.read_shlib_providers(d)
1771
1772 for pkg in shlib_pkgs:
1773 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1774 private_libs = private_libs.split()
1775 needs_ldconfig = False
1776 bb.debug(2, "calculating shlib provides for %s" % pkg)
1777
1778 pkgver = d.getVar('PKGV:' + pkg)
1779 if not pkgver:
1780 pkgver = d.getVar('PV_' + pkg)
1781 if not pkgver:
1782 pkgver = ver
1783
1784 needed[pkg] = set()
1785 sonames = set()
1786 linuxlist = []
1787 for file in pkgfiles[pkg]:
1788 soname = None
1789 if cpath.islink(file):
1790 continue
1791 if hostos.startswith("darwin"):
1792 darwin_so(file, needed, sonames, pkgver)
1793 elif hostos.startswith("mingw"):
1794 mingw_dll(file, needed, sonames, pkgver)
1795 elif os.access(file, os.X_OK) or lib_re.match(file):
1796 linuxlist.append(file)
1797
1798 if linuxlist:
1799 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1800 for r in results:
1801 ldconfig = r[0]
1802 needed[pkg] |= r[1]
1803 sonames |= r[2]
1804 needs_ldconfig = needs_ldconfig or ldconfig
1805
1806 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1807 if len(sonames):
1808 with open(shlibs_file, 'w') as fd:
1809 for s in sorted(sonames):
1810 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1811 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1812 if old_pkg != pkg:
1813 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1814 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1815 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1816 if s[0] not in shlib_provider:
1817 shlib_provider[s[0]] = {}
1818 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1819 if needs_ldconfig:
1820 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1821 postinst = d.getVar('pkg_postinst:%s' % pkg)
1822 if not postinst:
1823 postinst = '#!/bin/sh\n'
1824 postinst += d.getVar('ldconfig_postinst_fragment')
1825 d.setVar('pkg_postinst:%s' % pkg, postinst)
1826 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1827
1828 assumed_libs = d.getVar('ASSUME_SHLIBS')
1829 if assumed_libs:
1830 libdir = d.getVar("libdir")
1831 for e in assumed_libs.split():
1832 l, dep_pkg = e.split(":")
1833 lib_ver = None
1834 dep_pkg = dep_pkg.rsplit("_", 1)
1835 if len(dep_pkg) == 2:
1836 lib_ver = dep_pkg[1]
1837 dep_pkg = dep_pkg[0]
1838 if l not in shlib_provider:
1839 shlib_provider[l] = {}
1840 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1841
1842 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1843
1844 for pkg in shlib_pkgs:
1845 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1846
1847 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1848 private_libs = private_libs.split()
1849
1850 deps = list()
1851 for n in needed[pkg]:
1852 # if n is in private libraries, don't try to search provider for it
1853 # this could cause problem in case some abc.bb provides private
1854 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1855 # but skipping it is still better alternative than providing own
1856 # version and then adding runtime dependency for the same system library
1857 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
1858 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1859 continue
1860 if n[0] in shlib_provider.keys():
1861 shlib_provider_map = shlib_provider[n[0]]
1862 matches = set()
1863 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
1864 if p in shlib_provider_map:
1865 matches.add(p)
1866 if len(matches) > 1:
1867 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
1868 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
1869 elif len(matches) == 1:
1870 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
1871
1872 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1873
1874 if dep_pkg == pkg:
1875 continue
1876
1877 if ver_needed:
1878 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1879 else:
1880 dep = dep_pkg
1881 if not dep in deps:
1882 deps.append(dep)
1883 continue
1884 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1885
1886 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1887 if os.path.exists(deps_file):
1888 os.remove(deps_file)
1889 if deps:
1890 with open(deps_file, 'w') as fd:
1891 for dep in sorted(deps):
1892 fd.write(dep + '\n')
1893
1894def process_pkgconfig(pkgfiles, d):
1895 packages = d.getVar('PACKAGES')
1896 workdir = d.getVar('WORKDIR')
1897 pkgdest = d.getVar('PKGDEST')
1898
1899 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1900 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1901
1902 pc_re = re.compile(r'(.*)\.pc$')
1903 var_re = re.compile(r'(.*)=(.*)')
1904 field_re = re.compile(r'(.*): (.*)')
1905
1906 pkgconfig_provided = {}
1907 pkgconfig_needed = {}
1908 for pkg in packages.split():
1909 pkgconfig_provided[pkg] = []
1910 pkgconfig_needed[pkg] = []
1911 for file in sorted(pkgfiles[pkg]):
1912 m = pc_re.match(file)
1913 if m:
1914 pd = bb.data.init()
1915 name = m.group(1)
1916 pkgconfig_provided[pkg].append(os.path.basename(name))
1917 if not os.access(file, os.R_OK):
1918 continue
1919 with open(file, 'r') as f:
1920 lines = f.readlines()
1921 for l in lines:
1922 m = field_re.match(l)
1923 if m:
1924 hdr = m.group(1)
1925 exp = pd.expand(m.group(2))
1926 if hdr == 'Requires' or hdr == 'Requires.private':
1927 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1928 continue
1929 m = var_re.match(l)
1930 if m:
1931 name = m.group(1)
1932 val = m.group(2)
1933 pd.setVar(name, pd.expand(val))
1934
1935 for pkg in packages.split():
1936 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1937 if pkgconfig_provided[pkg] != []:
1938 with open(pkgs_file, 'w') as f:
1939 for p in sorted(pkgconfig_provided[pkg]):
1940 f.write('%s\n' % p)
1941
1942 # Go from least to most specific since the last one found wins
1943 for dir in reversed(shlibs_dirs):
1944 if not os.path.exists(dir):
1945 continue
1946 for file in sorted(os.listdir(dir)):
1947 m = re.match(r'^(.*)\.pclist$', file)
1948 if m:
1949 pkg = m.group(1)
1950 with open(os.path.join(dir, file)) as fd:
1951 lines = fd.readlines()
1952 pkgconfig_provided[pkg] = []
1953 for l in lines:
1954 pkgconfig_provided[pkg].append(l.rstrip())
1955
1956 for pkg in packages.split():
1957 deps = []
1958 for n in pkgconfig_needed[pkg]:
1959 found = False
1960 for k in pkgconfig_provided.keys():
1961 if n in pkgconfig_provided[k]:
1962 if k != pkg and not (k in deps):
1963 deps.append(k)
1964 found = True
1965 if found == False:
1966 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1967 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1968 if len(deps):
1969 with open(deps_file, 'w') as fd:
1970 for dep in deps:
1971 fd.write(dep + '\n')
1972
1973def read_libdep_files(d):
1974 pkglibdeps = {}
1975 packages = d.getVar('PACKAGES').split()
1976 for pkg in packages:
1977 pkglibdeps[pkg] = {}
1978 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1979 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1980 if os.access(depsfile, os.R_OK):
1981 with open(depsfile) as fd:
1982 lines = fd.readlines()
1983 for l in lines:
1984 l.rstrip()
1985 deps = bb.utils.explode_dep_versions2(l)
1986 for dep in deps:
1987 if not dep in pkglibdeps[pkg]:
1988 pkglibdeps[pkg][dep] = deps[dep]
1989 return pkglibdeps
1990
1991def process_depchains(pkgfiles, d):
1992 """
1993 For a given set of prefix and postfix modifiers, make those packages
1994 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1995
1996 Example: If package A depends upon package B, and A's .bb emits an
1997 A-dev package, this would make A-dev Recommends: B-dev.
1998
1999 If only one of a given suffix is specified, it will take the RRECOMMENDS
2000 based on the RDEPENDS of *all* other packages. If more than one of a given
2001 suffix is specified, its will only use the RDEPENDS of the single parent
2002 package.
2003 """
2004
2005 packages = d.getVar('PACKAGES')
2006 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
2007 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
2008
2009 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
2010
2011 #bb.note('depends for %s is %s' % (base, depends))
2012 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2013
2014 for depend in sorted(depends):
2015 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
2016 #bb.note("Skipping %s" % depend)
2017 continue
2018 if depend.endswith('-dev'):
2019 depend = depend[:-4]
2020 if depend.endswith('-dbg'):
2021 depend = depend[:-4]
2022 pkgname = getname(depend, suffix)
2023 #bb.note("Adding %s for %s" % (pkgname, depend))
2024 if pkgname not in rreclist and pkgname != pkg:
2025 rreclist[pkgname] = []
2026
2027 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2028 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2029
2030 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
2031
2032 #bb.note('rdepends for %s is %s' % (base, rdepends))
2033 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2034
2035 for depend in sorted(rdepends):
2036 if depend.find('virtual-locale-') != -1:
2037 #bb.note("Skipping %s" % depend)
2038 continue
2039 if depend.endswith('-dev'):
2040 depend = depend[:-4]
2041 if depend.endswith('-dbg'):
2042 depend = depend[:-4]
2043 pkgname = getname(depend, suffix)
2044 #bb.note("Adding %s for %s" % (pkgname, depend))
2045 if pkgname not in rreclist and pkgname != pkg:
2046 rreclist[pkgname] = []
2047
2048 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2049 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2050
2051 def add_dep(list, dep):
2052 if dep not in list:
2053 list.append(dep)
2054
2055 depends = []
2056 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2057 add_dep(depends, dep)
2058
2059 rdepends = []
2060 for pkg in packages.split():
2061 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
2062 add_dep(rdepends, dep)
2063
2064 #bb.note('rdepends is %s' % rdepends)
2065
2066 def post_getname(name, suffix):
2067 return '%s%s' % (name, suffix)
2068 def pre_getname(name, suffix):
2069 return '%s%s' % (suffix, name)
2070
2071 pkgs = {}
2072 for pkg in packages.split():
2073 for postfix in postfixes:
2074 if pkg.endswith(postfix):
2075 if not postfix in pkgs:
2076 pkgs[postfix] = {}
2077 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2078
2079 for prefix in prefixes:
2080 if pkg.startswith(prefix):
2081 if not prefix in pkgs:
2082 pkgs[prefix] = {}
2083 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2084
2085 if "-dbg" in pkgs:
2086 pkglibdeps = read_libdep_files(d)
2087 pkglibdeplist = []
2088 for pkg in pkglibdeps:
2089 for k in pkglibdeps[pkg]:
2090 add_dep(pkglibdeplist, k)
2091 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2092
2093 for suffix in pkgs:
2094 for pkg in pkgs[suffix]:
2095 if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2096 continue
2097 (base, func) = pkgs[suffix][pkg]
2098 if suffix == "-dev":
2099 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2100 elif suffix == "-dbg":
2101 if not dbgdefaultdeps:
2102 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2103 continue
2104 if len(pkgs[suffix]) == 1:
2105 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2106 else:
2107 rdeps = []
2108 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2109 add_dep(rdeps, dep)
2110 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py
index 8e7128b195..2100a97c12 100644
--- a/meta/lib/oe/package_manager/__init__.py
+++ b/meta/lib/oe/package_manager/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -90,7 +92,7 @@ def opkg_query(cmd_output):
90 92
91def failed_postinsts_abort(pkgs, log_path): 93def failed_postinsts_abort(pkgs, log_path):
92 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot, 94 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot,
93then please place them into pkg_postinst_ontarget_${PN} (). 95then please place them into pkg_postinst_ontarget:${PN} ().
94Deferring to first boot via 'exit 1' is no longer supported. 96Deferring to first boot via 'exit 1' is no longer supported.
95Details of the failure are in %s.""" %(pkgs, log_path)) 97Details of the failure are in %s.""" %(pkgs, log_path))
96 98
@@ -120,7 +122,8 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
120 "riscv32": ["--uint32-align=4", "--little-endian"], 122 "riscv32": ["--uint32-align=4", "--little-endian"],
121 "i586": ["--uint32-align=4", "--little-endian"], 123 "i586": ["--uint32-align=4", "--little-endian"],
122 "i686": ["--uint32-align=4", "--little-endian"], 124 "i686": ["--uint32-align=4", "--little-endian"],
123 "x86_64": ["--uint32-align=4", "--little-endian"] 125 "x86_64": ["--uint32-align=4", "--little-endian"],
126 "loongarch64": ["--uint32-align=4", "--little-endian"]
124 } 127 }
125 if target_arch in locale_arch_options: 128 if target_arch in locale_arch_options:
126 arch_options = locale_arch_options[target_arch] 129 arch_options = locale_arch_options[target_arch]
@@ -189,7 +192,7 @@ class PackageManager(object, metaclass=ABCMeta):
189 bb.utils.remove(self.intercepts_dir, True) 192 bb.utils.remove(self.intercepts_dir, True)
190 bb.utils.mkdirhier(self.intercepts_dir) 193 bb.utils.mkdirhier(self.intercepts_dir)
191 for intercept in postinst_intercepts: 194 for intercept in postinst_intercepts:
192 bb.utils.copyfile(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) 195 shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept)))
193 196
194 @abstractmethod 197 @abstractmethod
195 def _handle_intercept_failure(self, failed_script): 198 def _handle_intercept_failure(self, failed_script):
@@ -266,7 +269,7 @@ class PackageManager(object, metaclass=ABCMeta):
266 pass 269 pass
267 270
268 @abstractmethod 271 @abstractmethod
269 def install(self, pkgs, attempt_only=False): 272 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
270 """ 273 """
271 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is 274 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
272 True, installation failures are ignored. 275 True, installation failures are ignored.
@@ -321,7 +324,7 @@ class PackageManager(object, metaclass=ABCMeta):
321 # TODO don't have sdk here but have a property on the superclass 324 # TODO don't have sdk here but have a property on the superclass
322 # (and respect in install_complementary) 325 # (and respect in install_complementary)
323 if sdk: 326 if sdk:
324 pkgdatadir = self.d.expand("${TMPDIR}/pkgdata/${SDK_SYS}") 327 pkgdatadir = self.d.getVar("PKGDATA_DIR_SDK")
325 else: 328 else:
326 pkgdatadir = self.d.getVar("PKGDATA_DIR") 329 pkgdatadir = self.d.getVar("PKGDATA_DIR")
327 330
@@ -344,10 +347,8 @@ class PackageManager(object, metaclass=ABCMeta):
344 def install_complementary(self, globs=None): 347 def install_complementary(self, globs=None):
345 """ 348 """
346 Install complementary packages based upon the list of currently installed 349 Install complementary packages based upon the list of currently installed
347 packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install 350 packages e.g. locales, *-dev, *-dbg, etc. Note: every backend needs to
348 these packages, if they don't exist then no error will occur. Note: every 351 call this function explicitly after the normal package installation.
349 backend needs to call this function explicitly after the normal package
350 installation
351 """ 352 """
352 if globs is None: 353 if globs is None:
353 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') 354 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
@@ -364,45 +365,43 @@ class PackageManager(object, metaclass=ABCMeta):
364 for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split(): 365 for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
365 globs += (" " + complementary_linguas) % lang 366 globs += (" " + complementary_linguas) % lang
366 367
367 if globs is None: 368 if globs:
368 return 369 # we need to write the list of installed packages to a file because the
369 370 # oe-pkgdata-util reads it from a file
370 # we need to write the list of installed packages to a file because the 371 with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
371 # oe-pkgdata-util reads it from a file 372 pkgs = self.list_installed()
372 with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: 373
373 pkgs = self.list_installed() 374 provided_pkgs = set()
374 375 for pkg in pkgs.values():
375 provided_pkgs = set() 376 provided_pkgs |= set(pkg.get('provs', []))
376 for pkg in pkgs.values(): 377
377 provided_pkgs |= set(pkg.get('provs', [])) 378 output = oe.utils.format_pkg_list(pkgs, "arch")
378 379 installed_pkgs.write(output)
379 output = oe.utils.format_pkg_list(pkgs, "arch") 380 installed_pkgs.flush()
380 installed_pkgs.write(output) 381
381 installed_pkgs.flush() 382 cmd = ["oe-pkgdata-util",
382 383 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
383 cmd = ["oe-pkgdata-util", 384 globs]
384 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, 385 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
385 globs] 386 if exclude:
386 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') 387 cmd.extend(['--exclude=' + '|'.join(exclude.split())])
387 if exclude: 388 try:
388 cmd.extend(['--exclude=' + '|'.join(exclude.split())]) 389 bb.note('Running %s' % cmd)
389 try: 390 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
390 bb.note('Running %s' % cmd) 391 stdout, stderr = proc.communicate()
391 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 392 if stderr: bb.note(stderr.decode("utf-8"))
392 stdout, stderr = proc.communicate() 393 complementary_pkgs = stdout.decode("utf-8")
393 if stderr: bb.note(stderr.decode("utf-8")) 394 complementary_pkgs = set(complementary_pkgs.split())
394 complementary_pkgs = stdout.decode("utf-8") 395 skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
395 complementary_pkgs = set(complementary_pkgs.split()) 396 install_pkgs = sorted(complementary_pkgs - provided_pkgs)
396 skip_pkgs = sorted(complementary_pkgs & provided_pkgs) 397 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
397 install_pkgs = sorted(complementary_pkgs - provided_pkgs) 398 ' '.join(install_pkgs),
398 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( 399 ' '.join(skip_pkgs)))
399 ' '.join(install_pkgs), 400 self.install(install_pkgs, hard_depends_only=True)
400 ' '.join(skip_pkgs))) 401 except subprocess.CalledProcessError as e:
401 self.install(install_pkgs, attempt_only=True) 402 bb.fatal("Could not compute complementary packages list. Command "
402 except subprocess.CalledProcessError as e: 403 "'%s' returned %d:\n%s" %
403 bb.fatal("Could not compute complementary packages list. Command " 404 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
404 "'%s' returned %d:\n%s" %
405 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
406 405
407 if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': 406 if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
408 target_arch = self.d.getVar('TARGET_ARCH') 407 target_arch = self.d.getVar('TARGET_ARCH')
@@ -448,7 +447,7 @@ class PackageManager(object, metaclass=ABCMeta):
448 return res 447 return res
449 return _append(uris, base_paths) 448 return _append(uris, base_paths)
450 449
451def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies): 450def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies, include_self=False):
452 """ 451 """
453 Go through our do_package_write_X dependencies and hardlink the packages we depend 452 Go through our do_package_write_X dependencies and hardlink the packages we depend
454 upon into the repo directory. This prevents us seeing other packages that may 453 upon into the repo directory. This prevents us seeing other packages that may
@@ -469,7 +468,10 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
469 # Detect bitbake -b usage 468 # Detect bitbake -b usage
470 nodeps = d.getVar("BB_LIMITEDDEPS") or False 469 nodeps = d.getVar("BB_LIMITEDDEPS") or False
471 if nodeps or not filterbydependencies: 470 if nodeps or not filterbydependencies:
472 oe.path.symlink(deploydir, subrepo_dir, True) 471 for arch in d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").split() + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").replace("-", "_").split():
472 target = os.path.join(deploydir + "/" + arch)
473 if os.path.exists(target):
474 oe.path.symlink(target, subrepo_dir + "/" + arch, True)
473 return 475 return
474 476
475 start = None 477 start = None
@@ -482,14 +484,17 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
482 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") 484 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
483 pkgdeps = set() 485 pkgdeps = set()
484 start = [start] 486 start = [start]
485 seen = set(start) 487 if include_self:
488 seen = set()
489 else:
490 seen = set(start)
486 # Support direct dependencies (do_rootfs -> do_package_write_X) 491 # Support direct dependencies (do_rootfs -> do_package_write_X)
487 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) 492 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X)
488 while start: 493 while start:
489 next = [] 494 next = []
490 for dep2 in start: 495 for dep2 in start:
491 for dep in taskdepdata[dep2][3]: 496 for dep in taskdepdata[dep2][3]:
492 if taskdepdata[dep][0] != pn: 497 if include_self or taskdepdata[dep][0] != pn:
493 if "do_" + taskname in dep: 498 if "do_" + taskname in dep:
494 pkgdeps.add(dep) 499 pkgdeps.add(dep)
495 elif dep not in seen: 500 elif dep not in seen:
diff --git a/meta/lib/oe/package_manager/common_deb_ipk.py b/meta/lib/oe/package_manager/common_deb_ipk.py
new file mode 100644
index 0000000000..6a1e28ee6f
--- /dev/null
+++ b/meta/lib/oe/package_manager/common_deb_ipk.py
@@ -0,0 +1,97 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import glob
8import os
9import subprocess
10import tempfile
11
12import bb
13
14from oe.package_manager import opkg_query, PackageManager
15
16class OpkgDpkgPM(PackageManager):
17 def __init__(self, d, target_rootfs):
18 """
19 This is an abstract class. Do not instantiate this directly.
20 """
21 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
22
23 def package_info(self, pkg):
24 """
25 Returns a dictionary with the package info.
26 """
27 raise NotImplementedError
28
29 def _common_package_info(self, cmd):
30 """
31 "Returns a dictionary with the package info.
32
33 This method extracts the common parts for Opkg and Dpkg
34 """
35
36 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
37 if proc.returncode:
38 bb.fatal("Unable to list available packages. Command '%s' "
39 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
40 elif proc.stderr:
41 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
42
43 return opkg_query(proc.stdout)
44
45 def extract(self, pkg):
46 """
47 Returns the path to a tmpdir where resides the contents of a package.
48
49 Deleting the tmpdir is responsability of the caller.
50 """
51 pkg_info = self.package_info(pkg)
52 if not pkg_info:
53 bb.fatal("Unable to get information for package '%s' while "
54 "trying to extract the package." % pkg)
55
56 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
57 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
58 pkg_path = pkg_info[pkg]["filepath"]
59
60 if not os.path.isfile(pkg_path):
61 bb.fatal("Unable to extract package for '%s'."
62 "File %s doesn't exists" % (pkg, pkg_path))
63
64 tmp_dir = tempfile.mkdtemp()
65 current_dir = os.getcwd()
66 os.chdir(tmp_dir)
67
68 try:
69 cmd = [ar_cmd, 'x', pkg_path]
70 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
71 data_tar = glob.glob("data.tar.*")
72 if len(data_tar) != 1:
73 bb.fatal("Unable to extract %s package. Failed to identify "
74 "data tarball (found tarballs '%s').",
75 pkg_path, data_tar)
76 data_tar = data_tar[0]
77 cmd = [tar_cmd, 'xf', data_tar]
78 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
79 except subprocess.CalledProcessError as e:
80 bb.utils.remove(tmp_dir, recurse=True)
81 bb.fatal("Unable to extract %s package. Command '%s' "
82 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
83 except OSError as e:
84 bb.utils.remove(tmp_dir, recurse=True)
85 bb.fatal("Unable to extract %s package. Command '%s' "
86 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
87
88 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
89 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
90 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
91 bb.utils.remove(os.path.join(tmp_dir, data_tar))
92 os.chdir(current_dir)
93
94 return tmp_dir
95
96 def _handle_intercept_failure(self, registered_pkgs):
97 self.mark_packages("unpacked", registered_pkgs.split())
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py
index 2ee68fefb1..e09e81e490 100644
--- a/meta/lib/oe/package_manager/deb/__init__.py
+++ b/meta/lib/oe/package_manager/deb/__init__.py
@@ -1,10 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import re 7import re
6import subprocess 8import subprocess
7from oe.package_manager import * 9from oe.package_manager import *
10from oe.package_manager.common_deb_ipk import OpkgDpkgPM
8 11
9class DpkgIndexer(Indexer): 12class DpkgIndexer(Indexer):
10 def _create_configs(self): 13 def _create_configs(self):
@@ -53,6 +56,7 @@ class DpkgIndexer(Indexer):
53 56
54 index_cmds = [] 57 index_cmds = []
55 deb_dirs_found = False 58 deb_dirs_found = False
59 index_sign_files = set()
56 for arch in arch_list: 60 for arch in arch_list:
57 arch_dir = os.path.join(self.deploy_dir, arch) 61 arch_dir = os.path.join(self.deploy_dir, arch)
58 if not os.path.isdir(arch_dir): 62 if not os.path.isdir(arch_dir):
@@ -62,7 +66,10 @@ class DpkgIndexer(Indexer):
62 66
63 cmd += "%s -fcn Packages > Packages.gz;" % gzip 67 cmd += "%s -fcn Packages > Packages.gz;" % gzip
64 68
65 with open(os.path.join(arch_dir, "Release"), "w+") as release: 69 release_file = os.path.join(arch_dir, "Release")
70 index_sign_files.add(release_file)
71
72 with open(release_file, "w+") as release:
66 release.write("Label: %s\n" % arch) 73 release.write("Label: %s\n" % arch)
67 74
68 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive 75 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
@@ -77,7 +84,16 @@ class DpkgIndexer(Indexer):
77 84
78 oe.utils.multiprocess_launch(create_index, index_cmds, self.d) 85 oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
79 if self.d.getVar('PACKAGE_FEED_SIGN') == '1': 86 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
80 raise NotImplementedError('Package feed signing not implementd for dpkg') 87 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
88 else:
89 signer = None
90 if signer:
91 for f in index_sign_files:
92 signer.detach_sign(f,
93 self.d.getVar('PACKAGE_FEED_GPG_NAME'),
94 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
95 output_suffix="gpg",
96 use_sha256=True)
81 97
82class PMPkgsList(PkgsList): 98class PMPkgsList(PkgsList):
83 99
@@ -96,72 +112,6 @@ class PMPkgsList(PkgsList):
96 112
97 return opkg_query(cmd_output) 113 return opkg_query(cmd_output)
98 114
99class OpkgDpkgPM(PackageManager):
100 def __init__(self, d, target_rootfs):
101 """
102 This is an abstract class. Do not instantiate this directly.
103 """
104 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
105
106 def package_info(self, pkg, cmd):
107 """
108 Returns a dictionary with the package info.
109
110 This method extracts the common parts for Opkg and Dpkg
111 """
112
113 try:
114 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
115 except subprocess.CalledProcessError as e:
116 bb.fatal("Unable to list available packages. Command '%s' "
117 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
118 return opkg_query(output)
119
120 def extract(self, pkg, pkg_info):
121 """
122 Returns the path to a tmpdir where resides the contents of a package.
123
124 Deleting the tmpdir is responsability of the caller.
125
126 This method extracts the common parts for Opkg and Dpkg
127 """
128
129 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
130 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
131 pkg_path = pkg_info[pkg]["filepath"]
132
133 if not os.path.isfile(pkg_path):
134 bb.fatal("Unable to extract package for '%s'."
135 "File %s doesn't exists" % (pkg, pkg_path))
136
137 tmp_dir = tempfile.mkdtemp()
138 current_dir = os.getcwd()
139 os.chdir(tmp_dir)
140 data_tar = 'data.tar.xz'
141
142 try:
143 cmd = [ar_cmd, 'x', pkg_path]
144 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
145 cmd = [tar_cmd, 'xf', data_tar]
146 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
147 except subprocess.CalledProcessError as e:
148 bb.utils.remove(tmp_dir, recurse=True)
149 bb.fatal("Unable to extract %s package. Command '%s' "
150 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
151 except OSError as e:
152 bb.utils.remove(tmp_dir, recurse=True)
153 bb.fatal("Unable to extract %s package. Command '%s' "
154 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
155
156 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
157 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
158 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
159 os.chdir(current_dir)
160
161 return tmp_dir
162
163 def _handle_intercept_failure(self, registered_pkgs):
164 self.mark_packages("unpacked", registered_pkgs.split())
165 115
166class DpkgPM(OpkgDpkgPM): 116class DpkgPM(OpkgDpkgPM):
167 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): 117 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True):
@@ -214,7 +164,7 @@ class DpkgPM(OpkgDpkgPM):
214 164
215 tmp_sf.write(status) 165 tmp_sf.write(status)
216 166
217 os.rename(status_file + ".tmp", status_file) 167 bb.utils.rename(status_file + ".tmp", status_file)
218 168
219 def run_pre_post_installs(self, package_name=None): 169 def run_pre_post_installs(self, package_name=None):
220 """ 170 """
@@ -276,14 +226,18 @@ class DpkgPM(OpkgDpkgPM):
276 226
277 self.deploy_dir_unlock() 227 self.deploy_dir_unlock()
278 228
279 def install(self, pkgs, attempt_only=False): 229 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
280 if attempt_only and len(pkgs) == 0: 230 if attempt_only and len(pkgs) == 0:
281 return 231 return
282 232
283 os.environ['APT_CONFIG'] = self.apt_conf_file 233 os.environ['APT_CONFIG'] = self.apt_conf_file
284 234
285 cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s" % \ 235 extra_args = ""
286 (self.apt_get_cmd, self.apt_args, ' '.join(pkgs)) 236 if hard_depends_only:
237 extra_args = "--no-install-recommends"
238
239 cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s %s" % \
240 (self.apt_get_cmd, self.apt_args, extra_args, ' '.join(pkgs))
287 241
288 try: 242 try:
289 bb.note("Installing the following packages: %s" % ' '.join(pkgs)) 243 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
@@ -299,13 +253,13 @@ class DpkgPM(OpkgDpkgPM):
299 for dir in dirs: 253 for dir in dirs:
300 new_dir = re.sub(r"\.dpkg-new", "", dir) 254 new_dir = re.sub(r"\.dpkg-new", "", dir)
301 if dir != new_dir: 255 if dir != new_dir:
302 os.rename(os.path.join(root, dir), 256 bb.utils.rename(os.path.join(root, dir),
303 os.path.join(root, new_dir)) 257 os.path.join(root, new_dir))
304 258
305 for file in files: 259 for file in files:
306 new_file = re.sub(r"\.dpkg-new", "", file) 260 new_file = re.sub(r"\.dpkg-new", "", file)
307 if file != new_file: 261 if file != new_file:
308 os.rename(os.path.join(root, file), 262 bb.utils.rename(os.path.join(root, file),
309 os.path.join(root, new_file)) 263 os.path.join(root, new_file))
310 264
311 265
@@ -422,7 +376,7 @@ class DpkgPM(OpkgDpkgPM):
422 multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); 376 multilib_variants = self.d.getVar("MULTILIB_VARIANTS");
423 for variant in multilib_variants.split(): 377 for variant in multilib_variants.split():
424 localdata = bb.data.createCopy(self.d) 378 localdata = bb.data.createCopy(self.d)
425 variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) 379 variant_tune = localdata.getVar("DEFAULTTUNE:virtclass-multilib-" + variant, False)
426 orig_arch = localdata.getVar("DPKG_ARCH") 380 orig_arch = localdata.getVar("DPKG_ARCH")
427 localdata.setVar("DEFAULTTUNE", variant_tune) 381 localdata.setVar("DEFAULTTUNE", variant_tune)
428 variant_arch = localdata.getVar("DPKG_ARCH") 382 variant_arch = localdata.getVar("DPKG_ARCH")
@@ -477,7 +431,7 @@ class DpkgPM(OpkgDpkgPM):
477 Returns a dictionary with the package info. 431 Returns a dictionary with the package info.
478 """ 432 """
479 cmd = "%s show %s" % (self.apt_cache_cmd, pkg) 433 cmd = "%s show %s" % (self.apt_cache_cmd, pkg)
480 pkg_info = super(DpkgPM, self).package_info(pkg, cmd) 434 pkg_info = self._common_package_info(cmd)
481 435
482 pkg_arch = pkg_info[pkg]["pkgarch"] 436 pkg_arch = pkg_info[pkg]["pkgarch"]
483 pkg_filename = pkg_info[pkg]["filename"] 437 pkg_filename = pkg_info[pkg]["filename"]
@@ -485,19 +439,3 @@ class DpkgPM(OpkgDpkgPM):
485 os.path.join(self.deploy_dir, pkg_arch, pkg_filename) 439 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
486 440
487 return pkg_info 441 return pkg_info
488
489 def extract(self, pkg):
490 """
491 Returns the path to a tmpdir where resides the contents of a package.
492
493 Deleting the tmpdir is responsability of the caller.
494 """
495 pkg_info = self.package_info(pkg)
496 if not pkg_info:
497 bb.fatal("Unable to get information for package '%s' while "
498 "trying to extract the package." % pkg)
499
500 tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info)
501 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
502
503 return tmp_dir
diff --git a/meta/lib/oe/package_manager/deb/manifest.py b/meta/lib/oe/package_manager/deb/manifest.py
index d8eab24a06..72983bae98 100644
--- a/meta/lib/oe/package_manager/deb/manifest.py
+++ b/meta/lib/oe/package_manager/deb/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/deb/rootfs.py b/meta/lib/oe/package_manager/deb/rootfs.py
index 8fbaca11d6..1e25b64ed9 100644
--- a/meta/lib/oe/package_manager/deb/rootfs.py
+++ b/meta/lib/oe/package_manager/deb/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/deb/sdk.py b/meta/lib/oe/package_manager/deb/sdk.py
index 9859d8f32d..6f3005053e 100644
--- a/meta/lib/oe/package_manager/deb/sdk.py
+++ b/meta/lib/oe/package_manager/deb/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -65,7 +67,14 @@ class PkgSdk(Sdk):
65 67
66 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 68 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
67 69
70 self.target_pm.run_pre_post_installs()
71
72 env_bkp = os.environ.copy()
73 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
74 os.pathsep + os.environ["PATH"]
75
68 self.target_pm.run_intercepts(populate_sdk='target') 76 self.target_pm.run_intercepts(populate_sdk='target')
77 os.environ.update(env_bkp)
69 78
70 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 79 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
71 80
@@ -78,6 +87,8 @@ class PkgSdk(Sdk):
78 self._populate_sysroot(self.host_pm, self.host_manifest) 87 self._populate_sysroot(self.host_pm, self.host_manifest)
79 self.install_locales(self.host_pm) 88 self.install_locales(self.host_pm)
80 89
90 self.host_pm.run_pre_post_installs()
91
81 self.host_pm.run_intercepts(populate_sdk='host') 92 self.host_pm.run_intercepts(populate_sdk='host')
82 93
83 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) 94 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND"))
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py
index da488c1c7f..3d998e52ff 100644
--- a/meta/lib/oe/package_manager/ipk/__init__.py
+++ b/meta/lib/oe/package_manager/ipk/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -6,6 +8,7 @@ import re
6import shutil 8import shutil
7import subprocess 9import subprocess
8from oe.package_manager import * 10from oe.package_manager import *
11from oe.package_manager.common_deb_ipk import OpkgDpkgPM
9 12
10class OpkgIndexer(Indexer): 13class OpkgIndexer(Indexer):
11 def write_index(self): 14 def write_index(self):
@@ -14,6 +17,7 @@ class OpkgIndexer(Indexer):
14 ] 17 ]
15 18
16 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") 19 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
20 opkg_index_cmd_extra_params = self.d.getVar('OPKG_MAKE_INDEX_EXTRA_PARAMS') or ""
17 if self.d.getVar('PACKAGE_FEED_SIGN') == '1': 21 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
18 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) 22 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
19 else: 23 else:
@@ -39,8 +43,8 @@ class OpkgIndexer(Indexer):
39 if not os.path.exists(pkgs_file): 43 if not os.path.exists(pkgs_file):
40 open(pkgs_file, "w").close() 44 open(pkgs_file, "w").close()
41 45
42 index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s' % 46 index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s %s' %
43 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir)) 47 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir, opkg_index_cmd_extra_params))
44 48
45 index_sign_files.add(pkgs_file) 49 index_sign_files.add(pkgs_file)
46 50
@@ -87,74 +91,6 @@ class PMPkgsList(PkgsList):
87 return opkg_query(cmd_output) 91 return opkg_query(cmd_output)
88 92
89 93
90
91class OpkgDpkgPM(PackageManager):
92 def __init__(self, d, target_rootfs):
93 """
94 This is an abstract class. Do not instantiate this directly.
95 """
96 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
97
98 def package_info(self, pkg, cmd):
99 """
100 Returns a dictionary with the package info.
101
102 This method extracts the common parts for Opkg and Dpkg
103 """
104
105 try:
106 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
107 except subprocess.CalledProcessError as e:
108 bb.fatal("Unable to list available packages. Command '%s' "
109 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
110 return opkg_query(output)
111
112 def extract(self, pkg, pkg_info):
113 """
114 Returns the path to a tmpdir where resides the contents of a package.
115
116 Deleting the tmpdir is responsability of the caller.
117
118 This method extracts the common parts for Opkg and Dpkg
119 """
120
121 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
122 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
123 pkg_path = pkg_info[pkg]["filepath"]
124
125 if not os.path.isfile(pkg_path):
126 bb.fatal("Unable to extract package for '%s'."
127 "File %s doesn't exists" % (pkg, pkg_path))
128
129 tmp_dir = tempfile.mkdtemp()
130 current_dir = os.getcwd()
131 os.chdir(tmp_dir)
132 data_tar = 'data.tar.xz'
133
134 try:
135 cmd = [ar_cmd, 'x', pkg_path]
136 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
137 cmd = [tar_cmd, 'xf', data_tar]
138 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
139 except subprocess.CalledProcessError as e:
140 bb.utils.remove(tmp_dir, recurse=True)
141 bb.fatal("Unable to extract %s package. Command '%s' "
142 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
143 except OSError as e:
144 bb.utils.remove(tmp_dir, recurse=True)
145 bb.fatal("Unable to extract %s package. Command '%s' "
146 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
147
148 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
149 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
150 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
151 os.chdir(current_dir)
152
153 return tmp_dir
154
155 def _handle_intercept_failure(self, registered_pkgs):
156 self.mark_packages("unpacked", registered_pkgs.split())
157
158class OpkgPM(OpkgDpkgPM): 94class OpkgPM(OpkgDpkgPM):
159 def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): 95 def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True):
160 super(OpkgPM, self).__init__(d, target_rootfs) 96 super(OpkgPM, self).__init__(d, target_rootfs)
@@ -213,7 +149,7 @@ class OpkgPM(OpkgDpkgPM):
213 149
214 tmp_sf.write(status) 150 tmp_sf.write(status)
215 151
216 os.rename(status_file + ".tmp", status_file) 152 bb.utils.rename(status_file + ".tmp", status_file)
217 153
218 def _create_custom_config(self): 154 def _create_custom_config(self):
219 bb.note("Building from feeds activated!") 155 bb.note("Building from feeds activated!")
@@ -243,7 +179,7 @@ class OpkgPM(OpkgDpkgPM):
243 """ 179 """
244 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": 180 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "":
245 for arch in self.pkg_archs.split(): 181 for arch in self.pkg_archs.split():
246 cfg_file_name = os.path.join(self.target_rootfs, 182 cfg_file_name = oe.path.join(self.target_rootfs,
247 self.d.getVar("sysconfdir"), 183 self.d.getVar("sysconfdir"),
248 "opkg", 184 "opkg",
249 "local-%s-feed.conf" % arch) 185 "local-%s-feed.conf" % arch)
@@ -337,7 +273,7 @@ class OpkgPM(OpkgDpkgPM):
337 273
338 self.deploy_dir_unlock() 274 self.deploy_dir_unlock()
339 275
340 def install(self, pkgs, attempt_only=False): 276 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
341 if not pkgs: 277 if not pkgs:
342 return 278 return
343 279
@@ -346,6 +282,8 @@ class OpkgPM(OpkgDpkgPM):
346 cmd += " --add-exclude %s" % exclude 282 cmd += " --add-exclude %s" % exclude
347 for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split(): 283 for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split():
348 cmd += " --add-ignore-recommends %s" % bad_recommendation 284 cmd += " --add-ignore-recommends %s" % bad_recommendation
285 if hard_depends_only:
286 cmd += " --no-install-recommends"
349 cmd += " install " 287 cmd += " install "
350 cmd += " ".join(pkgs) 288 cmd += " ".join(pkgs)
351 289
@@ -443,15 +381,16 @@ class OpkgPM(OpkgDpkgPM):
443 cmd = "%s %s --noaction install %s " % (self.opkg_cmd, 381 cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
444 opkg_args, 382 opkg_args,
445 ' '.join(pkgs)) 383 ' '.join(pkgs))
446 try: 384 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
447 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) 385 if proc.returncode:
448 except subprocess.CalledProcessError as e:
449 bb.fatal("Unable to dummy install packages. Command '%s' " 386 bb.fatal("Unable to dummy install packages. Command '%s' "
450 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) 387 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
388 elif proc.stderr:
389 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
451 390
452 bb.utils.remove(temp_rootfs, True) 391 bb.utils.remove(temp_rootfs, True)
453 392
454 return output 393 return proc.stdout
455 394
456 def backup_packaging_data(self): 395 def backup_packaging_data(self):
457 # Save the opkglib for increment ipk image generation 396 # Save the opkglib for increment ipk image generation
@@ -477,7 +416,7 @@ class OpkgPM(OpkgDpkgPM):
477 Returns a dictionary with the package info. 416 Returns a dictionary with the package info.
478 """ 417 """
479 cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) 418 cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg)
480 pkg_info = super(OpkgPM, self).package_info(pkg, cmd) 419 pkg_info = self._common_package_info(cmd)
481 420
482 pkg_arch = pkg_info[pkg]["arch"] 421 pkg_arch = pkg_info[pkg]["arch"]
483 pkg_filename = pkg_info[pkg]["filename"] 422 pkg_filename = pkg_info[pkg]["filename"]
@@ -485,19 +424,3 @@ class OpkgPM(OpkgDpkgPM):
485 os.path.join(self.deploy_dir, pkg_arch, pkg_filename) 424 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
486 425
487 return pkg_info 426 return pkg_info
488
489 def extract(self, pkg):
490 """
491 Returns the path to a tmpdir where resides the contents of a package.
492
493 Deleting the tmpdir is responsability of the caller.
494 """
495 pkg_info = self.package_info(pkg)
496 if not pkg_info:
497 bb.fatal("Unable to get information for package '%s' while "
498 "trying to extract the package." % pkg)
499
500 tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info)
501 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
502
503 return tmp_dir
diff --git a/meta/lib/oe/package_manager/ipk/manifest.py b/meta/lib/oe/package_manager/ipk/manifest.py
index ee4b57bcb0..3549d7428d 100644
--- a/meta/lib/oe/package_manager/ipk/manifest.py
+++ b/meta/lib/oe/package_manager/ipk/manifest.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5from oe.manifest import Manifest 7from oe.manifest import Manifest
8import re
6 9
7class PkgManifest(Manifest): 10class PkgManifest(Manifest):
8 """ 11 """
diff --git a/meta/lib/oe/package_manager/ipk/rootfs.py b/meta/lib/oe/package_manager/ipk/rootfs.py
index 26dbee6f6a..ba93eb62ea 100644
--- a/meta/lib/oe/package_manager/ipk/rootfs.py
+++ b/meta/lib/oe/package_manager/ipk/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -145,51 +147,14 @@ class PkgRootfs(DpkgOpkgRootfs):
145 self.pm.recover_packaging_data() 147 self.pm.recover_packaging_data()
146 148
147 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) 149 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True)
148
149 def _prelink_file(self, root_dir, filename):
150 bb.note('prelink %s in %s' % (filename, root_dir))
151 prelink_cfg = oe.path.join(root_dir,
152 self.d.expand('${sysconfdir}/prelink.conf'))
153 if not os.path.exists(prelink_cfg):
154 shutil.copy(self.d.expand('${STAGING_DIR_NATIVE}${sysconfdir_native}/prelink.conf'),
155 prelink_cfg)
156
157 cmd_prelink = self.d.expand('${STAGING_DIR_NATIVE}${sbindir_native}/prelink')
158 self._exec_shell_cmd([cmd_prelink,
159 '--root',
160 root_dir,
161 '-amR',
162 '-N',
163 '-c',
164 self.d.expand('${sysconfdir}/prelink.conf')])
165
166 ''' 150 '''
167 Compare two files with the same key twice to see if they are equal. 151 Compare two files with the same key twice to see if they are equal.
168 If they are not equal, it means they are duplicated and come from 152 If they are not equal, it means they are duplicated and come from
169 different packages. 153 different packages.
170 1st: Comapre them directly;
171 2nd: While incremental image creation is enabled, one of the
172 files could be probaly prelinked in the previous image
173 creation and the file has been changed, so we need to
174 prelink the other one and compare them.
175 ''' 154 '''
176 def _file_equal(self, key, f1, f2): 155 def _file_equal(self, key, f1, f2):
177
178 # Both of them are not prelinked
179 if filecmp.cmp(f1, f2): 156 if filecmp.cmp(f1, f2):
180 return True 157 return True
181
182 if bb.data.inherits_class('image-prelink', self.d):
183 if self.image_rootfs not in f1:
184 self._prelink_file(f1.replace(key, ''), f1)
185
186 if self.image_rootfs not in f2:
187 self._prelink_file(f2.replace(key, ''), f2)
188
189 # Both of them are prelinked
190 if filecmp.cmp(f1, f2):
191 return True
192
193 # Not equal 158 # Not equal
194 return False 159 return False
195 160
@@ -200,7 +165,7 @@ class PkgRootfs(DpkgOpkgRootfs):
200 """ 165 """
201 def _multilib_sanity_test(self, dirs): 166 def _multilib_sanity_test(self, dirs):
202 167
203 allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP") 168 allow_replace = "|".join((self.d.getVar("MULTILIBRE_ALLOW_REP") or "").split())
204 if allow_replace is None: 169 if allow_replace is None:
205 allow_replace = "" 170 allow_replace = ""
206 171
diff --git a/meta/lib/oe/package_manager/ipk/sdk.py b/meta/lib/oe/package_manager/ipk/sdk.py
index e2ca415c8e..3acd55f548 100644
--- a/meta/lib/oe/package_manager/ipk/sdk.py
+++ b/meta/lib/oe/package_manager/ipk/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -61,12 +63,19 @@ class PkgSdk(Sdk):
61 63
62 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 64 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
63 65
66 env_bkp = os.environ.copy()
67 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
68 os.pathsep + os.environ["PATH"]
69
64 self.target_pm.run_intercepts(populate_sdk='target') 70 self.target_pm.run_intercepts(populate_sdk='target')
71 os.environ.update(env_bkp)
65 72
66 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 73 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
67 74
68 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 75 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
69 self.target_pm.remove_packaging_data() 76 self.target_pm.remove_packaging_data()
77 else:
78 self.target_pm.remove_lists()
70 79
71 bb.note("Installing NATIVESDK packages") 80 bb.note("Installing NATIVESDK packages")
72 self._populate_sysroot(self.host_pm, self.host_manifest) 81 self._populate_sysroot(self.host_pm, self.host_manifest)
@@ -78,6 +87,8 @@ class PkgSdk(Sdk):
78 87
79 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 88 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
80 self.host_pm.remove_packaging_data() 89 self.host_pm.remove_packaging_data()
90 else:
91 self.host_pm.remove_lists()
81 92
82 target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir) 93 target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir)
83 host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir) 94 host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir)
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py
index 6df0092281..323ec5008f 100644
--- a/meta/lib/oe/package_manager/rpm/__init__.py
+++ b/meta/lib/oe/package_manager/rpm/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -96,11 +98,15 @@ class RpmPM(PackageManager):
96 archs = ["sdk_provides_dummy_target"] + archs 98 archs = ["sdk_provides_dummy_target"] + archs
97 confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/") 99 confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
98 bb.utils.mkdirhier(confdir) 100 bb.utils.mkdirhier(confdir)
99 open(confdir + "arch", 'w').write(":".join(archs)) 101 with open(confdir + "arch", 'w') as f:
102 f.write(":".join(archs))
103
100 distro_codename = self.d.getVar('DISTRO_CODENAME') 104 distro_codename = self.d.getVar('DISTRO_CODENAME')
101 open(confdir + "releasever", 'w').write(distro_codename if distro_codename is not None else '') 105 with open(confdir + "releasever", 'w') as f:
106 f.write(distro_codename if distro_codename is not None else '')
102 107
103 open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w').write("") 108 with open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w') as f:
109 f.write("")
104 110
105 111
106 def _configure_rpm(self): 112 def _configure_rpm(self):
@@ -110,14 +116,17 @@ class RpmPM(PackageManager):
110 platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/") 116 platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/")
111 rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/") 117 rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/")
112 bb.utils.mkdirhier(platformconfdir) 118 bb.utils.mkdirhier(platformconfdir)
113 open(platformconfdir + "platform", 'w').write("%s-pc-linux" % self.primary_arch) 119 with open(platformconfdir + "platform", 'w') as f:
120 f.write("%s-pc-linux" % self.primary_arch)
114 with open(rpmrcconfdir + "rpmrc", 'w') as f: 121 with open(rpmrcconfdir + "rpmrc", 'w') as f:
115 f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch)) 122 f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch))
116 f.write("buildarch_compat: %s: noarch\n" % self.primary_arch) 123 f.write("buildarch_compat: %s: noarch\n" % self.primary_arch)
117 124
118 open(platformconfdir + "macros", 'w').write("%_transaction_color 7\n") 125 with open(platformconfdir + "macros", 'w') as f:
126 f.write("%_transaction_color 7\n")
119 if self.d.getVar('RPM_PREFER_ELF_ARCH'): 127 if self.d.getVar('RPM_PREFER_ELF_ARCH'):
120 open(platformconfdir + "macros", 'a').write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH'))) 128 with open(platformconfdir + "macros", 'a') as f:
129 f.write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
121 130
122 if self.d.getVar('RPM_SIGN_PACKAGES') == '1': 131 if self.d.getVar('RPM_SIGN_PACKAGES') == '1':
123 signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND')) 132 signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND'))
@@ -164,13 +173,13 @@ class RpmPM(PackageManager):
164 repo_uri = uri + "/" + arch 173 repo_uri = uri + "/" + arch
165 repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/")) 174 repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/"))
166 repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/")) 175 repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/"))
167 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a').write( 176 with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a') as f:
168 "[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts)) 177 f.write("[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
169 else: 178 else:
170 repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/")) 179 repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/"))
171 repo_uri = uri 180 repo_uri = uri
172 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w').write( 181 with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w') as f:
173 "[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts)) 182 f.write("[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
174 183
175 def _prepare_pkg_transaction(self): 184 def _prepare_pkg_transaction(self):
176 os.environ['D'] = self.target_rootfs 185 os.environ['D'] = self.target_rootfs
@@ -181,7 +190,7 @@ class RpmPM(PackageManager):
181 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') 190 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
182 191
183 192
184 def install(self, pkgs, attempt_only = False): 193 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
185 if len(pkgs) == 0: 194 if len(pkgs) == 0:
186 return 195 return
187 self._prepare_pkg_transaction() 196 self._prepare_pkg_transaction()
@@ -192,13 +201,16 @@ class RpmPM(PackageManager):
192 201
193 output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) + 202 output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) +
194 (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) + 203 (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) +
195 (["--setopt=install_weak_deps=False"] if self.d.getVar('NO_RECOMMENDATIONS') == "1" else []) + 204 (["--setopt=install_weak_deps=False"] if (hard_depends_only or self.d.getVar('NO_RECOMMENDATIONS') == "1") else []) +
196 (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) + 205 (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) +
197 ["install"] + 206 ["install"] +
198 pkgs) 207 pkgs)
199 208
200 failed_scriptlets_pkgnames = collections.OrderedDict() 209 failed_scriptlets_pkgnames = collections.OrderedDict()
201 for line in output.splitlines(): 210 for line in output.splitlines():
211 if line.startswith("Error: Systemctl"):
212 bb.error(line)
213
202 if line.startswith("Error in POSTIN scriptlet in rpm package"): 214 if line.startswith("Error in POSTIN scriptlet in rpm package"):
203 failed_scriptlets_pkgnames[line.split()[-1]] = True 215 failed_scriptlets_pkgnames[line.split()[-1]] = True
204 216
@@ -326,7 +338,8 @@ class RpmPM(PackageManager):
326 return e.output.decode("utf-8") 338 return e.output.decode("utf-8")
327 339
328 def dump_install_solution(self, pkgs): 340 def dump_install_solution(self, pkgs):
329 open(self.solution_manifest, 'w').write(" ".join(pkgs)) 341 with open(self.solution_manifest, 'w') as f:
342 f.write(" ".join(pkgs))
330 return pkgs 343 return pkgs
331 344
332 def load_old_install_solution(self): 345 def load_old_install_solution(self):
@@ -360,7 +373,8 @@ class RpmPM(PackageManager):
360 bb.utils.mkdirhier(target_path) 373 bb.utils.mkdirhier(target_path)
361 num = self._script_num_prefix(target_path) 374 num = self._script_num_prefix(target_path)
362 saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg)) 375 saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg))
363 open(saved_script_name, 'w').write(output) 376 with open(saved_script_name, 'w') as f:
377 f.write(output)
364 os.chmod(saved_script_name, 0o755) 378 os.chmod(saved_script_name, 0o755)
365 379
366 def _handle_intercept_failure(self, registered_pkgs): 380 def _handle_intercept_failure(self, registered_pkgs):
@@ -372,14 +386,15 @@ class RpmPM(PackageManager):
372 self.save_rpmpostinst(pkg) 386 self.save_rpmpostinst(pkg)
373 387
374 def extract(self, pkg): 388 def extract(self, pkg):
375 output = self._invoke_dnf(["repoquery", "--queryformat", "%{location}", pkg]) 389 output = self._invoke_dnf(["repoquery", "--location", pkg])
376 pkg_name = output.splitlines()[-1] 390 pkg_name = output.splitlines()[-1]
377 if not pkg_name.endswith(".rpm"): 391 if not pkg_name.endswith(".rpm"):
378 bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output)) 392 bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output))
379 pkg_path = oe.path.join(self.rpm_repo_dir, pkg_name) 393 # Strip file: prefix
394 pkg_path = pkg_name[5:]
380 395
381 cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio") 396 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
382 rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio") 397 rpm2archive_cmd = bb.utils.which(os.getenv("PATH"), "rpm2archive")
383 398
384 if not os.path.isfile(pkg_path): 399 if not os.path.isfile(pkg_path):
385 bb.fatal("Unable to extract package for '%s'." 400 bb.fatal("Unable to extract package for '%s'."
@@ -390,7 +405,7 @@ class RpmPM(PackageManager):
390 os.chdir(tmp_dir) 405 os.chdir(tmp_dir)
391 406
392 try: 407 try:
393 cmd = "%s %s | %s -idmv" % (rpm2cpio_cmd, pkg_path, cpio_cmd) 408 cmd = "%s -n %s | %s xv" % (rpm2archive_cmd, pkg_path, tar_cmd)
394 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) 409 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
395 except subprocess.CalledProcessError as e: 410 except subprocess.CalledProcessError as e:
396 bb.utils.remove(tmp_dir, recurse=True) 411 bb.utils.remove(tmp_dir, recurse=True)
diff --git a/meta/lib/oe/package_manager/rpm/manifest.py b/meta/lib/oe/package_manager/rpm/manifest.py
index e6604b301f..6ee7c329f0 100644
--- a/meta/lib/oe/package_manager/rpm/manifest.py
+++ b/meta/lib/oe/package_manager/rpm/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/rpm/rootfs.py b/meta/lib/oe/package_manager/rpm/rootfs.py
index 00d07cd9cc..3ba5396320 100644
--- a/meta/lib/oe/package_manager/rpm/rootfs.py
+++ b/meta/lib/oe/package_manager/rpm/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -108,7 +110,7 @@ class PkgRootfs(Rootfs):
108 if self.progress_reporter: 110 if self.progress_reporter:
109 self.progress_reporter.next_stage() 111 self.progress_reporter.next_stage()
110 112
111 self._setup_dbg_rootfs(['/etc', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf']) 113 self._setup_dbg_rootfs(['/etc/rpm', '/etc/rpmrc', '/etc/dnf', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf'])
112 114
113 execute_pre_post_process(self.d, rpm_post_process_cmds) 115 execute_pre_post_process(self.d, rpm_post_process_cmds)
114 116
diff --git a/meta/lib/oe/package_manager/rpm/sdk.py b/meta/lib/oe/package_manager/rpm/sdk.py
index c5f232431f..ea79fe050b 100644
--- a/meta/lib/oe/package_manager/rpm/sdk.py
+++ b/meta/lib/oe/package_manager/rpm/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -65,7 +67,12 @@ class PkgSdk(Sdk):
65 67
66 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 68 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
67 69
70 env_bkp = os.environ.copy()
71 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
72 os.pathsep + os.environ["PATH"]
73
68 self.target_pm.run_intercepts(populate_sdk='target') 74 self.target_pm.run_intercepts(populate_sdk='target')
75 os.environ.update(env_bkp)
69 76
70 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 77 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
71 78
@@ -110,5 +117,6 @@ class PkgSdk(Sdk):
110 for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")): 117 for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")):
111 self.movefile(f, native_sysconf_dir) 118 self.movefile(f, native_sysconf_dir)
112 for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")): 119 for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")):
113 self.movefile(f, native_sysconf_dir) 120 self.mkdirhier(native_sysconf_dir + "/dnf")
121 self.movefile(f, native_sysconf_dir + "/dnf")
114 self.remove(os.path.join(self.sdk_output, "etc"), True) 122 self.remove(os.path.join(self.sdk_output, "etc"), True)
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py
index a82085a792..b6a10a930a 100644
--- a/meta/lib/oe/packagedata.py
+++ b/meta/lib/oe/packagedata.py
@@ -1,9 +1,17 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import codecs 7import codecs
6import os 8import os
9import json
10import bb.parse
11import bb.compress.zstd
12import oe.path
13
14from glob import glob
7 15
8def packaged(pkg, d): 16def packaged(pkg, d):
9 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) 17 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
@@ -19,7 +27,7 @@ def read_pkgdatafile(fn):
19 import re 27 import re
20 with open(fn, 'r') as f: 28 with open(fn, 'r') as f:
21 lines = f.readlines() 29 lines = f.readlines()
22 r = re.compile("([^:]+):\s*(.*)") 30 r = re.compile(r"(^.+?):\s+(.*)")
23 for l in lines: 31 for l in lines:
24 m = r.match(l) 32 m = r.match(l)
25 if m: 33 if m:
@@ -45,18 +53,31 @@ def read_pkgdata(pn, d):
45 return read_pkgdatafile(fn) 53 return read_pkgdatafile(fn)
46 54
47# 55#
48# Collapse FOO_pkg variables into FOO 56# Collapse FOO:pkg variables into FOO
49# 57#
50def read_subpkgdata_dict(pkg, d): 58def read_subpkgdata_dict(pkg, d):
51 ret = {} 59 ret = {}
52 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d)) 60 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d))
53 for var in subd: 61 for var in subd:
54 newvar = var.replace("_" + pkg, "") 62 newvar = var.replace(":" + pkg, "")
55 if newvar == var and var + "_" + pkg in subd: 63 if newvar == var and var + ":" + pkg in subd:
56 continue 64 continue
57 ret[newvar] = subd[var] 65 ret[newvar] = subd[var]
58 return ret 66 return ret
59 67
68@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
69def read_subpkgdata_extended(pkg, d):
70 import json
71 import bb.compress.zstd
72
73 fn = d.expand("${PKGDATA_DIR}/extended/%s.json.zstd" % pkg)
74 try:
75 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
76 with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f:
77 return json.load(f)
78 except FileNotFoundError:
79 return None
80
60def _pkgmap(d): 81def _pkgmap(d):
61 """Return a dictionary mapping package to recipe name.""" 82 """Return a dictionary mapping package to recipe name."""
62 83
@@ -96,3 +117,253 @@ def recipename(pkg, d):
96 """Return the recipe name for the given binary package name.""" 117 """Return the recipe name for the given binary package name."""
97 118
98 return pkgmap(d).get(pkg) 119 return pkgmap(d).get(pkg)
120
121def foreach_runtime_provider_pkgdata(d, rdep, include_rdep=False):
122 pkgdata_dir = d.getVar("PKGDATA_DIR")
123 possibles = set()
124 try:
125 possibles |= set(os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdep)))
126 except OSError:
127 pass
128
129 if include_rdep:
130 possibles.add(rdep)
131
132 for p in sorted(list(possibles)):
133 rdep_data = read_subpkgdata(p, d)
134 yield p, rdep_data
135
136def get_package_mapping(pkg, basepkg, d, depversions=None):
137 import oe.packagedata
138
139 data = oe.packagedata.read_subpkgdata(pkg, d)
140 key = "PKG:%s" % pkg
141
142 if key in data:
143 if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
144 bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
145 # Have to avoid undoing the write_extra_pkgs(global_variants...)
146 if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
147 and data[key] == basepkg:
148 return pkg
149 if depversions == []:
150 # Avoid returning a mapping if the renamed package rprovides its original name
151 rprovkey = "RPROVIDES:%s" % pkg
152 if rprovkey in data:
153 if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
154 bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
155 return pkg
156 # Do map to rewritten package name
157 return data[key]
158
159 return pkg
160
161def get_package_additional_metadata(pkg_type, d):
162 base_key = "PACKAGE_ADD_METADATA"
163 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
164 if d.getVar(key, False) is None:
165 continue
166 d.setVarFlag(key, "type", "list")
167 if d.getVarFlag(key, "separator") is None:
168 d.setVarFlag(key, "separator", "\\n")
169 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
170 return "\n".join(metadata_fields).strip()
171
172def runtime_mapping_rename(varname, pkg, d):
173 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
174
175 new_depends = {}
176 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
177 for depend, depversions in deps.items():
178 new_depend = get_package_mapping(depend, pkg, d, depversions)
179 if depend != new_depend:
180 bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
181 new_depends[new_depend] = deps[depend]
182
183 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
184
185 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
186
187@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
188def emit_pkgdata(pkgfiles, d):
189 def process_postinst_on_target(pkg, mlprefix):
190 pkgval = d.getVar('PKG:%s' % pkg)
191 if pkgval is None:
192 pkgval = pkg
193
194 defer_fragment = """
195if [ -n "$D" ]; then
196 $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
197 exit 0
198fi
199""" % (pkgval, mlprefix)
200
201 postinst = d.getVar('pkg_postinst:%s' % pkg)
202 postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
203
204 if postinst_ontarget:
205 bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
206 if not postinst:
207 postinst = '#!/bin/sh\n'
208 postinst += defer_fragment
209 postinst += postinst_ontarget
210 d.setVar('pkg_postinst:%s' % pkg, postinst)
211
212 def add_set_e_to_scriptlets(pkg):
213 for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
214 scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
215 if scriptlet:
216 scriptlet_split = scriptlet.split('\n')
217 if scriptlet_split[0].startswith("#!"):
218 scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
219 else:
220 scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
221 d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
222
223 def write_if_exists(f, pkg, var):
224 def encode(str):
225 import codecs
226 c = codecs.getencoder("unicode_escape")
227 return c(str)[0].decode("latin1")
228
229 val = d.getVar('%s:%s' % (var, pkg))
230 if val:
231 f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
232 return val
233 val = d.getVar('%s' % (var))
234 if val:
235 f.write('%s: %s\n' % (var, encode(val)))
236 return val
237
238 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
239 for variant in variants:
240 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
241 fd.write("PACKAGES: %s\n" % ' '.join(
242 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
243
244 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
245 for variant in variants:
246 for pkg in packages.split():
247 ml_pkg = "%s-%s" % (variant, pkg)
248 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
249 with open(subdata_file, 'w') as fd:
250 fd.write("PKG:%s: %s" % (ml_pkg, pkg))
251
252 packages = d.getVar('PACKAGES')
253 pkgdest = d.getVar('PKGDEST')
254 pkgdatadir = d.getVar('PKGDESTWORK')
255
256 data_file = pkgdatadir + d.expand("/${PN}")
257 with open(data_file, 'w') as fd:
258 fd.write("PACKAGES: %s\n" % packages)
259
260 pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
261
262 pn = d.getVar('PN')
263 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
264 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
265
266 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
267 write_extra_pkgs(variants, pn, packages, pkgdatadir)
268
269 if bb.data.inherits_class('allarch', d) and not variants \
270 and not bb.data.inherits_class('packagegroup', d):
271 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
272
273 workdir = d.getVar('WORKDIR')
274
275 for pkg in packages.split():
276 pkgval = d.getVar('PKG:%s' % pkg)
277 if pkgval is None:
278 pkgval = pkg
279 d.setVar('PKG:%s' % pkg, pkg)
280
281 extended_data = {
282 "files_info": {}
283 }
284
285 pkgdestpkg = os.path.join(pkgdest, pkg)
286 files = {}
287 files_extra = {}
288 total_size = 0
289 seen = set()
290 for f in pkgfiles[pkg]:
291 fpath = os.sep + os.path.relpath(f, pkgdestpkg)
292
293 fstat = os.lstat(f)
294 files[fpath] = fstat.st_size
295
296 extended_data["files_info"].setdefault(fpath, {})
297 extended_data["files_info"][fpath]['size'] = fstat.st_size
298
299 if fstat.st_ino not in seen:
300 seen.add(fstat.st_ino)
301 total_size += fstat.st_size
302
303 if fpath in pkgdebugsource:
304 extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
305 del pkgdebugsource[fpath]
306
307 d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
308
309 process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
310 add_set_e_to_scriptlets(pkg)
311
312 subdata_file = pkgdatadir + "/runtime/%s" % pkg
313 with open(subdata_file, 'w') as sf:
314 for var in (d.getVar('PKGDATA_VARS') or "").split():
315 val = write_if_exists(sf, pkg, var)
316
317 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
318 for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
319 write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
320
321 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
322 for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
323 write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
324
325 sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
326
327 subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
328 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
329 with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
330 json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
331
332 # Symlinks needed for rprovides lookup
333 rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
334 if rprov:
335 for p in bb.utils.explode_deps(rprov):
336 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
337 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
338 oe.path.relsymlink(subdata_file, subdata_sym, True)
339
340 allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
341 if not allow_empty:
342 allow_empty = d.getVar('ALLOW_EMPTY')
343 root = "%s/%s" % (pkgdest, pkg)
344 os.chdir(root)
345 g = glob('*')
346 if g or allow_empty == "1":
347 # Symlinks needed for reverse lookups (from the final package name)
348 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
349 oe.path.relsymlink(subdata_file, subdata_sym, True)
350
351 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
352 open(packagedfile, 'w').close()
353
354 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
355 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
356
357 if bb.data.inherits_class('allarch', d) and not variants \
358 and not bb.data.inherits_class('packagegroup', d):
359 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
360
361def mapping_rename_hook(d):
362 """
363 Rewrite variables to account for package renaming in things
364 like debian.bbclass or manual PKG variable name changes
365 """
366 pkg = d.getVar("PKG")
367 oe.packagedata.runtime_mapping_rename("RDEPENDS", pkg, d)
368 oe.packagedata.runtime_mapping_rename("RRECOMMENDS", pkg, d)
369 oe.packagedata.runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py
index 8fcaecde82..7b7594751a 100644
--- a/meta/lib/oe/packagegroup.py
+++ b/meta/lib/oe/packagegroup.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index fccbedb519..edd77196ee 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -1,7 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import os
8import shlex
9import subprocess
5import oe.path 10import oe.path
6import oe.types 11import oe.types
7 12
@@ -24,9 +29,6 @@ class CmdError(bb.BBHandledException):
24 29
25 30
26def runcmd(args, dir = None): 31def runcmd(args, dir = None):
27 import pipes
28 import subprocess
29
30 if dir: 32 if dir:
31 olddir = os.path.abspath(os.curdir) 33 olddir = os.path.abspath(os.curdir)
32 if not os.path.exists(dir): 34 if not os.path.exists(dir):
@@ -35,7 +37,7 @@ def runcmd(args, dir = None):
35 # print("cwd: %s -> %s" % (olddir, dir)) 37 # print("cwd: %s -> %s" % (olddir, dir))
36 38
37 try: 39 try:
38 args = [ pipes.quote(str(arg)) for arg in args ] 40 args = [ shlex.quote(str(arg)) for arg in args ]
39 cmd = " ".join(args) 41 cmd = " ".join(args)
40 # print("cmd: %s" % cmd) 42 # print("cmd: %s" % cmd)
41 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) 43 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
@@ -56,6 +58,7 @@ def runcmd(args, dir = None):
56 if dir: 58 if dir:
57 os.chdir(olddir) 59 os.chdir(olddir)
58 60
61
59class PatchError(Exception): 62class PatchError(Exception):
60 def __init__(self, msg): 63 def __init__(self, msg):
61 self.msg = msg 64 self.msg = msg
@@ -214,7 +217,7 @@ class PatchTree(PatchSet):
214 with open(self.seriespath, 'w') as f: 217 with open(self.seriespath, 'w') as f:
215 for p in patches: 218 for p in patches:
216 f.write(p) 219 f.write(p)
217 220
218 def Import(self, patch, force = None): 221 def Import(self, patch, force = None):
219 """""" 222 """"""
220 PatchSet.Import(self, patch, force) 223 PatchSet.Import(self, patch, force)
@@ -291,13 +294,32 @@ class PatchTree(PatchSet):
291 self.Pop(all=True) 294 self.Pop(all=True)
292 295
293class GitApplyTree(PatchTree): 296class GitApplyTree(PatchTree):
294 patch_line_prefix = '%% original patch' 297 notes_ref = "refs/notes/devtool"
295 ignore_commit_prefix = '%% ignore' 298 original_patch = 'original patch'
299 ignore_commit = 'ignore'
296 300
297 def __init__(self, dir, d): 301 def __init__(self, dir, d):
298 PatchTree.__init__(self, dir, d) 302 PatchTree.__init__(self, dir, d)
299 self.commituser = d.getVar('PATCH_GIT_USER_NAME') 303 self.commituser = d.getVar('PATCH_GIT_USER_NAME')
300 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') 304 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
305 if not self._isInitialized(d):
306 self._initRepo()
307
308 def _isInitialized(self, d):
309 cmd = "git rev-parse --show-toplevel"
310 try:
311 output = runcmd(cmd.split(), self.dir).strip()
312 except CmdError as err:
313 ## runcmd returned non-zero which most likely means 128
314 ## Not a git directory
315 return False
316 ## Make sure repo is in builddir to not break top-level git repos, or under workdir
317 return os.path.samefile(output, self.dir) or oe.path.is_path_parent(d.getVar('WORKDIR'), output)
318
319 def _initRepo(self):
320 runcmd("git init".split(), self.dir)
321 runcmd("git add .".split(), self.dir)
322 runcmd("git commit -a --allow-empty -m bitbake_patching_started".split(), self.dir)
301 323
302 @staticmethod 324 @staticmethod
303 def extractPatchHeader(patchfile): 325 def extractPatchHeader(patchfile):
@@ -431,7 +453,7 @@ class GitApplyTree(PatchTree):
431 # Prepare git command 453 # Prepare git command
432 cmd = ["git"] 454 cmd = ["git"]
433 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail) 455 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail)
434 cmd += ["commit", "-F", tmpfile] 456 cmd += ["commit", "-F", tmpfile, "--no-verify"]
435 # git doesn't like plain email addresses as authors 457 # git doesn't like plain email addresses as authors
436 if author and '<' in author: 458 if author and '<' in author:
437 cmd.append('--author="%s"' % author) 459 cmd.append('--author="%s"' % author)
@@ -440,44 +462,133 @@ class GitApplyTree(PatchTree):
440 return (tmpfile, cmd) 462 return (tmpfile, cmd)
441 463
442 @staticmethod 464 @staticmethod
443 def extractPatches(tree, startcommit, outdir, paths=None): 465 def addNote(repo, ref, key, value=None, commituser=None, commitemail=None):
466 note = key + (": %s" % value if value else "")
467 notes_ref = GitApplyTree.notes_ref
468 runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo)
469 runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo)
470 runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo)
471 cmd = ["git"]
472 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail)
473 runcmd(cmd + ["notes", "--ref", notes_ref, "append", "-m", note, ref], repo)
474
475 @staticmethod
476 def removeNote(repo, ref, key, commituser=None, commitemail=None):
477 notes = GitApplyTree.getNotes(repo, ref)
478 notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")}
479 runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo)
480 for note, value in notes.items():
481 GitApplyTree.addNote(repo, ref, note, value, commituser, commitemail)
482
483 @staticmethod
484 def getNotes(repo, ref):
485 import re
486
487 note = None
488 try:
489 note = runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "show", ref], repo)
490 prefix = ""
491 except CmdError:
492 note = runcmd(['git', 'show', '-s', '--format=%B', ref], repo)
493 prefix = "%% "
494
495 note_re = re.compile(r'^%s(.*?)(?::\s*(.*))?$' % prefix)
496 notes = dict()
497 for line in note.splitlines():
498 m = note_re.match(line)
499 if m:
500 notes[m.group(1)] = m.group(2)
501
502 return notes
503
504 @staticmethod
505 def commitIgnored(subject, dir=None, files=None, d=None):
506 if files:
507 runcmd(['git', 'add'] + files, dir)
508 cmd = ["git"]
509 GitApplyTree.gitCommandUserOptions(cmd, d=d)
510 cmd += ["commit", "-m", subject, "--no-verify"]
511 runcmd(cmd, dir)
512 GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit, d.getVar('PATCH_GIT_USER_NAME'), d.getVar('PATCH_GIT_USER_EMAIL'))
513
514 @staticmethod
515 def extractPatches(tree, startcommits, outdir, paths=None):
444 import tempfile 516 import tempfile
445 import shutil 517 import shutil
446 tempdir = tempfile.mkdtemp(prefix='oepatch') 518 tempdir = tempfile.mkdtemp(prefix='oepatch')
447 try: 519 try:
448 shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", startcommit, "-o", tempdir] 520 for name, rev in startcommits.items():
449 if paths: 521 shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", rev, "-o", tempdir]
450 shellcmd.append('--') 522 if paths:
451 shellcmd.extend(paths) 523 shellcmd.append('--')
452 out = runcmd(["sh", "-c", " ".join(shellcmd)], tree) 524 shellcmd.extend(paths)
453 if out: 525 out = runcmd(["sh", "-c", " ".join(shellcmd)], os.path.join(tree, name))
454 for srcfile in out.split(): 526 if out:
455 for encoding in ['utf-8', 'latin-1']: 527 for srcfile in out.split():
456 patchlines = [] 528 # This loop, which is used to remove any line that
457 outfile = None 529 # starts with "%% original patch", is kept for backwards
458 try: 530 # compatibility. If/when that compatibility is dropped,
459 with open(srcfile, 'r', encoding=encoding) as f: 531 # it can be replaced with code to just read the first
460 for line in f: 532 # line of the patch file to get the SHA-1, and the code
461 if line.startswith(GitApplyTree.patch_line_prefix): 533 # below that writes the modified patch file can be
462 outfile = line.split()[-1].strip() 534 # replaced with a simple file move.
463 continue 535 for encoding in ['utf-8', 'latin-1']:
464 if line.startswith(GitApplyTree.ignore_commit_prefix): 536 patchlines = []
465 continue 537 try:
466 patchlines.append(line) 538 with open(srcfile, 'r', encoding=encoding, newline='') as f:
467 except UnicodeDecodeError: 539 for line in f:
540 if line.startswith("%% " + GitApplyTree.original_patch):
541 continue
542 patchlines.append(line)
543 except UnicodeDecodeError:
544 continue
545 break
546 else:
547 raise PatchError('Unable to find a character encoding to decode %s' % srcfile)
548
549 sha1 = patchlines[0].split()[1]
550 notes = GitApplyTree.getNotes(os.path.join(tree, name), sha1)
551 if GitApplyTree.ignore_commit in notes:
468 continue 552 continue
469 break 553 outfile = notes.get(GitApplyTree.original_patch, os.path.basename(srcfile))
470 else: 554
471 raise PatchError('Unable to find a character encoding to decode %s' % srcfile) 555 bb.utils.mkdirhier(os.path.join(outdir, name))
472 556 with open(os.path.join(outdir, name, outfile), 'w') as of:
473 if not outfile: 557 for line in patchlines:
474 outfile = os.path.basename(srcfile) 558 of.write(line)
475 with open(os.path.join(outdir, outfile), 'w') as of:
476 for line in patchlines:
477 of.write(line)
478 finally: 559 finally:
479 shutil.rmtree(tempdir) 560 shutil.rmtree(tempdir)
480 561
562 def _need_dirty_check(self):
563 fetch = bb.fetch2.Fetch([], self.d)
564 check_dirtyness = False
565 for url in fetch.urls:
566 url_data = fetch.ud[url]
567 parm = url_data.parm
568 # a git url with subpath param will surely be dirty
569 # since the git tree from which we clone will be emptied
570 # from all files that are not in the subpath
571 if url_data.type == 'git' and parm.get('subpath'):
572 check_dirtyness = True
573 return check_dirtyness
574
575 def _commitpatch(self, patch, patchfilevar):
576 output = ""
577 # Add all files
578 shellcmd = ["git", "add", "-f", "-A", "."]
579 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
580 # Exclude the patches directory
581 shellcmd = ["git", "reset", "HEAD", self.patchdir]
582 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
583 # Commit the result
584 (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
585 try:
586 shellcmd.insert(0, patchfilevar)
587 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
588 finally:
589 os.remove(tmpfile)
590 return output
591
481 def _applypatch(self, patch, force = False, reverse = False, run = True): 592 def _applypatch(self, patch, force = False, reverse = False, run = True):
482 import shutil 593 import shutil
483 594
@@ -492,27 +603,26 @@ class GitApplyTree(PatchTree):
492 603
493 return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) 604 return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
494 605
495 # Add hooks which add a pointer to the original patch file name in the commit message
496 reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip() 606 reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip()
497 if not reporoot: 607 if not reporoot:
498 raise Exception("Cannot get repository root for directory %s" % self.dir) 608 raise Exception("Cannot get repository root for directory %s" % self.dir)
499 hooks_dir = os.path.join(reporoot, '.git', 'hooks') 609
500 hooks_dir_backup = hooks_dir + '.devtool-orig' 610 patch_applied = True
501 if os.path.lexists(hooks_dir_backup):
502 raise Exception("Git hooks backup directory already exists: %s" % hooks_dir_backup)
503 if os.path.lexists(hooks_dir):
504 shutil.move(hooks_dir, hooks_dir_backup)
505 os.mkdir(hooks_dir)
506 commithook = os.path.join(hooks_dir, 'commit-msg')
507 applyhook = os.path.join(hooks_dir, 'applypatch-msg')
508 with open(commithook, 'w') as f:
509 # NOTE: the formatting here is significant; if you change it you'll also need to
510 # change other places which read it back
511 f.write('echo "\n%s: $PATCHFILE" >> $1' % GitApplyTree.patch_line_prefix)
512 os.chmod(commithook, 0o755)
513 shutil.copy2(commithook, applyhook)
514 try: 611 try:
515 patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file']) 612 patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file'])
613 if self._need_dirty_check():
614 # Check dirtyness of the tree
615 try:
616 output = runcmd(["git", "--work-tree=%s" % reporoot, "status", "--short"])
617 except CmdError:
618 pass
619 else:
620 if output:
621 # The tree is dirty, no need to try to apply patches with git anymore
622 # since they fail, fallback directly to patch
623 output = PatchTree._applypatch(self, patch, force, reverse, run)
624 output += self._commitpatch(patch, patchfilevar)
625 return output
516 try: 626 try:
517 shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot] 627 shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot]
518 self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail) 628 self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail)
@@ -539,24 +649,14 @@ class GitApplyTree(PatchTree):
539 except CmdError: 649 except CmdError:
540 # Fall back to patch 650 # Fall back to patch
541 output = PatchTree._applypatch(self, patch, force, reverse, run) 651 output = PatchTree._applypatch(self, patch, force, reverse, run)
542 # Add all files 652 output += self._commitpatch(patch, patchfilevar)
543 shellcmd = ["git", "add", "-f", "-A", "."]
544 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
545 # Exclude the patches directory
546 shellcmd = ["git", "reset", "HEAD", self.patchdir]
547 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
548 # Commit the result
549 (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
550 try:
551 shellcmd.insert(0, patchfilevar)
552 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
553 finally:
554 os.remove(tmpfile)
555 return output 653 return output
654 except:
655 patch_applied = False
656 raise
556 finally: 657 finally:
557 shutil.rmtree(hooks_dir) 658 if patch_applied:
558 if os.path.lexists(hooks_dir_backup): 659 GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file']), self.commituser, self.commitemail)
559 shutil.move(hooks_dir_backup, hooks_dir)
560 660
561 661
562class QuiltTree(PatchSet): 662class QuiltTree(PatchSet):
@@ -579,6 +679,8 @@ class QuiltTree(PatchSet):
579 679
580 def Clean(self): 680 def Clean(self):
581 try: 681 try:
682 # make sure that patches/series file exists before quilt pop to keep quilt-0.67 happy
683 open(os.path.join(self.dir, "patches","series"), 'a').close()
582 self._runcmd(["pop", "-a", "-f"]) 684 self._runcmd(["pop", "-a", "-f"])
583 oe.path.remove(os.path.join(self.dir, "patches","series")) 685 oe.path.remove(os.path.join(self.dir, "patches","series"))
584 except Exception: 686 except Exception:
@@ -715,8 +817,9 @@ class NOOPResolver(Resolver):
715 self.patchset.Push() 817 self.patchset.Push()
716 except Exception: 818 except Exception:
717 import sys 819 import sys
718 os.chdir(olddir)
719 raise 820 raise
821 finally:
822 os.chdir(olddir)
720 823
721# Patch resolver which relies on the user doing all the work involved in the 824# Patch resolver which relies on the user doing all the work involved in the
722# resolution, with the exception of refreshing the remote copy of the patch 825# resolution, with the exception of refreshing the remote copy of the patch
@@ -776,12 +879,12 @@ class UserResolver(Resolver):
776 # User did not fix the problem. Abort. 879 # User did not fix the problem. Abort.
777 raise PatchError("Patch application failed, and user did not fix and refresh the patch.") 880 raise PatchError("Patch application failed, and user did not fix and refresh the patch.")
778 except Exception: 881 except Exception:
779 os.chdir(olddir)
780 raise 882 raise
781 os.chdir(olddir) 883 finally:
884 os.chdir(olddir)
782 885
783 886
784def patch_path(url, fetch, workdir, expand=True): 887def patch_path(url, fetch, unpackdir, expand=True):
785 """Return the local path of a patch, or return nothing if this isn't a patch""" 888 """Return the local path of a patch, or return nothing if this isn't a patch"""
786 889
787 local = fetch.localpath(url) 890 local = fetch.localpath(url)
@@ -790,7 +893,7 @@ def patch_path(url, fetch, workdir, expand=True):
790 base, ext = os.path.splitext(os.path.basename(local)) 893 base, ext = os.path.splitext(os.path.basename(local))
791 if ext in ('.gz', '.bz2', '.xz', '.Z'): 894 if ext in ('.gz', '.bz2', '.xz', '.Z'):
792 if expand: 895 if expand:
793 local = os.path.join(workdir, base) 896 local = os.path.join(unpackdir, base)
794 ext = os.path.splitext(base)[1] 897 ext = os.path.splitext(base)[1]
795 898
796 urldata = fetch.ud[url] 899 urldata = fetch.ud[url]
@@ -804,12 +907,12 @@ def patch_path(url, fetch, workdir, expand=True):
804 return local 907 return local
805 908
806def src_patches(d, all=False, expand=True): 909def src_patches(d, all=False, expand=True):
807 workdir = d.getVar('WORKDIR') 910 unpackdir = d.getVar('UNPACKDIR')
808 fetch = bb.fetch2.Fetch([], d) 911 fetch = bb.fetch2.Fetch([], d)
809 patches = [] 912 patches = []
810 sources = [] 913 sources = []
811 for url in fetch.urls: 914 for url in fetch.urls:
812 local = patch_path(url, fetch, workdir, expand) 915 local = patch_path(url, fetch, unpackdir, expand)
813 if not local: 916 if not local:
814 if all: 917 if all:
815 local = fetch.localpath(url) 918 local = fetch.localpath(url)
@@ -898,4 +1001,3 @@ def should_apply(parm, d):
898 return False, "applies to later version" 1001 return False, "applies to later version"
899 1002
900 return True, None 1003 return True, None
901
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py
index c8d8ad05b9..a1efe97d88 100644
--- a/meta/lib/oe/path.py
+++ b/meta/lib/oe/path.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -8,6 +10,8 @@ import shutil
8import subprocess 10import subprocess
9import os.path 11import os.path
10 12
13import bb.parse
14
11def join(*paths): 15def join(*paths):
12 """Like os.path.join but doesn't treat absolute RHS specially""" 16 """Like os.path.join but doesn't treat absolute RHS specially"""
13 return os.path.normpath("/".join(paths)) 17 return os.path.normpath("/".join(paths))
@@ -75,6 +79,7 @@ def replace_absolute_symlinks(basedir, d):
75 os.remove(path) 79 os.remove(path)
76 os.symlink(base, path) 80 os.symlink(base, path)
77 81
82@bb.parse.vardepsexclude("TOPDIR")
78def format_display(path, metadata): 83def format_display(path, metadata):
79 """ Prepare a path for display to the user. """ 84 """ Prepare a path for display to the user. """
80 rel = relative(metadata.getVar("TOPDIR"), path) 85 rel = relative(metadata.getVar("TOPDIR"), path)
@@ -123,7 +128,8 @@ def copyhardlinktree(src, dst):
123 if os.path.isdir(src): 128 if os.path.isdir(src):
124 if len(glob.glob('%s/.??*' % src)) > 0: 129 if len(glob.glob('%s/.??*' % src)) > 0:
125 source = './.??* ' 130 source = './.??* '
126 source += './*' 131 if len(glob.glob('%s/**' % src)) > 0:
132 source += './*'
127 s_dir = src 133 s_dir = src
128 else: 134 else:
129 source = src 135 source = src
@@ -169,6 +175,9 @@ def symlink(source, destination, force=False):
169 if e.errno != errno.EEXIST or os.readlink(destination) != source: 175 if e.errno != errno.EEXIST or os.readlink(destination) != source:
170 raise 176 raise
171 177
178def relsymlink(target, name, force=False):
179 symlink(os.path.relpath(target, os.path.dirname(name)), name, force=force)
180
172def find(dir, **walkoptions): 181def find(dir, **walkoptions):
173 """ Given a directory, recurses into that directory, 182 """ Given a directory, recurses into that directory,
174 returning all files as absolute paths. """ 183 returning all files as absolute paths. """
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py
index fcdbe66c19..c41242c878 100644
--- a/meta/lib/oe/prservice.py
+++ b/meta/lib/oe/prservice.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -7,11 +9,10 @@ def prserv_make_conn(d, check = False):
7 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) 9 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
8 try: 10 try:
9 conn = None 11 conn = None
10 conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) 12 conn = prserv.serv.connect(host_params[0], int(host_params[1]))
11 if check: 13 if check:
12 if not conn.ping(): 14 if not conn.ping():
13 raise Exception('service not available') 15 raise Exception('service not available')
14 d.setVar("__PRSERV_CONN",conn)
15 except Exception as exc: 16 except Exception as exc:
16 bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc))) 17 bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc)))
17 18
@@ -22,31 +23,29 @@ def prserv_dump_db(d):
22 bb.error("Not using network based PR service") 23 bb.error("Not using network based PR service")
23 return None 24 return None
24 25
25 conn = d.getVar("__PRSERV_CONN") 26 conn = prserv_make_conn(d)
26 if conn is None: 27 if conn is None:
27 conn = prserv_make_conn(d) 28 bb.error("Making connection failed to remote PR service")
28 if conn is None: 29 return None
29 bb.error("Making connection failed to remote PR service")
30 return None
31 30
32 #dump db 31 #dump db
33 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') 32 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION')
34 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') 33 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH')
35 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') 34 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM')
36 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) 35 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL'))
37 return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) 36 d = conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col)
37 conn.close()
38 return d
38 39
39def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): 40def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None):
40 if not d.getVar('PRSERV_HOST'): 41 if not d.getVar('PRSERV_HOST'):
41 bb.error("Not using network based PR service") 42 bb.error("Not using network based PR service")
42 return None 43 return None
43 44
44 conn = d.getVar("__PRSERV_CONN") 45 conn = prserv_make_conn(d)
45 if conn is None: 46 if conn is None:
46 conn = prserv_make_conn(d) 47 bb.error("Making connection failed to remote PR service")
47 if conn is None: 48 return None
48 bb.error("Making connection failed to remote PR service")
49 return None
50 #get the entry values 49 #get the entry values
51 imported = [] 50 imported = []
52 prefix = "PRAUTO$" 51 prefix = "PRAUTO$"
@@ -70,6 +69,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu
70 bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret)) 69 bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret))
71 else: 70 else:
72 imported.append((version,pkgarch,checksum,value)) 71 imported.append((version,pkgarch,checksum,value))
72 conn.close()
73 return imported 73 return imported
74 74
75def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): 75def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
@@ -78,8 +78,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
78 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) 78 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR'))
79 df = d.getVar('PRSERV_DUMPFILE') 79 df = d.getVar('PRSERV_DUMPFILE')
80 #write data 80 #write data
81 lf = bb.utils.lockfile("%s.lock" % df) 81 with open(df, "a") as f, bb.utils.fileslocked(["%s.lock" % df]) as locks:
82 with open(df, "a") as f:
83 if metainfo: 82 if metainfo:
84 #dump column info 83 #dump column info
85 f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']); 84 f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
@@ -113,7 +112,6 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
113 if not nomax: 112 if not nomax:
114 for i in idx: 113 for i in idx:
115 f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value']))) 114 f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
116 bb.utils.unlockfile(lf)
117 115
118def prserv_check_avail(d): 116def prserv_check_avail(d):
119 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) 117 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
@@ -125,4 +123,5 @@ def prserv_check_avail(d):
125 except TypeError: 123 except TypeError:
126 bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"') 124 bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"')
127 else: 125 else:
128 prserv_make_conn(d, True) 126 conn = prserv_make_conn(d, True)
127 conn.close()
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py
index e8a854a302..cd36cb5070 100644
--- a/meta/lib/oe/qa.py
+++ b/meta/lib/oe/qa.py
@@ -1,7 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import ast
5import os, struct, mmap 8import os, struct, mmap
6 9
7class NotELFFileError(Exception): 10class NotELFFileError(Exception):
@@ -48,6 +51,9 @@ class ELFFile:
48 return self 51 return self
49 52
50 def __exit__(self, exc_type, exc_value, traceback): 53 def __exit__(self, exc_type, exc_value, traceback):
54 self.close()
55
56 def close(self):
51 if self.data: 57 if self.data:
52 self.data.close() 58 self.data.close()
53 59
@@ -128,6 +134,9 @@ class ELFFile:
128 """ 134 """
129 return self.getShort(ELFFile.E_MACHINE) 135 return self.getShort(ELFFile.E_MACHINE)
130 136
137 def set_objdump(self, cmd, output):
138 self.objdump_output[cmd] = output
139
131 def run_objdump(self, cmd, d): 140 def run_objdump(self, cmd, d):
132 import bb.process 141 import bb.process
133 import sys 142 import sys
@@ -171,6 +180,66 @@ def elf_machine_to_string(machine):
171 except: 180 except:
172 return "Unknown (%s)" % repr(machine) 181 return "Unknown (%s)" % repr(machine)
173 182
183def write_error(type, error, d):
184 logfile = d.getVar('QA_LOGFILE')
185 if logfile:
186 p = d.getVar('P')
187 with open(logfile, "a+") as f:
188 f.write("%s: %s [%s]\n" % (p, error, type))
189
190def handle_error_visitorcode(name, args):
191 execs = set()
192 contains = {}
193 warn = None
194 if isinstance(args[0], ast.Constant) and isinstance(args[0].value, str):
195 for i in ["ERROR_QA", "WARN_QA"]:
196 if i not in contains:
197 contains[i] = set()
198 contains[i].add(args[0].value)
199 else:
200 warn = args[0]
201 execs.add(name)
202 return contains, execs, warn
203
204def handle_error(error_class, error_msg, d):
205 if error_class in (d.getVar("ERROR_QA") or "").split():
206 write_error(error_class, error_msg, d)
207 bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
208 d.setVar("QA_ERRORS_FOUND", "True")
209 return False
210 elif error_class in (d.getVar("WARN_QA") or "").split():
211 write_error(error_class, error_msg, d)
212 bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
213 else:
214 bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
215 return True
216handle_error.visitorcode = handle_error_visitorcode
217
218def exit_with_message_if_errors(message, d):
219 qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False)
220 if qa_fatal_errors:
221 bb.fatal(message)
222
223def exit_if_errors(d):
224 exit_with_message_if_errors("Fatal QA errors were found, failing task.", d)
225
226def check_upstream_status(fullpath):
227 import re
228 kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE)
229 strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE)
230 guidelines = "https://docs.yoctoproject.org/contributor-guide/recipe-style-guide.html#patch-upstream-status"
231
232 with open(fullpath, encoding='utf-8', errors='ignore') as f:
233 file_content = f.read()
234 match_kinda = kinda_status_re.search(file_content)
235 match_strict = strict_status_re.search(file_content)
236
237 if not match_strict:
238 if match_kinda:
239 return "Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0))
240 else:
241 return "Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines)
242
174if __name__ == "__main__": 243if __name__ == "__main__":
175 import sys 244 import sys
176 245
diff --git a/meta/lib/oe/qemu.py b/meta/lib/oe/qemu.py
new file mode 100644
index 0000000000..769865036c
--- /dev/null
+++ b/meta/lib/oe/qemu.py
@@ -0,0 +1,54 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7def qemu_target_binary(d):
8 package_arch = d.getVar("PACKAGE_ARCH")
9 qemu_target_binary = (d.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "")
10 if qemu_target_binary:
11 return qemu_target_binary
12
13 target_arch = d.getVar("TARGET_ARCH")
14 if target_arch in ("i486", "i586", "i686"):
15 target_arch = "i386"
16 elif target_arch == "powerpc":
17 target_arch = "ppc"
18 elif target_arch == "powerpc64":
19 target_arch = "ppc64"
20 elif target_arch == "powerpc64le":
21 target_arch = "ppc64le"
22
23 return "qemu-" + target_arch
24
25def qemu_wrapper_cmdline(d, rootfs_path, library_paths, qemu_options=None):
26 import string
27
28 package_arch = d.getVar("PACKAGE_ARCH")
29 if package_arch == "all":
30 return "false"
31
32 qemu_binary = qemu_target_binary(d)
33 if qemu_binary == "qemu-allarch":
34 qemu_binary = "qemuwrapper"
35
36 if qemu_options == None:
37 qemu_options = d.getVar("QEMU_OPTIONS") or ""
38
39 return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\
40 + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " "
41
42# Next function will return a string containing the command that is needed to
43# to run a certain binary through qemu. For example, in order to make a certain
44# postinstall scriptlet run at do_rootfs time and running the postinstall is
45# architecture dependent, we can run it through qemu. For example, in the
46# postinstall scriptlet, we could use the following:
47#
48# ${@qemu_run_binary(d, '$D', '/usr/bin/test_app')} [test_app arguments]
49#
50def qemu_run_binary(d, rootfs_path, binary):
51 libdir = rootfs_path + d.getVar("libdir", False)
52 base_libdir = rootfs_path + d.getVar("base_libdir", False)
53
54 return qemu_wrapper_cmdline(d, rootfs_path, [libdir, base_libdir]) + rootfs_path + binary
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index 407d168894..044f1bfa61 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -24,9 +24,9 @@ from collections import OrderedDict, defaultdict
24from bb.utils import vercmp_string 24from bb.utils import vercmp_string
25 25
26# Help us to find places to insert values 26# Help us to find places to insert values
27recipe_progression = ['SUMMARY', 'DESCRIPTION', 'AUTHOR', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRCPV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND'] 27recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND']
28# Variables that sometimes are a bit long but shouldn't be wrapped 28# Variables that sometimes are a bit long but shouldn't be wrapped
29nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha256sum\]'] 29nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha[0-9]+sum\]']
30list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM'] 30list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM']
31meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION'] 31meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION']
32 32
@@ -47,7 +47,7 @@ def simplify_history(history, d):
47 continue 47 continue
48 has_set = True 48 has_set = True
49 elif event['op'] in ('append', 'prepend', 'postdot', 'predot'): 49 elif event['op'] in ('append', 'prepend', 'postdot', 'predot'):
50 # Reminder: "append" and "prepend" mean += and =+ respectively, NOT _append / _prepend 50 # Reminder: "append" and "prepend" mean += and =+ respectively, NOT :append / :prepend
51 if has_set: 51 if has_set:
52 continue 52 continue
53 ret_history.insert(0, event) 53 ret_history.insert(0, event)
@@ -342,7 +342,7 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
342 def override_applicable(hevent): 342 def override_applicable(hevent):
343 op = hevent['op'] 343 op = hevent['op']
344 if '[' in op: 344 if '[' in op:
345 opoverrides = op.split('[')[1].split(']')[0].split('_') 345 opoverrides = op.split('[')[1].split(']')[0].split(':')
346 for opoverride in opoverrides: 346 for opoverride in opoverrides:
347 if not opoverride in overrides: 347 if not opoverride in overrides:
348 return False 348 return False
@@ -368,13 +368,13 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
368 recipe_set = True 368 recipe_set = True
369 if not recipe_set: 369 if not recipe_set:
370 for event in history: 370 for event in history:
371 if event['op'].startswith('_remove'): 371 if event['op'].startswith(':remove'):
372 continue 372 continue
373 if not override_applicable(event): 373 if not override_applicable(event):
374 continue 374 continue
375 newvalue = value.replace(event['detail'], '') 375 newvalue = value.replace(event['detail'], '')
376 if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith('_'): 376 if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith(':'):
377 op = event['op'].replace('[', '_').replace(']', '') 377 op = event['op'].replace('[', ':').replace(']', '')
378 extravals[var + op] = None 378 extravals[var + op] = None
379 value = newvalue 379 value = newvalue
380 vals[var] = ('+=', value) 380 vals[var] = ('+=', value)
@@ -414,15 +414,13 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=F
414 414
415 fetch_urls(d) 415 fetch_urls(d)
416 if all_variants: 416 if all_variants:
417 # Get files for other variants e.g. in the case of a SRC_URI_append 417 # Get files for other variants e.g. in the case of a SRC_URI:append
418 localdata = bb.data.createCopy(d) 418 localdata = bb.data.createCopy(d)
419 variants = (localdata.getVar('BBCLASSEXTEND') or '').split() 419 variants = (localdata.getVar('BBCLASSEXTEND') or '').split()
420 if variants: 420 if variants:
421 # Ensure we handle class-target if we're dealing with one of the variants 421 # Ensure we handle class-target if we're dealing with one of the variants
422 variants.append('target') 422 variants.append('target')
423 for variant in variants: 423 for variant in variants:
424 if variant.startswith("devupstream"):
425 localdata.setVar('SRCPV', 'git')
426 localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant) 424 localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant)
427 fetch_urls(localdata) 425 fetch_urls(localdata)
428 426
@@ -666,19 +664,23 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
666 return (appendpath, pathok) 664 return (appendpath, pathok)
667 665
668 666
669def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None): 667def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None, update_original_recipe=False):
670 """ 668 """
671 Writes a bbappend file for a recipe 669 Writes a bbappend file for a recipe
672 Parameters: 670 Parameters:
673 rd: data dictionary for the recipe 671 rd: data dictionary for the recipe
674 destlayerdir: base directory of the layer to place the bbappend in 672 destlayerdir: base directory of the layer to place the bbappend in
675 (subdirectory path from there will be determined automatically) 673 (subdirectory path from there will be determined automatically)
676 srcfiles: dict of source files to add to SRC_URI, where the value 674 srcfiles: dict of source files to add to SRC_URI, where the key
677 is the full path to the file to be added, and the value is the 675 is the full path to the file to be added, and the value is a
678 original filename as it would appear in SRC_URI or None if it 676 dict with following optional keys:
679 isn't already present. You may pass None for this parameter if 677 path: the original filename as it would appear in SRC_URI
680 you simply want to specify your own content via the extralines 678 or None if it isn't already present.
681 parameter. 679 patchdir: the patchdir parameter
680 newname: the name to give to the new added file. None to use
681 the default value: basename(path)
682 You may pass None for this parameter if you simply want to specify
683 your own content via the extralines parameter.
682 install: dict mapping entries in srcfiles to a tuple of two elements: 684 install: dict mapping entries in srcfiles to a tuple of two elements:
683 install path (*without* ${D} prefix) and permission value (as a 685 install path (*without* ${D} prefix) and permission value (as a
684 string, e.g. '0644'). 686 string, e.g. '0644').
@@ -696,18 +698,32 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
696 redirect_output: 698 redirect_output:
697 If specified, redirects writing the output file to the 699 If specified, redirects writing the output file to the
698 specified directory (for dry-run purposes) 700 specified directory (for dry-run purposes)
701 params:
702 Parameters to use when adding entries to SRC_URI. If specified,
703 should be a list of dicts with the same length as srcfiles.
704 update_original_recipe:
705 Force to update the original recipe instead of creating/updating
706 a bbapend. destlayerdir must contain the original recipe
699 """ 707 """
700 708
701 if not removevalues: 709 if not removevalues:
702 removevalues = {} 710 removevalues = {}
703 711
704 # Determine how the bbappend should be named 712 recipefile = rd.getVar('FILE')
705 appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver) 713 if update_original_recipe:
706 if not appendpath: 714 if destlayerdir not in recipefile:
707 bb.error('Unable to determine layer directory containing %s' % recipefile) 715 bb.error("destlayerdir %s doesn't contain the original recipe (%s), cannot update it" % (destlayerdir, recipefile))
708 return (None, None) 716 return (None, None)
709 if not pathok: 717
710 bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath))) 718 appendpath = recipefile
719 else:
720 # Determine how the bbappend should be named
721 appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver)
722 if not appendpath:
723 bb.error('Unable to determine layer directory containing %s' % recipefile)
724 return (None, None)
725 if not pathok:
726 bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath)))
711 727
712 appenddir = os.path.dirname(appendpath) 728 appenddir = os.path.dirname(appendpath)
713 if not redirect_output: 729 if not redirect_output:
@@ -752,30 +768,48 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
752 bbappendlines.append((varname, op, value)) 768 bbappendlines.append((varname, op, value))
753 769
754 destsubdir = rd.getVar('PN') 770 destsubdir = rd.getVar('PN')
755 if srcfiles: 771 if not update_original_recipe and srcfiles:
756 bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) 772 bbappendlines.append(('FILESEXTRAPATHS:prepend', ':=', '${THISDIR}/${PN}:'))
757 773
758 appendoverride = '' 774 appendoverride = ''
759 if machine: 775 if machine:
760 bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}')) 776 bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}'))
761 appendoverride = '_%s' % machine 777 appendoverride = ':%s' % machine
762 copyfiles = {} 778 copyfiles = {}
763 if srcfiles: 779 if srcfiles:
764 instfunclines = [] 780 instfunclines = []
765 for newfile, origsrcfile in srcfiles.items(): 781 for i, (newfile, param) in enumerate(srcfiles.items()):
766 srcfile = origsrcfile
767 srcurientry = None 782 srcurientry = None
768 if not srcfile: 783 if not 'path' in param or not param['path']:
769 srcfile = os.path.basename(newfile) 784 if 'newname' in param and param['newname']:
785 srcfile = param['newname']
786 else:
787 srcfile = os.path.basename(newfile)
770 srcurientry = 'file://%s' % srcfile 788 srcurientry = 'file://%s' % srcfile
789 oldentry = None
790 for uri in rd.getVar('SRC_URI').split():
791 if srcurientry in uri:
792 oldentry = uri
793 if params and params[i]:
794 srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items()))
771 # Double-check it's not there already 795 # Double-check it's not there already
772 # FIXME do we care if the entry is added by another bbappend that might go away? 796 # FIXME do we care if the entry is added by another bbappend that might go away?
773 if not srcurientry in rd.getVar('SRC_URI').split(): 797 if not srcurientry in rd.getVar('SRC_URI').split():
774 if machine: 798 if machine:
775 appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) 799 if oldentry:
800 appendline('SRC_URI:remove%s' % appendoverride, '=', ' ' + oldentry)
801 appendline('SRC_URI:append%s' % appendoverride, '=', ' ' + srcurientry)
776 else: 802 else:
803 if oldentry:
804 if update_original_recipe:
805 removevalues['SRC_URI'] = oldentry
806 else:
807 appendline('SRC_URI:remove', '=', oldentry)
777 appendline('SRC_URI', '+=', srcurientry) 808 appendline('SRC_URI', '+=', srcurientry)
778 copyfiles[newfile] = srcfile 809 param['path'] = srcfile
810 else:
811 srcfile = param['path']
812 copyfiles[newfile] = param
779 if install: 813 if install:
780 institem = install.pop(newfile, None) 814 institem = install.pop(newfile, None)
781 if institem: 815 if institem:
@@ -784,9 +818,9 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
784 instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) 818 instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath)
785 if not instdirline in instfunclines: 819 if not instdirline in instfunclines:
786 instfunclines.append(instdirline) 820 instfunclines.append(instdirline)
787 instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) 821 instfunclines.append('install -m %s ${UNPACKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath))
788 if instfunclines: 822 if instfunclines:
789 bbappendlines.append(('do_install_append%s()' % appendoverride, '', instfunclines)) 823 bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines))
790 824
791 if redirect_output: 825 if redirect_output:
792 bb.note('Writing append file %s (dry-run)' % appendpath) 826 bb.note('Writing append file %s (dry-run)' % appendpath)
@@ -795,6 +829,8 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
795 # multiple times per operation when we're handling overrides) 829 # multiple times per operation when we're handling overrides)
796 if os.path.exists(appendpath) and not os.path.exists(outfile): 830 if os.path.exists(appendpath) and not os.path.exists(outfile):
797 shutil.copy2(appendpath, outfile) 831 shutil.copy2(appendpath, outfile)
832 elif update_original_recipe:
833 outfile = recipefile
798 else: 834 else:
799 bb.note('Writing append file %s' % appendpath) 835 bb.note('Writing append file %s' % appendpath)
800 outfile = appendpath 836 outfile = appendpath
@@ -804,15 +840,15 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
804 extvars = {'destsubdir': destsubdir} 840 extvars = {'destsubdir': destsubdir}
805 841
806 def appendfile_varfunc(varname, origvalue, op, newlines): 842 def appendfile_varfunc(varname, origvalue, op, newlines):
807 if varname == 'FILESEXTRAPATHS_prepend': 843 if varname == 'FILESEXTRAPATHS:prepend':
808 if origvalue.startswith('${THISDIR}/'): 844 if origvalue.startswith('${THISDIR}/'):
809 popline('FILESEXTRAPATHS_prepend') 845 popline('FILESEXTRAPATHS:prepend')
810 extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':')) 846 extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':'))
811 elif varname == 'PACKAGE_ARCH': 847 elif varname == 'PACKAGE_ARCH':
812 if machine: 848 if machine:
813 popline('PACKAGE_ARCH') 849 popline('PACKAGE_ARCH')
814 return (machine, None, 4, False) 850 return (machine, None, 4, False)
815 elif varname.startswith('do_install_append'): 851 elif varname.startswith('do_install:append'):
816 func = popline(varname) 852 func = popline(varname)
817 if func: 853 if func:
818 instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()] 854 instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()]
@@ -824,7 +860,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
824 splitval = split_var_value(origvalue, assignment=False) 860 splitval = split_var_value(origvalue, assignment=False)
825 changed = False 861 changed = False
826 removevar = varname 862 removevar = varname
827 if varname in ['SRC_URI', 'SRC_URI_append%s' % appendoverride]: 863 if varname in ['SRC_URI', 'SRC_URI:append%s' % appendoverride]:
828 removevar = 'SRC_URI' 864 removevar = 'SRC_URI'
829 line = popline(varname) 865 line = popline(varname)
830 if line: 866 if line:
@@ -853,11 +889,11 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
853 newvalue = splitval 889 newvalue = splitval
854 if len(newvalue) == 1: 890 if len(newvalue) == 1:
855 # Ensure it's written out as one line 891 # Ensure it's written out as one line
856 if '_append' in varname: 892 if ':append' in varname:
857 newvalue = ' ' + newvalue[0] 893 newvalue = ' ' + newvalue[0]
858 else: 894 else:
859 newvalue = newvalue[0] 895 newvalue = newvalue[0]
860 if not newvalue and (op in ['+=', '.='] or '_append' in varname): 896 if not newvalue and (op in ['+=', '.='] or ':append' in varname):
861 # There's no point appending nothing 897 # There's no point appending nothing
862 newvalue = None 898 newvalue = None
863 if varname.endswith('()'): 899 if varname.endswith('()'):
@@ -898,7 +934,12 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
898 outdir = redirect_output 934 outdir = redirect_output
899 else: 935 else:
900 outdir = appenddir 936 outdir = appenddir
901 for newfile, srcfile in copyfiles.items(): 937 for newfile, param in copyfiles.items():
938 srcfile = param['path']
939 patchdir = param.get('patchdir', ".")
940
941 if patchdir != ".":
942 newfile = os.path.join(os.path.split(newfile)[0], patchdir, os.path.split(newfile)[1])
902 filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile)) 943 filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile))
903 if os.path.abspath(newfile) != os.path.abspath(filedest): 944 if os.path.abspath(newfile) != os.path.abspath(filedest):
904 if newfile.startswith(tempfile.gettempdir()): 945 if newfile.startswith(tempfile.gettempdir()):
@@ -942,10 +983,9 @@ def replace_dir_vars(path, d):
942 path = path.replace(dirpath, '${%s}' % dirvars[dirpath]) 983 path = path.replace(dirpath, '${%s}' % dirvars[dirpath])
943 return path 984 return path
944 985
945def get_recipe_pv_without_srcpv(pv, uri_type): 986def get_recipe_pv_with_pfx_sfx(pv, uri_type):
946 """ 987 """
947 Get PV without SRCPV common in SCM's for now only 988 Get PV separating prefix and suffix components.
948 support git.
949 989
950 Returns tuple with pv, prefix and suffix. 990 Returns tuple with pv, prefix and suffix.
951 """ 991 """
@@ -953,7 +993,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
953 sfx = '' 993 sfx = ''
954 994
955 if uri_type == 'git': 995 if uri_type == 'git':
956 git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+))(?P<rev>.*)") 996 git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+)?)(?P<rev>.*)")
957 m = git_regex.match(pv) 997 m = git_regex.match(pv)
958 998
959 if m: 999 if m:
@@ -1005,7 +1045,7 @@ def get_recipe_upstream_version(rd):
1005 src_uri = src_uris.split()[0] 1045 src_uri = src_uris.split()[0]
1006 uri_type, _, _, _, _, _ = decodeurl(src_uri) 1046 uri_type, _, _, _, _, _ = decodeurl(src_uri)
1007 1047
1008 (pv, pfx, sfx) = get_recipe_pv_without_srcpv(rd.getVar('PV'), uri_type) 1048 (pv, pfx, sfx) = get_recipe_pv_with_pfx_sfx(rd.getVar('PV'), uri_type)
1009 ru['current_version'] = pv 1049 ru['current_version'] = pv
1010 1050
1011 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") 1051 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION")
@@ -1029,10 +1069,16 @@ def get_recipe_upstream_version(rd):
1029 else: 1069 else:
1030 ud = bb.fetch2.FetchData(src_uri, rd) 1070 ud = bb.fetch2.FetchData(src_uri, rd)
1031 if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": 1071 if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1":
1032 revision = ud.method.latest_revision(ud, rd, 'default') 1072 bb.fetch2.get_srcrev(rd)
1033 upversion = pv 1073 upversion = None
1034 if revision != rd.getVar("SRCREV"): 1074 revision = None
1035 upversion = upversion + "-new-commits-available" 1075 try:
1076 revision = ud.method.latest_revision(ud, rd, 'default')
1077 upversion = pv
1078 if revision != rd.getVar("SRCREV"):
1079 upversion = upversion + "-new-commits-available"
1080 except bb.fetch2.FetchError as e:
1081 bb.warn("Unable to obtain latest revision: {}".format(e))
1036 else: 1082 else:
1037 pupver = ud.method.latest_versionstring(ud, rd) 1083 pupver = ud.method.latest_versionstring(ud, rd)
1038 (upversion, revision) = pupver 1084 (upversion, revision) = pupver
@@ -1071,7 +1117,7 @@ def _get_recipe_upgrade_status(data):
1071 maintainer = data.getVar('RECIPE_MAINTAINER') 1117 maintainer = data.getVar('RECIPE_MAINTAINER')
1072 no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') 1118 no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')
1073 1119
1074 return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason) 1120 return {'pn':pn, 'status':status, 'cur_ver':cur_ver, 'next_ver':next_ver, 'maintainer':maintainer, 'revision':revision, 'no_upgrade_reason':no_upgrade_reason}
1075 1121
1076def get_recipe_upgrade_status(recipes=None): 1122def get_recipe_upgrade_status(recipes=None):
1077 pkgs_list = [] 1123 pkgs_list = []
@@ -1113,6 +1159,7 @@ def get_recipe_upgrade_status(recipes=None):
1113 if not recipes: 1159 if not recipes:
1114 recipes = tinfoil.all_recipe_files(variants=False) 1160 recipes = tinfoil.all_recipe_files(variants=False)
1115 1161
1162 recipeincludes = {}
1116 for fn in recipes: 1163 for fn in recipes:
1117 try: 1164 try:
1118 if fn.startswith("/"): 1165 if fn.startswith("/"):
@@ -1137,8 +1184,65 @@ def get_recipe_upgrade_status(recipes=None):
1137 1184
1138 data_copy_list.append(data_copy) 1185 data_copy_list.append(data_copy)
1139 1186
1187 recipeincludes[data.getVar('FILE')] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
1188
1140 from concurrent.futures import ProcessPoolExecutor 1189 from concurrent.futures import ProcessPoolExecutor
1141 with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: 1190 with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
1142 pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) 1191 pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)
1143 1192
1144 return pkgs_list 1193 return _group_recipes(pkgs_list, _get_common_include_recipes(recipeincludes))
1194
1195def get_common_include_recipes():
1196 with bb.tinfoil.Tinfoil() as tinfoil:
1197 tinfoil.prepare(config_only=False)
1198
1199 recipes = tinfoil.all_recipe_files(variants=False)
1200
1201 recipeincludes = {}
1202 for fn in recipes:
1203 data = tinfoil.parse_recipe_file(fn)
1204 recipeincludes[fn] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
1205 return _get_common_include_recipes(recipeincludes)
1206
1207def _get_common_include_recipes(recipeincludes_all):
1208 recipeincludes = {}
1209 for fn,data in recipeincludes_all.items():
1210 bbincluded_filtered = [i for i in data['bbincluded'] if os.path.dirname(i) == os.path.dirname(fn) and i != fn]
1211 if bbincluded_filtered:
1212 recipeincludes[data['pn']] = bbincluded_filtered
1213
1214 recipeincludes_inverted = {}
1215 for k,v in recipeincludes.items():
1216 for i in v:
1217 recipeincludes_inverted.setdefault(i,set()).add(k)
1218
1219 recipeincludes_inverted_filtered = {k:v for k,v in recipeincludes_inverted.items() if len(v) > 1}
1220
1221 recipes_with_shared_includes = list()
1222 for v in recipeincludes_inverted_filtered.values():
1223 recipeset = v
1224 for v1 in recipeincludes_inverted_filtered.values():
1225 if recipeset.intersection(v1):
1226 recipeset.update(v1)
1227 if recipeset not in recipes_with_shared_includes:
1228 recipes_with_shared_includes.append(recipeset)
1229
1230 return recipes_with_shared_includes
1231
1232def _group_recipes(recipes, groups):
1233 recipedict = {}
1234 for r in recipes:
1235 recipedict[r['pn']] = r
1236
1237 recipegroups = []
1238 for g in groups:
1239 recipeset = []
1240 for r in g:
1241 if r in recipedict.keys():
1242 recipeset.append(recipedict[r])
1243 del recipedict[r]
1244 recipegroups.append(recipeset)
1245
1246 for r in recipedict.values():
1247 recipegroups.append([r])
1248 return recipegroups
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py
index 204b9bd734..0270024a83 100644
--- a/meta/lib/oe/reproducible.py
+++ b/meta/lib/oe/reproducible.py
@@ -1,10 +1,63 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import os 6import os
5import subprocess 7import subprocess
6import bb 8import bb
7 9
10# For reproducible builds, this code sets the default SOURCE_DATE_EPOCH in each
11# component's build environment. The format is number of seconds since the
12# system epoch.
13#
14# Upstream components (generally) respect this environment variable,
15# using it in place of the "current" date and time.
16# See https://reproducible-builds.org/specs/source-date-epoch/
17#
18# The default value of SOURCE_DATE_EPOCH comes from the function
19# get_source_date_epoch_value which reads from the SDE_FILE, or if the file
20# is not available will use the fallback of SOURCE_DATE_EPOCH_FALLBACK.
21#
22# The SDE_FILE is normally constructed from the function
23# create_source_date_epoch_stamp which is typically added as a postfuncs to
24# the do_unpack task. If a recipe does NOT have do_unpack, it should be added
25# to a task that runs after the source is available and before the
26# do_deploy_source_date_epoch task is executed.
27#
28# If a recipe wishes to override the default behavior it should set it's own
29# SOURCE_DATE_EPOCH or override the do_deploy_source_date_epoch_stamp task
30# with recipe-specific functionality to write the appropriate
31# SOURCE_DATE_EPOCH into the SDE_FILE.
32#
33# SOURCE_DATE_EPOCH is intended to be a reproducible value. This value should
34# be reproducible for anyone who builds the same revision from the same
35# sources.
36#
37# There are 4 ways the create_source_date_epoch_stamp function determines what
38# becomes SOURCE_DATE_EPOCH:
39#
40# 1. Use the value from __source_date_epoch.txt file if this file exists.
41# This file was most likely created in the previous build by one of the
42# following methods 2,3,4.
43# Alternatively, it can be provided by a recipe via SRC_URI.
44#
45# If the file does not exist:
46#
47# 2. If there is a git checkout, use the last git commit timestamp.
48# Git does not preserve file timestamps on checkout.
49#
50# 3. Use the mtime of "known" files such as NEWS, CHANGELOG, ...
51# This works for well-kept repositories distributed via tarball.
52#
53# 4. Use the modification time of the youngest file in the source tree, if
54# there is one.
55# This will be the newest file from the distribution tarball, if any.
56#
57# 5. Fall back to a fixed timestamp (SOURCE_DATE_EPOCH_FALLBACK).
58#
59# Once the value is determined, it is stored in the recipe's SDE_FILE.
60
8def get_source_date_epoch_from_known_files(d, sourcedir): 61def get_source_date_epoch_from_known_files(d, sourcedir):
9 source_date_epoch = None 62 source_date_epoch = None
10 newest_file = None 63 newest_file = None
@@ -22,10 +75,11 @@ def get_source_date_epoch_from_known_files(d, sourcedir):
22 return source_date_epoch 75 return source_date_epoch
23 76
24def find_git_folder(d, sourcedir): 77def find_git_folder(d, sourcedir):
25 # First guess: WORKDIR/git 78 # First guess: UNPACKDIR/BB_GIT_DEFAULT_DESTSUFFIX
26 # This is the default git fetcher unpack path 79 # This is the default git fetcher unpack path
27 workdir = d.getVar('WORKDIR') 80 unpackdir = d.getVar('UNPACKDIR')
28 gitpath = os.path.join(workdir, "git/.git") 81 default_destsuffix = d.getVar('BB_GIT_DEFAULT_DESTSUFFIX')
82 gitpath = os.path.join(unpackdir, default_destsuffix, ".git")
29 if os.path.isdir(gitpath): 83 if os.path.isdir(gitpath):
30 return gitpath 84 return gitpath
31 85
@@ -35,15 +89,16 @@ def find_git_folder(d, sourcedir):
35 return gitpath 89 return gitpath
36 90
37 # Perhaps there was a subpath or destsuffix specified. 91 # Perhaps there was a subpath or destsuffix specified.
38 # Go looking in the WORKDIR 92 # Go looking in the UNPACKDIR
39 exclude = set(["build", "image", "license-destdir", "patches", "pseudo", 93 for root, dirs, files in os.walk(unpackdir, topdown=True):
40 "recipe-sysroot", "recipe-sysroot-native", "sysroot-destdir", "temp"])
41 for root, dirs, files in os.walk(workdir, topdown=True):
42 dirs[:] = [d for d in dirs if d not in exclude]
43 if '.git' in dirs: 94 if '.git' in dirs:
44 return root 95 return os.path.join(root, ".git")
45 96
46 bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) 97 for root, dirs, files in os.walk(sourcedir, topdown=True):
98 if '.git' in dirs:
99 return os.path.join(root, ".git")
100
101 bb.warn("Failed to find a git repository in UNPACKDIR: %s" % unpackdir)
47 return None 102 return None
48 103
49def get_source_date_epoch_from_git(d, sourcedir): 104def get_source_date_epoch_from_git(d, sourcedir):
@@ -62,11 +117,12 @@ def get_source_date_epoch_from_git(d, sourcedir):
62 return None 117 return None
63 118
64 bb.debug(1, "git repository: %s" % gitpath) 119 bb.debug(1, "git repository: %s" % gitpath)
65 p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE) 120 p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'],
121 check=True, stdout=subprocess.PIPE)
66 return int(p.stdout.decode('utf-8')) 122 return int(p.stdout.decode('utf-8'))
67 123
68def get_source_date_epoch_from_youngest_file(d, sourcedir): 124def get_source_date_epoch_from_youngest_file(d, sourcedir):
69 if sourcedir == d.getVar('WORKDIR'): 125 if sourcedir == d.getVar('UNPACKDIR'):
70 # These sources are almost certainly not from a tarball 126 # These sources are almost certainly not from a tarball
71 return None 127 return None
72 128
@@ -77,6 +133,9 @@ def get_source_date_epoch_from_youngest_file(d, sourcedir):
77 files = [f for f in files if not f[0] == '.'] 133 files = [f for f in files if not f[0] == '.']
78 134
79 for fname in files: 135 for fname in files:
136 if fname == "singletask.lock":
137 # Ignore externalsrc/devtool lockfile [YOCTO #14921]
138 continue
80 filename = os.path.join(root, fname) 139 filename = os.path.join(root, fname)
81 try: 140 try:
82 mtime = int(os.lstat(filename).st_mtime) 141 mtime = int(os.lstat(filename).st_mtime)
@@ -101,8 +160,40 @@ def fixed_source_date_epoch(d):
101def get_source_date_epoch(d, sourcedir): 160def get_source_date_epoch(d, sourcedir):
102 return ( 161 return (
103 get_source_date_epoch_from_git(d, sourcedir) or 162 get_source_date_epoch_from_git(d, sourcedir) or
104 get_source_date_epoch_from_known_files(d, sourcedir) or
105 get_source_date_epoch_from_youngest_file(d, sourcedir) or 163 get_source_date_epoch_from_youngest_file(d, sourcedir) or
106 fixed_source_date_epoch(d) # Last resort 164 fixed_source_date_epoch(d) # Last resort
107 ) 165 )
108 166
167def epochfile_read(epochfile, d):
168 cached, efile = d.getVar('__CACHED_SOURCE_DATE_EPOCH') or (None, None)
169 if cached and efile == epochfile:
170 return cached
171
172 if cached and epochfile != efile:
173 bb.debug(1, "Epoch file changed from %s to %s" % (efile, epochfile))
174
175 source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
176 try:
177 with open(epochfile, 'r') as f:
178 s = f.read()
179 try:
180 source_date_epoch = int(s)
181 except ValueError:
182 bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK" % s)
183 source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
184 bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
185 except FileNotFoundError:
186 bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
187
188 d.setVar('__CACHED_SOURCE_DATE_EPOCH', (str(source_date_epoch), epochfile))
189 return str(source_date_epoch)
190
191def epochfile_write(source_date_epoch, epochfile, d):
192
193 bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
194 bb.utils.mkdirhier(os.path.dirname(epochfile))
195
196 tmp_file = "%s.new" % epochfile
197 with open(tmp_file, 'w') as f:
198 f.write(str(source_date_epoch))
199 os.rename(tmp_file, epochfile)
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 249c685dcf..14befac8fa 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4from abc import ABCMeta, abstractmethod 6from abc import ABCMeta, abstractmethod
@@ -104,7 +106,7 @@ class Rootfs(object, metaclass=ABCMeta):
104 def _cleanup(self): 106 def _cleanup(self):
105 pass 107 pass
106 108
107 def _setup_dbg_rootfs(self, dirs): 109 def _setup_dbg_rootfs(self, package_paths):
108 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' 110 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0'
109 if gen_debugfs != '1': 111 if gen_debugfs != '1':
110 return 112 return
@@ -114,17 +116,18 @@ class Rootfs(object, metaclass=ABCMeta):
114 shutil.rmtree(self.image_rootfs + '-orig') 116 shutil.rmtree(self.image_rootfs + '-orig')
115 except: 117 except:
116 pass 118 pass
117 os.rename(self.image_rootfs, self.image_rootfs + '-orig') 119 bb.utils.rename(self.image_rootfs, self.image_rootfs + '-orig')
118 120
119 bb.note(" Creating debug rootfs...") 121 bb.note(" Creating debug rootfs...")
120 bb.utils.mkdirhier(self.image_rootfs) 122 bb.utils.mkdirhier(self.image_rootfs)
121 123
122 bb.note(" Copying back package database...") 124 bb.note(" Copying back package database...")
123 for dir in dirs: 125 for path in package_paths:
124 if not os.path.isdir(self.image_rootfs + '-orig' + dir): 126 bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(path))
125 continue 127 if os.path.isdir(self.image_rootfs + '-orig' + path):
126 bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(dir)) 128 shutil.copytree(self.image_rootfs + '-orig' + path, self.image_rootfs + path, symlinks=True)
127 shutil.copytree(self.image_rootfs + '-orig' + dir, self.image_rootfs + dir, symlinks=True) 129 elif os.path.isfile(self.image_rootfs + '-orig' + path):
130 shutil.copyfile(self.image_rootfs + '-orig' + path, self.image_rootfs + path)
128 131
129 # Copy files located in /usr/lib/debug or /usr/src/debug 132 # Copy files located in /usr/lib/debug or /usr/src/debug
130 for dir in ["/usr/lib/debug", "/usr/src/debug"]: 133 for dir in ["/usr/lib/debug", "/usr/src/debug"]:
@@ -160,25 +163,26 @@ class Rootfs(object, metaclass=ABCMeta):
160 bb.note(" Install extra debug packages...") 163 bb.note(" Install extra debug packages...")
161 self.pm.install(extra_debug_pkgs.split(), True) 164 self.pm.install(extra_debug_pkgs.split(), True)
162 165
166 bb.note(" Removing package database...")
167 for path in package_paths:
168 if os.path.isdir(self.image_rootfs + path):
169 shutil.rmtree(self.image_rootfs + path)
170 elif os.path.isfile(self.image_rootfs + path):
171 os.remove(self.image_rootfs + path)
172
163 bb.note(" Rename debug rootfs...") 173 bb.note(" Rename debug rootfs...")
164 try: 174 try:
165 shutil.rmtree(self.image_rootfs + '-dbg') 175 shutil.rmtree(self.image_rootfs + '-dbg')
166 except: 176 except:
167 pass 177 pass
168 os.rename(self.image_rootfs, self.image_rootfs + '-dbg') 178 bb.utils.rename(self.image_rootfs, self.image_rootfs + '-dbg')
169 179
170 bb.note(" Restoreing original rootfs...") 180 bb.note(" Restoring original rootfs...")
171 os.rename(self.image_rootfs + '-orig', self.image_rootfs) 181 bb.utils.rename(self.image_rootfs + '-orig', self.image_rootfs)
172 182
173 def _exec_shell_cmd(self, cmd): 183 def _exec_shell_cmd(self, cmd):
174 fakerootcmd = self.d.getVar('FAKEROOT')
175 if fakerootcmd is not None:
176 exec_cmd = [fakerootcmd, cmd]
177 else:
178 exec_cmd = cmd
179
180 try: 184 try:
181 subprocess.check_output(exec_cmd, stderr=subprocess.STDOUT) 185 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
182 except subprocess.CalledProcessError as e: 186 except subprocess.CalledProcessError as e:
183 return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output)) 187 return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output))
184 188
@@ -190,9 +194,17 @@ class Rootfs(object, metaclass=ABCMeta):
190 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") 194 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND")
191 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') 195 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND')
192 196
193 bb.utils.mkdirhier(self.image_rootfs) 197 def make_last(command, commands):
198 commands = commands.split()
199 if command in commands:
200 commands.remove(command)
201 commands.append(command)
202 return " ".join(commands)
194 203
195 bb.utils.mkdirhier(self.deploydir) 204 # We want this to run as late as possible, in particular after
205 # systemd_sysusers_create and set_user_group. Using :append is not enough
206 post_process_cmds = make_last("tidy_shadowutils_files", post_process_cmds)
207 post_process_cmds = make_last("rootfs_reproducible", post_process_cmds)
196 208
197 execute_pre_post_process(self.d, pre_process_cmds) 209 execute_pre_post_process(self.d, pre_process_cmds)
198 210
@@ -250,16 +262,18 @@ class Rootfs(object, metaclass=ABCMeta):
250 262
251 263
252 def _uninstall_unneeded(self): 264 def _uninstall_unneeded(self):
253 # Remove unneeded init script symlinks 265 # Remove the run-postinsts package if no delayed postinsts are found
254 delayed_postinsts = self._get_delayed_postinsts() 266 delayed_postinsts = self._get_delayed_postinsts()
255 if delayed_postinsts is None: 267 if delayed_postinsts is None:
256 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): 268 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")) or os.path.exists(self.d.expand("${IMAGE_ROOTFS}${systemd_system_unitdir}/run-postinsts.service")):
257 self._exec_shell_cmd(["update-rc.d", "-f", "-r", 269 self.pm.remove(["run-postinsts"])
258 self.d.getVar('IMAGE_ROOTFS'),
259 "run-postinsts", "remove"])
260 270
261 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", 271 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
272 True, False, self.d) and \
273 not bb.utils.contains("IMAGE_FEATURES",
274 "read-only-rootfs-delayed-postinsts",
262 True, False, self.d) 275 True, False, self.d)
276
263 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') 277 image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE')
264 278
265 if image_rorfs or image_rorfs_force == "1": 279 if image_rorfs or image_rorfs_force == "1":
@@ -304,10 +318,20 @@ class Rootfs(object, metaclass=ABCMeta):
304 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', 318 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c',
305 'new', '-v', '-X']) 319 'new', '-v', '-X'])
306 320
321 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
322 True, False, self.d)
323 ldconfig_in_features = bb.utils.contains("DISTRO_FEATURES", "ldconfig",
324 True, False, self.d)
325 if image_rorfs or not ldconfig_in_features:
326 ldconfig_cache_dir = os.path.join(self.image_rootfs, "var/cache/ldconfig")
327 if os.path.exists(ldconfig_cache_dir):
328 bb.note("Removing ldconfig auxiliary cache...")
329 shutil.rmtree(ldconfig_cache_dir)
330
307 def _check_for_kernel_modules(self, modules_dir): 331 def _check_for_kernel_modules(self, modules_dir):
308 for root, dirs, files in os.walk(modules_dir, topdown=True): 332 for root, dirs, files in os.walk(modules_dir, topdown=True):
309 for name in files: 333 for name in files:
310 found_ko = name.endswith(".ko") 334 found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz", ".ko.zst"))
311 if found_ko: 335 if found_ko:
312 return found_ko 336 return found_ko
313 return False 337 return False
@@ -319,17 +343,31 @@ class Rootfs(object, metaclass=ABCMeta):
319 bb.note("No Kernel Modules found, not running depmod") 343 bb.note("No Kernel Modules found, not running depmod")
320 return 344 return
321 345
322 kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod", 346 pkgdatadir = self.d.getVar('PKGDATA_DIR')
323 'kernel-abiversion')
324 if not os.path.exists(kernel_abi_ver_file):
325 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
326 347
327 kernel_ver = open(kernel_abi_ver_file).read().strip(' \n') 348 # PKGDATA_DIR can include multiple kernels so we run depmod for each
328 versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) 349 # one of them.
350 for direntry in os.listdir(pkgdatadir):
351 match = re.match('(.*)-depmod', direntry)
352 if not match:
353 continue
354 kernel_package_name = match.group(1)
355
356 kernel_abi_ver_file = oe.path.join(pkgdatadir, direntry, kernel_package_name + '-abiversion')
357 if not os.path.exists(kernel_abi_ver_file):
358 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
329 359
330 bb.utils.mkdirhier(versioned_modules_dir) 360 with open(kernel_abi_ver_file) as f:
361 kernel_ver = f.read().strip(' \n')
331 362
332 self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver]) 363 versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
364
365 if os.path.exists(versioned_modules_dir):
366 bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir)
367 if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]):
368 bb.fatal("Kernel modules dependency generation failed")
369 else:
370 bb.note("Not running depmodwrapper for %s since directory does not exist" % versioned_modules_dir)
333 371
334 """ 372 """
335 Create devfs: 373 Create devfs:
@@ -378,6 +416,10 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None)
378 416
379 417
380def image_list_installed_packages(d, rootfs_dir=None): 418def image_list_installed_packages(d, rootfs_dir=None):
419 # Theres no rootfs for baremetal images
420 if bb.data.inherits_class('baremetal-image', d):
421 return ""
422
381 if not rootfs_dir: 423 if not rootfs_dir:
382 rootfs_dir = d.getVar('IMAGE_ROOTFS') 424 rootfs_dir = d.getVar('IMAGE_ROOTFS')
383 425
@@ -386,12 +428,3 @@ def image_list_installed_packages(d, rootfs_dir=None):
386 import importlib 428 import importlib
387 cls = importlib.import_module('oe.package_manager.' + img_type) 429 cls = importlib.import_module('oe.package_manager.' + img_type)
388 return cls.PMPkgsList(d, rootfs_dir).list_pkgs() 430 return cls.PMPkgsList(d, rootfs_dir).list_pkgs()
389
390if __name__ == "__main__":
391 """
392 We should be able to run this as a standalone script, from outside bitbake
393 environment.
394 """
395 """
396 TBD
397 """
diff --git a/meta/lib/oe/rootfspostcommands.py b/meta/lib/oe/rootfspostcommands.py
new file mode 100644
index 0000000000..5386eea409
--- /dev/null
+++ b/meta/lib/oe/rootfspostcommands.py
@@ -0,0 +1,90 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import os
8
9def sort_shadowutils_file(filename, mapping):
10 """
11 Sorts a passwd or group file based on the numeric ID in the third column.
12 If a mapping is given, the name from the first column is mapped via that
13 dictionary instead (necessary for /etc/shadow and /etc/gshadow). If not,
14 a new mapping is created on the fly and returned.
15 """
16
17 new_mapping = {}
18 with open(filename, 'rb+') as f:
19 lines = f.readlines()
20 # No explicit error checking for the sake of simplicity. /etc
21 # files are assumed to be well-formed, causing exceptions if
22 # not.
23 for line in lines:
24 entries = line.split(b':')
25 name = entries[0]
26 if mapping is None:
27 id = int(entries[2])
28 else:
29 id = mapping[name]
30 new_mapping[name] = id
31 # Sort by numeric id first, with entire line as secondary key
32 # (just in case that there is more than one entry for the same id).
33 lines.sort(key=lambda line: (new_mapping[line.split(b':')[0]], line))
34 # We overwrite the entire file, i.e. no truncate() necessary.
35 f.seek(0)
36 f.write(b''.join(lines))
37
38 return new_mapping
39
40def sort_shadowutils_files(sysconfdir):
41 """
42 Sorts shadow-utils 'passwd' and 'group' files in a rootfs' /etc directory
43 by ID.
44 """
45
46 for main, shadow in (('passwd', 'shadow'),
47 ('group', 'gshadow')):
48 filename = os.path.join(sysconfdir, main)
49 if os.path.exists(filename):
50 mapping = sort_shadowutils_file(filename, None)
51 filename = os.path.join(sysconfdir, shadow)
52 if os.path.exists(filename):
53 sort_shadowutils_file(filename, mapping)
54
55def remove_shadowutils_backup_file(filename):
56 """
57 Remove shadow-utils backup file for files like /etc/passwd.
58 """
59
60 backup_filename = filename + '-'
61 if os.path.exists(backup_filename):
62 os.unlink(backup_filename)
63
64def remove_shadowutils_backup_files(sysconfdir):
65 """
66 Remove shadow-utils backup files in a rootfs /etc directory. They are not
67 needed in the initial root filesystem and sorting them can be inconsistent
68 (YOCTO #11043).
69 """
70
71 for filename in (
72 'group',
73 'gshadow',
74 'passwd',
75 'shadow',
76 'subgid',
77 'subuid',
78 ):
79 filepath = os.path.join(sysconfdir, filename)
80 remove_shadowutils_backup_file(filepath)
81
82def tidy_shadowutils_files(sysconfdir):
83 """
84 Tidy up shadow-utils files.
85 """
86
87 remove_shadowutils_backup_files(sysconfdir)
88 sort_shadowutils_files(sysconfdir)
89
90 return True
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py
new file mode 100644
index 0000000000..1dc9cf150d
--- /dev/null
+++ b/meta/lib/oe/rust.py
@@ -0,0 +1,11 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# Handle mismatches between `uname -m`-style output and Rust's arch names
8def arch_to_rust_arch(arch):
9 if arch == "ppc64le":
10 return "powerpc64le"
11 return arch
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py
new file mode 100644
index 0000000000..fd4b6895d8
--- /dev/null
+++ b/meta/lib/oe/sbom.py
@@ -0,0 +1,120 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import collections
8
9DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe"))
10DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file"))
11
12
13def get_recipe_spdxid(d):
14 return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN"))
15
16
17def get_download_spdxid(d, idx):
18 return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx)
19
20
21def get_package_spdxid(pkg):
22 return "SPDXRef-Package-%s" % pkg
23
24
25def get_source_file_spdxid(d, idx):
26 return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx)
27
28
29def get_packaged_file_spdxid(pkg, idx):
30 return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx)
31
32
33def get_image_spdxid(img):
34 return "SPDXRef-Image-%s" % img
35
36
37def get_sdk_spdxid(sdk):
38 return "SPDXRef-SDK-%s" % sdk
39
40
41def _doc_path_by_namespace(spdx_deploy, arch, doc_namespace):
42 return spdx_deploy / "by-namespace" / arch / doc_namespace.replace("/", "_")
43
44
45def doc_find_by_namespace(spdx_deploy, search_arches, doc_namespace):
46 for pkgarch in search_arches:
47 p = _doc_path_by_namespace(spdx_deploy, pkgarch, doc_namespace)
48 if os.path.exists(p):
49 return p
50 return None
51
52
53def _doc_path_by_hashfn(spdx_deploy, arch, doc_name, hashfn):
54 return (
55 spdx_deploy / "by-hash" / arch / hashfn.split()[1] / (doc_name + ".spdx.json")
56 )
57
58
59def doc_find_by_hashfn(spdx_deploy, search_arches, doc_name, hashfn):
60 for pkgarch in search_arches:
61 p = _doc_path_by_hashfn(spdx_deploy, pkgarch, doc_name, hashfn)
62 if os.path.exists(p):
63 return p
64 return None
65
66
67def doc_path(spdx_deploy, doc_name, arch, subdir):
68 return spdx_deploy / arch / subdir / (doc_name + ".spdx.json")
69
70
71def write_doc(d, spdx_doc, arch, subdir, spdx_deploy=None, indent=None):
72 from pathlib import Path
73
74 if spdx_deploy is None:
75 spdx_deploy = Path(d.getVar("SPDXDEPLOY"))
76
77 dest = doc_path(spdx_deploy, spdx_doc.name, arch, subdir)
78 dest.parent.mkdir(exist_ok=True, parents=True)
79 with dest.open("wb") as f:
80 doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
81
82 l = _doc_path_by_namespace(spdx_deploy, arch, spdx_doc.documentNamespace)
83 l.parent.mkdir(exist_ok=True, parents=True)
84 l.symlink_to(os.path.relpath(dest, l.parent))
85
86 l = _doc_path_by_hashfn(
87 spdx_deploy, arch, spdx_doc.name, d.getVar("BB_HASHFILENAME")
88 )
89 l.parent.mkdir(exist_ok=True, parents=True)
90 l.symlink_to(os.path.relpath(dest, l.parent))
91
92 return doc_sha1
93
94
95def read_doc(fn):
96 import hashlib
97 import oe.spdx
98 import io
99 import contextlib
100
101 @contextlib.contextmanager
102 def get_file():
103 if isinstance(fn, io.IOBase):
104 yield fn
105 else:
106 with fn.open("rb") as f:
107 yield f
108
109 with get_file() as f:
110 sha1 = hashlib.sha1()
111 while True:
112 chunk = f.read(4096)
113 if not chunk:
114 break
115 sha1.update(chunk)
116
117 f.seek(0)
118 doc = oe.spdx.SPDXDocument.from_json(f)
119
120 return (doc, sha1.hexdigest())
diff --git a/meta/lib/oe/sbom30.py b/meta/lib/oe/sbom30.py
new file mode 100644
index 0000000000..227ac51877
--- /dev/null
+++ b/meta/lib/oe/sbom30.py
@@ -0,0 +1,1096 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from pathlib import Path
8
9import oe.spdx30
10import bb
11import re
12import hashlib
13import uuid
14import os
15import oe.spdx_common
16from datetime import datetime, timezone
17
18OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/"
19
20VEX_VERSION = "1.0.0"
21
22SPDX_BUILD_TYPE = "http://openembedded.org/bitbake"
23
24OE_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/by-doc-hash/"
25OE_DOC_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/doc/"
26
27
28@oe.spdx30.register(OE_SPDX_BASE + "id-alias")
29class OEIdAliasExtension(oe.spdx30.extension_Extension):
30 """
31 This extension allows an Element to provide an internal alias for the SPDX
32 ID. Since SPDX requires unique URIs for each SPDX ID, most of the objects
33 created have a unique UUID namespace and the unihash of the task encoded in
34 their SPDX ID. However, this causes a problem for referencing documents
35 across recipes, since the taskhash of a dependency may not factor into the
36 taskhash of the current task and thus the current task won't rebuild and
37 see the new SPDX ID when the dependency changes (e.g. ABI safe recipes and
38 tasks).
39
40 To help work around this, this extension provides a non-unique alias for an
41 Element by which it can be referenced from other tasks/recipes. When a
42 final SBoM is created, references to these aliases will be replaced with
43 the actual unique SPDX ID.
44
45 Most Elements will automatically get an alias created when they are written
46 out if they do not already have one. To suppress the creation of an alias,
47 add an extension with a blank `alias` property.
48
49
50 It is in internal extension that should be removed when writing out a final
51 SBoM
52 """
53
54 CLOSED = True
55 INTERNAL = True
56
57 @classmethod
58 def _register_props(cls):
59 super()._register_props()
60 cls._add_property(
61 "alias",
62 oe.spdx30.StringProp(),
63 OE_SPDX_BASE + "alias",
64 max_count=1,
65 )
66
67 cls._add_property(
68 "link_name",
69 oe.spdx30.StringProp(),
70 OE_SPDX_BASE + "link-name",
71 max_count=1,
72 )
73
74
75@oe.spdx30.register(OE_SPDX_BASE + "file-name-alias")
76class OEFileNameAliasExtension(oe.spdx30.extension_Extension):
77 CLOSED = True
78 INTERNAL = True
79
80 @classmethod
81 def _register_props(cls):
82 super()._register_props()
83 cls._add_property(
84 "aliases",
85 oe.spdx30.ListProp(oe.spdx30.StringProp()),
86 OE_SPDX_BASE + "filename-alias",
87 )
88
89
90@oe.spdx30.register(OE_SPDX_BASE + "license-scanned")
91class OELicenseScannedExtension(oe.spdx30.extension_Extension):
92 """
93 The presence of this extension means the file has already been scanned for
94 license information
95 """
96
97 CLOSED = True
98 INTERNAL = True
99
100
101@oe.spdx30.register(OE_SPDX_BASE + "document-extension")
102class OEDocumentExtension(oe.spdx30.extension_Extension):
103 """
104 This extension is added to a SpdxDocument to indicate various useful bits
105 of information about its contents
106 """
107
108 CLOSED = True
109
110 @classmethod
111 def _register_props(cls):
112 super()._register_props()
113 cls._add_property(
114 "is_native",
115 oe.spdx30.BooleanProp(),
116 OE_SPDX_BASE + "is-native",
117 max_count=1,
118 )
119
120
121def spdxid_hash(*items):
122 h = hashlib.md5()
123 for i in items:
124 if isinstance(i, oe.spdx30.Element):
125 h.update(i._id.encode("utf-8"))
126 else:
127 h.update(i.encode("utf-8"))
128 return h.hexdigest()
129
130
131def spdx_sde(d):
132 sde = d.getVar("SOURCE_DATE_EPOCH")
133 if not sde:
134 return datetime.now(timezone.utc)
135
136 return datetime.fromtimestamp(int(sde), timezone.utc)
137
138
139def get_element_link_id(e):
140 """
141 Get the string ID which should be used to link to an Element. If the
142 element has an alias, that will be preferred, otherwise its SPDX ID will be
143 used.
144 """
145 ext = get_alias(e)
146 if ext is not None and ext.alias:
147 return ext.alias
148 return e._id
149
150
151def get_alias(obj):
152 for ext in obj.extension:
153 if not isinstance(ext, OEIdAliasExtension):
154 continue
155 return ext
156
157 return None
158
159
160def hash_id(_id):
161 return hashlib.sha256(_id.encode("utf-8")).hexdigest()
162
163
164def to_list(l):
165 if isinstance(l, set):
166 l = sorted(list(l))
167
168 if not isinstance(l, (list, tuple)):
169 raise TypeError("Must be a list or tuple. Got %s" % type(l))
170
171 return l
172
173
174class ObjectSet(oe.spdx30.SHACLObjectSet):
175 def __init__(self, d):
176 super().__init__()
177 self.d = d
178 self.alias_prefix = None
179
180 def create_index(self):
181 self.by_sha256_hash = {}
182 super().create_index()
183
184 def add_index(self, obj):
185 # Check that all elements are given an ID before being inserted
186 if isinstance(obj, oe.spdx30.Element):
187 if not obj._id:
188 raise ValueError("Element missing ID")
189
190 alias_ext = get_alias(obj)
191 if alias_ext is not None and alias_ext.alias:
192 self.obj_by_id[alias_ext.alias] = obj
193
194 for v in obj.verifiedUsing:
195 if not isinstance(v, oe.spdx30.Hash):
196 continue
197
198 if v.algorithm != oe.spdx30.HashAlgorithm.sha256:
199 continue
200
201 self.by_sha256_hash.setdefault(v.hashValue, set()).add(obj)
202
203 super().add_index(obj)
204 if isinstance(obj, oe.spdx30.SpdxDocument):
205 self.doc = obj
206 alias_ext = get_alias(obj)
207 if alias_ext is not None and alias_ext.alias:
208 self.alias_prefix = OE_ALIAS_PREFIX + hash_id(alias_ext.alias) + "/"
209
210 def __filter_obj(self, obj, attr_filter):
211 return all(getattr(obj, k) == v for k, v in attr_filter.items())
212
213 def foreach_filter(self, typ, *, match_subclass=True, **attr_filter):
214 for obj in self.foreach_type(typ, match_subclass=match_subclass):
215 if self.__filter_obj(obj, attr_filter):
216 yield obj
217
218 def find_filter(self, typ, *, match_subclass=True, **attr_filter):
219 for obj in self.foreach_filter(
220 typ, match_subclass=match_subclass, **attr_filter
221 ):
222 return obj
223 return None
224
225 def foreach_root(self, typ, **attr_filter):
226 for obj in self.doc.rootElement:
227 if not isinstance(obj, typ):
228 continue
229
230 if self.__filter_obj(obj, attr_filter):
231 yield obj
232
233 def find_root(self, typ, **attr_filter):
234 for obj in self.foreach_root(typ, **attr_filter):
235 return obj
236 return None
237
238 def add_root(self, obj):
239 self.add(obj)
240 self.doc.rootElement.append(obj)
241 return obj
242
243 def is_native(self):
244 for e in self.doc.extension:
245 if not isinstance(e, oe.sbom30.OEDocumentExtension):
246 continue
247
248 if e.is_native is not None:
249 return e.is_native
250
251 return False
252
253 def set_is_native(self, is_native):
254 for e in self.doc.extension:
255 if not isinstance(e, oe.sbom30.OEDocumentExtension):
256 continue
257
258 e.is_native = is_native
259 return
260
261 if is_native:
262 self.doc.extension.append(oe.sbom30.OEDocumentExtension(is_native=True))
263
264 def add_aliases(self):
265 for o in self.foreach_type(oe.spdx30.Element):
266 self.set_element_alias(o)
267
268 def new_alias_id(self, obj, replace):
269 unihash = self.d.getVar("BB_UNIHASH")
270 namespace = self.get_namespace()
271 if unihash not in obj._id:
272 bb.warn(f"Unihash {unihash} not found in {obj._id}")
273 return None
274
275 if namespace not in obj._id:
276 bb.warn(f"Namespace {namespace} not found in {obj._id}")
277 return None
278
279 return obj._id.replace(unihash, "UNIHASH").replace(
280 namespace, replace + self.d.getVar("PN")
281 )
282
283 def remove_internal_extensions(self):
284 def remove(o):
285 o.extension = [e for e in o.extension if not getattr(e, "INTERNAL", False)]
286
287 for o in self.foreach_type(oe.spdx30.Element):
288 remove(o)
289
290 if self.doc:
291 remove(self.doc)
292
293 def get_namespace(self):
294 namespace_uuid = uuid.uuid5(
295 uuid.NAMESPACE_DNS, self.d.getVar("SPDX_UUID_NAMESPACE")
296 )
297 pn = self.d.getVar("PN")
298 return "%s/%s-%s" % (
299 self.d.getVar("SPDX_NAMESPACE_PREFIX"),
300 pn,
301 str(uuid.uuid5(namespace_uuid, pn)),
302 )
303
304 def set_element_alias(self, e):
305 if not e._id or e._id.startswith("_:"):
306 return
307
308 alias_ext = get_alias(e)
309 if alias_ext is None:
310 alias_id = self.new_alias_id(e, self.alias_prefix)
311 if alias_id is not None:
312 e.extension.append(OEIdAliasExtension(alias=alias_id))
313 elif (
314 alias_ext.alias
315 and not isinstance(e, oe.spdx30.SpdxDocument)
316 and not alias_ext.alias.startswith(self.alias_prefix)
317 ):
318 bb.warn(
319 f"Element {e._id} has alias {alias_ext.alias}, but it should have prefix {self.alias_prefix}"
320 )
321
322 def new_spdxid(self, *suffix, include_unihash=True):
323 items = [self.get_namespace()]
324 if include_unihash:
325 unihash = self.d.getVar("BB_UNIHASH")
326 items.append(unihash)
327 items.extend(re.sub(r"[^a-zA-Z0-9_-]", "_", s) for s in suffix)
328 return "/".join(items)
329
330 def new_import(self, key):
331 base = f"SPDX_IMPORTS_{key}"
332 spdxid = self.d.getVar(f"{base}_spdxid")
333 if not spdxid:
334 bb.fatal(f"{key} is not a valid SPDX_IMPORTS key")
335
336 for i in self.doc.import_:
337 if i.externalSpdxId == spdxid:
338 # Already imported
339 return spdxid
340
341 m = oe.spdx30.ExternalMap(externalSpdxId=spdxid)
342
343 uri = self.d.getVar(f"{base}_uri")
344 if uri:
345 m.locationHint = uri
346
347 for pyname, algorithm in oe.spdx30.HashAlgorithm.NAMED_INDIVIDUALS.items():
348 value = self.d.getVar(f"{base}_hash_{pyname}")
349 if value:
350 m.verifiedUsing.append(
351 oe.spdx30.Hash(
352 algorithm=algorithm,
353 hashValue=value,
354 )
355 )
356
357 self.doc.import_.append(m)
358 return spdxid
359
360 def new_agent(self, varname, *, creation_info=None, add=True):
361 ref_varname = self.d.getVar(f"{varname}_ref")
362 if ref_varname:
363 if ref_varname == varname:
364 bb.fatal(f"{varname} cannot reference itself")
365 return self.new_agent(ref_varname, creation_info=creation_info)
366
367 import_key = self.d.getVar(f"{varname}_import")
368 if import_key:
369 return self.new_import(import_key)
370
371 name = self.d.getVar(f"{varname}_name")
372 if not name:
373 return None
374
375 spdxid = self.new_spdxid("agent", name)
376 agent = self.find_by_id(spdxid)
377 if agent is not None:
378 return agent
379
380 agent_type = self.d.getVar("%s_type" % varname)
381 if agent_type == "person":
382 agent = oe.spdx30.Person()
383 elif agent_type == "software":
384 agent = oe.spdx30.SoftwareAgent()
385 elif agent_type == "organization":
386 agent = oe.spdx30.Organization()
387 elif not agent_type or agent_type == "agent":
388 agent = oe.spdx30.Agent()
389 else:
390 bb.fatal("Unknown agent type '%s' in %s_type" % (agent_type, varname))
391
392 agent._id = spdxid
393 agent.creationInfo = creation_info or self.doc.creationInfo
394 agent.name = name
395
396 comment = self.d.getVar("%s_comment" % varname)
397 if comment:
398 agent.comment = comment
399
400 for (
401 pyname,
402 idtype,
403 ) in oe.spdx30.ExternalIdentifierType.NAMED_INDIVIDUALS.items():
404 value = self.d.getVar("%s_id_%s" % (varname, pyname))
405 if value:
406 agent.externalIdentifier.append(
407 oe.spdx30.ExternalIdentifier(
408 externalIdentifierType=idtype,
409 identifier=value,
410 )
411 )
412
413 if add:
414 self.add(agent)
415
416 return agent
417
418 def new_creation_info(self):
419 creation_info = oe.spdx30.CreationInfo()
420
421 name = "%s %s" % (
422 self.d.getVar("SPDX_TOOL_NAME"),
423 self.d.getVar("SPDX_TOOL_VERSION"),
424 )
425 tool = self.add(
426 oe.spdx30.Tool(
427 _id=self.new_spdxid("tool", name),
428 creationInfo=creation_info,
429 name=name,
430 )
431 )
432
433 authors = []
434 for a in self.d.getVar("SPDX_AUTHORS").split():
435 varname = "SPDX_AUTHORS_%s" % a
436 author = self.new_agent(varname, creation_info=creation_info)
437
438 if not author:
439 bb.fatal("Unable to find or create author %s" % a)
440
441 authors.append(author)
442
443 creation_info.created = spdx_sde(self.d)
444 creation_info.specVersion = self.d.getVar("SPDX_VERSION")
445 creation_info.createdBy = authors
446 creation_info.createdUsing = [tool]
447
448 return creation_info
449
450 def copy_creation_info(self, copy):
451 c = oe.spdx30.CreationInfo(
452 created=spdx_sde(self.d),
453 specVersion=self.d.getVar("SPDX_VERSION"),
454 )
455
456 for author in copy.createdBy:
457 if isinstance(author, str):
458 c.createdBy.append(author)
459 else:
460 c.createdBy.append(author._id)
461
462 for tool in copy.createdUsing:
463 if isinstance(tool, str):
464 c.createdUsing.append(tool)
465 else:
466 c.createdUsing.append(tool._id)
467
468 return c
469
470 def new_annotation(self, subject, comment, typ):
471 return self.add(
472 oe.spdx30.Annotation(
473 _id=self.new_spdxid("annotation", spdxid_hash(comment, typ)),
474 creationInfo=self.doc.creationInfo,
475 annotationType=typ,
476 subject=subject,
477 statement=comment,
478 )
479 )
480
481 def _new_relationship(
482 self,
483 cls,
484 from_,
485 typ,
486 to,
487 *,
488 spdxid_name="relationship",
489 **props,
490 ):
491 from_ = to_list(from_)
492 to = to_list(to)
493
494 if not from_:
495 return []
496
497 if not to:
498 to = [oe.spdx30.IndividualElement.NoneElement]
499
500 ret = []
501
502 for f in from_:
503 hash_args = [typ, f]
504 for k in sorted(props.keys()):
505 hash_args.append(props[k])
506 hash_args.extend(to)
507
508 relationship = self.add(
509 cls(
510 _id=self.new_spdxid(spdxid_name, spdxid_hash(*hash_args)),
511 creationInfo=self.doc.creationInfo,
512 from_=f,
513 relationshipType=typ,
514 to=to,
515 **props,
516 )
517 )
518 ret.append(relationship)
519
520 return ret
521
522 def new_relationship(self, from_, typ, to):
523 return self._new_relationship(oe.spdx30.Relationship, from_, typ, to)
524
525 def new_scoped_relationship(self, from_, typ, scope, to):
526 return self._new_relationship(
527 oe.spdx30.LifecycleScopedRelationship,
528 from_,
529 typ,
530 to,
531 scope=scope,
532 )
533
534 def new_license_expression(
535 self, license_expression, license_data, license_text_map={}
536 ):
537 license_list_version = license_data["licenseListVersion"]
538 # SPDX 3 requires that the license list version be a semver
539 # MAJOR.MINOR.MICRO, but the actual license version might be
540 # MAJOR.MINOR on some older versions. As such, manually append a .0
541 # micro version if its missing to keep SPDX happy
542 if license_list_version.count(".") < 2:
543 license_list_version += ".0"
544
545 spdxid = [
546 "license",
547 license_list_version,
548 re.sub(r"[^a-zA-Z0-9_-]", "_", license_expression),
549 ]
550
551 license_text = [
552 (k, license_text_map[k]) for k in sorted(license_text_map.keys())
553 ]
554
555 if not license_text:
556 lic = self.find_filter(
557 oe.spdx30.simplelicensing_LicenseExpression,
558 simplelicensing_licenseExpression=license_expression,
559 simplelicensing_licenseListVersion=license_list_version,
560 )
561 if lic is not None:
562 return lic
563 else:
564 spdxid.append(spdxid_hash(*(v for _, v in license_text)))
565 lic = self.find_by_id(self.new_spdxid(*spdxid))
566 if lic is not None:
567 return lic
568
569 lic = self.add(
570 oe.spdx30.simplelicensing_LicenseExpression(
571 _id=self.new_spdxid(*spdxid),
572 creationInfo=self.doc.creationInfo,
573 simplelicensing_licenseExpression=license_expression,
574 simplelicensing_licenseListVersion=license_list_version,
575 )
576 )
577
578 for key, value in license_text:
579 lic.simplelicensing_customIdToUri.append(
580 oe.spdx30.DictionaryEntry(key=key, value=value)
581 )
582
583 return lic
584
585 def scan_declared_licenses(self, spdx_file, filepath, license_data):
586 for e in spdx_file.extension:
587 if isinstance(e, OELicenseScannedExtension):
588 return
589
590 file_licenses = set()
591 for extracted_lic in oe.spdx_common.extract_licenses(filepath):
592 lic = self.new_license_expression(extracted_lic, license_data)
593 self.set_element_alias(lic)
594 file_licenses.add(lic)
595
596 self.new_relationship(
597 [spdx_file],
598 oe.spdx30.RelationshipType.hasDeclaredLicense,
599 [oe.sbom30.get_element_link_id(lic_alias) for lic_alias in file_licenses],
600 )
601 spdx_file.extension.append(OELicenseScannedExtension())
602
603 def new_file(self, _id, name, path, *, purposes=[]):
604 sha256_hash = bb.utils.sha256_file(path)
605
606 for f in self.by_sha256_hash.get(sha256_hash, []):
607 if not isinstance(f, oe.spdx30.software_File):
608 continue
609
610 if purposes:
611 new_primary = purposes[0]
612 new_additional = []
613
614 if f.software_primaryPurpose:
615 new_additional.append(f.software_primaryPurpose)
616 new_additional.extend(f.software_additionalPurpose)
617
618 new_additional = sorted(
619 list(set(p for p in new_additional if p != new_primary))
620 )
621
622 f.software_primaryPurpose = new_primary
623 f.software_additionalPurpose = new_additional
624
625 if f.name != name:
626 for e in f.extension:
627 if isinstance(e, OEFileNameAliasExtension):
628 e.aliases.append(name)
629 break
630 else:
631 f.extension.append(OEFileNameAliasExtension(aliases=[name]))
632
633 return f
634
635 spdx_file = oe.spdx30.software_File(
636 _id=_id,
637 creationInfo=self.doc.creationInfo,
638 name=name,
639 )
640 if purposes:
641 spdx_file.software_primaryPurpose = purposes[0]
642 spdx_file.software_additionalPurpose = purposes[1:]
643
644 spdx_file.verifiedUsing.append(
645 oe.spdx30.Hash(
646 algorithm=oe.spdx30.HashAlgorithm.sha256,
647 hashValue=sha256_hash,
648 )
649 )
650
651 return self.add(spdx_file)
652
653 def new_cve_vuln(self, cve):
654 v = oe.spdx30.security_Vulnerability()
655 v._id = self.new_spdxid("vulnerability", cve)
656 v.creationInfo = self.doc.creationInfo
657
658 v.externalIdentifier.append(
659 oe.spdx30.ExternalIdentifier(
660 externalIdentifierType=oe.spdx30.ExternalIdentifierType.cve,
661 identifier=cve,
662 identifierLocator=[
663 f"https://cveawg.mitre.org/api/cve/{cve}",
664 f"https://www.cve.org/CVERecord?id={cve}",
665 ],
666 )
667 )
668 return self.add(v)
669
670 def new_vex_patched_relationship(self, from_, to):
671 return self._new_relationship(
672 oe.spdx30.security_VexFixedVulnAssessmentRelationship,
673 from_,
674 oe.spdx30.RelationshipType.fixedIn,
675 to,
676 spdxid_name="vex-fixed",
677 security_vexVersion=VEX_VERSION,
678 )
679
680 def new_vex_unpatched_relationship(self, from_, to):
681 return self._new_relationship(
682 oe.spdx30.security_VexAffectedVulnAssessmentRelationship,
683 from_,
684 oe.spdx30.RelationshipType.affects,
685 to,
686 spdxid_name="vex-affected",
687 security_vexVersion=VEX_VERSION,
688 security_actionStatement="Mitigation action unknown",
689 )
690
691 def new_vex_ignored_relationship(self, from_, to, *, impact_statement):
692 return self._new_relationship(
693 oe.spdx30.security_VexNotAffectedVulnAssessmentRelationship,
694 from_,
695 oe.spdx30.RelationshipType.doesNotAffect,
696 to,
697 spdxid_name="vex-not-affected",
698 security_vexVersion=VEX_VERSION,
699 security_impactStatement=impact_statement,
700 )
701
702 def import_bitbake_build_objset(self):
703 deploy_dir_spdx = Path(self.d.getVar("DEPLOY_DIR_SPDX"))
704 bb_objset = load_jsonld(
705 self.d, deploy_dir_spdx / "bitbake.spdx.json", required=True
706 )
707 self.doc.import_.extend(bb_objset.doc.import_)
708 self.update(bb_objset.objects)
709
710 return bb_objset
711
712 def import_bitbake_build(self):
713 def find_bitbake_build(objset):
714 return objset.find_filter(
715 oe.spdx30.build_Build,
716 build_buildType=SPDX_BUILD_TYPE,
717 )
718
719 build = find_bitbake_build(self)
720 if build:
721 return build
722
723 bb_objset = self.import_bitbake_build_objset()
724 build = find_bitbake_build(bb_objset)
725 if build is None:
726 bb.fatal(f"No build found in {deploy_dir_spdx}")
727
728 return build
729
730 def new_task_build(self, name, typ):
731 current_task = self.d.getVar("BB_CURRENTTASK")
732 pn = self.d.getVar("PN")
733
734 build = self.add(
735 oe.spdx30.build_Build(
736 _id=self.new_spdxid("build", name),
737 creationInfo=self.doc.creationInfo,
738 name=f"{pn}:do_{current_task}:{name}",
739 build_buildType=f"{SPDX_BUILD_TYPE}/do_{current_task}/{typ}",
740 )
741 )
742
743 if self.d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
744 bitbake_build = self.import_bitbake_build()
745
746 self.new_relationship(
747 [bitbake_build],
748 oe.spdx30.RelationshipType.ancestorOf,
749 [build],
750 )
751
752 if self.d.getVar("SPDX_INCLUDE_BUILD_VARIABLES") == "1":
753 for varname in sorted(self.d.keys()):
754 if varname.startswith("__"):
755 continue
756
757 value = self.d.getVar(varname, expand=False)
758
759 # TODO: Deal with non-string values
760 if not isinstance(value, str):
761 continue
762
763 build.build_parameter.append(
764 oe.spdx30.DictionaryEntry(key=varname, value=value)
765 )
766
767 return build
768
769 def new_archive(self, archive_name):
770 return self.add(
771 oe.spdx30.software_File(
772 _id=self.new_spdxid("archive", str(archive_name)),
773 creationInfo=self.doc.creationInfo,
774 name=str(archive_name),
775 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
776 )
777 )
778
779 @classmethod
780 def new_objset(cls, d, name, copy_from_bitbake_doc=True):
781 objset = cls(d)
782
783 document = oe.spdx30.SpdxDocument(
784 _id=objset.new_spdxid("document", name),
785 name=name,
786 )
787
788 document.extension.append(
789 OEIdAliasExtension(
790 alias=objset.new_alias_id(
791 document,
792 OE_DOC_ALIAS_PREFIX + d.getVar("PN") + "/" + name + "/",
793 ),
794 )
795 )
796 objset.doc = document
797 objset.add_index(document)
798
799 if copy_from_bitbake_doc:
800 bb_objset = objset.import_bitbake_build_objset()
801 document.creationInfo = objset.copy_creation_info(
802 bb_objset.doc.creationInfo
803 )
804 else:
805 document.creationInfo = objset.new_creation_info()
806
807 return objset
808
809 def expand_collection(self, *, add_objectsets=[]):
810 """
811 Expands a collection to pull in all missing elements
812
813 Returns the set of ids that could not be found to link into the document
814 """
815 missing_spdxids = set()
816 imports = {e.externalSpdxId: e for e in self.doc.import_}
817
818 def merge_doc(other):
819 nonlocal imports
820
821 for e in other.doc.import_:
822 if not e.externalSpdxId in imports:
823 imports[e.externalSpdxId] = e
824
825 self.objects |= other.objects
826
827 for o in add_objectsets:
828 merge_doc(o)
829
830 needed_spdxids = self.link()
831 provided_spdxids = set(self.obj_by_id.keys())
832
833 while True:
834 import_spdxids = set(imports.keys())
835 searching_spdxids = (
836 needed_spdxids - provided_spdxids - missing_spdxids - import_spdxids
837 )
838 if not searching_spdxids:
839 break
840
841 spdxid = searching_spdxids.pop()
842 bb.debug(
843 1,
844 f"Searching for {spdxid}. Remaining: {len(searching_spdxids)}, Total: {len(provided_spdxids)}, Missing: {len(missing_spdxids)}, Imports: {len(import_spdxids)}",
845 )
846 dep_objset, dep_path = find_by_spdxid(self.d, spdxid)
847
848 if dep_objset:
849 dep_provided = set(dep_objset.obj_by_id.keys())
850 if spdxid not in dep_provided:
851 bb.fatal(f"{spdxid} not found in {dep_path}")
852 provided_spdxids |= dep_provided
853 needed_spdxids |= dep_objset.missing_ids
854 merge_doc(dep_objset)
855 else:
856 missing_spdxids.add(spdxid)
857
858 self.doc.import_ = sorted(imports.values(), key=lambda e: e.externalSpdxId)
859 bb.debug(1, "Linking...")
860 self.link()
861
862 # Manually go through all of the simplelicensing_customIdToUri DictionaryEntry
863 # items and resolve any aliases to actual objects.
864 for lic in self.foreach_type(oe.spdx30.simplelicensing_LicenseExpression):
865 for d in lic.simplelicensing_customIdToUri:
866 if d.value.startswith(OE_ALIAS_PREFIX):
867 obj = self.find_by_id(d.value)
868 if obj is not None:
869 d.value = obj._id
870 else:
871 self.missing_ids.add(d.value)
872
873 self.missing_ids -= set(imports.keys())
874 return self.missing_ids
875
876
877def load_jsonld(d, path, required=False):
878 deserializer = oe.spdx30.JSONLDDeserializer()
879 objset = ObjectSet(d)
880 try:
881 with path.open("rb") as f:
882 deserializer.read(f, objset)
883 except FileNotFoundError:
884 if required:
885 bb.fatal("No SPDX document named %s found" % path)
886 return None
887
888 if not objset.doc:
889 bb.fatal("SPDX Document %s has no SPDXDocument element" % path)
890 return None
891
892 objset.objects.remove(objset.doc)
893 return objset
894
895
896def jsonld_arch_path(d, arch, subdir, name, deploydir=None):
897 if deploydir is None:
898 deploydir = Path(d.getVar("DEPLOY_DIR_SPDX"))
899 return deploydir / arch / subdir / (name + ".spdx.json")
900
901
902def jsonld_hash_path(h):
903 return Path("by-spdxid-hash") / h[:2], h
904
905
906def load_jsonld_by_arch(d, arch, subdir, name, *, required=False):
907 path = jsonld_arch_path(d, arch, subdir, name)
908 objset = load_jsonld(d, path, required=required)
909 if objset is not None:
910 return (objset, path)
911 return (None, None)
912
913
914def find_jsonld(d, subdir, name, *, required=False):
915 package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
916 package_archs.reverse()
917
918 for arch in package_archs:
919 objset, path = load_jsonld_by_arch(d, arch, subdir, name)
920 if objset is not None:
921 return (objset, path)
922
923 if required:
924 bb.fatal("Could not find a %s SPDX document named %s" % (subdir, name))
925
926 return (None, None)
927
928
929def write_jsonld_doc(d, objset, dest):
930 if not isinstance(objset, ObjectSet):
931 bb.fatal("Only an ObjsetSet can be serialized")
932 return
933
934 if not objset.doc:
935 bb.fatal("ObjectSet is missing a SpdxDocument")
936 return
937
938 objset.doc.rootElement = sorted(list(set(objset.doc.rootElement)))
939 objset.doc.profileConformance = sorted(
940 list(
941 getattr(oe.spdx30.ProfileIdentifierType, p)
942 for p in d.getVar("SPDX_PROFILES").split()
943 )
944 )
945
946 dest.parent.mkdir(exist_ok=True, parents=True)
947
948 if d.getVar("SPDX_PRETTY") == "1":
949 serializer = oe.spdx30.JSONLDSerializer(
950 indent=2,
951 )
952 else:
953 serializer = oe.spdx30.JSONLDInlineSerializer()
954
955 objset.objects.add(objset.doc)
956 with dest.open("wb") as f:
957 serializer.write(objset, f, force_at_graph=True)
958 objset.objects.remove(objset.doc)
959
960
961def write_recipe_jsonld_doc(
962 d,
963 objset,
964 subdir,
965 deploydir,
966 *,
967 create_spdx_id_links=True,
968):
969 pkg_arch = d.getVar("SSTATE_PKGARCH")
970
971 dest = jsonld_arch_path(d, pkg_arch, subdir, objset.doc.name, deploydir=deploydir)
972
973 def link_id(_id):
974 hash_path = jsonld_hash_path(hash_id(_id))
975
976 link_name = jsonld_arch_path(
977 d,
978 pkg_arch,
979 *hash_path,
980 deploydir=deploydir,
981 )
982 try:
983 link_name.parent.mkdir(exist_ok=True, parents=True)
984 link_name.symlink_to(os.path.relpath(dest, link_name.parent))
985 except:
986 target = link_name.readlink()
987 bb.warn(
988 f"Unable to link {_id} in {dest} as {link_name}. Already points to {target}"
989 )
990 raise
991
992 return hash_path[-1]
993
994 objset.add_aliases()
995
996 try:
997 if create_spdx_id_links:
998 alias_ext = get_alias(objset.doc)
999 if alias_ext is not None and alias_ext.alias:
1000 alias_ext.link_name = link_id(alias_ext.alias)
1001
1002 finally:
1003 # It is really helpful for debugging if the JSON document is written
1004 # out, so always do that even if there is an error making the links
1005 write_jsonld_doc(d, objset, dest)
1006
1007
1008def find_root_obj_in_jsonld(d, subdir, fn_name, obj_type, **attr_filter):
1009 objset, fn = find_jsonld(d, subdir, fn_name, required=True)
1010
1011 spdx_obj = objset.find_root(obj_type, **attr_filter)
1012 if not spdx_obj:
1013 bb.fatal("No root %s found in %s" % (obj_type.__name__, fn))
1014
1015 return spdx_obj, objset
1016
1017
1018def load_obj_in_jsonld(d, arch, subdir, fn_name, obj_type, **attr_filter):
1019 objset, fn = load_jsonld_by_arch(d, arch, subdir, fn_name, required=True)
1020
1021 spdx_obj = objset.find_filter(obj_type, **attr_filter)
1022 if not spdx_obj:
1023 bb.fatal("No %s found in %s" % (obj_type.__name__, fn))
1024
1025 return spdx_obj, objset
1026
1027
1028def find_by_spdxid(d, spdxid, *, required=False):
1029 if spdxid.startswith(OE_ALIAS_PREFIX):
1030 h = spdxid[len(OE_ALIAS_PREFIX) :].split("/", 1)[0]
1031 return find_jsonld(d, *jsonld_hash_path(h), required=required)
1032 return find_jsonld(d, *jsonld_hash_path(hash_id(spdxid)), required=required)
1033
1034
1035def create_sbom(d, name, root_elements, add_objectsets=[]):
1036 objset = ObjectSet.new_objset(d, name)
1037
1038 sbom = objset.add(
1039 oe.spdx30.software_Sbom(
1040 _id=objset.new_spdxid("sbom", name),
1041 name=name,
1042 creationInfo=objset.doc.creationInfo,
1043 software_sbomType=[oe.spdx30.software_SbomType.build],
1044 rootElement=root_elements,
1045 )
1046 )
1047
1048 missing_spdxids = objset.expand_collection(add_objectsets=add_objectsets)
1049 if missing_spdxids:
1050 bb.warn(
1051 "The following SPDX IDs were unable to be resolved:\n "
1052 + "\n ".join(sorted(list(missing_spdxids)))
1053 )
1054
1055 # Filter out internal extensions from final SBoMs
1056 objset.remove_internal_extensions()
1057
1058 # SBoM should be the only root element of the document
1059 objset.doc.rootElement = [sbom]
1060
1061 # De-duplicate licenses
1062 unique = set()
1063 dedup = {}
1064 for lic in objset.foreach_type(oe.spdx30.simplelicensing_LicenseExpression):
1065 for u in unique:
1066 if (
1067 u.simplelicensing_licenseExpression
1068 == lic.simplelicensing_licenseExpression
1069 and u.simplelicensing_licenseListVersion
1070 == lic.simplelicensing_licenseListVersion
1071 ):
1072 dedup[lic] = u
1073 break
1074 else:
1075 unique.add(lic)
1076
1077 if dedup:
1078 for rel in objset.foreach_filter(
1079 oe.spdx30.Relationship,
1080 relationshipType=oe.spdx30.RelationshipType.hasDeclaredLicense,
1081 ):
1082 rel.to = [dedup.get(to, to) for to in rel.to]
1083
1084 for rel in objset.foreach_filter(
1085 oe.spdx30.Relationship,
1086 relationshipType=oe.spdx30.RelationshipType.hasConcludedLicense,
1087 ):
1088 rel.to = [dedup.get(to, to) for to in rel.to]
1089
1090 for k, v in dedup.items():
1091 bb.debug(1, f"Removing duplicate License {k._id} -> {v._id}")
1092 objset.objects.remove(k)
1093
1094 objset.create_index()
1095
1096 return objset, sbom
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py
index 37b59afd1a..9fe0fbb752 100644
--- a/meta/lib/oe/sdk.py
+++ b/meta/lib/oe/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -68,7 +70,7 @@ class Sdk(object, metaclass=ABCMeta):
68 #FIXME: using umbrella exc catching because bb.utils method raises it 70 #FIXME: using umbrella exc catching because bb.utils method raises it
69 except Exception as e: 71 except Exception as e:
70 bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc()) 72 bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
71 bb.error("unable to place %s in final SDK location" % sourcefile) 73 bb.fatal("unable to place %s in final SDK location" % sourcefile)
72 74
73 def mkdirhier(self, dirpath): 75 def mkdirhier(self, dirpath):
74 try: 76 try:
@@ -115,6 +117,10 @@ def sdk_list_installed_packages(d, target, rootfs_dir=None):
115 117
116 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] 118 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True]
117 119
120 if target is False:
121 ipkgconf_sdk_target = d.getVar("IPKGCONF_SDK")
122 d.setVar("IPKGCONF_TARGET", ipkgconf_sdk_target)
123
118 img_type = d.getVar('IMAGE_PKGTYPE') 124 img_type = d.getVar('IMAGE_PKGTYPE')
119 import importlib 125 import importlib
120 cls = importlib.import_module('oe.package_manager.' + img_type) 126 cls = importlib.import_module('oe.package_manager.' + img_type)
@@ -142,13 +148,11 @@ def get_extra_sdkinfo(sstate_dir):
142 extra_info['filesizes'] = {} 148 extra_info['filesizes'] = {}
143 for root, _, files in os.walk(sstate_dir): 149 for root, _, files in os.walk(sstate_dir):
144 for fn in files: 150 for fn in files:
145 if fn.endswith('.tgz'): 151 # Note that this makes an assumption about the sstate filenames
152 if '.tar.' in fn and not fn.endswith('.siginfo'):
146 fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024)) 153 fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024))
147 task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0] 154 task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0]
148 origtotal = extra_info['tasksizes'].get(task, 0) 155 origtotal = extra_info['tasksizes'].get(task, 0)
149 extra_info['tasksizes'][task] = origtotal + fsize 156 extra_info['tasksizes'][task] = origtotal + fsize
150 extra_info['filesizes'][fn] = fsize 157 extra_info['filesizes'][fn] = fsize
151 return extra_info 158 return extra_info
152
153if __name__ == "__main__":
154 pass
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py
new file mode 100644
index 0000000000..7aaf2af5ed
--- /dev/null
+++ b/meta/lib/oe/spdx.py
@@ -0,0 +1,357 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7#
8# This library is intended to capture the JSON SPDX specification in a type
9# safe manner. It is not intended to encode any particular OE specific
10# behaviors, see the sbom.py for that.
11#
12# The documented SPDX spec document doesn't cover the JSON syntax for
13# particular configuration, which can make it hard to determine what the JSON
14# syntax should be. I've found it is actually much simpler to read the official
15# SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec
16# in schemas/spdx-schema.json
17#
18
19import hashlib
20import itertools
21import json
22
23SPDX_VERSION = "2.2"
24
25
26#
27# The following are the support classes that are used to implement SPDX object
28#
29
30class _Property(object):
31 """
32 A generic SPDX object property. The different types will derive from this
33 class
34 """
35
36 def __init__(self, *, default=None):
37 self.default = default
38
39 def setdefault(self, dest, name):
40 if self.default is not None:
41 dest.setdefault(name, self.default)
42
43
44class _String(_Property):
45 """
46 A scalar string property for an SPDX object
47 """
48
49 def __init__(self, **kwargs):
50 super().__init__(**kwargs)
51
52 def set_property(self, attrs, name):
53 def get_helper(obj):
54 return obj._spdx[name]
55
56 def set_helper(obj, value):
57 obj._spdx[name] = value
58
59 def del_helper(obj):
60 del obj._spdx[name]
61
62 attrs[name] = property(get_helper, set_helper, del_helper)
63
64 def init(self, source):
65 return source
66
67
68class _Object(_Property):
69 """
70 A scalar SPDX object property of a SPDX object
71 """
72
73 def __init__(self, cls, **kwargs):
74 super().__init__(**kwargs)
75 self.cls = cls
76
77 def set_property(self, attrs, name):
78 def get_helper(obj):
79 if not name in obj._spdx:
80 obj._spdx[name] = self.cls()
81 return obj._spdx[name]
82
83 def set_helper(obj, value):
84 obj._spdx[name] = value
85
86 def del_helper(obj):
87 del obj._spdx[name]
88
89 attrs[name] = property(get_helper, set_helper)
90
91 def init(self, source):
92 return self.cls(**source)
93
94
95class _ListProperty(_Property):
96 """
97 A list of SPDX properties
98 """
99
100 def __init__(self, prop, **kwargs):
101 super().__init__(**kwargs)
102 self.prop = prop
103
104 def set_property(self, attrs, name):
105 def get_helper(obj):
106 if not name in obj._spdx:
107 obj._spdx[name] = []
108 return obj._spdx[name]
109
110 def set_helper(obj, value):
111 obj._spdx[name] = list(value)
112
113 def del_helper(obj):
114 del obj._spdx[name]
115
116 attrs[name] = property(get_helper, set_helper, del_helper)
117
118 def init(self, source):
119 return [self.prop.init(o) for o in source]
120
121
122class _StringList(_ListProperty):
123 """
124 A list of strings as a property for an SPDX object
125 """
126
127 def __init__(self, **kwargs):
128 super().__init__(_String(), **kwargs)
129
130
131class _ObjectList(_ListProperty):
132 """
133 A list of SPDX objects as a property for an SPDX object
134 """
135
136 def __init__(self, cls, **kwargs):
137 super().__init__(_Object(cls), **kwargs)
138
139
140class MetaSPDXObject(type):
141 """
142 A metaclass that allows properties (anything derived from a _Property
143 class) to be defined for a SPDX object
144 """
145 def __new__(mcls, name, bases, attrs):
146 attrs["_properties"] = {}
147
148 for key in attrs.keys():
149 if isinstance(attrs[key], _Property):
150 prop = attrs[key]
151 attrs["_properties"][key] = prop
152 prop.set_property(attrs, key)
153
154 return super().__new__(mcls, name, bases, attrs)
155
156
157class SPDXObject(metaclass=MetaSPDXObject):
158 """
159 The base SPDX object; all SPDX spec classes must derive from this class
160 """
161 def __init__(self, **d):
162 self._spdx = {}
163
164 for name, prop in self._properties.items():
165 prop.setdefault(self._spdx, name)
166 if name in d:
167 self._spdx[name] = prop.init(d[name])
168
169 def serializer(self):
170 return self._spdx
171
172 def __setattr__(self, name, value):
173 if name in self._properties or name == "_spdx":
174 super().__setattr__(name, value)
175 return
176 raise KeyError("%r is not a valid SPDX property" % name)
177
178#
179# These are the SPDX objects implemented from the spec. The *only* properties
180# that can be added to these objects are ones directly specified in the SPDX
181# spec, however you may add helper functions to make operations easier.
182#
183# Defaults should *only* be specified if the SPDX spec says there is a certain
184# required value for a field (e.g. dataLicense), or if the field is mandatory
185# and has some sane "this field is unknown" (e.g. "NOASSERTION")
186#
187
188class SPDXAnnotation(SPDXObject):
189 annotationDate = _String()
190 annotationType = _String()
191 annotator = _String()
192 comment = _String()
193
194class SPDXChecksum(SPDXObject):
195 algorithm = _String()
196 checksumValue = _String()
197
198
199class SPDXRelationship(SPDXObject):
200 spdxElementId = _String()
201 relatedSpdxElement = _String()
202 relationshipType = _String()
203 comment = _String()
204 annotations = _ObjectList(SPDXAnnotation)
205
206
207class SPDXExternalReference(SPDXObject):
208 referenceCategory = _String()
209 referenceType = _String()
210 referenceLocator = _String()
211
212
213class SPDXPackageVerificationCode(SPDXObject):
214 packageVerificationCodeValue = _String()
215 packageVerificationCodeExcludedFiles = _StringList()
216
217
218class SPDXPackage(SPDXObject):
219 ALLOWED_CHECKSUMS = [
220 "SHA1",
221 "SHA224",
222 "SHA256",
223 "SHA384",
224 "SHA512",
225 "MD2",
226 "MD4",
227 "MD5",
228 "MD6",
229 ]
230
231 name = _String()
232 SPDXID = _String()
233 versionInfo = _String()
234 downloadLocation = _String(default="NOASSERTION")
235 supplier = _String(default="NOASSERTION")
236 homepage = _String()
237 licenseConcluded = _String(default="NOASSERTION")
238 licenseDeclared = _String(default="NOASSERTION")
239 summary = _String()
240 description = _String()
241 sourceInfo = _String()
242 copyrightText = _String(default="NOASSERTION")
243 licenseInfoFromFiles = _StringList(default=["NOASSERTION"])
244 externalRefs = _ObjectList(SPDXExternalReference)
245 packageVerificationCode = _Object(SPDXPackageVerificationCode)
246 hasFiles = _StringList()
247 packageFileName = _String()
248 annotations = _ObjectList(SPDXAnnotation)
249 checksums = _ObjectList(SPDXChecksum)
250
251
252class SPDXFile(SPDXObject):
253 SPDXID = _String()
254 fileName = _String()
255 licenseConcluded = _String(default="NOASSERTION")
256 copyrightText = _String(default="NOASSERTION")
257 licenseInfoInFiles = _StringList(default=["NOASSERTION"])
258 checksums = _ObjectList(SPDXChecksum)
259 fileTypes = _StringList()
260
261
262class SPDXCreationInfo(SPDXObject):
263 created = _String()
264 licenseListVersion = _String()
265 comment = _String()
266 creators = _StringList()
267
268
269class SPDXExternalDocumentRef(SPDXObject):
270 externalDocumentId = _String()
271 spdxDocument = _String()
272 checksum = _Object(SPDXChecksum)
273
274
275class SPDXExtractedLicensingInfo(SPDXObject):
276 name = _String()
277 comment = _String()
278 licenseId = _String()
279 extractedText = _String()
280
281
282class SPDXDocument(SPDXObject):
283 spdxVersion = _String(default="SPDX-" + SPDX_VERSION)
284 dataLicense = _String(default="CC0-1.0")
285 SPDXID = _String(default="SPDXRef-DOCUMENT")
286 name = _String()
287 documentNamespace = _String()
288 creationInfo = _Object(SPDXCreationInfo)
289 packages = _ObjectList(SPDXPackage)
290 files = _ObjectList(SPDXFile)
291 relationships = _ObjectList(SPDXRelationship)
292 externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef)
293 hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo)
294
295 def __init__(self, **d):
296 super().__init__(**d)
297
298 def to_json(self, f, *, sort_keys=False, indent=None, separators=None):
299 class Encoder(json.JSONEncoder):
300 def default(self, o):
301 if isinstance(o, SPDXObject):
302 return o.serializer()
303
304 return super().default(o)
305
306 sha1 = hashlib.sha1()
307 for chunk in Encoder(
308 sort_keys=sort_keys,
309 indent=indent,
310 separators=separators,
311 ).iterencode(self):
312 chunk = chunk.encode("utf-8")
313 f.write(chunk)
314 sha1.update(chunk)
315
316 return sha1.hexdigest()
317
318 @classmethod
319 def from_json(cls, f):
320 return cls(**json.load(f))
321
322 def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None):
323 if isinstance(_from, SPDXObject):
324 from_spdxid = _from.SPDXID
325 else:
326 from_spdxid = _from
327
328 if isinstance(_to, SPDXObject):
329 to_spdxid = _to.SPDXID
330 else:
331 to_spdxid = _to
332
333 r = SPDXRelationship(
334 spdxElementId=from_spdxid,
335 relatedSpdxElement=to_spdxid,
336 relationshipType=relationship,
337 )
338
339 if comment is not None:
340 r.comment = comment
341
342 if annotation is not None:
343 r.annotations.append(annotation)
344
345 self.relationships.append(r)
346
347 def find_by_spdxid(self, spdxid):
348 for o in itertools.chain(self.packages, self.files):
349 if o.SPDXID == spdxid:
350 return o
351 return None
352
353 def find_external_document_ref(self, namespace):
354 for r in self.externalDocumentRefs:
355 if r.spdxDocument == namespace:
356 return r
357 return None
diff --git a/meta/lib/oe/spdx30.py b/meta/lib/oe/spdx30.py
new file mode 100644
index 0000000000..cd97eebd18
--- /dev/null
+++ b/meta/lib/oe/spdx30.py
@@ -0,0 +1,5593 @@
1#! /usr/bin/env python3
2#
3# Generated Python bindings from a SHACL model
4#
5# This file was automatically generated by shacl2code. DO NOT MANUALLY MODIFY IT
6#
7# SPDX-License-Identifier: MIT
8
9import functools
10import hashlib
11import json
12import re
13import sys
14import threading
15import time
16from contextlib import contextmanager
17from datetime import datetime, timezone, timedelta
18from enum import Enum
19from abc import ABC, abstractmethod
20
21
22def check_type(obj, types):
23 if not isinstance(obj, types):
24 if isinstance(types, (list, tuple)):
25 raise TypeError(
26 f"Value must be one of type: {', '.join(t.__name__ for t in types)}. Got {type(obj)}"
27 )
28 raise TypeError(f"Value must be of type {types.__name__}. Got {type(obj)}")
29
30
31class Property(ABC):
32 """
33 A generic SHACL object property. The different types will derive from this
34 class
35 """
36
37 def __init__(self, *, pattern=None):
38 self.pattern = pattern
39
40 def init(self):
41 return None
42
43 def validate(self, value):
44 check_type(value, self.VALID_TYPES)
45 if self.pattern is not None and not re.search(
46 self.pattern, self.to_string(value)
47 ):
48 raise ValueError(
49 f"Value is not correctly formatted. Got '{self.to_string(value)}'"
50 )
51
52 def set(self, value):
53 return value
54
55 def check_min_count(self, value, min_count):
56 return min_count == 1
57
58 def check_max_count(self, value, max_count):
59 return max_count == 1
60
61 def elide(self, value):
62 return value is None
63
64 def walk(self, value, callback, path):
65 callback(value, path)
66
67 def iter_objects(self, value, recursive, visited):
68 return []
69
70 def link_prop(self, value, objectset, missing, visited):
71 return value
72
73 def to_string(self, value):
74 return str(value)
75
76 @abstractmethod
77 def encode(self, encoder, value, state):
78 pass
79
80 @abstractmethod
81 def decode(self, decoder, *, objectset=None):
82 pass
83
84
85class StringProp(Property):
86 """
87 A scalar string property for an SHACL object
88 """
89
90 VALID_TYPES = str
91
92 def set(self, value):
93 return str(value)
94
95 def encode(self, encoder, value, state):
96 encoder.write_string(value)
97
98 def decode(self, decoder, *, objectset=None):
99 return decoder.read_string()
100
101
102class AnyURIProp(StringProp):
103 def encode(self, encoder, value, state):
104 encoder.write_iri(value)
105
106 def decode(self, decoder, *, objectset=None):
107 return decoder.read_iri()
108
109
110class DateTimeProp(Property):
111 """
112 A Date/Time Object with optional timezone
113 """
114
115 VALID_TYPES = datetime
116 UTC_FORMAT_STR = "%Y-%m-%dT%H:%M:%SZ"
117 REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})?$"
118
119 def set(self, value):
120 return self._normalize(value)
121
122 def encode(self, encoder, value, state):
123 encoder.write_datetime(self.to_string(value))
124
125 def decode(self, decoder, *, objectset=None):
126 s = decoder.read_datetime()
127 if s is None:
128 return None
129 v = self.from_string(s)
130 return self._normalize(v)
131
132 def _normalize(self, value):
133 if value.utcoffset() is None:
134 value = value.astimezone()
135 offset = value.utcoffset()
136 seconds = offset % timedelta(minutes=-1 if offset.total_seconds() < 0 else 1)
137 if seconds:
138 offset = offset - seconds
139 value = value.replace(tzinfo=timezone(offset))
140 value = value.replace(microsecond=0)
141 return value
142
143 def to_string(self, value):
144 value = self._normalize(value)
145 if value.tzinfo == timezone.utc:
146 return value.strftime(self.UTC_FORMAT_STR)
147 return value.isoformat()
148
149 def from_string(self, value):
150 if not re.match(self.REGEX, value):
151 raise ValueError(f"'{value}' is not a correctly formatted datetime")
152 if "Z" in value:
153 d = datetime(
154 *(time.strptime(value, self.UTC_FORMAT_STR)[0:6]),
155 tzinfo=timezone.utc,
156 )
157 else:
158 d = datetime.fromisoformat(value)
159
160 return self._normalize(d)
161
162
163class DateTimeStampProp(DateTimeProp):
164 """
165 A Date/Time Object with required timestamp
166 """
167
168 REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})$"
169
170
171class IntegerProp(Property):
172 VALID_TYPES = int
173
174 def set(self, value):
175 return int(value)
176
177 def encode(self, encoder, value, state):
178 encoder.write_integer(value)
179
180 def decode(self, decoder, *, objectset=None):
181 return decoder.read_integer()
182
183
184class PositiveIntegerProp(IntegerProp):
185 def validate(self, value):
186 super().validate(value)
187 if value < 1:
188 raise ValueError(f"Value must be >=1. Got {value}")
189
190
191class NonNegativeIntegerProp(IntegerProp):
192 def validate(self, value):
193 super().validate(value)
194 if value < 0:
195 raise ValueError(f"Value must be >= 0. Got {value}")
196
197
198class BooleanProp(Property):
199 VALID_TYPES = bool
200
201 def set(self, value):
202 return bool(value)
203
204 def encode(self, encoder, value, state):
205 encoder.write_bool(value)
206
207 def decode(self, decoder, *, objectset=None):
208 return decoder.read_bool()
209
210
211class FloatProp(Property):
212 VALID_TYPES = (float, int)
213
214 def set(self, value):
215 return float(value)
216
217 def encode(self, encoder, value, state):
218 encoder.write_float(value)
219
220 def decode(self, decoder, *, objectset=None):
221 return decoder.read_float()
222
223
224class IRIProp(Property):
225 def __init__(self, context=[], *, pattern=None):
226 super().__init__(pattern=pattern)
227 self.context = context
228
229 def compact(self, value):
230 for iri, compact in self.context:
231 if value == iri:
232 return compact
233 return None
234
235 def expand(self, value):
236 for iri, compact in self.context:
237 if value == compact:
238 return iri
239 return None
240
241 def iri_values(self):
242 return (iri for iri, _ in self.context)
243
244
245class ObjectProp(IRIProp):
246 """
247 A scalar SHACL object property of a SHACL object
248 """
249
250 def __init__(self, cls, required, context=[]):
251 super().__init__(context)
252 self.cls = cls
253 self.required = required
254
255 def init(self):
256 if self.required and not self.cls.IS_ABSTRACT:
257 return self.cls()
258 return None
259
260 def validate(self, value):
261 check_type(value, (self.cls, str))
262
263 def walk(self, value, callback, path):
264 if value is None:
265 return
266
267 if not isinstance(value, str):
268 value.walk(callback, path)
269 else:
270 callback(value, path)
271
272 def iter_objects(self, value, recursive, visited):
273 if value is None or isinstance(value, str):
274 return
275
276 if value not in visited:
277 visited.add(value)
278 yield value
279
280 if recursive:
281 for c in value.iter_objects(recursive=True, visited=visited):
282 yield c
283
284 def encode(self, encoder, value, state):
285 if value is None:
286 raise ValueError("Object cannot be None")
287
288 if isinstance(value, str):
289 encoder.write_iri(value, self.compact(value))
290 return
291
292 return value.encode(encoder, state)
293
294 def decode(self, decoder, *, objectset=None):
295 iri = decoder.read_iri()
296 if iri is None:
297 return self.cls.decode(decoder, objectset=objectset)
298
299 iri = self.expand(iri) or iri
300
301 if objectset is None:
302 return iri
303
304 obj = objectset.find_by_id(iri)
305 if obj is None:
306 return iri
307
308 self.validate(obj)
309 return obj
310
311 def link_prop(self, value, objectset, missing, visited):
312 if value is None:
313 return value
314
315 if isinstance(value, str):
316 o = objectset.find_by_id(value)
317 if o is not None:
318 self.validate(o)
319 return o
320
321 if missing is not None:
322 missing.add(value)
323
324 return value
325
326 # De-duplicate IDs
327 if value._id:
328 value = objectset.find_by_id(value._id, value)
329 self.validate(value)
330
331 value.link_helper(objectset, missing, visited)
332 return value
333
334
335class ListProxy(object):
336 def __init__(self, prop, data=None):
337 if data is None:
338 self.__data = []
339 else:
340 self.__data = data
341 self.__prop = prop
342
343 def append(self, value):
344 self.__prop.validate(value)
345 self.__data.append(self.__prop.set(value))
346
347 def insert(self, idx, value):
348 self.__prop.validate(value)
349 self.__data.insert(idx, self.__prop.set(value))
350
351 def extend(self, items):
352 for i in items:
353 self.append(i)
354
355 def sort(self, *args, **kwargs):
356 self.__data.sort(*args, **kwargs)
357
358 def __getitem__(self, key):
359 return self.__data[key]
360
361 def __setitem__(self, key, value):
362 if isinstance(key, slice):
363 for v in value:
364 self.__prop.validate(v)
365 self.__data[key] = [self.__prop.set(v) for v in value]
366 else:
367 self.__prop.validate(value)
368 self.__data[key] = self.__prop.set(value)
369
370 def __delitem__(self, key):
371 del self.__data[key]
372
373 def __contains__(self, item):
374 return item in self.__data
375
376 def __iter__(self):
377 return iter(self.__data)
378
379 def __len__(self):
380 return len(self.__data)
381
382 def __str__(self):
383 return str(self.__data)
384
385 def __repr__(self):
386 return repr(self.__data)
387
388 def __eq__(self, other):
389 if isinstance(other, ListProxy):
390 return self.__data == other.__data
391
392 return self.__data == other
393
394
395class ListProp(Property):
396 """
397 A list of SHACL properties
398 """
399
400 VALID_TYPES = (list, ListProxy)
401
402 def __init__(self, prop):
403 super().__init__()
404 self.prop = prop
405
406 def init(self):
407 return ListProxy(self.prop)
408
409 def validate(self, value):
410 super().validate(value)
411
412 for i in value:
413 self.prop.validate(i)
414
415 def set(self, value):
416 if isinstance(value, ListProxy):
417 return value
418
419 return ListProxy(self.prop, [self.prop.set(d) for d in value])
420
421 def check_min_count(self, value, min_count):
422 check_type(value, ListProxy)
423 return len(value) >= min_count
424
425 def check_max_count(self, value, max_count):
426 check_type(value, ListProxy)
427 return len(value) <= max_count
428
429 def elide(self, value):
430 check_type(value, ListProxy)
431 return len(value) == 0
432
433 def walk(self, value, callback, path):
434 callback(value, path)
435 for idx, v in enumerate(value):
436 self.prop.walk(v, callback, path + [f"[{idx}]"])
437
438 def iter_objects(self, value, recursive, visited):
439 for v in value:
440 for c in self.prop.iter_objects(v, recursive, visited):
441 yield c
442
443 def link_prop(self, value, objectset, missing, visited):
444 if isinstance(value, ListProxy):
445 data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
446 else:
447 data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
448
449 return ListProxy(self.prop, data=data)
450
451 def encode(self, encoder, value, state):
452 check_type(value, ListProxy)
453
454 with encoder.write_list() as list_s:
455 for v in value:
456 with list_s.write_list_item() as item_s:
457 self.prop.encode(item_s, v, state)
458
459 def decode(self, decoder, *, objectset=None):
460 data = []
461 for val_d in decoder.read_list():
462 v = self.prop.decode(val_d, objectset=objectset)
463 self.prop.validate(v)
464 data.append(v)
465
466 return ListProxy(self.prop, data=data)
467
468
469class EnumProp(IRIProp):
470 VALID_TYPES = str
471
472 def __init__(self, values, *, pattern=None):
473 super().__init__(values, pattern=pattern)
474
475 def validate(self, value):
476 super().validate(value)
477
478 valid_values = self.iri_values()
479 if value not in valid_values:
480 raise ValueError(
481 f"'{value}' is not a valid value. Choose one of {' '.join(valid_values)}"
482 )
483
484 def encode(self, encoder, value, state):
485 encoder.write_enum(value, self, self.compact(value))
486
487 def decode(self, decoder, *, objectset=None):
488 v = decoder.read_enum(self)
489 return self.expand(v) or v
490
491
492class NodeKind(Enum):
493 BlankNode = 1
494 IRI = 2
495 BlankNodeOrIRI = 3
496
497
498def is_IRI(s):
499 if not isinstance(s, str):
500 return False
501 if s.startswith("_:"):
502 return False
503 if ":" not in s:
504 return False
505 return True
506
507
508def is_blank_node(s):
509 if not isinstance(s, str):
510 return False
511 if not s.startswith("_:"):
512 return False
513 return True
514
515
516def register(type_iri, *, compact_type=None, abstract=False):
517 def add_class(key, c):
518 assert (
519 key not in SHACLObject.CLASSES
520 ), f"{key} already registered to {SHACLObject.CLASSES[key].__name__}"
521 SHACLObject.CLASSES[key] = c
522
523 def decorator(c):
524 global NAMED_INDIVIDUALS
525
526 assert issubclass(
527 c, SHACLObject
528 ), f"{c.__name__} is not derived from SHACLObject"
529
530 c._OBJ_TYPE = type_iri
531 c.IS_ABSTRACT = abstract
532 add_class(type_iri, c)
533
534 c._OBJ_COMPACT_TYPE = compact_type
535 if compact_type:
536 add_class(compact_type, c)
537
538 NAMED_INDIVIDUALS |= set(c.NAMED_INDIVIDUALS.values())
539
540 # Registration is deferred until the first instance of class is created
541 # so that it has access to any other defined class
542 c._NEEDS_REG = True
543 return c
544
545 return decorator
546
547
548register_lock = threading.Lock()
549NAMED_INDIVIDUALS = set()
550
551
552@functools.total_ordering
553class SHACLObject(object):
554 CLASSES = {}
555 NODE_KIND = NodeKind.BlankNodeOrIRI
556 ID_ALIAS = None
557 IS_ABSTRACT = True
558
559 def __init__(self, **kwargs):
560 if self._is_abstract():
561 raise NotImplementedError(
562 f"{self.__class__.__name__} is abstract and cannot be implemented"
563 )
564
565 with register_lock:
566 cls = self.__class__
567 if cls._NEEDS_REG:
568 cls._OBJ_PROPERTIES = {}
569 cls._OBJ_IRIS = {}
570 cls._register_props()
571 cls._NEEDS_REG = False
572
573 self.__dict__["_obj_data"] = {}
574 self.__dict__["_obj_metadata"] = {}
575
576 for iri, prop, _, _, _, _ in self.__iter_props():
577 self.__dict__["_obj_data"][iri] = prop.init()
578
579 for k, v in kwargs.items():
580 setattr(self, k, v)
581
582 def _is_abstract(self):
583 return self.__class__.IS_ABSTRACT
584
585 @classmethod
586 def _register_props(cls):
587 cls._add_property("_id", StringProp(), iri="@id")
588
589 @classmethod
590 def _add_property(
591 cls,
592 pyname,
593 prop,
594 iri,
595 min_count=None,
596 max_count=None,
597 compact=None,
598 ):
599 if pyname in cls._OBJ_IRIS:
600 raise KeyError(f"'{pyname}' is already defined for '{cls.__name__}'")
601 if iri in cls._OBJ_PROPERTIES:
602 raise KeyError(f"'{iri}' is already defined for '{cls.__name__}'")
603
604 while hasattr(cls, pyname):
605 pyname = pyname + "_"
606
607 pyname = sys.intern(pyname)
608 iri = sys.intern(iri)
609
610 cls._OBJ_IRIS[pyname] = iri
611 cls._OBJ_PROPERTIES[iri] = (prop, min_count, max_count, pyname, compact)
612
613 def __setattr__(self, name, value):
614 if name == self.ID_ALIAS:
615 self["@id"] = value
616 return
617
618 try:
619 iri = self._OBJ_IRIS[name]
620 self[iri] = value
621 except KeyError:
622 raise AttributeError(
623 f"'{name}' is not a valid property of {self.__class__.__name__}"
624 )
625
626 def __getattr__(self, name):
627 if name in self._OBJ_IRIS:
628 return self.__dict__["_obj_data"][self._OBJ_IRIS[name]]
629
630 if name == self.ID_ALIAS:
631 return self.__dict__["_obj_data"]["@id"]
632
633 if name == "_metadata":
634 return self.__dict__["_obj_metadata"]
635
636 if name == "_IRI":
637 return self._OBJ_IRIS
638
639 if name == "TYPE":
640 return self.__class__._OBJ_TYPE
641
642 if name == "COMPACT_TYPE":
643 return self.__class__._OBJ_COMPACT_TYPE
644
645 raise AttributeError(
646 f"'{name}' is not a valid property of {self.__class__.__name__}"
647 )
648
649 def __delattr__(self, name):
650 if name == self.ID_ALIAS:
651 del self["@id"]
652 return
653
654 try:
655 iri = self._OBJ_IRIS[name]
656 del self[iri]
657 except KeyError:
658 raise AttributeError(
659 f"'{name}' is not a valid property of {self.__class__.__name__}"
660 )
661
662 def __get_prop(self, iri):
663 if iri not in self._OBJ_PROPERTIES:
664 raise KeyError(
665 f"'{iri}' is not a valid property of {self.__class__.__name__}"
666 )
667
668 return self._OBJ_PROPERTIES[iri]
669
670 def __iter_props(self):
671 for iri, v in self._OBJ_PROPERTIES.items():
672 yield iri, *v
673
674 def __getitem__(self, iri):
675 return self.__dict__["_obj_data"][iri]
676
677 def __setitem__(self, iri, value):
678 if iri == "@id":
679 if self.NODE_KIND == NodeKind.BlankNode:
680 if not is_blank_node(value):
681 raise ValueError(
682 f"{self.__class__.__name__} ({id(self)}) can only have local reference. Property '{iri}' cannot be set to '{value}' and must start with '_:'"
683 )
684 elif self.NODE_KIND == NodeKind.IRI:
685 if not is_IRI(value):
686 raise ValueError(
687 f"{self.__class__.__name__} ({id(self)}) can only have an IRI value. Property '{iri}' cannot be set to '{value}'"
688 )
689 else:
690 if not is_blank_node(value) and not is_IRI(value):
691 raise ValueError(
692 f"{self.__class__.__name__} ({id(self)}) Has invalid Property '{iri}' '{value}'. Must be a blank node or IRI"
693 )
694
695 prop, _, _, _, _ = self.__get_prop(iri)
696 prop.validate(value)
697 self.__dict__["_obj_data"][iri] = prop.set(value)
698
699 def __delitem__(self, iri):
700 prop, _, _, _, _ = self.__get_prop(iri)
701 self.__dict__["_obj_data"][iri] = prop.init()
702
703 def __iter__(self):
704 return self._OBJ_PROPERTIES.keys()
705
706 def walk(self, callback, path=None):
707 """
708 Walk object tree, invoking the callback for each item
709
710 Callback has the form:
711
712 def callback(object, path):
713 """
714 if path is None:
715 path = ["."]
716
717 if callback(self, path):
718 for iri, prop, _, _, _, _ in self.__iter_props():
719 prop.walk(self.__dict__["_obj_data"][iri], callback, path + [f".{iri}"])
720
721 def property_keys(self):
722 for iri, _, _, _, pyname, compact in self.__iter_props():
723 if iri == "@id":
724 compact = self.ID_ALIAS
725 yield pyname, iri, compact
726
727 def iter_objects(self, *, recursive=False, visited=None):
728 """
729 Iterate of all objects that are a child of this one
730 """
731 if visited is None:
732 visited = set()
733
734 for iri, prop, _, _, _, _ in self.__iter_props():
735 for c in prop.iter_objects(
736 self.__dict__["_obj_data"][iri], recursive=recursive, visited=visited
737 ):
738 yield c
739
740 def encode(self, encoder, state):
741 idname = self.ID_ALIAS or self._OBJ_IRIS["_id"]
742 if not self._id and self.NODE_KIND == NodeKind.IRI:
743 raise ValueError(
744 f"{self.__class__.__name__} ({id(self)}) must have a IRI for property '{idname}'"
745 )
746
747 if state.is_written(self):
748 encoder.write_iri(state.get_object_id(self))
749 return
750
751 state.add_written(self)
752
753 with encoder.write_object(
754 self,
755 state.get_object_id(self),
756 bool(self._id) or state.is_refed(self),
757 ) as obj_s:
758 self._encode_properties(obj_s, state)
759
760 def _encode_properties(self, encoder, state):
761 for iri, prop, min_count, max_count, pyname, compact in self.__iter_props():
762 value = self.__dict__["_obj_data"][iri]
763 if prop.elide(value):
764 if min_count:
765 raise ValueError(
766 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) is required (currently {value!r})"
767 )
768 continue
769
770 if min_count is not None:
771 if not prop.check_min_count(value, min_count):
772 raise ValueError(
773 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a minimum of {min_count} elements"
774 )
775
776 if max_count is not None:
777 if not prop.check_max_count(value, max_count):
778 raise ValueError(
779 f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a maximum of {max_count} elements"
780 )
781
782 if iri == self._OBJ_IRIS["_id"]:
783 continue
784
785 with encoder.write_property(iri, compact) as prop_s:
786 prop.encode(prop_s, value, state)
787
788 @classmethod
789 def _make_object(cls, typ):
790 if typ not in cls.CLASSES:
791 raise TypeError(f"Unknown type {typ}")
792
793 return cls.CLASSES[typ]()
794
795 @classmethod
796 def decode(cls, decoder, *, objectset=None):
797 typ, obj_d = decoder.read_object()
798 if typ is None:
799 raise TypeError("Unable to determine type for object")
800
801 obj = cls._make_object(typ)
802 for key in (obj.ID_ALIAS, obj._OBJ_IRIS["_id"]):
803 with obj_d.read_property(key) as prop_d:
804 if prop_d is None:
805 continue
806
807 _id = prop_d.read_iri()
808 if _id is None:
809 raise TypeError(f"Object key '{key}' is the wrong type")
810
811 obj._id = _id
812 break
813
814 if obj.NODE_KIND == NodeKind.IRI and not obj._id:
815 raise ValueError("Object is missing required IRI")
816
817 if objectset is not None:
818 if obj._id:
819 v = objectset.find_by_id(_id)
820 if v is not None:
821 return v
822
823 obj._decode_properties(obj_d, objectset=objectset)
824
825 if objectset is not None:
826 objectset.add_index(obj)
827 return obj
828
829 def _decode_properties(self, decoder, objectset=None):
830 for key in decoder.object_keys():
831 if not self._decode_prop(decoder, key, objectset=objectset):
832 raise KeyError(f"Unknown property '{key}'")
833
834 def _decode_prop(self, decoder, key, objectset=None):
835 if key in (self._OBJ_IRIS["_id"], self.ID_ALIAS):
836 return True
837
838 for iri, prop, _, _, _, compact in self.__iter_props():
839 if compact == key:
840 read_key = compact
841 elif iri == key:
842 read_key = iri
843 else:
844 continue
845
846 with decoder.read_property(read_key) as prop_d:
847 v = prop.decode(prop_d, objectset=objectset)
848 prop.validate(v)
849 self.__dict__["_obj_data"][iri] = v
850 return True
851
852 return False
853
854 def link_helper(self, objectset, missing, visited):
855 if self in visited:
856 return
857
858 visited.add(self)
859
860 for iri, prop, _, _, _, _ in self.__iter_props():
861 self.__dict__["_obj_data"][iri] = prop.link_prop(
862 self.__dict__["_obj_data"][iri],
863 objectset,
864 missing,
865 visited,
866 )
867
868 def __str__(self):
869 parts = [
870 f"{self.__class__.__name__}(",
871 ]
872 if self._id:
873 parts.append(f"@id='{self._id}'")
874 parts.append(")")
875 return "".join(parts)
876
877 def __hash__(self):
878 return super().__hash__()
879
880 def __eq__(self, other):
881 return super().__eq__(other)
882
883 def __lt__(self, other):
884 def sort_key(obj):
885 if isinstance(obj, str):
886 return (obj, "", "", "")
887 return (
888 obj._id or "",
889 obj.TYPE,
890 getattr(obj, "name", None) or "",
891 id(obj),
892 )
893
894 return sort_key(self) < sort_key(other)
895
896
897class SHACLExtensibleObject(object):
898 CLOSED = False
899
900 def __init__(self, typ=None, **kwargs):
901 if typ:
902 self.__dict__["_obj_TYPE"] = (typ, None)
903 else:
904 self.__dict__["_obj_TYPE"] = (self._OBJ_TYPE, self._OBJ_COMPACT_TYPE)
905 super().__init__(**kwargs)
906
907 def _is_abstract(self):
908 # Unknown classes are assumed to not be abstract so that they can be
909 # deserialized
910 typ = self.__dict__["_obj_TYPE"][0]
911 if typ in self.__class__.CLASSES:
912 return self.__class__.CLASSES[typ].IS_ABSTRACT
913
914 return False
915
916 @classmethod
917 def _make_object(cls, typ):
918 # Check for a known type, and if so, deserialize as that instead
919 if typ in cls.CLASSES:
920 return cls.CLASSES[typ]()
921
922 obj = cls(typ)
923 return obj
924
925 def _decode_properties(self, decoder, objectset=None):
926 def decode_value(d):
927 if not d.is_list():
928 return d.read_value()
929
930 return [decode_value(val_d) for val_d in d.read_list()]
931
932 if self.CLOSED:
933 super()._decode_properties(decoder, objectset=objectset)
934 return
935
936 for key in decoder.object_keys():
937 if self._decode_prop(decoder, key, objectset=objectset):
938 continue
939
940 if not is_IRI(key):
941 raise KeyError(
942 f"Extensible object properties must be IRIs. Got '{key}'"
943 )
944
945 with decoder.read_property(key) as prop_d:
946 self.__dict__["_obj_data"][key] = decode_value(prop_d)
947
948 def _encode_properties(self, encoder, state):
949 def encode_value(encoder, v):
950 if isinstance(v, bool):
951 encoder.write_bool(v)
952 elif isinstance(v, str):
953 encoder.write_string(v)
954 elif isinstance(v, int):
955 encoder.write_integer(v)
956 elif isinstance(v, float):
957 encoder.write_float(v)
958 elif isinstance(v, list):
959 with encoder.write_list() as list_s:
960 for i in v:
961 with list_s.write_list_item() as item_s:
962 encode_value(item_s, i)
963 else:
964 raise TypeError(
965 f"Unsupported serialized type {type(v)} with value '{v}'"
966 )
967
968 super()._encode_properties(encoder, state)
969 if self.CLOSED:
970 return
971
972 for iri, value in self.__dict__["_obj_data"].items():
973 if iri in self._OBJ_PROPERTIES:
974 continue
975
976 with encoder.write_property(iri) as prop_s:
977 encode_value(prop_s, value)
978
979 def __setitem__(self, iri, value):
980 try:
981 super().__setitem__(iri, value)
982 except KeyError:
983 if self.CLOSED:
984 raise
985
986 if not is_IRI(iri):
987 raise KeyError(f"Key '{iri}' must be an IRI")
988 self.__dict__["_obj_data"][iri] = value
989
990 def __delitem__(self, iri):
991 try:
992 super().__delitem__(iri)
993 except KeyError:
994 if self.CLOSED:
995 raise
996
997 if not is_IRI(iri):
998 raise KeyError(f"Key '{iri}' must be an IRI")
999 del self.__dict__["_obj_data"][iri]
1000
1001 def __getattr__(self, name):
1002 if name == "TYPE":
1003 return self.__dict__["_obj_TYPE"][0]
1004 if name == "COMPACT_TYPE":
1005 return self.__dict__["_obj_TYPE"][1]
1006 return super().__getattr__(name)
1007
1008 def property_keys(self):
1009 iris = set()
1010 for pyname, iri, compact in super().property_keys():
1011 iris.add(iri)
1012 yield pyname, iri, compact
1013
1014 if self.CLOSED:
1015 return
1016
1017 for iri in self.__dict__["_obj_data"].keys():
1018 if iri not in iris:
1019 yield None, iri, None
1020
1021
1022class SHACLObjectSet(object):
1023 def __init__(self, objects=[], *, link=False):
1024 self.objects = set()
1025 self.missing_ids = set()
1026 for o in objects:
1027 self.objects.add(o)
1028 self.create_index()
1029 if link:
1030 self._link()
1031
1032 def create_index(self):
1033 """
1034 (re)Create object index
1035
1036 Creates or recreates the indices for the object set to enable fast
1037 lookup. All objects and their children are walked and indexed
1038 """
1039 self.obj_by_id = {}
1040 self.obj_by_type = {}
1041 for o in self.foreach():
1042 self.add_index(o)
1043
1044 def add_index(self, obj):
1045 """
1046 Add object to index
1047
1048 Adds the object to all appropriate indices
1049 """
1050
1051 def reg_type(typ, compact, o, exact):
1052 self.obj_by_type.setdefault(typ, set()).add((exact, o))
1053 if compact:
1054 self.obj_by_type.setdefault(compact, set()).add((exact, o))
1055
1056 if not isinstance(obj, SHACLObject):
1057 raise TypeError("Object is not of type SHACLObject")
1058
1059 for typ in SHACLObject.CLASSES.values():
1060 if isinstance(obj, typ):
1061 reg_type(
1062 typ._OBJ_TYPE, typ._OBJ_COMPACT_TYPE, obj, obj.__class__ is typ
1063 )
1064
1065 # This covers custom extensions
1066 reg_type(obj.TYPE, obj.COMPACT_TYPE, obj, True)
1067
1068 if not obj._id:
1069 return
1070
1071 self.missing_ids.discard(obj._id)
1072
1073 if obj._id in self.obj_by_id:
1074 return
1075
1076 self.obj_by_id[obj._id] = obj
1077
1078 def add(self, obj):
1079 """
1080 Add object to object set
1081
1082 Adds a SHACLObject to the object set and index it.
1083
1084 NOTE: Child objects of the attached object are not indexes
1085 """
1086 if not isinstance(obj, SHACLObject):
1087 raise TypeError("Object is not of type SHACLObject")
1088
1089 if obj not in self.objects:
1090 self.objects.add(obj)
1091 self.add_index(obj)
1092 return obj
1093
1094 def update(self, *others):
1095 """
1096 Update object set adding all objects in each other iterable
1097 """
1098 for o in others:
1099 for obj in o:
1100 self.add(obj)
1101
1102 def __contains__(self, item):
1103 """
1104 Returns True if the item is in the object set
1105 """
1106 return item in self.objects
1107
1108 def link(self):
1109 """
1110 Link object set
1111
1112 Links the object in the object set by replacing string object
1113 references with references to the objects themselves. e.g.
1114 a property that references object "https://foo/bar" by a string
1115 reference will be replaced with an actual reference to the object in
1116 the object set with the same ID if it exists in the object set
1117
1118 If multiple objects with the same ID are found, the duplicates are
1119 eliminated
1120 """
1121 self.create_index()
1122 return self._link()
1123
1124 def _link(self):
1125 global NAMED_INDIVIDUALS
1126
1127 self.missing_ids = set()
1128 visited = set()
1129
1130 new_objects = set()
1131
1132 for o in self.objects:
1133 if o._id:
1134 o = self.find_by_id(o._id, o)
1135 o.link_helper(self, self.missing_ids, visited)
1136 new_objects.add(o)
1137
1138 self.objects = new_objects
1139
1140 # Remove blank nodes
1141 obj_by_id = {}
1142 for _id, obj in self.obj_by_id.items():
1143 if _id.startswith("_:"):
1144 del obj._id
1145 else:
1146 obj_by_id[_id] = obj
1147 self.obj_by_id = obj_by_id
1148
1149 # Named individuals aren't considered missing
1150 self.missing_ids -= NAMED_INDIVIDUALS
1151
1152 return self.missing_ids
1153
1154 def find_by_id(self, _id, default=None):
1155 """
1156 Find object by ID
1157
1158 Returns objects that match the specified ID, or default if there is no
1159 object with the specified ID
1160 """
1161 if _id not in self.obj_by_id:
1162 return default
1163 return self.obj_by_id[_id]
1164
1165 def foreach(self):
1166 """
1167 Iterate over every object in the object set, and all child objects
1168 """
1169 visited = set()
1170 for o in self.objects:
1171 if o not in visited:
1172 yield o
1173 visited.add(o)
1174
1175 for child in o.iter_objects(recursive=True, visited=visited):
1176 yield child
1177
1178 def foreach_type(self, typ, *, match_subclass=True):
1179 """
1180 Iterate over each object of a specified type (or subclass there of)
1181
1182 If match_subclass is True, and class derived from typ will also match
1183 (similar to isinstance()). If False, only exact matches will be
1184 returned
1185 """
1186 if not isinstance(typ, str):
1187 if not issubclass(typ, SHACLObject):
1188 raise TypeError(f"Type must be derived from SHACLObject, got {typ}")
1189 typ = typ._OBJ_TYPE
1190
1191 if typ not in self.obj_by_type:
1192 return
1193
1194 for exact, o in self.obj_by_type[typ]:
1195 if match_subclass or exact:
1196 yield o
1197
1198 def merge(self, *objectsets):
1199 """
1200 Merge object sets
1201
1202 Returns a new object set that is the combination of this object set and
1203 all provided arguments
1204 """
1205 new_objects = set()
1206 new_objects |= self.objects
1207 for d in objectsets:
1208 new_objects |= d.objects
1209
1210 return SHACLObjectSet(new_objects, link=True)
1211
1212 def encode(self, encoder, force_list=False, *, key=None):
1213 """
1214 Serialize a list of objects to a serialization encoder
1215
1216 If force_list is true, a list will always be written using the encoder.
1217 """
1218 ref_counts = {}
1219 state = EncodeState()
1220
1221 def walk_callback(value, path):
1222 nonlocal state
1223 nonlocal ref_counts
1224
1225 if not isinstance(value, SHACLObject):
1226 return True
1227
1228 # Remove blank node ID for re-assignment
1229 if value._id and value._id.startswith("_:"):
1230 del value._id
1231
1232 if value._id:
1233 state.add_refed(value)
1234
1235 # If the object is referenced more than once, add it to the set of
1236 # referenced objects
1237 ref_counts.setdefault(value, 0)
1238 ref_counts[value] += 1
1239 if ref_counts[value] > 1:
1240 state.add_refed(value)
1241 return False
1242
1243 return True
1244
1245 for o in self.objects:
1246 if o._id:
1247 state.add_refed(o)
1248 o.walk(walk_callback)
1249
1250 use_list = force_list or len(self.objects) > 1
1251
1252 if use_list:
1253 # If we are making a list add all the objects referred to by reference
1254 # to the list
1255 objects = list(self.objects | state.ref_objects)
1256 else:
1257 objects = list(self.objects)
1258
1259 objects.sort(key=key)
1260
1261 if use_list:
1262 # Ensure top level objects are only written in the top level graph
1263 # node, and referenced by ID everywhere else. This is done by setting
1264 # the flag that indicates this object has been written for all the top
1265 # level objects, then clearing it right before serializing the object.
1266 #
1267 # In this way, if an object is referenced before it is supposed to be
1268 # serialized into the @graph, it will serialize as a string instead of
1269 # the actual object
1270 for o in objects:
1271 state.written_objects.add(o)
1272
1273 with encoder.write_list() as list_s:
1274 for o in objects:
1275 # Allow this specific object to be written now
1276 state.written_objects.remove(o)
1277 with list_s.write_list_item() as item_s:
1278 o.encode(item_s, state)
1279
1280 elif objects:
1281 objects[0].encode(encoder, state)
1282
1283 def decode(self, decoder):
1284 self.create_index()
1285
1286 for obj_d in decoder.read_list():
1287 o = SHACLObject.decode(obj_d, objectset=self)
1288 self.objects.add(o)
1289
1290 self._link()
1291
1292
1293class EncodeState(object):
1294 def __init__(self):
1295 self.ref_objects = set()
1296 self.written_objects = set()
1297 self.blank_objects = {}
1298
1299 def get_object_id(self, o):
1300 if o._id:
1301 return o._id
1302
1303 if o not in self.blank_objects:
1304 _id = f"_:{o.__class__.__name__}{len(self.blank_objects)}"
1305 self.blank_objects[o] = _id
1306
1307 return self.blank_objects[o]
1308
1309 def is_refed(self, o):
1310 return o in self.ref_objects
1311
1312 def add_refed(self, o):
1313 self.ref_objects.add(o)
1314
1315 def is_written(self, o):
1316 return o in self.written_objects
1317
1318 def add_written(self, o):
1319 self.written_objects.add(o)
1320
1321
1322class Decoder(ABC):
1323 @abstractmethod
1324 def read_value(self):
1325 """
1326 Consume next item
1327
1328 Consumes the next item of any type
1329 """
1330 pass
1331
1332 @abstractmethod
1333 def read_string(self):
1334 """
1335 Consume the next item as a string.
1336
1337 Returns the string value of the next item, or `None` if the next item
1338 is not a string
1339 """
1340 pass
1341
1342 @abstractmethod
1343 def read_datetime(self):
1344 """
1345 Consumes the next item as a date & time string
1346
1347 Returns the string value of the next item, if it is a ISO datetime, or
1348 `None` if the next item is not a ISO datetime string.
1349
1350 Note that validation of the string is done by the caller, so a minimal
1351 implementation can just check if the next item is a string without
1352 worrying about the format
1353 """
1354 pass
1355
1356 @abstractmethod
1357 def read_integer(self):
1358 """
1359 Consumes the next item as an integer
1360
1361 Returns the integer value of the next item, or `None` if the next item
1362 is not an integer
1363 """
1364 pass
1365
1366 @abstractmethod
1367 def read_iri(self):
1368 """
1369 Consumes the next item as an IRI string
1370
1371 Returns the string value of the next item an IRI, or `None` if the next
1372 item is not an IRI.
1373
1374 The returned string should be either a fully-qualified IRI, or a blank
1375 node ID
1376 """
1377 pass
1378
1379 @abstractmethod
1380 def read_enum(self, e):
1381 """
1382 Consumes the next item as an Enum value string
1383
1384 Returns the fully qualified IRI of the next enum item, or `None` if the
1385 next item is not an enum value.
1386
1387 The callee is responsible for validating that the returned IRI is
1388 actually a member of the specified Enum, so the `Decoder` does not need
1389 to check that, but can if it wishes
1390 """
1391 pass
1392
1393 @abstractmethod
1394 def read_bool(self):
1395 """
1396 Consume the next item as a boolean value
1397
1398 Returns the boolean value of the next item, or `None` if the next item
1399 is not a boolean
1400 """
1401 pass
1402
1403 @abstractmethod
1404 def read_float(self):
1405 """
1406 Consume the next item as a float value
1407
1408 Returns the float value of the next item, or `None` if the next item is
1409 not a float
1410 """
1411 pass
1412
1413 @abstractmethod
1414 def read_list(self):
1415 """
1416 Consume the next item as a list generator
1417
1418 This should generate a `Decoder` object for each item in the list. The
1419 generated `Decoder` can be used to read the corresponding item from the
1420 list
1421 """
1422 pass
1423
1424 @abstractmethod
1425 def is_list(self):
1426 """
1427 Checks if the next item is a list
1428
1429 Returns True if the next item is a list, or False if it is a scalar
1430 """
1431 pass
1432
1433 @abstractmethod
1434 def read_object(self):
1435 """
1436 Consume next item as an object
1437
1438 A context manager that "enters" the next item as a object and yields a
1439 `Decoder` that can read properties from it. If the next item is not an
1440 object, yields `None`
1441
1442 Properties will be read out of the object using `read_property` and
1443 `read_object_id`
1444 """
1445 pass
1446
1447 @abstractmethod
1448 @contextmanager
1449 def read_property(self, key):
1450 """
1451 Read property from object
1452
1453 A context manager that yields a `Decoder` that can be used to read the
1454 value of the property with the given key in current object, or `None`
1455 if the property does not exist in the current object.
1456 """
1457 pass
1458
1459 @abstractmethod
1460 def object_keys(self):
1461 """
1462 Read property keys from an object
1463
1464 Iterates over all the serialized keys for the current object
1465 """
1466 pass
1467
1468 @abstractmethod
1469 def read_object_id(self, alias=None):
1470 """
1471 Read current object ID property
1472
1473 Returns the ID of the current object if one is defined, or `None` if
1474 the current object has no ID.
1475
1476 The ID must be a fully qualified IRI or a blank node
1477
1478 If `alias` is provided, is is a hint as to another name by which the ID
1479 might be found, if the `Decoder` supports aliases for an ID
1480 """
1481 pass
1482
1483
1484class JSONLDDecoder(Decoder):
1485 def __init__(self, data, root=False):
1486 self.data = data
1487 self.root = root
1488
1489 def read_value(self):
1490 if isinstance(self.data, str):
1491 try:
1492 return float(self.data)
1493 except ValueError:
1494 pass
1495 return self.data
1496
1497 def read_string(self):
1498 if isinstance(self.data, str):
1499 return self.data
1500 return None
1501
1502 def read_datetime(self):
1503 return self.read_string()
1504
1505 def read_integer(self):
1506 if isinstance(self.data, int):
1507 return self.data
1508 return None
1509
1510 def read_bool(self):
1511 if isinstance(self.data, bool):
1512 return self.data
1513 return None
1514
1515 def read_float(self):
1516 if isinstance(self.data, (int, float, str)):
1517 return float(self.data)
1518 return None
1519
1520 def read_iri(self):
1521 if isinstance(self.data, str):
1522 return self.data
1523 return None
1524
1525 def read_enum(self, e):
1526 if isinstance(self.data, str):
1527 return self.data
1528 return None
1529
1530 def read_list(self):
1531 if self.is_list():
1532 for v in self.data:
1533 yield self.__class__(v)
1534 else:
1535 yield self
1536
1537 def is_list(self):
1538 return isinstance(self.data, (list, tuple, set))
1539
1540 def __get_value(self, *keys):
1541 for k in keys:
1542 if k and k in self.data:
1543 return self.data[k]
1544 return None
1545
1546 @contextmanager
1547 def read_property(self, key):
1548 v = self.__get_value(key)
1549 if v is not None:
1550 yield self.__class__(v)
1551 else:
1552 yield None
1553
1554 def object_keys(self):
1555 for key in self.data.keys():
1556 if key in ("@type", "type"):
1557 continue
1558 if self.root and key == "@context":
1559 continue
1560 yield key
1561
1562 def read_object(self):
1563 typ = self.__get_value("@type", "type")
1564 if typ is not None:
1565 return typ, self
1566
1567 return None, self
1568
1569 def read_object_id(self, alias=None):
1570 return self.__get_value(alias, "@id")
1571
1572
1573class JSONLDDeserializer(object):
1574 def deserialize_data(self, data, objectset: SHACLObjectSet):
1575 if "@graph" in data:
1576 h = JSONLDDecoder(data["@graph"], True)
1577 else:
1578 h = JSONLDDecoder(data, True)
1579
1580 objectset.decode(h)
1581
1582 def read(self, f, objectset: SHACLObjectSet):
1583 data = json.load(f)
1584 self.deserialize_data(data, objectset)
1585
1586
1587class Encoder(ABC):
1588 @abstractmethod
1589 def write_string(self, v):
1590 """
1591 Write a string value
1592
1593 Encodes the value as a string in the output
1594 """
1595 pass
1596
1597 @abstractmethod
1598 def write_datetime(self, v):
1599 """
1600 Write a date & time string
1601
1602 Encodes the value as an ISO datetime string
1603
1604 Note: The provided string is already correctly encoded as an ISO datetime
1605 """
1606 pass
1607
1608 @abstractmethod
1609 def write_integer(self, v):
1610 """
1611 Write an integer value
1612
1613 Encodes the value as an integer in the output
1614 """
1615 pass
1616
1617 @abstractmethod
1618 def write_iri(self, v, compact=None):
1619 """
1620 Write IRI
1621
1622 Encodes the string as an IRI. Note that the string will be either a
1623 fully qualified IRI or a blank node ID. If `compact` is provided and
1624 the serialization supports compacted IRIs, it should be preferred to
1625 the full IRI
1626 """
1627 pass
1628
1629 @abstractmethod
1630 def write_enum(self, v, e, compact=None):
1631 """
1632 Write enum value IRI
1633
1634 Encodes the string enum value IRI. Note that the string will be a fully
1635 qualified IRI. If `compact` is provided and the serialization supports
1636 compacted IRIs, it should be preferred to the full IRI.
1637 """
1638 pass
1639
1640 @abstractmethod
1641 def write_bool(self, v):
1642 """
1643 Write boolean
1644
1645 Encodes the value as a boolean in the output
1646 """
1647 pass
1648
1649 @abstractmethod
1650 def write_float(self, v):
1651 """
1652 Write float
1653
1654 Encodes the value as a floating point number in the output
1655 """
1656 pass
1657
1658 @abstractmethod
1659 @contextmanager
1660 def write_object(self, o, _id, needs_id):
1661 """
1662 Write object
1663
1664 A context manager that yields an `Encoder` that can be used to encode
1665 the given object properties.
1666
1667 The provided ID will always be a valid ID (even if o._id is `None`), in
1668 case the `Encoder` _must_ have an ID. `needs_id` is a hint to indicate
1669 to the `Encoder` if an ID must be written or not (if that is even an
1670 option). If it is `True`, the `Encoder` must encode an ID for the
1671 object. If `False`, the encoder is not required to encode an ID and may
1672 omit it.
1673
1674 The ID will be either a fully qualified IRI, or a blank node IRI.
1675
1676 Properties will be written the object using `write_property`
1677 """
1678 pass
1679
1680 @abstractmethod
1681 @contextmanager
1682 def write_property(self, iri, compact=None):
1683 """
1684 Write object property
1685
1686 A context manager that yields an `Encoder` that can be used to encode
1687 the value for the property with the given IRI in the current object
1688
1689 Note that the IRI will be fully qualified. If `compact` is provided and
1690 the serialization supports compacted IRIs, it should be preferred to
1691 the full IRI.
1692 """
1693 pass
1694
1695 @abstractmethod
1696 @contextmanager
1697 def write_list(self):
1698 """
1699 Write list
1700
1701 A context manager that yields an `Encoder` that can be used to encode a
1702 list.
1703
1704 Each item of the list will be added using `write_list_item`
1705 """
1706 pass
1707
1708 @abstractmethod
1709 @contextmanager
1710 def write_list_item(self):
1711 """
1712 Write list item
1713
1714 A context manager that yields an `Encoder` that can be used to encode
1715 the value for a list item
1716 """
1717 pass
1718
1719
1720class JSONLDEncoder(Encoder):
1721 def __init__(self, data=None):
1722 self.data = data
1723
1724 def write_string(self, v):
1725 self.data = v
1726
1727 def write_datetime(self, v):
1728 self.data = v
1729
1730 def write_integer(self, v):
1731 self.data = v
1732
1733 def write_iri(self, v, compact=None):
1734 self.write_string(compact or v)
1735
1736 def write_enum(self, v, e, compact=None):
1737 self.write_string(compact or v)
1738
1739 def write_bool(self, v):
1740 self.data = v
1741
1742 def write_float(self, v):
1743 self.data = str(v)
1744
1745 @contextmanager
1746 def write_property(self, iri, compact=None):
1747 s = self.__class__(None)
1748 yield s
1749 if s.data is not None:
1750 self.data[compact or iri] = s.data
1751
1752 @contextmanager
1753 def write_object(self, o, _id, needs_id):
1754 self.data = {
1755 "type": o.COMPACT_TYPE or o.TYPE,
1756 }
1757 if needs_id:
1758 self.data[o.ID_ALIAS or "@id"] = _id
1759 yield self
1760
1761 @contextmanager
1762 def write_list(self):
1763 self.data = []
1764 yield self
1765 if not self.data:
1766 self.data = None
1767
1768 @contextmanager
1769 def write_list_item(self):
1770 s = self.__class__(None)
1771 yield s
1772 if s.data is not None:
1773 self.data.append(s.data)
1774
1775
1776class JSONLDSerializer(object):
1777 def __init__(self, **args):
1778 self.args = args
1779
1780 def serialize_data(
1781 self,
1782 objectset: SHACLObjectSet,
1783 force_at_graph=False,
1784 ):
1785 h = JSONLDEncoder()
1786 objectset.encode(h, force_at_graph)
1787 data = {}
1788 if len(CONTEXT_URLS) == 1:
1789 data["@context"] = CONTEXT_URLS[0]
1790 elif CONTEXT_URLS:
1791 data["@context"] = CONTEXT_URLS
1792
1793 if isinstance(h.data, list):
1794 data["@graph"] = h.data
1795 else:
1796 for k, v in h.data.items():
1797 data[k] = v
1798
1799 return data
1800
1801 def write(
1802 self,
1803 objectset: SHACLObjectSet,
1804 f,
1805 force_at_graph=False,
1806 **kwargs,
1807 ):
1808 """
1809 Write a SHACLObjectSet to a JSON LD file
1810
1811 If force_at_graph is True, a @graph node will always be written
1812 """
1813 data = self.serialize_data(objectset, force_at_graph)
1814
1815 args = {**self.args, **kwargs}
1816
1817 sha1 = hashlib.sha1()
1818 for chunk in json.JSONEncoder(**args).iterencode(data):
1819 chunk = chunk.encode("utf-8")
1820 f.write(chunk)
1821 sha1.update(chunk)
1822
1823 return sha1.hexdigest()
1824
1825
1826class JSONLDInlineEncoder(Encoder):
1827 def __init__(self, f, sha1):
1828 self.f = f
1829 self.comma = False
1830 self.sha1 = sha1
1831
1832 def write(self, s):
1833 s = s.encode("utf-8")
1834 self.f.write(s)
1835 self.sha1.update(s)
1836
1837 def _write_comma(self):
1838 if self.comma:
1839 self.write(",")
1840 self.comma = False
1841
1842 def write_string(self, v):
1843 self.write(json.dumps(v))
1844
1845 def write_datetime(self, v):
1846 self.write_string(v)
1847
1848 def write_integer(self, v):
1849 self.write(f"{v}")
1850
1851 def write_iri(self, v, compact=None):
1852 self.write_string(compact or v)
1853
1854 def write_enum(self, v, e, compact=None):
1855 self.write_iri(v, compact)
1856
1857 def write_bool(self, v):
1858 if v:
1859 self.write("true")
1860 else:
1861 self.write("false")
1862
1863 def write_float(self, v):
1864 self.write(json.dumps(str(v)))
1865
1866 @contextmanager
1867 def write_property(self, iri, compact=None):
1868 self._write_comma()
1869 self.write_string(compact or iri)
1870 self.write(":")
1871 yield self
1872 self.comma = True
1873
1874 @contextmanager
1875 def write_object(self, o, _id, needs_id):
1876 self._write_comma()
1877
1878 self.write("{")
1879 self.write_string("type")
1880 self.write(":")
1881 self.write_string(o.COMPACT_TYPE or o.TYPE)
1882 self.comma = True
1883
1884 if needs_id:
1885 self._write_comma()
1886 self.write_string(o.ID_ALIAS or "@id")
1887 self.write(":")
1888 self.write_string(_id)
1889 self.comma = True
1890
1891 self.comma = True
1892 yield self
1893
1894 self.write("}")
1895 self.comma = True
1896
1897 @contextmanager
1898 def write_list(self):
1899 self._write_comma()
1900 self.write("[")
1901 yield self.__class__(self.f, self.sha1)
1902 self.write("]")
1903 self.comma = True
1904
1905 @contextmanager
1906 def write_list_item(self):
1907 self._write_comma()
1908 yield self.__class__(self.f, self.sha1)
1909 self.comma = True
1910
1911
1912class JSONLDInlineSerializer(object):
1913 def write(
1914 self,
1915 objectset: SHACLObjectSet,
1916 f,
1917 force_at_graph=False,
1918 ):
1919 """
1920 Write a SHACLObjectSet to a JSON LD file
1921
1922 Note: force_at_graph is included for compatibility, but ignored. This
1923 serializer always writes out a graph
1924 """
1925 sha1 = hashlib.sha1()
1926 h = JSONLDInlineEncoder(f, sha1)
1927 h.write('{"@context":')
1928 if len(CONTEXT_URLS) == 1:
1929 h.write(f'"{CONTEXT_URLS[0]}"')
1930 elif CONTEXT_URLS:
1931 h.write('["')
1932 h.write('","'.join(CONTEXT_URLS))
1933 h.write('"]')
1934 h.write(",")
1935
1936 h.write('"@graph":')
1937
1938 objectset.encode(h, True)
1939 h.write("}")
1940 return sha1.hexdigest()
1941
1942
1943def print_tree(objects, all_fields=False):
1944 """
1945 Print object tree
1946 """
1947 seen = set()
1948
1949 def callback(value, path):
1950 nonlocal seen
1951
1952 s = (" " * (len(path) - 1)) + f"{path[-1]}"
1953 if isinstance(value, SHACLObject):
1954 s += f" {value} ({id(value)})"
1955 is_empty = False
1956 elif isinstance(value, ListProxy):
1957 is_empty = len(value) == 0
1958 if is_empty:
1959 s += " []"
1960 else:
1961 s += f" {value!r}"
1962 is_empty = value is None
1963
1964 if all_fields or not is_empty:
1965 print(s)
1966
1967 if isinstance(value, SHACLObject):
1968 if value in seen:
1969 return False
1970 seen.add(value)
1971 return True
1972
1973 return True
1974
1975 for o in objects:
1976 o.walk(callback)
1977
1978
1979# fmt: off
1980"""Format Guard"""
1981
1982
1983CONTEXT_URLS = [
1984 "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
1985]
1986
1987
1988# CLASSES
1989# A class for describing the energy consumption incurred by an AI model in
1990# different stages of its lifecycle.
1991@register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumption", compact_type="ai_EnergyConsumption", abstract=False)
1992class ai_EnergyConsumption(SHACLObject):
1993 NODE_KIND = NodeKind.BlankNodeOrIRI
1994 NAMED_INDIVIDUALS = {
1995 }
1996
1997 @classmethod
1998 def _register_props(cls):
1999 super()._register_props()
2000 # Specifies the amount of energy consumed when finetuning the AI model that is
2001 # being used in the AI system.
2002 cls._add_property(
2003 "ai_finetuningEnergyConsumption",
2004 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2005 iri="https://spdx.org/rdf/3.0.1/terms/AI/finetuningEnergyConsumption",
2006 compact="ai_finetuningEnergyConsumption",
2007 )
2008 # Specifies the amount of energy consumed during inference time by an AI model
2009 # that is being used in the AI system.
2010 cls._add_property(
2011 "ai_inferenceEnergyConsumption",
2012 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2013 iri="https://spdx.org/rdf/3.0.1/terms/AI/inferenceEnergyConsumption",
2014 compact="ai_inferenceEnergyConsumption",
2015 )
2016 # Specifies the amount of energy consumed when training the AI model that is
2017 # being used in the AI system.
2018 cls._add_property(
2019 "ai_trainingEnergyConsumption",
2020 ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
2021 iri="https://spdx.org/rdf/3.0.1/terms/AI/trainingEnergyConsumption",
2022 compact="ai_trainingEnergyConsumption",
2023 )
2024
2025
2026# The class that helps note down the quantity of energy consumption and the unit
2027# used for measurement.
2028@register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumptionDescription", compact_type="ai_EnergyConsumptionDescription", abstract=False)
2029class ai_EnergyConsumptionDescription(SHACLObject):
2030 NODE_KIND = NodeKind.BlankNodeOrIRI
2031 NAMED_INDIVIDUALS = {
2032 }
2033
2034 @classmethod
2035 def _register_props(cls):
2036 super()._register_props()
2037 # Represents the energy quantity.
2038 cls._add_property(
2039 "ai_energyQuantity",
2040 FloatProp(),
2041 iri="https://spdx.org/rdf/3.0.1/terms/AI/energyQuantity",
2042 min_count=1,
2043 compact="ai_energyQuantity",
2044 )
2045 # Specifies the unit in which energy is measured.
2046 cls._add_property(
2047 "ai_energyUnit",
2048 EnumProp([
2049 ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", "kilowattHour"),
2050 ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", "megajoule"),
2051 ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", "other"),
2052 ]),
2053 iri="https://spdx.org/rdf/3.0.1/terms/AI/energyUnit",
2054 min_count=1,
2055 compact="ai_energyUnit",
2056 )
2057
2058
2059# Specifies the unit of energy consumption.
2060@register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType", compact_type="ai_EnergyUnitType", abstract=False)
2061class ai_EnergyUnitType(SHACLObject):
2062 NODE_KIND = NodeKind.BlankNodeOrIRI
2063 NAMED_INDIVIDUALS = {
2064 "kilowattHour": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour",
2065 "megajoule": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule",
2066 "other": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other",
2067 }
2068 # Kilowatt-hour.
2069 kilowattHour = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour"
2070 # Megajoule.
2071 megajoule = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule"
2072 # Any other units of energy measurement.
2073 other = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other"
2074
2075
2076# Specifies the safety risk level.
2077@register("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType", compact_type="ai_SafetyRiskAssessmentType", abstract=False)
2078class ai_SafetyRiskAssessmentType(SHACLObject):
2079 NODE_KIND = NodeKind.BlankNodeOrIRI
2080 NAMED_INDIVIDUALS = {
2081 "high": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high",
2082 "low": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low",
2083 "medium": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium",
2084 "serious": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious",
2085 }
2086 # The second-highest level of risk posed by an AI system.
2087 high = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high"
2088 # Low/no risk is posed by an AI system.
2089 low = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low"
2090 # The third-highest level of risk posed by an AI system.
2091 medium = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium"
2092 # The highest level of risk posed by an AI system.
2093 serious = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious"
2094
2095
2096# Specifies the type of an annotation.
2097@register("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType", compact_type="AnnotationType", abstract=False)
2098class AnnotationType(SHACLObject):
2099 NODE_KIND = NodeKind.BlankNodeOrIRI
2100 NAMED_INDIVIDUALS = {
2101 "other": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other",
2102 "review": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review",
2103 }
2104 # Used to store extra information about an Element which is not part of a review (e.g. extra information provided during the creation of the Element).
2105 other = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other"
2106 # Used when someone reviews the Element.
2107 review = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review"
2108
2109
2110# Provides information about the creation of the Element.
2111@register("https://spdx.org/rdf/3.0.1/terms/Core/CreationInfo", compact_type="CreationInfo", abstract=False)
2112class CreationInfo(SHACLObject):
2113 NODE_KIND = NodeKind.BlankNodeOrIRI
2114 NAMED_INDIVIDUALS = {
2115 }
2116
2117 @classmethod
2118 def _register_props(cls):
2119 super()._register_props()
2120 # Provide consumers with comments by the creator of the Element about the
2121 # Element.
2122 cls._add_property(
2123 "comment",
2124 StringProp(),
2125 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2126 compact="comment",
2127 )
2128 # Identifies when the Element was originally created.
2129 cls._add_property(
2130 "created",
2131 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
2132 iri="https://spdx.org/rdf/3.0.1/terms/Core/created",
2133 min_count=1,
2134 compact="created",
2135 )
2136 # Identifies who or what created the Element.
2137 cls._add_property(
2138 "createdBy",
2139 ListProp(ObjectProp(Agent, False, context=[
2140 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
2141 ],)),
2142 iri="https://spdx.org/rdf/3.0.1/terms/Core/createdBy",
2143 min_count=1,
2144 compact="createdBy",
2145 )
2146 # Identifies the tooling that was used during the creation of the Element.
2147 cls._add_property(
2148 "createdUsing",
2149 ListProp(ObjectProp(Tool, False)),
2150 iri="https://spdx.org/rdf/3.0.1/terms/Core/createdUsing",
2151 compact="createdUsing",
2152 )
2153 # Provides a reference number that can be used to understand how to parse and
2154 # interpret an Element.
2155 cls._add_property(
2156 "specVersion",
2157 StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
2158 iri="https://spdx.org/rdf/3.0.1/terms/Core/specVersion",
2159 min_count=1,
2160 compact="specVersion",
2161 )
2162
2163
2164# A key with an associated value.
2165@register("https://spdx.org/rdf/3.0.1/terms/Core/DictionaryEntry", compact_type="DictionaryEntry", abstract=False)
2166class DictionaryEntry(SHACLObject):
2167 NODE_KIND = NodeKind.BlankNodeOrIRI
2168 NAMED_INDIVIDUALS = {
2169 }
2170
2171 @classmethod
2172 def _register_props(cls):
2173 super()._register_props()
2174 # A key used in a generic key-value pair.
2175 cls._add_property(
2176 "key",
2177 StringProp(),
2178 iri="https://spdx.org/rdf/3.0.1/terms/Core/key",
2179 min_count=1,
2180 compact="key",
2181 )
2182 # A value used in a generic key-value pair.
2183 cls._add_property(
2184 "value",
2185 StringProp(),
2186 iri="https://spdx.org/rdf/3.0.1/terms/Core/value",
2187 compact="value",
2188 )
2189
2190
2191# Base domain class from which all other SPDX-3.0 domain classes derive.
2192@register("https://spdx.org/rdf/3.0.1/terms/Core/Element", compact_type="Element", abstract=True)
2193class Element(SHACLObject):
2194 NODE_KIND = NodeKind.IRI
2195 ID_ALIAS = "spdxId"
2196 NAMED_INDIVIDUALS = {
2197 }
2198
2199 @classmethod
2200 def _register_props(cls):
2201 super()._register_props()
2202 # Provide consumers with comments by the creator of the Element about the
2203 # Element.
2204 cls._add_property(
2205 "comment",
2206 StringProp(),
2207 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2208 compact="comment",
2209 )
2210 # Provides information about the creation of the Element.
2211 cls._add_property(
2212 "creationInfo",
2213 ObjectProp(CreationInfo, True),
2214 iri="https://spdx.org/rdf/3.0.1/terms/Core/creationInfo",
2215 min_count=1,
2216 compact="creationInfo",
2217 )
2218 # Provides a detailed description of the Element.
2219 cls._add_property(
2220 "description",
2221 StringProp(),
2222 iri="https://spdx.org/rdf/3.0.1/terms/Core/description",
2223 compact="description",
2224 )
2225 # Specifies an Extension characterization of some aspect of an Element.
2226 cls._add_property(
2227 "extension",
2228 ListProp(ObjectProp(extension_Extension, False)),
2229 iri="https://spdx.org/rdf/3.0.1/terms/Core/extension",
2230 compact="extension",
2231 )
2232 # Provides a reference to a resource outside the scope of SPDX-3.0 content
2233 # that uniquely identifies an Element.
2234 cls._add_property(
2235 "externalIdentifier",
2236 ListProp(ObjectProp(ExternalIdentifier, False)),
2237 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifier",
2238 compact="externalIdentifier",
2239 )
2240 # Points to a resource outside the scope of the SPDX-3.0 content
2241 # that provides additional characteristics of an Element.
2242 cls._add_property(
2243 "externalRef",
2244 ListProp(ObjectProp(ExternalRef, False)),
2245 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRef",
2246 compact="externalRef",
2247 )
2248 # Identifies the name of an Element as designated by the creator.
2249 cls._add_property(
2250 "name",
2251 StringProp(),
2252 iri="https://spdx.org/rdf/3.0.1/terms/Core/name",
2253 compact="name",
2254 )
2255 # A short description of an Element.
2256 cls._add_property(
2257 "summary",
2258 StringProp(),
2259 iri="https://spdx.org/rdf/3.0.1/terms/Core/summary",
2260 compact="summary",
2261 )
2262 # Provides an IntegrityMethod with which the integrity of an Element can be
2263 # asserted.
2264 cls._add_property(
2265 "verifiedUsing",
2266 ListProp(ObjectProp(IntegrityMethod, False)),
2267 iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing",
2268 compact="verifiedUsing",
2269 )
2270
2271
2272# A collection of Elements, not necessarily with unifying context.
2273@register("https://spdx.org/rdf/3.0.1/terms/Core/ElementCollection", compact_type="ElementCollection", abstract=True)
2274class ElementCollection(Element):
2275 NODE_KIND = NodeKind.IRI
2276 ID_ALIAS = "spdxId"
2277 NAMED_INDIVIDUALS = {
2278 }
2279
2280 @classmethod
2281 def _register_props(cls):
2282 super()._register_props()
2283 # Refers to one or more Elements that are part of an ElementCollection.
2284 cls._add_property(
2285 "element",
2286 ListProp(ObjectProp(Element, False, context=[
2287 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
2288 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
2289 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
2290 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
2291 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
2292 ],)),
2293 iri="https://spdx.org/rdf/3.0.1/terms/Core/element",
2294 compact="element",
2295 )
2296 # Describes one a profile which the creator of this ElementCollection intends to
2297 # conform to.
2298 cls._add_property(
2299 "profileConformance",
2300 ListProp(EnumProp([
2301 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", "ai"),
2302 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", "build"),
2303 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", "core"),
2304 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", "dataset"),
2305 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", "expandedLicensing"),
2306 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", "extension"),
2307 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", "lite"),
2308 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", "security"),
2309 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", "simpleLicensing"),
2310 ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", "software"),
2311 ])),
2312 iri="https://spdx.org/rdf/3.0.1/terms/Core/profileConformance",
2313 compact="profileConformance",
2314 )
2315 # This property is used to denote the root Element(s) of a tree of elements contained in a BOM.
2316 cls._add_property(
2317 "rootElement",
2318 ListProp(ObjectProp(Element, False, context=[
2319 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
2320 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
2321 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
2322 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
2323 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
2324 ],)),
2325 iri="https://spdx.org/rdf/3.0.1/terms/Core/rootElement",
2326 compact="rootElement",
2327 )
2328
2329
2330# A reference to a resource identifier defined outside the scope of SPDX-3.0 content that uniquely identifies an Element.
2331@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifier", compact_type="ExternalIdentifier", abstract=False)
2332class ExternalIdentifier(SHACLObject):
2333 NODE_KIND = NodeKind.BlankNodeOrIRI
2334 NAMED_INDIVIDUALS = {
2335 }
2336
2337 @classmethod
2338 def _register_props(cls):
2339 super()._register_props()
2340 # Provide consumers with comments by the creator of the Element about the
2341 # Element.
2342 cls._add_property(
2343 "comment",
2344 StringProp(),
2345 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2346 compact="comment",
2347 )
2348 # Specifies the type of the external identifier.
2349 cls._add_property(
2350 "externalIdentifierType",
2351 EnumProp([
2352 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", "cpe22"),
2353 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", "cpe23"),
2354 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", "cve"),
2355 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", "email"),
2356 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", "gitoid"),
2357 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", "other"),
2358 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", "packageUrl"),
2359 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", "securityOther"),
2360 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", "swhid"),
2361 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", "swid"),
2362 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", "urlScheme"),
2363 ]),
2364 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifierType",
2365 min_count=1,
2366 compact="externalIdentifierType",
2367 )
2368 # Uniquely identifies an external element.
2369 cls._add_property(
2370 "identifier",
2371 StringProp(),
2372 iri="https://spdx.org/rdf/3.0.1/terms/Core/identifier",
2373 min_count=1,
2374 compact="identifier",
2375 )
2376 # Provides the location for more information regarding an external identifier.
2377 cls._add_property(
2378 "identifierLocator",
2379 ListProp(AnyURIProp()),
2380 iri="https://spdx.org/rdf/3.0.1/terms/Core/identifierLocator",
2381 compact="identifierLocator",
2382 )
2383 # An entity that is authorized to issue identification credentials.
2384 cls._add_property(
2385 "issuingAuthority",
2386 StringProp(),
2387 iri="https://spdx.org/rdf/3.0.1/terms/Core/issuingAuthority",
2388 compact="issuingAuthority",
2389 )
2390
2391
2392# Specifies the type of an external identifier.
2393@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType", compact_type="ExternalIdentifierType", abstract=False)
2394class ExternalIdentifierType(SHACLObject):
2395 NODE_KIND = NodeKind.BlankNodeOrIRI
2396 NAMED_INDIVIDUALS = {
2397 "cpe22": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22",
2398 "cpe23": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23",
2399 "cve": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve",
2400 "email": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email",
2401 "gitoid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid",
2402 "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other",
2403 "packageUrl": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl",
2404 "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther",
2405 "swhid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid",
2406 "swid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid",
2407 "urlScheme": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme",
2408 }
2409 # [Common Platform Enumeration Specification 2.2](https://cpe.mitre.org/files/cpe-specification_2.2.pdf)
2410 cpe22 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22"
2411 # [Common Platform Enumeration: Naming Specification Version 2.3](https://csrc.nist.gov/publications/detail/nistir/7695/final)
2412 cpe23 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23"
2413 # Common Vulnerabilities and Exposures identifiers, an identifier for a specific software flaw defined within the official CVE Dictionary and that conforms to the [CVE specification](https://csrc.nist.gov/glossary/term/cve_id).
2414 cve = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve"
2415 # Email address, as defined in [RFC 3696](https://datatracker.ietf.org/doc/rfc3986/) Section 3.
2416 email = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email"
2417 # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg).
2418 gitoid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid"
2419 # Used when the type does not match any of the other options.
2420 other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other"
2421 # Package URL, as defined in the corresponding [Annex](../../../annexes/pkg-url-specification.md) of this specification.
2422 packageUrl = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl"
2423 # Used when there is a security related identifier of unspecified type.
2424 securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther"
2425 # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
2426 swhid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid"
2427 # Concise Software Identification (CoSWID) tag, as defined in [RFC 9393](https://datatracker.ietf.org/doc/rfc9393/) Section 2.3.
2428 swid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid"
2429 # [Uniform Resource Identifier (URI) Schemes](https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml). The scheme used in order to locate a resource.
2430 urlScheme = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme"
2431
2432
2433# A map of Element identifiers that are used within an SpdxDocument but defined
2434# external to that SpdxDocument.
2435@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalMap", compact_type="ExternalMap", abstract=False)
2436class ExternalMap(SHACLObject):
2437 NODE_KIND = NodeKind.BlankNodeOrIRI
2438 NAMED_INDIVIDUALS = {
2439 }
2440
2441 @classmethod
2442 def _register_props(cls):
2443 super()._register_props()
2444 # Artifact representing a serialization instance of SPDX data containing the
2445 # definition of a particular Element.
2446 cls._add_property(
2447 "definingArtifact",
2448 ObjectProp(Artifact, False),
2449 iri="https://spdx.org/rdf/3.0.1/terms/Core/definingArtifact",
2450 compact="definingArtifact",
2451 )
2452 # Identifies an external Element used within an SpdxDocument but defined
2453 # external to that SpdxDocument.
2454 cls._add_property(
2455 "externalSpdxId",
2456 AnyURIProp(),
2457 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalSpdxId",
2458 min_count=1,
2459 compact="externalSpdxId",
2460 )
2461 # Provides an indication of where to retrieve an external Element.
2462 cls._add_property(
2463 "locationHint",
2464 AnyURIProp(),
2465 iri="https://spdx.org/rdf/3.0.1/terms/Core/locationHint",
2466 compact="locationHint",
2467 )
2468 # Provides an IntegrityMethod with which the integrity of an Element can be
2469 # asserted.
2470 cls._add_property(
2471 "verifiedUsing",
2472 ListProp(ObjectProp(IntegrityMethod, False)),
2473 iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing",
2474 compact="verifiedUsing",
2475 )
2476
2477
2478# A reference to a resource outside the scope of SPDX-3.0 content related to an Element.
2479@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRef", compact_type="ExternalRef", abstract=False)
2480class ExternalRef(SHACLObject):
2481 NODE_KIND = NodeKind.BlankNodeOrIRI
2482 NAMED_INDIVIDUALS = {
2483 }
2484
2485 @classmethod
2486 def _register_props(cls):
2487 super()._register_props()
2488 # Provide consumers with comments by the creator of the Element about the
2489 # Element.
2490 cls._add_property(
2491 "comment",
2492 StringProp(),
2493 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2494 compact="comment",
2495 )
2496 # Provides information about the content type of an Element or a Property.
2497 cls._add_property(
2498 "contentType",
2499 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
2500 iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType",
2501 compact="contentType",
2502 )
2503 # Specifies the type of the external reference.
2504 cls._add_property(
2505 "externalRefType",
2506 EnumProp([
2507 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", "altDownloadLocation"),
2508 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", "altWebPage"),
2509 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", "binaryArtifact"),
2510 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", "bower"),
2511 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", "buildMeta"),
2512 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", "buildSystem"),
2513 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", "certificationReport"),
2514 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", "chat"),
2515 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", "componentAnalysisReport"),
2516 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", "cwe"),
2517 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", "documentation"),
2518 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", "dynamicAnalysisReport"),
2519 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", "eolNotice"),
2520 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", "exportControlAssessment"),
2521 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", "funding"),
2522 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", "issueTracker"),
2523 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", "license"),
2524 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", "mailingList"),
2525 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", "mavenCentral"),
2526 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", "metrics"),
2527 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", "npm"),
2528 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", "nuget"),
2529 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", "other"),
2530 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", "privacyAssessment"),
2531 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", "productMetadata"),
2532 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", "purchaseOrder"),
2533 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", "qualityAssessmentReport"),
2534 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", "releaseHistory"),
2535 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", "releaseNotes"),
2536 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", "riskAssessment"),
2537 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", "runtimeAnalysisReport"),
2538 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", "secureSoftwareAttestation"),
2539 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", "securityAdversaryModel"),
2540 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", "securityAdvisory"),
2541 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", "securityFix"),
2542 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", "securityOther"),
2543 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", "securityPenTestReport"),
2544 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", "securityPolicy"),
2545 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", "securityThreatModel"),
2546 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", "socialMedia"),
2547 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", "sourceArtifact"),
2548 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", "staticAnalysisReport"),
2549 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", "support"),
2550 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", "vcs"),
2551 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", "vulnerabilityDisclosureReport"),
2552 ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", "vulnerabilityExploitabilityAssessment"),
2553 ]),
2554 iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRefType",
2555 compact="externalRefType",
2556 )
2557 # Provides the location of an external reference.
2558 cls._add_property(
2559 "locator",
2560 ListProp(StringProp()),
2561 iri="https://spdx.org/rdf/3.0.1/terms/Core/locator",
2562 compact="locator",
2563 )
2564
2565
2566# Specifies the type of an external reference.
2567@register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType", compact_type="ExternalRefType", abstract=False)
2568class ExternalRefType(SHACLObject):
2569 NODE_KIND = NodeKind.BlankNodeOrIRI
2570 NAMED_INDIVIDUALS = {
2571 "altDownloadLocation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation",
2572 "altWebPage": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage",
2573 "binaryArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact",
2574 "bower": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower",
2575 "buildMeta": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta",
2576 "buildSystem": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem",
2577 "certificationReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport",
2578 "chat": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat",
2579 "componentAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport",
2580 "cwe": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe",
2581 "documentation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation",
2582 "dynamicAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport",
2583 "eolNotice": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice",
2584 "exportControlAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment",
2585 "funding": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding",
2586 "issueTracker": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker",
2587 "license": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license",
2588 "mailingList": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList",
2589 "mavenCentral": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral",
2590 "metrics": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics",
2591 "npm": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm",
2592 "nuget": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget",
2593 "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other",
2594 "privacyAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment",
2595 "productMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata",
2596 "purchaseOrder": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder",
2597 "qualityAssessmentReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport",
2598 "releaseHistory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory",
2599 "releaseNotes": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes",
2600 "riskAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment",
2601 "runtimeAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport",
2602 "secureSoftwareAttestation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation",
2603 "securityAdversaryModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel",
2604 "securityAdvisory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory",
2605 "securityFix": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix",
2606 "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther",
2607 "securityPenTestReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport",
2608 "securityPolicy": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy",
2609 "securityThreatModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel",
2610 "socialMedia": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia",
2611 "sourceArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact",
2612 "staticAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport",
2613 "support": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support",
2614 "vcs": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs",
2615 "vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport",
2616 "vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment",
2617 }
2618 # A reference to an alternative download location.
2619 altDownloadLocation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation"
2620 # A reference to an alternative web page.
2621 altWebPage = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage"
2622 # A reference to binary artifacts related to a package.
2623 binaryArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact"
2624 # A reference to a Bower package. The package locator format, looks like `package#version`, is defined in the "install" section of [Bower API documentation](https://bower.io/docs/api/#install).
2625 bower = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower"
2626 # A reference build metadata related to a published package.
2627 buildMeta = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta"
2628 # A reference build system used to create or publish the package.
2629 buildSystem = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem"
2630 # A reference to a certification report for a package from an accredited/independent body.
2631 certificationReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport"
2632 # A reference to the instant messaging system used by the maintainer for a package.
2633 chat = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat"
2634 # A reference to a Software Composition Analysis (SCA) report.
2635 componentAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport"
2636 # [Common Weakness Enumeration](https://csrc.nist.gov/glossary/term/common_weakness_enumeration). A reference to a source of software flaw defined within the official [CWE List](https://cwe.mitre.org/data/) that conforms to the [CWE specification](https://cwe.mitre.org/).
2637 cwe = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe"
2638 # A reference to the documentation for a package.
2639 documentation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation"
2640 # A reference to a dynamic analysis report for a package.
2641 dynamicAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport"
2642 # A reference to the End Of Sale (EOS) and/or End Of Life (EOL) information related to a package.
2643 eolNotice = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice"
2644 # A reference to a export control assessment for a package.
2645 exportControlAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment"
2646 # A reference to funding information related to a package.
2647 funding = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding"
2648 # A reference to the issue tracker for a package.
2649 issueTracker = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker"
2650 # A reference to additional license information related to an artifact.
2651 license = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license"
2652 # A reference to the mailing list used by the maintainer for a package.
2653 mailingList = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList"
2654 # A reference to a Maven repository artifact. The artifact locator format is defined in the [Maven documentation](https://maven.apache.org/guides/mini/guide-naming-conventions.html) and looks like `groupId:artifactId[:version]`.
2655 mavenCentral = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral"
2656 # A reference to metrics related to package such as OpenSSF scorecards.
2657 metrics = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics"
2658 # A reference to an npm package. The package locator format is defined in the [npm documentation](https://docs.npmjs.com/cli/v10/configuring-npm/package-json) and looks like `package@version`.
2659 npm = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm"
2660 # A reference to a NuGet package. The package locator format is defined in the [NuGet documentation](https://docs.nuget.org) and looks like `package/version`.
2661 nuget = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget"
2662 # Used when the type does not match any of the other options.
2663 other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other"
2664 # A reference to a privacy assessment for a package.
2665 privacyAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment"
2666 # A reference to additional product metadata such as reference within organization's product catalog.
2667 productMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata"
2668 # A reference to a purchase order for a package.
2669 purchaseOrder = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder"
2670 # A reference to a quality assessment for a package.
2671 qualityAssessmentReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport"
2672 # A reference to a published list of releases for a package.
2673 releaseHistory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory"
2674 # A reference to the release notes for a package.
2675 releaseNotes = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes"
2676 # A reference to a risk assessment for a package.
2677 riskAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment"
2678 # A reference to a runtime analysis report for a package.
2679 runtimeAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport"
2680 # A reference to information assuring that the software is developed using security practices as defined by [NIST SP 800-218 Secure Software Development Framework (SSDF) Version 1.1](https://csrc.nist.gov/pubs/sp/800/218/final) or [CISA Secure Software Development Attestation Form](https://www.cisa.gov/resources-tools/resources/secure-software-development-attestation-form).
2681 secureSoftwareAttestation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation"
2682 # A reference to the security adversary model for a package.
2683 securityAdversaryModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel"
2684 # A reference to a published security advisory (where advisory as defined per [ISO 29147:2018](https://www.iso.org/standard/72311.html)) that may affect one or more elements, e.g., vendor advisories or specific NVD entries.
2685 securityAdvisory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory"
2686 # A reference to the patch or source code that fixes a vulnerability.
2687 securityFix = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix"
2688 # A reference to related security information of unspecified type.
2689 securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther"
2690 # A reference to a [penetration test](https://en.wikipedia.org/wiki/Penetration_test) report for a package.
2691 securityPenTestReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport"
2692 # A reference to instructions for reporting newly discovered security vulnerabilities for a package.
2693 securityPolicy = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy"
2694 # A reference the [security threat model](https://en.wikipedia.org/wiki/Threat_model) for a package.
2695 securityThreatModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel"
2696 # A reference to a social media channel for a package.
2697 socialMedia = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia"
2698 # A reference to an artifact containing the sources for a package.
2699 sourceArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact"
2700 # A reference to a static analysis report for a package.
2701 staticAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport"
2702 # A reference to the software support channel or other support information for a package.
2703 support = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support"
2704 # A reference to a version control system related to a software artifact.
2705 vcs = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs"
2706 # A reference to a Vulnerability Disclosure Report (VDR) which provides the software supplier's analysis and findings describing the impact (or lack of impact) that reported vulnerabilities have on packages or products in the supplier's SBOM as defined in [NIST SP 800-161 Cybersecurity Supply Chain Risk Management Practices for Systems and Organizations](https://csrc.nist.gov/pubs/sp/800/161/r1/final).
2707 vulnerabilityDisclosureReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport"
2708 # A reference to a Vulnerability Exploitability eXchange (VEX) statement which provides information on whether a product is impacted by a specific vulnerability in an included package and, if affected, whether there are actions recommended to remediate. See also [NTIA VEX one-page summary](https://ntia.gov/files/ntia/publications/vex_one-page_summary.pdf).
2709 vulnerabilityExploitabilityAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment"
2710
2711
2712# A mathematical algorithm that maps data of arbitrary size to a bit string.
2713@register("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm", compact_type="HashAlgorithm", abstract=False)
2714class HashAlgorithm(SHACLObject):
2715 NODE_KIND = NodeKind.BlankNodeOrIRI
2716 NAMED_INDIVIDUALS = {
2717 "adler32": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32",
2718 "blake2b256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256",
2719 "blake2b384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384",
2720 "blake2b512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512",
2721 "blake3": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3",
2722 "crystalsDilithium": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium",
2723 "crystalsKyber": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber",
2724 "falcon": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon",
2725 "md2": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2",
2726 "md4": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4",
2727 "md5": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5",
2728 "md6": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6",
2729 "other": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other",
2730 "sha1": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1",
2731 "sha224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224",
2732 "sha256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256",
2733 "sha384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384",
2734 "sha3_224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224",
2735 "sha3_256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256",
2736 "sha3_384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384",
2737 "sha3_512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512",
2738 "sha512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512",
2739 }
2740 # Adler-32 checksum is part of the widely used zlib compression library as defined in [RFC 1950](https://datatracker.ietf.org/doc/rfc1950/) Section 2.3.
2741 adler32 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32"
2742 # BLAKE2b algorithm with a digest size of 256, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4.
2743 blake2b256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256"
2744 # BLAKE2b algorithm with a digest size of 384, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4.
2745 blake2b384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384"
2746 # BLAKE2b algorithm with a digest size of 512, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4.
2747 blake2b512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512"
2748 # [BLAKE3](https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf)
2749 blake3 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3"
2750 # [Dilithium](https://pq-crystals.org/dilithium/)
2751 crystalsDilithium = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium"
2752 # [Kyber](https://pq-crystals.org/kyber/)
2753 crystalsKyber = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber"
2754 # [FALCON](https://falcon-sign.info/falcon.pdf)
2755 falcon = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon"
2756 # MD2 message-digest algorithm, as defined in [RFC 1319](https://datatracker.ietf.org/doc/rfc1319/).
2757 md2 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2"
2758 # MD4 message-digest algorithm, as defined in [RFC 1186](https://datatracker.ietf.org/doc/rfc1186/).
2759 md4 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4"
2760 # MD5 message-digest algorithm, as defined in [RFC 1321](https://datatracker.ietf.org/doc/rfc1321/).
2761 md5 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5"
2762 # [MD6 hash function](https://people.csail.mit.edu/rivest/pubs/RABCx08.pdf)
2763 md6 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6"
2764 # any hashing algorithm that does not exist in this list of entries
2765 other = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other"
2766 # SHA-1, a secure hashing algorithm, as defined in [RFC 3174](https://datatracker.ietf.org/doc/rfc3174/).
2767 sha1 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1"
2768 # SHA-2 with a digest length of 224, as defined in [RFC 3874](https://datatracker.ietf.org/doc/rfc3874/).
2769 sha224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224"
2770 # SHA-2 with a digest length of 256, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/).
2771 sha256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256"
2772 # SHA-2 with a digest length of 384, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/).
2773 sha384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384"
2774 # SHA-3 with a digest length of 224, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2775 sha3_224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224"
2776 # SHA-3 with a digest length of 256, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2777 sha3_256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256"
2778 # SHA-3 with a digest length of 384, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2779 sha3_384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384"
2780 # SHA-3 with a digest length of 512, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final).
2781 sha3_512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512"
2782 # SHA-2 with a digest length of 512, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/).
2783 sha512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512"
2784
2785
2786# A concrete subclass of Element used by Individuals in the
2787# Core profile.
2788@register("https://spdx.org/rdf/3.0.1/terms/Core/IndividualElement", compact_type="IndividualElement", abstract=False)
2789class IndividualElement(Element):
2790 NODE_KIND = NodeKind.IRI
2791 ID_ALIAS = "spdxId"
2792 NAMED_INDIVIDUALS = {
2793 "NoAssertionElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement",
2794 "NoneElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement",
2795 }
2796 # An Individual Value for Element representing a set of Elements of unknown
2797 # identify or cardinality (number).
2798 NoAssertionElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement"
2799 # An Individual Value for Element representing a set of Elements with
2800 # cardinality (number/count) of zero.
2801 NoneElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement"
2802
2803
2804# Provides an independently reproducible mechanism that permits verification of a specific Element.
2805@register("https://spdx.org/rdf/3.0.1/terms/Core/IntegrityMethod", compact_type="IntegrityMethod", abstract=True)
2806class IntegrityMethod(SHACLObject):
2807 NODE_KIND = NodeKind.BlankNodeOrIRI
2808 NAMED_INDIVIDUALS = {
2809 }
2810
2811 @classmethod
2812 def _register_props(cls):
2813 super()._register_props()
2814 # Provide consumers with comments by the creator of the Element about the
2815 # Element.
2816 cls._add_property(
2817 "comment",
2818 StringProp(),
2819 iri="https://spdx.org/rdf/3.0.1/terms/Core/comment",
2820 compact="comment",
2821 )
2822
2823
2824# Provide an enumerated set of lifecycle phases that can provide context to relationships.
2825@register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType", compact_type="LifecycleScopeType", abstract=False)
2826class LifecycleScopeType(SHACLObject):
2827 NODE_KIND = NodeKind.BlankNodeOrIRI
2828 NAMED_INDIVIDUALS = {
2829 "build": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build",
2830 "design": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design",
2831 "development": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development",
2832 "other": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other",
2833 "runtime": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime",
2834 "test": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test",
2835 }
2836 # A relationship has specific context implications during an element's build phase, during development.
2837 build = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build"
2838 # A relationship has specific context implications during an element's design.
2839 design = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design"
2840 # A relationship has specific context implications during development phase of an element.
2841 development = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development"
2842 # A relationship has other specific context information necessary to capture that the above set of enumerations does not handle.
2843 other = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other"
2844 # A relationship has specific context implications during the execution phase of an element.
2845 runtime = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime"
2846 # A relationship has specific context implications during an element's testing phase, during development.
2847 test = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test"
2848
2849
2850# A mapping between prefixes and namespace partial URIs.
2851@register("https://spdx.org/rdf/3.0.1/terms/Core/NamespaceMap", compact_type="NamespaceMap", abstract=False)
2852class NamespaceMap(SHACLObject):
2853 NODE_KIND = NodeKind.BlankNodeOrIRI
2854 NAMED_INDIVIDUALS = {
2855 }
2856
2857 @classmethod
2858 def _register_props(cls):
2859 super()._register_props()
2860 # Provides an unambiguous mechanism for conveying a URI fragment portion of an
2861 # Element ID.
2862 cls._add_property(
2863 "namespace",
2864 AnyURIProp(),
2865 iri="https://spdx.org/rdf/3.0.1/terms/Core/namespace",
2866 min_count=1,
2867 compact="namespace",
2868 )
2869 # A substitute for a URI.
2870 cls._add_property(
2871 "prefix",
2872 StringProp(),
2873 iri="https://spdx.org/rdf/3.0.1/terms/Core/prefix",
2874 min_count=1,
2875 compact="prefix",
2876 )
2877
2878
2879# An SPDX version 2.X compatible verification method for software packages.
2880@register("https://spdx.org/rdf/3.0.1/terms/Core/PackageVerificationCode", compact_type="PackageVerificationCode", abstract=False)
2881class PackageVerificationCode(IntegrityMethod):
2882 NODE_KIND = NodeKind.BlankNodeOrIRI
2883 NAMED_INDIVIDUALS = {
2884 }
2885
2886 @classmethod
2887 def _register_props(cls):
2888 super()._register_props()
2889 # Specifies the algorithm used for calculating the hash value.
2890 cls._add_property(
2891 "algorithm",
2892 EnumProp([
2893 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"),
2894 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
2895 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
2896 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
2897 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"),
2898 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
2899 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
2900 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"),
2901 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"),
2902 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"),
2903 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"),
2904 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"),
2905 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"),
2906 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"),
2907 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"),
2908 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"),
2909 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"),
2910 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
2911 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
2912 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
2913 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
2914 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"),
2915 ]),
2916 iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm",
2917 min_count=1,
2918 compact="algorithm",
2919 )
2920 # The result of applying a hash algorithm to an Element.
2921 cls._add_property(
2922 "hashValue",
2923 StringProp(),
2924 iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue",
2925 min_count=1,
2926 compact="hashValue",
2927 )
2928 # The relative file name of a file to be excluded from the
2929 # `PackageVerificationCode`.
2930 cls._add_property(
2931 "packageVerificationCodeExcludedFile",
2932 ListProp(StringProp()),
2933 iri="https://spdx.org/rdf/3.0.1/terms/Core/packageVerificationCodeExcludedFile",
2934 compact="packageVerificationCodeExcludedFile",
2935 )
2936
2937
2938# A tuple of two positive integers that define a range.
2939@register("https://spdx.org/rdf/3.0.1/terms/Core/PositiveIntegerRange", compact_type="PositiveIntegerRange", abstract=False)
2940class PositiveIntegerRange(SHACLObject):
2941 NODE_KIND = NodeKind.BlankNodeOrIRI
2942 NAMED_INDIVIDUALS = {
2943 }
2944
2945 @classmethod
2946 def _register_props(cls):
2947 super()._register_props()
2948 # Defines the beginning of a range.
2949 cls._add_property(
2950 "beginIntegerRange",
2951 PositiveIntegerProp(),
2952 iri="https://spdx.org/rdf/3.0.1/terms/Core/beginIntegerRange",
2953 min_count=1,
2954 compact="beginIntegerRange",
2955 )
2956 # Defines the end of a range.
2957 cls._add_property(
2958 "endIntegerRange",
2959 PositiveIntegerProp(),
2960 iri="https://spdx.org/rdf/3.0.1/terms/Core/endIntegerRange",
2961 min_count=1,
2962 compact="endIntegerRange",
2963 )
2964
2965
2966# Categories of presence or absence.
2967@register("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType", compact_type="PresenceType", abstract=False)
2968class PresenceType(SHACLObject):
2969 NODE_KIND = NodeKind.BlankNodeOrIRI
2970 NAMED_INDIVIDUALS = {
2971 "no": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no",
2972 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion",
2973 "yes": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes",
2974 }
2975 # Indicates absence of the field.
2976 no = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no"
2977 # Makes no assertion about the field.
2978 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion"
2979 # Indicates presence of the field.
2980 yes = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes"
2981
2982
2983# Enumeration of the valid profiles.
2984@register("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType", compact_type="ProfileIdentifierType", abstract=False)
2985class ProfileIdentifierType(SHACLObject):
2986 NODE_KIND = NodeKind.BlankNodeOrIRI
2987 NAMED_INDIVIDUALS = {
2988 "ai": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai",
2989 "build": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build",
2990 "core": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core",
2991 "dataset": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset",
2992 "expandedLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing",
2993 "extension": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension",
2994 "lite": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite",
2995 "security": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security",
2996 "simpleLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing",
2997 "software": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software",
2998 }
2999 # the element follows the AI profile specification
3000 ai = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai"
3001 # the element follows the Build profile specification
3002 build = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build"
3003 # the element follows the Core profile specification
3004 core = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core"
3005 # the element follows the Dataset profile specification
3006 dataset = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset"
3007 # the element follows the ExpandedLicensing profile specification
3008 expandedLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing"
3009 # the element follows the Extension profile specification
3010 extension = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension"
3011 # the element follows the Lite profile specification
3012 lite = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite"
3013 # the element follows the Security profile specification
3014 security = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security"
3015 # the element follows the SimpleLicensing profile specification
3016 simpleLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing"
3017 # the element follows the Software profile specification
3018 software = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software"
3019
3020
3021# Describes a relationship between one or more elements.
3022@register("https://spdx.org/rdf/3.0.1/terms/Core/Relationship", compact_type="Relationship", abstract=False)
3023class Relationship(Element):
3024 NODE_KIND = NodeKind.IRI
3025 ID_ALIAS = "spdxId"
3026 NAMED_INDIVIDUALS = {
3027 }
3028
3029 @classmethod
3030 def _register_props(cls):
3031 super()._register_props()
3032 # Provides information about the completeness of relationships.
3033 cls._add_property(
3034 "completeness",
3035 EnumProp([
3036 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", "complete"),
3037 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", "incomplete"),
3038 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", "noAssertion"),
3039 ]),
3040 iri="https://spdx.org/rdf/3.0.1/terms/Core/completeness",
3041 compact="completeness",
3042 )
3043 # Specifies the time from which an element is no longer applicable / valid.
3044 cls._add_property(
3045 "endTime",
3046 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3047 iri="https://spdx.org/rdf/3.0.1/terms/Core/endTime",
3048 compact="endTime",
3049 )
3050 # References the Element on the left-hand side of a relationship.
3051 cls._add_property(
3052 "from_",
3053 ObjectProp(Element, True, context=[
3054 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
3055 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
3056 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
3057 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
3058 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
3059 ],),
3060 iri="https://spdx.org/rdf/3.0.1/terms/Core/from",
3061 min_count=1,
3062 compact="from",
3063 )
3064 # Information about the relationship between two Elements.
3065 cls._add_property(
3066 "relationshipType",
3067 EnumProp([
3068 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", "affects"),
3069 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", "amendedBy"),
3070 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", "ancestorOf"),
3071 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", "availableFrom"),
3072 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", "configures"),
3073 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", "contains"),
3074 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", "coordinatedBy"),
3075 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", "copiedTo"),
3076 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", "delegatedTo"),
3077 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", "dependsOn"),
3078 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", "descendantOf"),
3079 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", "describes"),
3080 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", "doesNotAffect"),
3081 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", "expandsTo"),
3082 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", "exploitCreatedBy"),
3083 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", "fixedBy"),
3084 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", "fixedIn"),
3085 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", "foundBy"),
3086 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", "generates"),
3087 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", "hasAddedFile"),
3088 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", "hasAssessmentFor"),
3089 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", "hasAssociatedVulnerability"),
3090 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", "hasConcludedLicense"),
3091 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", "hasDataFile"),
3092 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", "hasDeclaredLicense"),
3093 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", "hasDeletedFile"),
3094 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", "hasDependencyManifest"),
3095 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", "hasDistributionArtifact"),
3096 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", "hasDocumentation"),
3097 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", "hasDynamicLink"),
3098 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", "hasEvidence"),
3099 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", "hasExample"),
3100 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", "hasHost"),
3101 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", "hasInput"),
3102 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", "hasMetadata"),
3103 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", "hasOptionalComponent"),
3104 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", "hasOptionalDependency"),
3105 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", "hasOutput"),
3106 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", "hasPrerequisite"),
3107 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", "hasProvidedDependency"),
3108 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", "hasRequirement"),
3109 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", "hasSpecification"),
3110 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", "hasStaticLink"),
3111 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", "hasTest"),
3112 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", "hasTestCase"),
3113 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", "hasVariant"),
3114 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", "invokedBy"),
3115 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", "modifiedBy"),
3116 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", "other"),
3117 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", "packagedBy"),
3118 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", "patchedBy"),
3119 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", "publishedBy"),
3120 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", "reportedBy"),
3121 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", "republishedBy"),
3122 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", "serializedInArtifact"),
3123 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", "testedOn"),
3124 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", "trainedOn"),
3125 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", "underInvestigationFor"),
3126 ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", "usesTool"),
3127 ]),
3128 iri="https://spdx.org/rdf/3.0.1/terms/Core/relationshipType",
3129 min_count=1,
3130 compact="relationshipType",
3131 )
3132 # Specifies the time from which an element is applicable / valid.
3133 cls._add_property(
3134 "startTime",
3135 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3136 iri="https://spdx.org/rdf/3.0.1/terms/Core/startTime",
3137 compact="startTime",
3138 )
3139 # References an Element on the right-hand side of a relationship.
3140 cls._add_property(
3141 "to",
3142 ListProp(ObjectProp(Element, False, context=[
3143 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
3144 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
3145 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
3146 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
3147 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
3148 ],)),
3149 iri="https://spdx.org/rdf/3.0.1/terms/Core/to",
3150 min_count=1,
3151 compact="to",
3152 )
3153
3154
3155# Indicates whether a relationship is known to be complete, incomplete, or if no assertion is made with respect to relationship completeness.
3156@register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness", compact_type="RelationshipCompleteness", abstract=False)
3157class RelationshipCompleteness(SHACLObject):
3158 NODE_KIND = NodeKind.BlankNodeOrIRI
3159 NAMED_INDIVIDUALS = {
3160 "complete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete",
3161 "incomplete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete",
3162 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion",
3163 }
3164 # The relationship is known to be exhaustive.
3165 complete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete"
3166 # The relationship is known not to be exhaustive.
3167 incomplete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete"
3168 # No assertion can be made about the completeness of the relationship.
3169 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion"
3170
3171
3172# Information about the relationship between two Elements.
3173@register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType", compact_type="RelationshipType", abstract=False)
3174class RelationshipType(SHACLObject):
3175 NODE_KIND = NodeKind.BlankNodeOrIRI
3176 NAMED_INDIVIDUALS = {
3177 "affects": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects",
3178 "amendedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy",
3179 "ancestorOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf",
3180 "availableFrom": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom",
3181 "configures": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures",
3182 "contains": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains",
3183 "coordinatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy",
3184 "copiedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo",
3185 "delegatedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo",
3186 "dependsOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn",
3187 "descendantOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf",
3188 "describes": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes",
3189 "doesNotAffect": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect",
3190 "expandsTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo",
3191 "exploitCreatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy",
3192 "fixedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy",
3193 "fixedIn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn",
3194 "foundBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy",
3195 "generates": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates",
3196 "hasAddedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile",
3197 "hasAssessmentFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor",
3198 "hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability",
3199 "hasConcludedLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense",
3200 "hasDataFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile",
3201 "hasDeclaredLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense",
3202 "hasDeletedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile",
3203 "hasDependencyManifest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest",
3204 "hasDistributionArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact",
3205 "hasDocumentation": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation",
3206 "hasDynamicLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink",
3207 "hasEvidence": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence",
3208 "hasExample": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample",
3209 "hasHost": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost",
3210 "hasInput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput",
3211 "hasMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata",
3212 "hasOptionalComponent": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent",
3213 "hasOptionalDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency",
3214 "hasOutput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput",
3215 "hasPrerequisite": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite",
3216 "hasProvidedDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency",
3217 "hasRequirement": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement",
3218 "hasSpecification": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification",
3219 "hasStaticLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink",
3220 "hasTest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest",
3221 "hasTestCase": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase",
3222 "hasVariant": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant",
3223 "invokedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy",
3224 "modifiedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy",
3225 "other": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other",
3226 "packagedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy",
3227 "patchedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy",
3228 "publishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy",
3229 "reportedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy",
3230 "republishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy",
3231 "serializedInArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact",
3232 "testedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn",
3233 "trainedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn",
3234 "underInvestigationFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor",
3235 "usesTool": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool",
3236 }
3237 # The `from` Vulnerability affects each `to` Element. The use of the `affects` type is constrained to `VexAffectedVulnAssessmentRelationship` classed relationships.
3238 affects = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects"
3239 # The `from` Element is amended by each `to` Element.
3240 amendedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy"
3241 # The `from` Element is an ancestor of each `to` Element.
3242 ancestorOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf"
3243 # The `from` Element is available from the additional supplier described by each `to` Element.
3244 availableFrom = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom"
3245 # The `from` Element is a configuration applied to each `to` Element, during a LifecycleScopeType period.
3246 configures = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures"
3247 # The `from` Element contains each `to` Element.
3248 contains = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains"
3249 # The `from` Vulnerability is coordinatedBy the `to` Agent(s) (vendor, researcher, or consumer agent).
3250 coordinatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy"
3251 # The `from` Element has been copied to each `to` Element.
3252 copiedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo"
3253 # The `from` Agent is delegating an action to the Agent of the `to` Relationship (which must be of type invokedBy), during a LifecycleScopeType (e.g. the `to` invokedBy Relationship is being done on behalf of `from`).
3254 delegatedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo"
3255 # The `from` Element depends on each `to` Element, during a LifecycleScopeType period.
3256 dependsOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn"
3257 # The `from` Element is a descendant of each `to` Element.
3258 descendantOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf"
3259 # The `from` Element describes each `to` Element. To denote the root(s) of a tree of elements in a collection, the rootElement property should be used.
3260 describes = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes"
3261 # The `from` Vulnerability has no impact on each `to` Element. The use of the `doesNotAffect` is constrained to `VexNotAffectedVulnAssessmentRelationship` classed relationships.
3262 doesNotAffect = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect"
3263 # The `from` archive expands out as an artifact described by each `to` Element.
3264 expandsTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo"
3265 # The `from` Vulnerability has had an exploit created against it by each `to` Agent.
3266 exploitCreatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy"
3267 # Designates a `from` Vulnerability has been fixed by the `to` Agent(s).
3268 fixedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy"
3269 # A `from` Vulnerability has been fixed in each `to` Element. The use of the `fixedIn` type is constrained to `VexFixedVulnAssessmentRelationship` classed relationships.
3270 fixedIn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn"
3271 # Designates a `from` Vulnerability was originally discovered by the `to` Agent(s).
3272 foundBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy"
3273 # The `from` Element generates each `to` Element.
3274 generates = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates"
3275 # Every `to` Element is a file added to the `from` Element (`from` hasAddedFile `to`).
3276 hasAddedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile"
3277 # Relates a `from` Vulnerability and each `to` Element with a security assessment. To be used with `VulnAssessmentRelationship` types.
3278 hasAssessmentFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor"
3279 # Used to associate a `from` Artifact with each `to` Vulnerability.
3280 hasAssociatedVulnerability = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability"
3281 # The `from` SoftwareArtifact is concluded by the SPDX data creator to be governed by each `to` license.
3282 hasConcludedLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense"
3283 # The `from` Element treats each `to` Element as a data file. A data file is an artifact that stores data required or optional for the `from` Element's functionality. A data file can be a database file, an index file, a log file, an AI model file, a calibration data file, a temporary file, a backup file, and more. For AI training dataset, test dataset, test artifact, configuration data, build input data, and build output data, please consider using the more specific relationship types: `trainedOn`, `testedOn`, `hasTest`, `configures`, `hasInput`, and `hasOutput`, respectively. This relationship does not imply dependency.
3284 hasDataFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile"
3285 # The `from` SoftwareArtifact was discovered to actually contain each `to` license, for example as detected by use of automated tooling.
3286 hasDeclaredLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense"
3287 # Every `to` Element is a file deleted from the `from` Element (`from` hasDeletedFile `to`).
3288 hasDeletedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile"
3289 # The `from` Element has manifest files that contain dependency information in each `to` Element.
3290 hasDependencyManifest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest"
3291 # The `from` Element is distributed as an artifact in each `to` Element (e.g. an RPM or archive file).
3292 hasDistributionArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact"
3293 # The `from` Element is documented by each `to` Element.
3294 hasDocumentation = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation"
3295 # The `from` Element dynamically links in each `to` Element, during a LifecycleScopeType period.
3296 hasDynamicLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink"
3297 # Every `to` Element is considered as evidence for the `from` Element (`from` hasEvidence `to`).
3298 hasEvidence = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence"
3299 # Every `to` Element is an example for the `from` Element (`from` hasExample `to`).
3300 hasExample = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample"
3301 # The `from` Build was run on the `to` Element during a LifecycleScopeType period (e.g. the host that the build runs on).
3302 hasHost = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost"
3303 # The `from` Build has each `to` Element as an input, during a LifecycleScopeType period.
3304 hasInput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput"
3305 # Every `to` Element is metadata about the `from` Element (`from` hasMetadata `to`).
3306 hasMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata"
3307 # Every `to` Element is an optional component of the `from` Element (`from` hasOptionalComponent `to`).
3308 hasOptionalComponent = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent"
3309 # The `from` Element optionally depends on each `to` Element, during a LifecycleScopeType period.
3310 hasOptionalDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency"
3311 # The `from` Build element generates each `to` Element as an output, during a LifecycleScopeType period.
3312 hasOutput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput"
3313 # The `from` Element has a prerequisite on each `to` Element, during a LifecycleScopeType period.
3314 hasPrerequisite = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite"
3315 # The `from` Element has a dependency on each `to` Element, dependency is not in the distributed artifact, but assumed to be provided, during a LifecycleScopeType period.
3316 hasProvidedDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency"
3317 # The `from` Element has a requirement on each `to` Element, during a LifecycleScopeType period.
3318 hasRequirement = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement"
3319 # Every `to` Element is a specification for the `from` Element (`from` hasSpecification `to`), during a LifecycleScopeType period.
3320 hasSpecification = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification"
3321 # The `from` Element statically links in each `to` Element, during a LifecycleScopeType period.
3322 hasStaticLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink"
3323 # Every `to` Element is a test artifact for the `from` Element (`from` hasTest `to`), during a LifecycleScopeType period.
3324 hasTest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest"
3325 # Every `to` Element is a test case for the `from` Element (`from` hasTestCase `to`).
3326 hasTestCase = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase"
3327 # Every `to` Element is a variant the `from` Element (`from` hasVariant `to`).
3328 hasVariant = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant"
3329 # The `from` Element was invoked by the `to` Agent, during a LifecycleScopeType period (for example, a Build element that describes a build step).
3330 invokedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy"
3331 # The `from` Element is modified by each `to` Element.
3332 modifiedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy"
3333 # Every `to` Element is related to the `from` Element where the relationship type is not described by any of the SPDX relationship types (this relationship is directionless).
3334 other = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other"
3335 # Every `to` Element is a packaged instance of the `from` Element (`from` packagedBy `to`).
3336 packagedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy"
3337 # Every `to` Element is a patch for the `from` Element (`from` patchedBy `to`).
3338 patchedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy"
3339 # Designates a `from` Vulnerability was made available for public use or reference by each `to` Agent.
3340 publishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy"
3341 # Designates a `from` Vulnerability was first reported to a project, vendor, or tracking database for formal identification by each `to` Agent.
3342 reportedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy"
3343 # Designates a `from` Vulnerability's details were tracked, aggregated, and/or enriched to improve context (i.e. NVD) by each `to` Agent.
3344 republishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy"
3345 # The `from` SpdxDocument can be found in a serialized form in each `to` Artifact.
3346 serializedInArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact"
3347 # The `from` Element has been tested on the `to` Element(s).
3348 testedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn"
3349 # The `from` Element has been trained on the `to` Element(s).
3350 trainedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn"
3351 # The `from` Vulnerability impact is being investigated for each `to` Element. The use of the `underInvestigationFor` type is constrained to `VexUnderInvestigationVulnAssessmentRelationship` classed relationships.
3352 underInvestigationFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor"
3353 # The `from` Element uses each `to` Element as a tool, during a LifecycleScopeType period.
3354 usesTool = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool"
3355
3356
3357# A collection of SPDX Elements that could potentially be serialized.
3358@register("https://spdx.org/rdf/3.0.1/terms/Core/SpdxDocument", compact_type="SpdxDocument", abstract=False)
3359class SpdxDocument(ElementCollection):
3360 NODE_KIND = NodeKind.IRI
3361 ID_ALIAS = "spdxId"
3362 NAMED_INDIVIDUALS = {
3363 }
3364
3365 @classmethod
3366 def _register_props(cls):
3367 super()._register_props()
3368 # Provides the license under which the SPDX documentation of the Element can be
3369 # used.
3370 cls._add_property(
3371 "dataLicense",
3372 ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[
3373 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
3374 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
3375 ],),
3376 iri="https://spdx.org/rdf/3.0.1/terms/Core/dataLicense",
3377 compact="dataLicense",
3378 )
3379 # Provides an ExternalMap of Element identifiers.
3380 cls._add_property(
3381 "import_",
3382 ListProp(ObjectProp(ExternalMap, False)),
3383 iri="https://spdx.org/rdf/3.0.1/terms/Core/import",
3384 compact="import",
3385 )
3386 # Provides a NamespaceMap of prefixes and associated namespace partial URIs applicable to an SpdxDocument and independent of any specific serialization format or instance.
3387 cls._add_property(
3388 "namespaceMap",
3389 ListProp(ObjectProp(NamespaceMap, False)),
3390 iri="https://spdx.org/rdf/3.0.1/terms/Core/namespaceMap",
3391 compact="namespaceMap",
3392 )
3393
3394
3395# Indicates the type of support that is associated with an artifact.
3396@register("https://spdx.org/rdf/3.0.1/terms/Core/SupportType", compact_type="SupportType", abstract=False)
3397class SupportType(SHACLObject):
3398 NODE_KIND = NodeKind.BlankNodeOrIRI
3399 NAMED_INDIVIDUALS = {
3400 "deployed": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed",
3401 "development": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development",
3402 "endOfSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport",
3403 "limitedSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport",
3404 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion",
3405 "noSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport",
3406 "support": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support",
3407 }
3408 # in addition to being supported by the supplier, the software is known to have been deployed and is in use. For a software as a service provider, this implies the software is now available as a service.
3409 deployed = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed"
3410 # the artifact is in active development and is not considered ready for formal support from the supplier.
3411 development = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development"
3412 # there is a defined end of support for the artifact from the supplier. This may also be referred to as end of life. There is a validUntilDate that can be used to signal when support ends for the artifact.
3413 endOfSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport"
3414 # the artifact has been released, and there is limited support available from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
3415 limitedSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport"
3416 # no assertion about the type of support is made. This is considered the default if no other support type is used.
3417 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion"
3418 # there is no support for the artifact from the supplier, consumer assumes any support obligations.
3419 noSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport"
3420 # the artifact has been released, and is supported from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
3421 support = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support"
3422
3423
3424# An element of hardware and/or software utilized to carry out a particular function.
3425@register("https://spdx.org/rdf/3.0.1/terms/Core/Tool", compact_type="Tool", abstract=False)
3426class Tool(Element):
3427 NODE_KIND = NodeKind.IRI
3428 ID_ALIAS = "spdxId"
3429 NAMED_INDIVIDUALS = {
3430 }
3431
3432
3433# Categories of confidentiality level.
3434@register("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType", compact_type="dataset_ConfidentialityLevelType", abstract=False)
3435class dataset_ConfidentialityLevelType(SHACLObject):
3436 NODE_KIND = NodeKind.BlankNodeOrIRI
3437 NAMED_INDIVIDUALS = {
3438 "amber": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber",
3439 "clear": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear",
3440 "green": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green",
3441 "red": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red",
3442 }
3443 # Data points in the dataset can be shared only with specific organizations and their clients on a need to know basis.
3444 amber = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber"
3445 # Dataset may be distributed freely, without restriction.
3446 clear = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear"
3447 # Dataset can be shared within a community of peers and partners.
3448 green = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green"
3449 # Data points in the dataset are highly confidential and can only be shared with named recipients.
3450 red = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red"
3451
3452
3453# Availability of dataset.
3454@register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType", compact_type="dataset_DatasetAvailabilityType", abstract=False)
3455class dataset_DatasetAvailabilityType(SHACLObject):
3456 NODE_KIND = NodeKind.BlankNodeOrIRI
3457 NAMED_INDIVIDUALS = {
3458 "clickthrough": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough",
3459 "directDownload": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload",
3460 "query": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query",
3461 "registration": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration",
3462 "scrapingScript": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript",
3463 }
3464 # the dataset is not publicly available and can only be accessed after affirmatively accepting terms on a clickthrough webpage.
3465 clickthrough = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough"
3466 # the dataset is publicly available and can be downloaded directly.
3467 directDownload = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload"
3468 # the dataset is publicly available, but not all at once, and can only be accessed through queries which return parts of the dataset.
3469 query = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query"
3470 # the dataset is not publicly available and an email registration is required before accessing the dataset, although without an affirmative acceptance of terms.
3471 registration = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration"
3472 # the dataset provider is not making available the underlying data and the dataset must be reassembled, typically using the provided script for scraping the data.
3473 scrapingScript = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript"
3474
3475
3476# Enumeration of dataset types.
3477@register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType", compact_type="dataset_DatasetType", abstract=False)
3478class dataset_DatasetType(SHACLObject):
3479 NODE_KIND = NodeKind.BlankNodeOrIRI
3480 NAMED_INDIVIDUALS = {
3481 "audio": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio",
3482 "categorical": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical",
3483 "graph": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph",
3484 "image": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image",
3485 "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion",
3486 "numeric": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric",
3487 "other": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other",
3488 "sensor": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor",
3489 "structured": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured",
3490 "syntactic": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic",
3491 "text": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text",
3492 "timeseries": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries",
3493 "timestamp": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp",
3494 "video": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video",
3495 }
3496 # data is audio based, such as a collection of music from the 80s.
3497 audio = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio"
3498 # data that is classified into a discrete number of categories, such as the eye color of a population of people.
3499 categorical = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical"
3500 # data is in the form of a graph where entries are somehow related to each other through edges, such a social network of friends.
3501 graph = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph"
3502 # data is a collection of images such as pictures of animals.
3503 image = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image"
3504 # data type is not known.
3505 noAssertion = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion"
3506 # data consists only of numeric entries.
3507 numeric = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric"
3508 # data is of a type not included in this list.
3509 other = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other"
3510 # data is recorded from a physical sensor, such as a thermometer reading or biometric device.
3511 sensor = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor"
3512 # data is stored in tabular format or retrieved from a relational database.
3513 structured = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured"
3514 # data describes the syntax or semantics of a language or text, such as a parse tree used for natural language processing.
3515 syntactic = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic"
3516 # data consists of unstructured text, such as a book, Wikipedia article (without images), or transcript.
3517 text = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text"
3518 # data is recorded in an ordered sequence of timestamped entries, such as the price of a stock over the course of a day.
3519 timeseries = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries"
3520 # data is recorded with a timestamp for each entry, but not necessarily ordered or at specific intervals, such as when a taxi ride starts and ends.
3521 timestamp = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp"
3522 # data is video based, such as a collection of movie clips featuring Tom Hanks.
3523 video = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video"
3524
3525
3526# Abstract class for additional text intended to be added to a License, but
3527# which is not itself a standalone License.
3528@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/LicenseAddition", compact_type="expandedlicensing_LicenseAddition", abstract=True)
3529class expandedlicensing_LicenseAddition(Element):
3530 NODE_KIND = NodeKind.IRI
3531 ID_ALIAS = "spdxId"
3532 NAMED_INDIVIDUALS = {
3533 }
3534
3535 @classmethod
3536 def _register_props(cls):
3537 super()._register_props()
3538 # Identifies the full text of a LicenseAddition.
3539 cls._add_property(
3540 "expandedlicensing_additionText",
3541 StringProp(),
3542 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/additionText",
3543 min_count=1,
3544 compact="expandedlicensing_additionText",
3545 )
3546 # Specifies whether an additional text identifier has been marked as deprecated.
3547 cls._add_property(
3548 "expandedlicensing_isDeprecatedAdditionId",
3549 BooleanProp(),
3550 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedAdditionId",
3551 compact="expandedlicensing_isDeprecatedAdditionId",
3552 )
3553 # Identifies all the text and metadata associated with a license in the license
3554 # XML format.
3555 cls._add_property(
3556 "expandedlicensing_licenseXml",
3557 StringProp(),
3558 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml",
3559 compact="expandedlicensing_licenseXml",
3560 )
3561 # Specifies the licenseId that is preferred to be used in place of a deprecated
3562 # License or LicenseAddition.
3563 cls._add_property(
3564 "expandedlicensing_obsoletedBy",
3565 StringProp(),
3566 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy",
3567 compact="expandedlicensing_obsoletedBy",
3568 )
3569 # Contains a URL where the License or LicenseAddition can be found in use.
3570 cls._add_property(
3571 "expandedlicensing_seeAlso",
3572 ListProp(AnyURIProp()),
3573 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso",
3574 compact="expandedlicensing_seeAlso",
3575 )
3576 # Identifies the full text of a LicenseAddition, in SPDX templating format.
3577 cls._add_property(
3578 "expandedlicensing_standardAdditionTemplate",
3579 StringProp(),
3580 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardAdditionTemplate",
3581 compact="expandedlicensing_standardAdditionTemplate",
3582 )
3583
3584
3585# A license exception that is listed on the SPDX Exceptions list.
3586@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicenseException", compact_type="expandedlicensing_ListedLicenseException", abstract=False)
3587class expandedlicensing_ListedLicenseException(expandedlicensing_LicenseAddition):
3588 NODE_KIND = NodeKind.IRI
3589 ID_ALIAS = "spdxId"
3590 NAMED_INDIVIDUALS = {
3591 }
3592
3593 @classmethod
3594 def _register_props(cls):
3595 super()._register_props()
3596 # Specifies the SPDX License List version in which this license or exception
3597 # identifier was deprecated.
3598 cls._add_property(
3599 "expandedlicensing_deprecatedVersion",
3600 StringProp(),
3601 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion",
3602 compact="expandedlicensing_deprecatedVersion",
3603 )
3604 # Specifies the SPDX License List version in which this ListedLicense or
3605 # ListedLicenseException identifier was first added.
3606 cls._add_property(
3607 "expandedlicensing_listVersionAdded",
3608 StringProp(),
3609 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded",
3610 compact="expandedlicensing_listVersionAdded",
3611 )
3612
3613
3614# A property name with an associated value.
3615@register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertyEntry", compact_type="extension_CdxPropertyEntry", abstract=False)
3616class extension_CdxPropertyEntry(SHACLObject):
3617 NODE_KIND = NodeKind.BlankNodeOrIRI
3618 NAMED_INDIVIDUALS = {
3619 }
3620
3621 @classmethod
3622 def _register_props(cls):
3623 super()._register_props()
3624 # A name used in a CdxPropertyEntry name-value pair.
3625 cls._add_property(
3626 "extension_cdxPropName",
3627 StringProp(),
3628 iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropName",
3629 min_count=1,
3630 compact="extension_cdxPropName",
3631 )
3632 # A value used in a CdxPropertyEntry name-value pair.
3633 cls._add_property(
3634 "extension_cdxPropValue",
3635 StringProp(),
3636 iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropValue",
3637 compact="extension_cdxPropValue",
3638 )
3639
3640
3641# A characterization of some aspect of an Element that is associated with the Element in a generalized fashion.
3642@register("https://spdx.org/rdf/3.0.1/terms/Extension/Extension", compact_type="extension_Extension", abstract=True)
3643class extension_Extension(SHACLExtensibleObject, SHACLObject):
3644 NODE_KIND = NodeKind.BlankNodeOrIRI
3645 NAMED_INDIVIDUALS = {
3646 }
3647
3648
3649# Specifies the CVSS base, temporal, threat, or environmental severity type.
3650@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType", compact_type="security_CvssSeverityType", abstract=False)
3651class security_CvssSeverityType(SHACLObject):
3652 NODE_KIND = NodeKind.BlankNodeOrIRI
3653 NAMED_INDIVIDUALS = {
3654 "critical": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical",
3655 "high": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high",
3656 "low": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low",
3657 "medium": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium",
3658 "none": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none",
3659 }
3660 # When a CVSS score is between 9.0 - 10.0
3661 critical = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical"
3662 # When a CVSS score is between 7.0 - 8.9
3663 high = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high"
3664 # When a CVSS score is between 0.1 - 3.9
3665 low = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low"
3666 # When a CVSS score is between 4.0 - 6.9
3667 medium = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium"
3668 # When a CVSS score is 0.0
3669 none = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none"
3670
3671
3672# Specifies the exploit catalog type.
3673@register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType", compact_type="security_ExploitCatalogType", abstract=False)
3674class security_ExploitCatalogType(SHACLObject):
3675 NODE_KIND = NodeKind.BlankNodeOrIRI
3676 NAMED_INDIVIDUALS = {
3677 "kev": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev",
3678 "other": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other",
3679 }
3680 # CISA's Known Exploited Vulnerability (KEV) Catalog
3681 kev = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev"
3682 # Other exploit catalogs
3683 other = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other"
3684
3685
3686# Specifies the SSVC decision type.
3687@register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType", compact_type="security_SsvcDecisionType", abstract=False)
3688class security_SsvcDecisionType(SHACLObject):
3689 NODE_KIND = NodeKind.BlankNodeOrIRI
3690 NAMED_INDIVIDUALS = {
3691 "act": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act",
3692 "attend": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend",
3693 "track": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track",
3694 "trackStar": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar",
3695 }
3696 # The vulnerability requires attention from the organization's internal, supervisory-level and leadership-level individuals. Necessary actions include requesting assistance or information about the vulnerability, as well as publishing a notification either internally and/or externally. Typically, internal groups would meet to determine the overall response and then execute agreed upon actions. CISA recommends remediating Act vulnerabilities as soon as possible.
3697 act = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act"
3698 # The vulnerability requires attention from the organization's internal, supervisory-level individuals. Necessary actions include requesting assistance or information about the vulnerability, and may involve publishing a notification either internally and/or externally. CISA recommends remediating Attend vulnerabilities sooner than standard update timelines.
3699 attend = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend"
3700 # The vulnerability does not require action at this time. The organization would continue to track the vulnerability and reassess it if new information becomes available. CISA recommends remediating Track vulnerabilities within standard update timelines.
3701 track = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track"
3702 # ("Track\*" in the SSVC spec) The vulnerability contains specific characteristics that may require closer monitoring for changes. CISA recommends remediating Track\* vulnerabilities within standard update timelines.
3703 trackStar = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar"
3704
3705
3706# Specifies the VEX justification type.
3707@register("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType", compact_type="security_VexJustificationType", abstract=False)
3708class security_VexJustificationType(SHACLObject):
3709 NODE_KIND = NodeKind.BlankNodeOrIRI
3710 NAMED_INDIVIDUALS = {
3711 "componentNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent",
3712 "inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist",
3713 "vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary",
3714 "vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath",
3715 "vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent",
3716 }
3717 # The software is not affected because the vulnerable component is not in the product.
3718 componentNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent"
3719 # Built-in inline controls or mitigations prevent an adversary from leveraging the vulnerability.
3720 inlineMitigationsAlreadyExist = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist"
3721 # The vulnerable component is present, and the component contains the vulnerable code. However, vulnerable code is used in such a way that an attacker cannot mount any anticipated attack.
3722 vulnerableCodeCannotBeControlledByAdversary = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary"
3723 # The affected code is not reachable through the execution of the code, including non-anticipated states of the product.
3724 vulnerableCodeNotInExecutePath = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath"
3725 # The product is not affected because the code underlying the vulnerability is not present in the product.
3726 vulnerableCodeNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent"
3727
3728
3729# Abstract ancestor class for all vulnerability assessments
3730@register("https://spdx.org/rdf/3.0.1/terms/Security/VulnAssessmentRelationship", compact_type="security_VulnAssessmentRelationship", abstract=True)
3731class security_VulnAssessmentRelationship(Relationship):
3732 NODE_KIND = NodeKind.IRI
3733 ID_ALIAS = "spdxId"
3734 NAMED_INDIVIDUALS = {
3735 }
3736
3737 @classmethod
3738 def _register_props(cls):
3739 super()._register_props()
3740 # Identifies who or what supplied the artifact or VulnAssessmentRelationship
3741 # referenced by the Element.
3742 cls._add_property(
3743 "suppliedBy",
3744 ObjectProp(Agent, False, context=[
3745 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
3746 ],),
3747 iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy",
3748 compact="suppliedBy",
3749 )
3750 # Specifies an Element contained in a piece of software where a vulnerability was
3751 # found.
3752 cls._add_property(
3753 "security_assessedElement",
3754 ObjectProp(software_SoftwareArtifact, False),
3755 iri="https://spdx.org/rdf/3.0.1/terms/Security/assessedElement",
3756 compact="security_assessedElement",
3757 )
3758 # Specifies a time when a vulnerability assessment was modified
3759 cls._add_property(
3760 "security_modifiedTime",
3761 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3762 iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime",
3763 compact="security_modifiedTime",
3764 )
3765 # Specifies the time when a vulnerability was published.
3766 cls._add_property(
3767 "security_publishedTime",
3768 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3769 iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime",
3770 compact="security_publishedTime",
3771 )
3772 # Specified the time and date when a vulnerability was withdrawn.
3773 cls._add_property(
3774 "security_withdrawnTime",
3775 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
3776 iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime",
3777 compact="security_withdrawnTime",
3778 )
3779
3780
3781# Abstract class representing a license combination consisting of one or more licenses.
3782@register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/AnyLicenseInfo", compact_type="simplelicensing_AnyLicenseInfo", abstract=True)
3783class simplelicensing_AnyLicenseInfo(Element):
3784 NODE_KIND = NodeKind.IRI
3785 ID_ALIAS = "spdxId"
3786 NAMED_INDIVIDUALS = {
3787 }
3788
3789
3790# An SPDX Element containing an SPDX license expression string.
3791@register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/LicenseExpression", compact_type="simplelicensing_LicenseExpression", abstract=False)
3792class simplelicensing_LicenseExpression(simplelicensing_AnyLicenseInfo):
3793 NODE_KIND = NodeKind.IRI
3794 ID_ALIAS = "spdxId"
3795 NAMED_INDIVIDUALS = {
3796 }
3797
3798 @classmethod
3799 def _register_props(cls):
3800 super()._register_props()
3801 # Maps a LicenseRef or AdditionRef string for a Custom License or a Custom
3802 # License Addition to its URI ID.
3803 cls._add_property(
3804 "simplelicensing_customIdToUri",
3805 ListProp(ObjectProp(DictionaryEntry, False)),
3806 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/customIdToUri",
3807 compact="simplelicensing_customIdToUri",
3808 )
3809 # A string in the license expression format.
3810 cls._add_property(
3811 "simplelicensing_licenseExpression",
3812 StringProp(),
3813 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseExpression",
3814 min_count=1,
3815 compact="simplelicensing_licenseExpression",
3816 )
3817 # The version of the SPDX License List used in the license expression.
3818 cls._add_property(
3819 "simplelicensing_licenseListVersion",
3820 StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
3821 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseListVersion",
3822 compact="simplelicensing_licenseListVersion",
3823 )
3824
3825
3826# A license or addition that is not listed on the SPDX License List.
3827@register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/SimpleLicensingText", compact_type="simplelicensing_SimpleLicensingText", abstract=False)
3828class simplelicensing_SimpleLicensingText(Element):
3829 NODE_KIND = NodeKind.IRI
3830 ID_ALIAS = "spdxId"
3831 NAMED_INDIVIDUALS = {
3832 }
3833
3834 @classmethod
3835 def _register_props(cls):
3836 super()._register_props()
3837 # Identifies the full text of a License or Addition.
3838 cls._add_property(
3839 "simplelicensing_licenseText",
3840 StringProp(),
3841 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText",
3842 min_count=1,
3843 compact="simplelicensing_licenseText",
3844 )
3845
3846
3847# A canonical, unique, immutable identifier
3848@register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifier", compact_type="software_ContentIdentifier", abstract=False)
3849class software_ContentIdentifier(IntegrityMethod):
3850 NODE_KIND = NodeKind.BlankNodeOrIRI
3851 NAMED_INDIVIDUALS = {
3852 }
3853
3854 @classmethod
3855 def _register_props(cls):
3856 super()._register_props()
3857 # Specifies the type of the content identifier.
3858 cls._add_property(
3859 "software_contentIdentifierType",
3860 EnumProp([
3861 ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", "gitoid"),
3862 ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", "swhid"),
3863 ]),
3864 iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierType",
3865 min_count=1,
3866 compact="software_contentIdentifierType",
3867 )
3868 # Specifies the value of the content identifier.
3869 cls._add_property(
3870 "software_contentIdentifierValue",
3871 AnyURIProp(),
3872 iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierValue",
3873 min_count=1,
3874 compact="software_contentIdentifierValue",
3875 )
3876
3877
3878# Specifies the type of a content identifier.
3879@register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType", compact_type="software_ContentIdentifierType", abstract=False)
3880class software_ContentIdentifierType(SHACLObject):
3881 NODE_KIND = NodeKind.BlankNodeOrIRI
3882 NAMED_INDIVIDUALS = {
3883 "gitoid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid",
3884 "swhid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid",
3885 }
3886 # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg).
3887 gitoid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid"
3888 # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
3889 swhid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid"
3890
3891
3892# Enumeration of the different kinds of SPDX file.
3893@register("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType", compact_type="software_FileKindType", abstract=False)
3894class software_FileKindType(SHACLObject):
3895 NODE_KIND = NodeKind.BlankNodeOrIRI
3896 NAMED_INDIVIDUALS = {
3897 "directory": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory",
3898 "file": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file",
3899 }
3900 # The file represents a directory and all content stored in that directory.
3901 directory = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory"
3902 # The file represents a single file (default).
3903 file = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file"
3904
3905
3906# Provides a set of values to be used to describe the common types of SBOMs that
3907# tools may create.
3908@register("https://spdx.org/rdf/3.0.1/terms/Software/SbomType", compact_type="software_SbomType", abstract=False)
3909class software_SbomType(SHACLObject):
3910 NODE_KIND = NodeKind.BlankNodeOrIRI
3911 NAMED_INDIVIDUALS = {
3912 "analyzed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed",
3913 "build": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build",
3914 "deployed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed",
3915 "design": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design",
3916 "runtime": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime",
3917 "source": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source",
3918 }
3919 # SBOM generated through analysis of artifacts (e.g., executables, packages, containers, and virtual machine images) after its build. Such analysis generally requires a variety of heuristics. In some contexts, this may also be referred to as a "3rd party" SBOM.
3920 analyzed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed"
3921 # SBOM generated as part of the process of building the software to create a releasable artifact (e.g., executable or package) from data such as source files, dependencies, built components, build process ephemeral data, and other SBOMs.
3922 build = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build"
3923 # SBOM provides an inventory of software that is present on a system. This may be an assembly of other SBOMs that combines analysis of configuration options, and examination of execution behavior in a (potentially simulated) deployment environment.
3924 deployed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed"
3925 # SBOM of intended, planned software project or product with included components (some of which may not yet exist) for a new software artifact.
3926 design = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design"
3927 # SBOM generated through instrumenting the system running the software, to capture only components present in the system, as well as external call-outs or dynamically loaded components. In some contexts, this may also be referred to as an "Instrumented" or "Dynamic" SBOM.
3928 runtime = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime"
3929 # SBOM created directly from the development environment, source files, and included dependencies used to build an product artifact.
3930 source = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source"
3931
3932
3933# Provides information about the primary purpose of an Element.
3934@register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose", compact_type="software_SoftwarePurpose", abstract=False)
3935class software_SoftwarePurpose(SHACLObject):
3936 NODE_KIND = NodeKind.BlankNodeOrIRI
3937 NAMED_INDIVIDUALS = {
3938 "application": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application",
3939 "archive": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive",
3940 "bom": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom",
3941 "configuration": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration",
3942 "container": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container",
3943 "data": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data",
3944 "device": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device",
3945 "deviceDriver": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver",
3946 "diskImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage",
3947 "documentation": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation",
3948 "evidence": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence",
3949 "executable": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable",
3950 "file": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file",
3951 "filesystemImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage",
3952 "firmware": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware",
3953 "framework": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework",
3954 "install": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install",
3955 "library": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library",
3956 "manifest": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest",
3957 "model": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model",
3958 "module": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module",
3959 "operatingSystem": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem",
3960 "other": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other",
3961 "patch": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch",
3962 "platform": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform",
3963 "requirement": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement",
3964 "source": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source",
3965 "specification": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification",
3966 "test": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test",
3967 }
3968 # The Element is a software application.
3969 application = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application"
3970 # The Element is an archived collection of one or more files (.tar, .zip, etc.).
3971 archive = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive"
3972 # The Element is a bill of materials.
3973 bom = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom"
3974 # The Element is configuration data.
3975 configuration = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration"
3976 # The Element is a container image which can be used by a container runtime application.
3977 container = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container"
3978 # The Element is data.
3979 data = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data"
3980 # The Element refers to a chipset, processor, or electronic board.
3981 device = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device"
3982 # The Element represents software that controls hardware devices.
3983 deviceDriver = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver"
3984 # The Element refers to a disk image that can be written to a disk, booted in a VM, etc. A disk image typically contains most or all of the components necessary to boot, such as bootloaders, kernels, firmware, userspace, etc.
3985 diskImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage"
3986 # The Element is documentation.
3987 documentation = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation"
3988 # The Element is the evidence that a specification or requirement has been fulfilled.
3989 evidence = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence"
3990 # The Element is an Artifact that can be run on a computer.
3991 executable = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable"
3992 # The Element is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc.).
3993 file = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file"
3994 # The Element is a file system image that can be written to a disk (or virtual) partition.
3995 filesystemImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage"
3996 # The Element provides low level control over a device's hardware.
3997 firmware = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware"
3998 # The Element is a software framework.
3999 framework = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework"
4000 # The Element is used to install software on disk.
4001 install = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install"
4002 # The Element is a software library.
4003 library = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library"
4004 # The Element is a software manifest.
4005 manifest = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest"
4006 # The Element is a machine learning or artificial intelligence model.
4007 model = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model"
4008 # The Element is a module of a piece of software.
4009 module = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module"
4010 # The Element is an operating system.
4011 operatingSystem = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem"
4012 # The Element doesn't fit into any of the other categories.
4013 other = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other"
4014 # The Element contains a set of changes to update, fix, or improve another Element.
4015 patch = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch"
4016 # The Element represents a runtime environment.
4017 platform = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform"
4018 # The Element provides a requirement needed as input for another Element.
4019 requirement = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement"
4020 # The Element is a single or a collection of source files.
4021 source = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source"
4022 # The Element is a plan, guideline or strategy how to create, perform or analyze an application.
4023 specification = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification"
4024 # The Element is a test used to verify functionality on an software element.
4025 test = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test"
4026
4027
4028# Class that describes a build instance of software/artifacts.
4029@register("https://spdx.org/rdf/3.0.1/terms/Build/Build", compact_type="build_Build", abstract=False)
4030class build_Build(Element):
4031 NODE_KIND = NodeKind.IRI
4032 ID_ALIAS = "spdxId"
4033 NAMED_INDIVIDUALS = {
4034 }
4035
4036 @classmethod
4037 def _register_props(cls):
4038 super()._register_props()
4039 # Property that describes the time at which a build stops.
4040 cls._add_property(
4041 "build_buildEndTime",
4042 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4043 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildEndTime",
4044 compact="build_buildEndTime",
4045 )
4046 # A buildId is a locally unique identifier used by a builder to identify a unique
4047 # instance of a build produced by it.
4048 cls._add_property(
4049 "build_buildId",
4050 StringProp(),
4051 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildId",
4052 compact="build_buildId",
4053 )
4054 # Property describing the start time of a build.
4055 cls._add_property(
4056 "build_buildStartTime",
4057 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4058 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildStartTime",
4059 compact="build_buildStartTime",
4060 )
4061 # A buildType is a hint that is used to indicate the toolchain, platform, or
4062 # infrastructure that the build was invoked on.
4063 cls._add_property(
4064 "build_buildType",
4065 AnyURIProp(),
4066 iri="https://spdx.org/rdf/3.0.1/terms/Build/buildType",
4067 min_count=1,
4068 compact="build_buildType",
4069 )
4070 # Property that describes the digest of the build configuration file used to
4071 # invoke a build.
4072 cls._add_property(
4073 "build_configSourceDigest",
4074 ListProp(ObjectProp(Hash, False)),
4075 iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceDigest",
4076 compact="build_configSourceDigest",
4077 )
4078 # Property describes the invocation entrypoint of a build.
4079 cls._add_property(
4080 "build_configSourceEntrypoint",
4081 ListProp(StringProp()),
4082 iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceEntrypoint",
4083 compact="build_configSourceEntrypoint",
4084 )
4085 # Property that describes the URI of the build configuration source file.
4086 cls._add_property(
4087 "build_configSourceUri",
4088 ListProp(AnyURIProp()),
4089 iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceUri",
4090 compact="build_configSourceUri",
4091 )
4092 # Property describing the session in which a build is invoked.
4093 cls._add_property(
4094 "build_environment",
4095 ListProp(ObjectProp(DictionaryEntry, False)),
4096 iri="https://spdx.org/rdf/3.0.1/terms/Build/environment",
4097 compact="build_environment",
4098 )
4099 # Property describing a parameter used in an instance of a build.
4100 cls._add_property(
4101 "build_parameter",
4102 ListProp(ObjectProp(DictionaryEntry, False)),
4103 iri="https://spdx.org/rdf/3.0.1/terms/Build/parameter",
4104 compact="build_parameter",
4105 )
4106
4107
4108# Agent represents anything with the potential to act on a system.
4109@register("https://spdx.org/rdf/3.0.1/terms/Core/Agent", compact_type="Agent", abstract=False)
4110class Agent(Element):
4111 NODE_KIND = NodeKind.IRI
4112 ID_ALIAS = "spdxId"
4113 NAMED_INDIVIDUALS = {
4114 }
4115
4116
4117# An assertion made in relation to one or more elements.
4118@register("https://spdx.org/rdf/3.0.1/terms/Core/Annotation", compact_type="Annotation", abstract=False)
4119class Annotation(Element):
4120 NODE_KIND = NodeKind.IRI
4121 ID_ALIAS = "spdxId"
4122 NAMED_INDIVIDUALS = {
4123 }
4124
4125 @classmethod
4126 def _register_props(cls):
4127 super()._register_props()
4128 # Describes the type of annotation.
4129 cls._add_property(
4130 "annotationType",
4131 EnumProp([
4132 ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", "other"),
4133 ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", "review"),
4134 ]),
4135 iri="https://spdx.org/rdf/3.0.1/terms/Core/annotationType",
4136 min_count=1,
4137 compact="annotationType",
4138 )
4139 # Provides information about the content type of an Element or a Property.
4140 cls._add_property(
4141 "contentType",
4142 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
4143 iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType",
4144 compact="contentType",
4145 )
4146 # Commentary on an assertion that an annotator has made.
4147 cls._add_property(
4148 "statement",
4149 StringProp(),
4150 iri="https://spdx.org/rdf/3.0.1/terms/Core/statement",
4151 compact="statement",
4152 )
4153 # An Element an annotator has made an assertion about.
4154 cls._add_property(
4155 "subject",
4156 ObjectProp(Element, True, context=[
4157 ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"),
4158 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
4159 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
4160 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
4161 ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"),
4162 ],),
4163 iri="https://spdx.org/rdf/3.0.1/terms/Core/subject",
4164 min_count=1,
4165 compact="subject",
4166 )
4167
4168
4169# A distinct article or unit within the digital domain.
4170@register("https://spdx.org/rdf/3.0.1/terms/Core/Artifact", compact_type="Artifact", abstract=True)
4171class Artifact(Element):
4172 NODE_KIND = NodeKind.IRI
4173 ID_ALIAS = "spdxId"
4174 NAMED_INDIVIDUALS = {
4175 }
4176
4177 @classmethod
4178 def _register_props(cls):
4179 super()._register_props()
4180 # Specifies the time an artifact was built.
4181 cls._add_property(
4182 "builtTime",
4183 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4184 iri="https://spdx.org/rdf/3.0.1/terms/Core/builtTime",
4185 compact="builtTime",
4186 )
4187 # Identifies from where or whom the Element originally came.
4188 cls._add_property(
4189 "originatedBy",
4190 ListProp(ObjectProp(Agent, False, context=[
4191 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
4192 ],)),
4193 iri="https://spdx.org/rdf/3.0.1/terms/Core/originatedBy",
4194 compact="originatedBy",
4195 )
4196 # Specifies the time an artifact was released.
4197 cls._add_property(
4198 "releaseTime",
4199 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4200 iri="https://spdx.org/rdf/3.0.1/terms/Core/releaseTime",
4201 compact="releaseTime",
4202 )
4203 # The name of a relevant standard that may apply to an artifact.
4204 cls._add_property(
4205 "standardName",
4206 ListProp(StringProp()),
4207 iri="https://spdx.org/rdf/3.0.1/terms/Core/standardName",
4208 compact="standardName",
4209 )
4210 # Identifies who or what supplied the artifact or VulnAssessmentRelationship
4211 # referenced by the Element.
4212 cls._add_property(
4213 "suppliedBy",
4214 ObjectProp(Agent, False, context=[
4215 ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"),
4216 ],),
4217 iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy",
4218 compact="suppliedBy",
4219 )
4220 # Specifies the level of support associated with an artifact.
4221 cls._add_property(
4222 "supportLevel",
4223 ListProp(EnumProp([
4224 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", "deployed"),
4225 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", "development"),
4226 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", "endOfSupport"),
4227 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", "limitedSupport"),
4228 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", "noAssertion"),
4229 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", "noSupport"),
4230 ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", "support"),
4231 ])),
4232 iri="https://spdx.org/rdf/3.0.1/terms/Core/supportLevel",
4233 compact="supportLevel",
4234 )
4235 # Specifies until when the artifact can be used before its usage needs to be
4236 # reassessed.
4237 cls._add_property(
4238 "validUntilTime",
4239 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4240 iri="https://spdx.org/rdf/3.0.1/terms/Core/validUntilTime",
4241 compact="validUntilTime",
4242 )
4243
4244
4245# A collection of Elements that have a shared context.
4246@register("https://spdx.org/rdf/3.0.1/terms/Core/Bundle", compact_type="Bundle", abstract=False)
4247class Bundle(ElementCollection):
4248 NODE_KIND = NodeKind.IRI
4249 ID_ALIAS = "spdxId"
4250 NAMED_INDIVIDUALS = {
4251 }
4252
4253 @classmethod
4254 def _register_props(cls):
4255 super()._register_props()
4256 # Gives information about the circumstances or unifying properties
4257 # that Elements of the bundle have been assembled under.
4258 cls._add_property(
4259 "context",
4260 StringProp(),
4261 iri="https://spdx.org/rdf/3.0.1/terms/Core/context",
4262 compact="context",
4263 )
4264
4265
4266# A mathematically calculated representation of a grouping of data.
4267@register("https://spdx.org/rdf/3.0.1/terms/Core/Hash", compact_type="Hash", abstract=False)
4268class Hash(IntegrityMethod):
4269 NODE_KIND = NodeKind.BlankNodeOrIRI
4270 NAMED_INDIVIDUALS = {
4271 }
4272
4273 @classmethod
4274 def _register_props(cls):
4275 super()._register_props()
4276 # Specifies the algorithm used for calculating the hash value.
4277 cls._add_property(
4278 "algorithm",
4279 EnumProp([
4280 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"),
4281 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
4282 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
4283 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
4284 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"),
4285 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
4286 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
4287 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"),
4288 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"),
4289 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"),
4290 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"),
4291 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"),
4292 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"),
4293 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"),
4294 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"),
4295 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"),
4296 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"),
4297 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
4298 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
4299 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
4300 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
4301 ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"),
4302 ]),
4303 iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm",
4304 min_count=1,
4305 compact="algorithm",
4306 )
4307 # The result of applying a hash algorithm to an Element.
4308 cls._add_property(
4309 "hashValue",
4310 StringProp(),
4311 iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue",
4312 min_count=1,
4313 compact="hashValue",
4314 )
4315
4316
4317# Provide context for a relationship that occurs in the lifecycle.
4318@register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopedRelationship", compact_type="LifecycleScopedRelationship", abstract=False)
4319class LifecycleScopedRelationship(Relationship):
4320 NODE_KIND = NodeKind.IRI
4321 ID_ALIAS = "spdxId"
4322 NAMED_INDIVIDUALS = {
4323 }
4324
4325 @classmethod
4326 def _register_props(cls):
4327 super()._register_props()
4328 # Capture the scope of information about a specific relationship between elements.
4329 cls._add_property(
4330 "scope",
4331 EnumProp([
4332 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", "build"),
4333 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", "design"),
4334 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", "development"),
4335 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", "other"),
4336 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", "runtime"),
4337 ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", "test"),
4338 ]),
4339 iri="https://spdx.org/rdf/3.0.1/terms/Core/scope",
4340 compact="scope",
4341 )
4342
4343
4344# A group of people who work together in an organized way for a shared purpose.
4345@register("https://spdx.org/rdf/3.0.1/terms/Core/Organization", compact_type="Organization", abstract=False)
4346class Organization(Agent):
4347 NODE_KIND = NodeKind.IRI
4348 ID_ALIAS = "spdxId"
4349 NAMED_INDIVIDUALS = {
4350 "SpdxOrganization": "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization",
4351 }
4352 # An Organization representing the SPDX Project.
4353 SpdxOrganization = "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization"
4354
4355
4356# An individual human being.
4357@register("https://spdx.org/rdf/3.0.1/terms/Core/Person", compact_type="Person", abstract=False)
4358class Person(Agent):
4359 NODE_KIND = NodeKind.IRI
4360 ID_ALIAS = "spdxId"
4361 NAMED_INDIVIDUALS = {
4362 }
4363
4364
4365# A software agent.
4366@register("https://spdx.org/rdf/3.0.1/terms/Core/SoftwareAgent", compact_type="SoftwareAgent", abstract=False)
4367class SoftwareAgent(Agent):
4368 NODE_KIND = NodeKind.IRI
4369 ID_ALIAS = "spdxId"
4370 NAMED_INDIVIDUALS = {
4371 }
4372
4373
4374# Portion of an AnyLicenseInfo representing a set of licensing information
4375# where all elements apply.
4376@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ConjunctiveLicenseSet", compact_type="expandedlicensing_ConjunctiveLicenseSet", abstract=False)
4377class expandedlicensing_ConjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
4378 NODE_KIND = NodeKind.IRI
4379 ID_ALIAS = "spdxId"
4380 NAMED_INDIVIDUALS = {
4381 }
4382
4383 @classmethod
4384 def _register_props(cls):
4385 super()._register_props()
4386 # A license expression participating in a license set.
4387 cls._add_property(
4388 "expandedlicensing_member",
4389 ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[
4390 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
4391 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
4392 ],)),
4393 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member",
4394 min_count=2,
4395 compact="expandedlicensing_member",
4396 )
4397
4398
4399# A license addition that is not listed on the SPDX Exceptions List.
4400@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicenseAddition", compact_type="expandedlicensing_CustomLicenseAddition", abstract=False)
4401class expandedlicensing_CustomLicenseAddition(expandedlicensing_LicenseAddition):
4402 NODE_KIND = NodeKind.IRI
4403 ID_ALIAS = "spdxId"
4404 NAMED_INDIVIDUALS = {
4405 }
4406
4407
4408# Portion of an AnyLicenseInfo representing a set of licensing information where
4409# only one of the elements applies.
4410@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/DisjunctiveLicenseSet", compact_type="expandedlicensing_DisjunctiveLicenseSet", abstract=False)
4411class expandedlicensing_DisjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
4412 NODE_KIND = NodeKind.IRI
4413 ID_ALIAS = "spdxId"
4414 NAMED_INDIVIDUALS = {
4415 }
4416
4417 @classmethod
4418 def _register_props(cls):
4419 super()._register_props()
4420 # A license expression participating in a license set.
4421 cls._add_property(
4422 "expandedlicensing_member",
4423 ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[
4424 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"),
4425 ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"),
4426 ],)),
4427 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member",
4428 min_count=2,
4429 compact="expandedlicensing_member",
4430 )
4431
4432
4433# Abstract class representing a License or an OrLaterOperator.
4434@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ExtendableLicense", compact_type="expandedlicensing_ExtendableLicense", abstract=True)
4435class expandedlicensing_ExtendableLicense(simplelicensing_AnyLicenseInfo):
4436 NODE_KIND = NodeKind.IRI
4437 ID_ALIAS = "spdxId"
4438 NAMED_INDIVIDUALS = {
4439 }
4440
4441
4442# A concrete subclass of AnyLicenseInfo used by Individuals in the
4443# ExpandedLicensing profile.
4444@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/IndividualLicensingInfo", compact_type="expandedlicensing_IndividualLicensingInfo", abstract=False)
4445class expandedlicensing_IndividualLicensingInfo(simplelicensing_AnyLicenseInfo):
4446 NODE_KIND = NodeKind.IRI
4447 ID_ALIAS = "spdxId"
4448 NAMED_INDIVIDUALS = {
4449 "NoAssertionLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense",
4450 "NoneLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense",
4451 }
4452 # An Individual Value for License when no assertion can be made about its actual
4453 # value.
4454 NoAssertionLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense"
4455 # An Individual Value for License where the SPDX data creator determines that no
4456 # license is present.
4457 NoneLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense"
4458
4459
4460# Abstract class for the portion of an AnyLicenseInfo representing a license.
4461@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/License", compact_type="expandedlicensing_License", abstract=True)
4462class expandedlicensing_License(expandedlicensing_ExtendableLicense):
4463 NODE_KIND = NodeKind.IRI
4464 ID_ALIAS = "spdxId"
4465 NAMED_INDIVIDUALS = {
4466 }
4467
4468 @classmethod
4469 def _register_props(cls):
4470 super()._register_props()
4471 # Specifies whether a license or additional text identifier has been marked as
4472 # deprecated.
4473 cls._add_property(
4474 "expandedlicensing_isDeprecatedLicenseId",
4475 BooleanProp(),
4476 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedLicenseId",
4477 compact="expandedlicensing_isDeprecatedLicenseId",
4478 )
4479 # Specifies whether the License is listed as free by the
4480 # Free Software Foundation (FSF).
4481 cls._add_property(
4482 "expandedlicensing_isFsfLibre",
4483 BooleanProp(),
4484 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isFsfLibre",
4485 compact="expandedlicensing_isFsfLibre",
4486 )
4487 # Specifies whether the License is listed as approved by the
4488 # Open Source Initiative (OSI).
4489 cls._add_property(
4490 "expandedlicensing_isOsiApproved",
4491 BooleanProp(),
4492 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isOsiApproved",
4493 compact="expandedlicensing_isOsiApproved",
4494 )
4495 # Identifies all the text and metadata associated with a license in the license
4496 # XML format.
4497 cls._add_property(
4498 "expandedlicensing_licenseXml",
4499 StringProp(),
4500 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml",
4501 compact="expandedlicensing_licenseXml",
4502 )
4503 # Specifies the licenseId that is preferred to be used in place of a deprecated
4504 # License or LicenseAddition.
4505 cls._add_property(
4506 "expandedlicensing_obsoletedBy",
4507 StringProp(),
4508 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy",
4509 compact="expandedlicensing_obsoletedBy",
4510 )
4511 # Contains a URL where the License or LicenseAddition can be found in use.
4512 cls._add_property(
4513 "expandedlicensing_seeAlso",
4514 ListProp(AnyURIProp()),
4515 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso",
4516 compact="expandedlicensing_seeAlso",
4517 )
4518 # Provides a License author's preferred text to indicate that a file is covered
4519 # by the License.
4520 cls._add_property(
4521 "expandedlicensing_standardLicenseHeader",
4522 StringProp(),
4523 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseHeader",
4524 compact="expandedlicensing_standardLicenseHeader",
4525 )
4526 # Identifies the full text of a License, in SPDX templating format.
4527 cls._add_property(
4528 "expandedlicensing_standardLicenseTemplate",
4529 StringProp(),
4530 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseTemplate",
4531 compact="expandedlicensing_standardLicenseTemplate",
4532 )
4533 # Identifies the full text of a License or Addition.
4534 cls._add_property(
4535 "simplelicensing_licenseText",
4536 StringProp(),
4537 iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText",
4538 min_count=1,
4539 compact="simplelicensing_licenseText",
4540 )
4541
4542
4543# A license that is listed on the SPDX License List.
4544@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicense", compact_type="expandedlicensing_ListedLicense", abstract=False)
4545class expandedlicensing_ListedLicense(expandedlicensing_License):
4546 NODE_KIND = NodeKind.IRI
4547 ID_ALIAS = "spdxId"
4548 NAMED_INDIVIDUALS = {
4549 }
4550
4551 @classmethod
4552 def _register_props(cls):
4553 super()._register_props()
4554 # Specifies the SPDX License List version in which this license or exception
4555 # identifier was deprecated.
4556 cls._add_property(
4557 "expandedlicensing_deprecatedVersion",
4558 StringProp(),
4559 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion",
4560 compact="expandedlicensing_deprecatedVersion",
4561 )
4562 # Specifies the SPDX License List version in which this ListedLicense or
4563 # ListedLicenseException identifier was first added.
4564 cls._add_property(
4565 "expandedlicensing_listVersionAdded",
4566 StringProp(),
4567 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded",
4568 compact="expandedlicensing_listVersionAdded",
4569 )
4570
4571
4572# Portion of an AnyLicenseInfo representing this version, or any later version,
4573# of the indicated License.
4574@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/OrLaterOperator", compact_type="expandedlicensing_OrLaterOperator", abstract=False)
4575class expandedlicensing_OrLaterOperator(expandedlicensing_ExtendableLicense):
4576 NODE_KIND = NodeKind.IRI
4577 ID_ALIAS = "spdxId"
4578 NAMED_INDIVIDUALS = {
4579 }
4580
4581 @classmethod
4582 def _register_props(cls):
4583 super()._register_props()
4584 # A License participating in an 'or later' model.
4585 cls._add_property(
4586 "expandedlicensing_subjectLicense",
4587 ObjectProp(expandedlicensing_License, True),
4588 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectLicense",
4589 min_count=1,
4590 compact="expandedlicensing_subjectLicense",
4591 )
4592
4593
4594# Portion of an AnyLicenseInfo representing a License which has additional
4595# text applied to it.
4596@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/WithAdditionOperator", compact_type="expandedlicensing_WithAdditionOperator", abstract=False)
4597class expandedlicensing_WithAdditionOperator(simplelicensing_AnyLicenseInfo):
4598 NODE_KIND = NodeKind.IRI
4599 ID_ALIAS = "spdxId"
4600 NAMED_INDIVIDUALS = {
4601 }
4602
4603 @classmethod
4604 def _register_props(cls):
4605 super()._register_props()
4606 # A LicenseAddition participating in a 'with addition' model.
4607 cls._add_property(
4608 "expandedlicensing_subjectAddition",
4609 ObjectProp(expandedlicensing_LicenseAddition, True),
4610 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectAddition",
4611 min_count=1,
4612 compact="expandedlicensing_subjectAddition",
4613 )
4614 # A License participating in a 'with addition' model.
4615 cls._add_property(
4616 "expandedlicensing_subjectExtendableLicense",
4617 ObjectProp(expandedlicensing_ExtendableLicense, True),
4618 iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectExtendableLicense",
4619 min_count=1,
4620 compact="expandedlicensing_subjectExtendableLicense",
4621 )
4622
4623
4624# A type of extension consisting of a list of name value pairs.
4625@register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertiesExtension", compact_type="extension_CdxPropertiesExtension", abstract=False)
4626class extension_CdxPropertiesExtension(extension_Extension):
4627 NODE_KIND = NodeKind.BlankNodeOrIRI
4628 NAMED_INDIVIDUALS = {
4629 }
4630
4631 @classmethod
4632 def _register_props(cls):
4633 super()._register_props()
4634 # Provides a map of a property names to a values.
4635 cls._add_property(
4636 "extension_cdxProperty",
4637 ListProp(ObjectProp(extension_CdxPropertyEntry, False)),
4638 iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxProperty",
4639 min_count=1,
4640 compact="extension_cdxProperty",
4641 )
4642
4643
4644# Provides a CVSS version 2.0 assessment for a vulnerability.
4645@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV2VulnAssessmentRelationship", compact_type="security_CvssV2VulnAssessmentRelationship", abstract=False)
4646class security_CvssV2VulnAssessmentRelationship(security_VulnAssessmentRelationship):
4647 NODE_KIND = NodeKind.IRI
4648 ID_ALIAS = "spdxId"
4649 NAMED_INDIVIDUALS = {
4650 }
4651
4652 @classmethod
4653 def _register_props(cls):
4654 super()._register_props()
4655 # Provides a numerical (0-10) representation of the severity of a vulnerability.
4656 cls._add_property(
4657 "security_score",
4658 FloatProp(),
4659 iri="https://spdx.org/rdf/3.0.1/terms/Security/score",
4660 min_count=1,
4661 compact="security_score",
4662 )
4663 # Specifies the CVSS vector string for a vulnerability.
4664 cls._add_property(
4665 "security_vectorString",
4666 StringProp(),
4667 iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString",
4668 min_count=1,
4669 compact="security_vectorString",
4670 )
4671
4672
4673# Provides a CVSS version 3 assessment for a vulnerability.
4674@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV3VulnAssessmentRelationship", compact_type="security_CvssV3VulnAssessmentRelationship", abstract=False)
4675class security_CvssV3VulnAssessmentRelationship(security_VulnAssessmentRelationship):
4676 NODE_KIND = NodeKind.IRI
4677 ID_ALIAS = "spdxId"
4678 NAMED_INDIVIDUALS = {
4679 }
4680
4681 @classmethod
4682 def _register_props(cls):
4683 super()._register_props()
4684 # Provides a numerical (0-10) representation of the severity of a vulnerability.
4685 cls._add_property(
4686 "security_score",
4687 FloatProp(),
4688 iri="https://spdx.org/rdf/3.0.1/terms/Security/score",
4689 min_count=1,
4690 compact="security_score",
4691 )
4692 # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
4693 cls._add_property(
4694 "security_severity",
4695 EnumProp([
4696 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"),
4697 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"),
4698 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"),
4699 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"),
4700 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"),
4701 ]),
4702 iri="https://spdx.org/rdf/3.0.1/terms/Security/severity",
4703 min_count=1,
4704 compact="security_severity",
4705 )
4706 # Specifies the CVSS vector string for a vulnerability.
4707 cls._add_property(
4708 "security_vectorString",
4709 StringProp(),
4710 iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString",
4711 min_count=1,
4712 compact="security_vectorString",
4713 )
4714
4715
4716# Provides a CVSS version 4 assessment for a vulnerability.
4717@register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV4VulnAssessmentRelationship", compact_type="security_CvssV4VulnAssessmentRelationship", abstract=False)
4718class security_CvssV4VulnAssessmentRelationship(security_VulnAssessmentRelationship):
4719 NODE_KIND = NodeKind.IRI
4720 ID_ALIAS = "spdxId"
4721 NAMED_INDIVIDUALS = {
4722 }
4723
4724 @classmethod
4725 def _register_props(cls):
4726 super()._register_props()
4727 # Provides a numerical (0-10) representation of the severity of a vulnerability.
4728 cls._add_property(
4729 "security_score",
4730 FloatProp(),
4731 iri="https://spdx.org/rdf/3.0.1/terms/Security/score",
4732 min_count=1,
4733 compact="security_score",
4734 )
4735 # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
4736 cls._add_property(
4737 "security_severity",
4738 EnumProp([
4739 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"),
4740 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"),
4741 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"),
4742 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"),
4743 ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"),
4744 ]),
4745 iri="https://spdx.org/rdf/3.0.1/terms/Security/severity",
4746 min_count=1,
4747 compact="security_severity",
4748 )
4749 # Specifies the CVSS vector string for a vulnerability.
4750 cls._add_property(
4751 "security_vectorString",
4752 StringProp(),
4753 iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString",
4754 min_count=1,
4755 compact="security_vectorString",
4756 )
4757
4758
4759# Provides an EPSS assessment for a vulnerability.
4760@register("https://spdx.org/rdf/3.0.1/terms/Security/EpssVulnAssessmentRelationship", compact_type="security_EpssVulnAssessmentRelationship", abstract=False)
4761class security_EpssVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4762 NODE_KIND = NodeKind.IRI
4763 ID_ALIAS = "spdxId"
4764 NAMED_INDIVIDUALS = {
4765 }
4766
4767 @classmethod
4768 def _register_props(cls):
4769 super()._register_props()
4770 # The percentile of the current probability score.
4771 cls._add_property(
4772 "security_percentile",
4773 FloatProp(),
4774 iri="https://spdx.org/rdf/3.0.1/terms/Security/percentile",
4775 min_count=1,
4776 compact="security_percentile",
4777 )
4778 # A probability score between 0 and 1 of a vulnerability being exploited.
4779 cls._add_property(
4780 "security_probability",
4781 FloatProp(),
4782 iri="https://spdx.org/rdf/3.0.1/terms/Security/probability",
4783 min_count=1,
4784 compact="security_probability",
4785 )
4786
4787
4788# Provides an exploit assessment of a vulnerability.
4789@register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogVulnAssessmentRelationship", compact_type="security_ExploitCatalogVulnAssessmentRelationship", abstract=False)
4790class security_ExploitCatalogVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4791 NODE_KIND = NodeKind.IRI
4792 ID_ALIAS = "spdxId"
4793 NAMED_INDIVIDUALS = {
4794 }
4795
4796 @classmethod
4797 def _register_props(cls):
4798 super()._register_props()
4799 # Specifies the exploit catalog type.
4800 cls._add_property(
4801 "security_catalogType",
4802 EnumProp([
4803 ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", "kev"),
4804 ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", "other"),
4805 ]),
4806 iri="https://spdx.org/rdf/3.0.1/terms/Security/catalogType",
4807 min_count=1,
4808 compact="security_catalogType",
4809 )
4810 # Describe that a CVE is known to have an exploit because it's been listed in an exploit catalog.
4811 cls._add_property(
4812 "security_exploited",
4813 BooleanProp(),
4814 iri="https://spdx.org/rdf/3.0.1/terms/Security/exploited",
4815 min_count=1,
4816 compact="security_exploited",
4817 )
4818 # Provides the location of an exploit catalog.
4819 cls._add_property(
4820 "security_locator",
4821 AnyURIProp(),
4822 iri="https://spdx.org/rdf/3.0.1/terms/Security/locator",
4823 min_count=1,
4824 compact="security_locator",
4825 )
4826
4827
4828# Provides an SSVC assessment for a vulnerability.
4829@register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcVulnAssessmentRelationship", compact_type="security_SsvcVulnAssessmentRelationship", abstract=False)
4830class security_SsvcVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4831 NODE_KIND = NodeKind.IRI
4832 ID_ALIAS = "spdxId"
4833 NAMED_INDIVIDUALS = {
4834 }
4835
4836 @classmethod
4837 def _register_props(cls):
4838 super()._register_props()
4839 # Provide the enumeration of possible decisions in the
4840 # [Stakeholder-Specific Vulnerability Categorization (SSVC) decision tree](https://www.cisa.gov/stakeholder-specific-vulnerability-categorization-ssvc).
4841 cls._add_property(
4842 "security_decisionType",
4843 EnumProp([
4844 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", "act"),
4845 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", "attend"),
4846 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", "track"),
4847 ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", "trackStar"),
4848 ]),
4849 iri="https://spdx.org/rdf/3.0.1/terms/Security/decisionType",
4850 min_count=1,
4851 compact="security_decisionType",
4852 )
4853
4854
4855# Abstract ancestor class for all VEX relationships
4856@register("https://spdx.org/rdf/3.0.1/terms/Security/VexVulnAssessmentRelationship", compact_type="security_VexVulnAssessmentRelationship", abstract=True)
4857class security_VexVulnAssessmentRelationship(security_VulnAssessmentRelationship):
4858 NODE_KIND = NodeKind.IRI
4859 ID_ALIAS = "spdxId"
4860 NAMED_INDIVIDUALS = {
4861 }
4862
4863 @classmethod
4864 def _register_props(cls):
4865 super()._register_props()
4866 # Conveys information about how VEX status was determined.
4867 cls._add_property(
4868 "security_statusNotes",
4869 StringProp(),
4870 iri="https://spdx.org/rdf/3.0.1/terms/Security/statusNotes",
4871 compact="security_statusNotes",
4872 )
4873 # Specifies the version of a VEX statement.
4874 cls._add_property(
4875 "security_vexVersion",
4876 StringProp(),
4877 iri="https://spdx.org/rdf/3.0.1/terms/Security/vexVersion",
4878 compact="security_vexVersion",
4879 )
4880
4881
4882# Specifies a vulnerability and its associated information.
4883@register("https://spdx.org/rdf/3.0.1/terms/Security/Vulnerability", compact_type="security_Vulnerability", abstract=False)
4884class security_Vulnerability(Artifact):
4885 NODE_KIND = NodeKind.IRI
4886 ID_ALIAS = "spdxId"
4887 NAMED_INDIVIDUALS = {
4888 }
4889
4890 @classmethod
4891 def _register_props(cls):
4892 super()._register_props()
4893 # Specifies a time when a vulnerability assessment was modified
4894 cls._add_property(
4895 "security_modifiedTime",
4896 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4897 iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime",
4898 compact="security_modifiedTime",
4899 )
4900 # Specifies the time when a vulnerability was published.
4901 cls._add_property(
4902 "security_publishedTime",
4903 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4904 iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime",
4905 compact="security_publishedTime",
4906 )
4907 # Specified the time and date when a vulnerability was withdrawn.
4908 cls._add_property(
4909 "security_withdrawnTime",
4910 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
4911 iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime",
4912 compact="security_withdrawnTime",
4913 )
4914
4915
4916# A distinct article or unit related to Software.
4917@register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwareArtifact", compact_type="software_SoftwareArtifact", abstract=True)
4918class software_SoftwareArtifact(Artifact):
4919 NODE_KIND = NodeKind.IRI
4920 ID_ALIAS = "spdxId"
4921 NAMED_INDIVIDUALS = {
4922 }
4923
4924 @classmethod
4925 def _register_props(cls):
4926 super()._register_props()
4927 # Provides additional purpose information of the software artifact.
4928 cls._add_property(
4929 "software_additionalPurpose",
4930 ListProp(EnumProp([
4931 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"),
4932 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"),
4933 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"),
4934 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"),
4935 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"),
4936 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"),
4937 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"),
4938 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
4939 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
4940 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"),
4941 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"),
4942 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"),
4943 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"),
4944 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
4945 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"),
4946 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"),
4947 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"),
4948 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"),
4949 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"),
4950 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"),
4951 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"),
4952 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
4953 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"),
4954 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"),
4955 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"),
4956 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"),
4957 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"),
4958 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"),
4959 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"),
4960 ])),
4961 iri="https://spdx.org/rdf/3.0.1/terms/Software/additionalPurpose",
4962 compact="software_additionalPurpose",
4963 )
4964 # Provides a place for the SPDX data creator to record acknowledgement text for
4965 # a software Package, File or Snippet.
4966 cls._add_property(
4967 "software_attributionText",
4968 ListProp(StringProp()),
4969 iri="https://spdx.org/rdf/3.0.1/terms/Software/attributionText",
4970 compact="software_attributionText",
4971 )
4972 # A canonical, unique, immutable identifier of the artifact content, that may be
4973 # used for verifying its identity and/or integrity.
4974 cls._add_property(
4975 "software_contentIdentifier",
4976 ListProp(ObjectProp(software_ContentIdentifier, False)),
4977 iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifier",
4978 compact="software_contentIdentifier",
4979 )
4980 # Identifies the text of one or more copyright notices for a software Package,
4981 # File or Snippet, if any.
4982 cls._add_property(
4983 "software_copyrightText",
4984 StringProp(),
4985 iri="https://spdx.org/rdf/3.0.1/terms/Software/copyrightText",
4986 compact="software_copyrightText",
4987 )
4988 # Provides information about the primary purpose of the software artifact.
4989 cls._add_property(
4990 "software_primaryPurpose",
4991 EnumProp([
4992 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"),
4993 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"),
4994 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"),
4995 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"),
4996 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"),
4997 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"),
4998 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"),
4999 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
5000 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
5001 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"),
5002 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"),
5003 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"),
5004 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"),
5005 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
5006 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"),
5007 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"),
5008 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"),
5009 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"),
5010 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"),
5011 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"),
5012 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"),
5013 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
5014 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"),
5015 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"),
5016 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"),
5017 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"),
5018 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"),
5019 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"),
5020 ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"),
5021 ]),
5022 iri="https://spdx.org/rdf/3.0.1/terms/Software/primaryPurpose",
5023 compact="software_primaryPurpose",
5024 )
5025
5026
5027# A container for a grouping of SPDX-3.0 content characterizing details
5028# (provenence, composition, licensing, etc.) about a product.
5029@register("https://spdx.org/rdf/3.0.1/terms/Core/Bom", compact_type="Bom", abstract=False)
5030class Bom(Bundle):
5031 NODE_KIND = NodeKind.IRI
5032 ID_ALIAS = "spdxId"
5033 NAMED_INDIVIDUALS = {
5034 }
5035
5036
5037# A license that is not listed on the SPDX License List.
5038@register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicense", compact_type="expandedlicensing_CustomLicense", abstract=False)
5039class expandedlicensing_CustomLicense(expandedlicensing_License):
5040 NODE_KIND = NodeKind.IRI
5041 ID_ALIAS = "spdxId"
5042 NAMED_INDIVIDUALS = {
5043 }
5044
5045
5046# Connects a vulnerability and an element designating the element as a product
5047# affected by the vulnerability.
5048@register("https://spdx.org/rdf/3.0.1/terms/Security/VexAffectedVulnAssessmentRelationship", compact_type="security_VexAffectedVulnAssessmentRelationship", abstract=False)
5049class security_VexAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5050 NODE_KIND = NodeKind.IRI
5051 ID_ALIAS = "spdxId"
5052 NAMED_INDIVIDUALS = {
5053 }
5054
5055 @classmethod
5056 def _register_props(cls):
5057 super()._register_props()
5058 # Provides advise on how to mitigate or remediate a vulnerability when a VEX product
5059 # is affected by it.
5060 cls._add_property(
5061 "security_actionStatement",
5062 StringProp(),
5063 iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatement",
5064 min_count=1,
5065 compact="security_actionStatement",
5066 )
5067 # Records the time when a recommended action was communicated in a VEX statement
5068 # to mitigate a vulnerability.
5069 cls._add_property(
5070 "security_actionStatementTime",
5071 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5072 iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatementTime",
5073 compact="security_actionStatementTime",
5074 )
5075
5076
5077# Links a vulnerability and elements representing products (in the VEX sense) where
5078# a fix has been applied and are no longer affected.
5079@register("https://spdx.org/rdf/3.0.1/terms/Security/VexFixedVulnAssessmentRelationship", compact_type="security_VexFixedVulnAssessmentRelationship", abstract=False)
5080class security_VexFixedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5081 NODE_KIND = NodeKind.IRI
5082 ID_ALIAS = "spdxId"
5083 NAMED_INDIVIDUALS = {
5084 }
5085
5086
5087# Links a vulnerability and one or more elements designating the latter as products
5088# not affected by the vulnerability.
5089@register("https://spdx.org/rdf/3.0.1/terms/Security/VexNotAffectedVulnAssessmentRelationship", compact_type="security_VexNotAffectedVulnAssessmentRelationship", abstract=False)
5090class security_VexNotAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5091 NODE_KIND = NodeKind.IRI
5092 ID_ALIAS = "spdxId"
5093 NAMED_INDIVIDUALS = {
5094 }
5095
5096 @classmethod
5097 def _register_props(cls):
5098 super()._register_props()
5099 # Explains why a VEX product is not affected by a vulnerability. It is an
5100 # alternative in VexNotAffectedVulnAssessmentRelationship to the machine-readable
5101 # justification label.
5102 cls._add_property(
5103 "security_impactStatement",
5104 StringProp(),
5105 iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatement",
5106 compact="security_impactStatement",
5107 )
5108 # Timestamp of impact statement.
5109 cls._add_property(
5110 "security_impactStatementTime",
5111 DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
5112 iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatementTime",
5113 compact="security_impactStatementTime",
5114 )
5115 # Impact justification label to be used when linking a vulnerability to an element
5116 # representing a VEX product with a VexNotAffectedVulnAssessmentRelationship
5117 # relationship.
5118 cls._add_property(
5119 "security_justificationType",
5120 EnumProp([
5121 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", "componentNotPresent"),
5122 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", "inlineMitigationsAlreadyExist"),
5123 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", "vulnerableCodeCannotBeControlledByAdversary"),
5124 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", "vulnerableCodeNotInExecutePath"),
5125 ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", "vulnerableCodeNotPresent"),
5126 ]),
5127 iri="https://spdx.org/rdf/3.0.1/terms/Security/justificationType",
5128 compact="security_justificationType",
5129 )
5130
5131
5132# Designates elements as products where the impact of a vulnerability is being
5133# investigated.
5134@register("https://spdx.org/rdf/3.0.1/terms/Security/VexUnderInvestigationVulnAssessmentRelationship", compact_type="security_VexUnderInvestigationVulnAssessmentRelationship", abstract=False)
5135class security_VexUnderInvestigationVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
5136 NODE_KIND = NodeKind.IRI
5137 ID_ALIAS = "spdxId"
5138 NAMED_INDIVIDUALS = {
5139 }
5140
5141
5142# Refers to any object that stores content on a computer.
5143@register("https://spdx.org/rdf/3.0.1/terms/Software/File", compact_type="software_File", abstract=False)
5144class software_File(software_SoftwareArtifact):
5145 NODE_KIND = NodeKind.IRI
5146 ID_ALIAS = "spdxId"
5147 NAMED_INDIVIDUALS = {
5148 }
5149
5150 @classmethod
5151 def _register_props(cls):
5152 super()._register_props()
5153 # Provides information about the content type of an Element or a Property.
5154 cls._add_property(
5155 "contentType",
5156 StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
5157 iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType",
5158 compact="contentType",
5159 )
5160 # Describes if a given file is a directory or non-directory kind of file.
5161 cls._add_property(
5162 "software_fileKind",
5163 EnumProp([
5164 ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", "directory"),
5165 ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", "file"),
5166 ]),
5167 iri="https://spdx.org/rdf/3.0.1/terms/Software/fileKind",
5168 compact="software_fileKind",
5169 )
5170
5171
5172# Refers to any unit of content that can be associated with a distribution of
5173# software.
5174@register("https://spdx.org/rdf/3.0.1/terms/Software/Package", compact_type="software_Package", abstract=False)
5175class software_Package(software_SoftwareArtifact):
5176 NODE_KIND = NodeKind.IRI
5177 ID_ALIAS = "spdxId"
5178 NAMED_INDIVIDUALS = {
5179 }
5180
5181 @classmethod
5182 def _register_props(cls):
5183 super()._register_props()
5184 # Identifies the download Uniform Resource Identifier for the package at the time
5185 # that the document was created.
5186 cls._add_property(
5187 "software_downloadLocation",
5188 AnyURIProp(),
5189 iri="https://spdx.org/rdf/3.0.1/terms/Software/downloadLocation",
5190 compact="software_downloadLocation",
5191 )
5192 # A place for the SPDX document creator to record a website that serves as the
5193 # package's home page.
5194 cls._add_property(
5195 "software_homePage",
5196 AnyURIProp(),
5197 iri="https://spdx.org/rdf/3.0.1/terms/Software/homePage",
5198 compact="software_homePage",
5199 )
5200 # Provides a place for the SPDX data creator to record the package URL string
5201 # (in accordance with the Package URL specification) for a software Package.
5202 cls._add_property(
5203 "software_packageUrl",
5204 AnyURIProp(),
5205 iri="https://spdx.org/rdf/3.0.1/terms/Software/packageUrl",
5206 compact="software_packageUrl",
5207 )
5208 # Identify the version of a package.
5209 cls._add_property(
5210 "software_packageVersion",
5211 StringProp(),
5212 iri="https://spdx.org/rdf/3.0.1/terms/Software/packageVersion",
5213 compact="software_packageVersion",
5214 )
5215 # Records any relevant background information or additional comments
5216 # about the origin of the package.
5217 cls._add_property(
5218 "software_sourceInfo",
5219 StringProp(),
5220 iri="https://spdx.org/rdf/3.0.1/terms/Software/sourceInfo",
5221 compact="software_sourceInfo",
5222 )
5223
5224
5225# A collection of SPDX Elements describing a single package.
5226@register("https://spdx.org/rdf/3.0.1/terms/Software/Sbom", compact_type="software_Sbom", abstract=False)
5227class software_Sbom(Bom):
5228 NODE_KIND = NodeKind.IRI
5229 ID_ALIAS = "spdxId"
5230 NAMED_INDIVIDUALS = {
5231 }
5232
5233 @classmethod
5234 def _register_props(cls):
5235 super()._register_props()
5236 # Provides information about the type of an SBOM.
5237 cls._add_property(
5238 "software_sbomType",
5239 ListProp(EnumProp([
5240 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", "analyzed"),
5241 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", "build"),
5242 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", "deployed"),
5243 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", "design"),
5244 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", "runtime"),
5245 ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", "source"),
5246 ])),
5247 iri="https://spdx.org/rdf/3.0.1/terms/Software/sbomType",
5248 compact="software_sbomType",
5249 )
5250
5251
5252# Describes a certain part of a file.
5253@register("https://spdx.org/rdf/3.0.1/terms/Software/Snippet", compact_type="software_Snippet", abstract=False)
5254class software_Snippet(software_SoftwareArtifact):
5255 NODE_KIND = NodeKind.IRI
5256 ID_ALIAS = "spdxId"
5257 NAMED_INDIVIDUALS = {
5258 }
5259
5260 @classmethod
5261 def _register_props(cls):
5262 super()._register_props()
5263 # Defines the byte range in the original host file that the snippet information
5264 # applies to.
5265 cls._add_property(
5266 "software_byteRange",
5267 ObjectProp(PositiveIntegerRange, False),
5268 iri="https://spdx.org/rdf/3.0.1/terms/Software/byteRange",
5269 compact="software_byteRange",
5270 )
5271 # Defines the line range in the original host file that the snippet information
5272 # applies to.
5273 cls._add_property(
5274 "software_lineRange",
5275 ObjectProp(PositiveIntegerRange, False),
5276 iri="https://spdx.org/rdf/3.0.1/terms/Software/lineRange",
5277 compact="software_lineRange",
5278 )
5279 # Defines the original host file that the snippet information applies to.
5280 cls._add_property(
5281 "software_snippetFromFile",
5282 ObjectProp(software_File, True),
5283 iri="https://spdx.org/rdf/3.0.1/terms/Software/snippetFromFile",
5284 min_count=1,
5285 compact="software_snippetFromFile",
5286 )
5287
5288
5289# Specifies an AI package and its associated information.
5290@register("https://spdx.org/rdf/3.0.1/terms/AI/AIPackage", compact_type="ai_AIPackage", abstract=False)
5291class ai_AIPackage(software_Package):
5292 NODE_KIND = NodeKind.IRI
5293 ID_ALIAS = "spdxId"
5294 NAMED_INDIVIDUALS = {
5295 }
5296
5297 @classmethod
5298 def _register_props(cls):
5299 super()._register_props()
5300 # Indicates whether the system can perform a decision or action without human
5301 # involvement or guidance.
5302 cls._add_property(
5303 "ai_autonomyType",
5304 EnumProp([
5305 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"),
5306 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"),
5307 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"),
5308 ]),
5309 iri="https://spdx.org/rdf/3.0.1/terms/AI/autonomyType",
5310 compact="ai_autonomyType",
5311 )
5312 # Captures the domain in which the AI package can be used.
5313 cls._add_property(
5314 "ai_domain",
5315 ListProp(StringProp()),
5316 iri="https://spdx.org/rdf/3.0.1/terms/AI/domain",
5317 compact="ai_domain",
5318 )
5319 # Indicates the amount of energy consumption incurred by an AI model.
5320 cls._add_property(
5321 "ai_energyConsumption",
5322 ObjectProp(ai_EnergyConsumption, False),
5323 iri="https://spdx.org/rdf/3.0.1/terms/AI/energyConsumption",
5324 compact="ai_energyConsumption",
5325 )
5326 # Records a hyperparameter used to build the AI model contained in the AI
5327 # package.
5328 cls._add_property(
5329 "ai_hyperparameter",
5330 ListProp(ObjectProp(DictionaryEntry, False)),
5331 iri="https://spdx.org/rdf/3.0.1/terms/AI/hyperparameter",
5332 compact="ai_hyperparameter",
5333 )
5334 # Provides relevant information about the AI software, not including the model
5335 # description.
5336 cls._add_property(
5337 "ai_informationAboutApplication",
5338 StringProp(),
5339 iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutApplication",
5340 compact="ai_informationAboutApplication",
5341 )
5342 # Describes relevant information about different steps of the training process.
5343 cls._add_property(
5344 "ai_informationAboutTraining",
5345 StringProp(),
5346 iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutTraining",
5347 compact="ai_informationAboutTraining",
5348 )
5349 # Captures a limitation of the AI software.
5350 cls._add_property(
5351 "ai_limitation",
5352 StringProp(),
5353 iri="https://spdx.org/rdf/3.0.1/terms/AI/limitation",
5354 compact="ai_limitation",
5355 )
5356 # Records the measurement of prediction quality of the AI model.
5357 cls._add_property(
5358 "ai_metric",
5359 ListProp(ObjectProp(DictionaryEntry, False)),
5360 iri="https://spdx.org/rdf/3.0.1/terms/AI/metric",
5361 compact="ai_metric",
5362 )
5363 # Captures the threshold that was used for computation of a metric described in
5364 # the metric field.
5365 cls._add_property(
5366 "ai_metricDecisionThreshold",
5367 ListProp(ObjectProp(DictionaryEntry, False)),
5368 iri="https://spdx.org/rdf/3.0.1/terms/AI/metricDecisionThreshold",
5369 compact="ai_metricDecisionThreshold",
5370 )
5371 # Describes all the preprocessing steps applied to the training data before the
5372 # model training.
5373 cls._add_property(
5374 "ai_modelDataPreprocessing",
5375 ListProp(StringProp()),
5376 iri="https://spdx.org/rdf/3.0.1/terms/AI/modelDataPreprocessing",
5377 compact="ai_modelDataPreprocessing",
5378 )
5379 # Describes methods that can be used to explain the results from the AI model.
5380 cls._add_property(
5381 "ai_modelExplainability",
5382 ListProp(StringProp()),
5383 iri="https://spdx.org/rdf/3.0.1/terms/AI/modelExplainability",
5384 compact="ai_modelExplainability",
5385 )
5386 # Records the results of general safety risk assessment of the AI system.
5387 cls._add_property(
5388 "ai_safetyRiskAssessment",
5389 EnumProp([
5390 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", "high"),
5391 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", "low"),
5392 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", "medium"),
5393 ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", "serious"),
5394 ]),
5395 iri="https://spdx.org/rdf/3.0.1/terms/AI/safetyRiskAssessment",
5396 compact="ai_safetyRiskAssessment",
5397 )
5398 # Captures a standard that is being complied with.
5399 cls._add_property(
5400 "ai_standardCompliance",
5401 ListProp(StringProp()),
5402 iri="https://spdx.org/rdf/3.0.1/terms/AI/standardCompliance",
5403 compact="ai_standardCompliance",
5404 )
5405 # Records the type of the model used in the AI software.
5406 cls._add_property(
5407 "ai_typeOfModel",
5408 ListProp(StringProp()),
5409 iri="https://spdx.org/rdf/3.0.1/terms/AI/typeOfModel",
5410 compact="ai_typeOfModel",
5411 )
5412 # Records if sensitive personal information is used during model training or
5413 # could be used during the inference.
5414 cls._add_property(
5415 "ai_useSensitivePersonalInformation",
5416 EnumProp([
5417 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"),
5418 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"),
5419 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"),
5420 ]),
5421 iri="https://spdx.org/rdf/3.0.1/terms/AI/useSensitivePersonalInformation",
5422 compact="ai_useSensitivePersonalInformation",
5423 )
5424
5425
5426# Specifies a data package and its associated information.
5427@register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetPackage", compact_type="dataset_DatasetPackage", abstract=False)
5428class dataset_DatasetPackage(software_Package):
5429 NODE_KIND = NodeKind.IRI
5430 ID_ALIAS = "spdxId"
5431 NAMED_INDIVIDUALS = {
5432 }
5433
5434 @classmethod
5435 def _register_props(cls):
5436 super()._register_props()
5437 # Describes the anonymization methods used.
5438 cls._add_property(
5439 "dataset_anonymizationMethodUsed",
5440 ListProp(StringProp()),
5441 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/anonymizationMethodUsed",
5442 compact="dataset_anonymizationMethodUsed",
5443 )
5444 # Describes the confidentiality level of the data points contained in the dataset.
5445 cls._add_property(
5446 "dataset_confidentialityLevel",
5447 EnumProp([
5448 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", "amber"),
5449 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", "clear"),
5450 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", "green"),
5451 ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", "red"),
5452 ]),
5453 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/confidentialityLevel",
5454 compact="dataset_confidentialityLevel",
5455 )
5456 # Describes how the dataset was collected.
5457 cls._add_property(
5458 "dataset_dataCollectionProcess",
5459 StringProp(),
5460 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataCollectionProcess",
5461 compact="dataset_dataCollectionProcess",
5462 )
5463 # Describes the preprocessing steps that were applied to the raw data to create the given dataset.
5464 cls._add_property(
5465 "dataset_dataPreprocessing",
5466 ListProp(StringProp()),
5467 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataPreprocessing",
5468 compact="dataset_dataPreprocessing",
5469 )
5470 # The field describes the availability of a dataset.
5471 cls._add_property(
5472 "dataset_datasetAvailability",
5473 EnumProp([
5474 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", "clickthrough"),
5475 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", "directDownload"),
5476 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", "query"),
5477 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", "registration"),
5478 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", "scrapingScript"),
5479 ]),
5480 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetAvailability",
5481 compact="dataset_datasetAvailability",
5482 )
5483 # Describes potentially noisy elements of the dataset.
5484 cls._add_property(
5485 "dataset_datasetNoise",
5486 StringProp(),
5487 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetNoise",
5488 compact="dataset_datasetNoise",
5489 )
5490 # Captures the size of the dataset.
5491 cls._add_property(
5492 "dataset_datasetSize",
5493 NonNegativeIntegerProp(),
5494 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetSize",
5495 compact="dataset_datasetSize",
5496 )
5497 # Describes the type of the given dataset.
5498 cls._add_property(
5499 "dataset_datasetType",
5500 ListProp(EnumProp([
5501 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", "audio"),
5502 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", "categorical"),
5503 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", "graph"),
5504 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", "image"),
5505 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", "noAssertion"),
5506 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", "numeric"),
5507 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", "other"),
5508 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", "sensor"),
5509 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", "structured"),
5510 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", "syntactic"),
5511 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", "text"),
5512 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", "timeseries"),
5513 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", "timestamp"),
5514 ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", "video"),
5515 ])),
5516 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetType",
5517 min_count=1,
5518 compact="dataset_datasetType",
5519 )
5520 # Describes a mechanism to update the dataset.
5521 cls._add_property(
5522 "dataset_datasetUpdateMechanism",
5523 StringProp(),
5524 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetUpdateMechanism",
5525 compact="dataset_datasetUpdateMechanism",
5526 )
5527 # Describes if any sensitive personal information is present in the dataset.
5528 cls._add_property(
5529 "dataset_hasSensitivePersonalInformation",
5530 EnumProp([
5531 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"),
5532 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"),
5533 ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"),
5534 ]),
5535 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/hasSensitivePersonalInformation",
5536 compact="dataset_hasSensitivePersonalInformation",
5537 )
5538 # Describes what the given dataset should be used for.
5539 cls._add_property(
5540 "dataset_intendedUse",
5541 StringProp(),
5542 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/intendedUse",
5543 compact="dataset_intendedUse",
5544 )
5545 # Records the biases that the dataset is known to encompass.
5546 cls._add_property(
5547 "dataset_knownBias",
5548 ListProp(StringProp()),
5549 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/knownBias",
5550 compact="dataset_knownBias",
5551 )
5552 # Describes a sensor used for collecting the data.
5553 cls._add_property(
5554 "dataset_sensor",
5555 ListProp(ObjectProp(DictionaryEntry, False)),
5556 iri="https://spdx.org/rdf/3.0.1/terms/Dataset/sensor",
5557 compact="dataset_sensor",
5558 )
5559
5560
5561"""Format Guard"""
5562# fmt: on
5563
5564
5565def main():
5566 import argparse
5567 from pathlib import Path
5568
5569 parser = argparse.ArgumentParser(description="Python SHACL model test")
5570 parser.add_argument("infile", type=Path, help="Input file")
5571 parser.add_argument("--print", action="store_true", help="Print object tree")
5572 parser.add_argument("--outfile", type=Path, help="Output file")
5573
5574 args = parser.parse_args()
5575
5576 objectset = SHACLObjectSet()
5577 with args.infile.open("r") as f:
5578 d = JSONLDDeserializer()
5579 d.read(f, objectset)
5580
5581 if args.print:
5582 print_tree(objectset.objects)
5583
5584 if args.outfile:
5585 with args.outfile.open("wb") as f:
5586 s = JSONLDSerializer()
5587 s.write(objectset, f)
5588
5589 return 0
5590
5591
5592if __name__ == "__main__":
5593 sys.exit(main())
diff --git a/meta/lib/oe/spdx30_tasks.py b/meta/lib/oe/spdx30_tasks.py
new file mode 100644
index 0000000000..c352dab152
--- /dev/null
+++ b/meta/lib/oe/spdx30_tasks.py
@@ -0,0 +1,1367 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import json
8import oe.cve_check
9import oe.packagedata
10import oe.patch
11import oe.sbom30
12import oe.spdx30
13import oe.spdx_common
14import oe.sdk
15import os
16
17from contextlib import contextmanager
18from datetime import datetime, timezone
19from pathlib import Path
20
21
22def walk_error(err):
23 bb.error(f"ERROR walking {err.filename}: {err}")
24
25
26def set_timestamp_now(d, o, prop):
27 if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1":
28 setattr(o, prop, datetime.now(timezone.utc))
29 else:
30 # Doing this helps to validated that the property actually exists, and
31 # also that it is not mandatory
32 delattr(o, prop)
33
34
35def add_license_expression(d, objset, license_expression, license_data):
36 simple_license_text = {}
37 license_text_map = {}
38 license_ref_idx = 0
39
40 def add_license_text(name):
41 nonlocal objset
42 nonlocal simple_license_text
43
44 if name in simple_license_text:
45 return simple_license_text[name]
46
47 lic = objset.find_filter(
48 oe.spdx30.simplelicensing_SimpleLicensingText,
49 name=name,
50 )
51
52 if lic is not None:
53 simple_license_text[name] = lic
54 return lic
55
56 lic = objset.add(
57 oe.spdx30.simplelicensing_SimpleLicensingText(
58 _id=objset.new_spdxid("license-text", name),
59 creationInfo=objset.doc.creationInfo,
60 name=name,
61 )
62 )
63 objset.set_element_alias(lic)
64 simple_license_text[name] = lic
65
66 if name == "PD":
67 lic.simplelicensing_licenseText = "Software released to the public domain"
68 return lic
69
70 # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
71 for directory in [d.getVar("COMMON_LICENSE_DIR")] + (
72 d.getVar("LICENSE_PATH") or ""
73 ).split():
74 try:
75 with (Path(directory) / name).open(errors="replace") as f:
76 lic.simplelicensing_licenseText = f.read()
77 return lic
78
79 except FileNotFoundError:
80 pass
81
82 # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
83 filename = d.getVarFlag("NO_GENERIC_LICENSE", name)
84 if filename:
85 filename = d.expand("${S}/" + filename)
86 with open(filename, errors="replace") as f:
87 lic.simplelicensing_licenseText = f.read()
88 return lic
89 else:
90 bb.fatal("Cannot find any text for license %s" % name)
91
92 def convert(l):
93 nonlocal license_text_map
94 nonlocal license_ref_idx
95
96 if l == "(" or l == ")":
97 return l
98
99 if l == "&":
100 return "AND"
101
102 if l == "|":
103 return "OR"
104
105 if l == "CLOSED":
106 return "NONE"
107
108 spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
109 if spdx_license in license_data["licenses"]:
110 return spdx_license
111
112 spdx_license = "LicenseRef-" + l
113 if spdx_license not in license_text_map:
114 license_text_map[spdx_license] = oe.sbom30.get_element_link_id(
115 add_license_text(l)
116 )
117
118 return spdx_license
119
120 lic_split = (
121 license_expression.replace("(", " ( ")
122 .replace(")", " ) ")
123 .replace("|", " | ")
124 .replace("&", " & ")
125 .split()
126 )
127 spdx_license_expression = " ".join(convert(l) for l in lic_split)
128
129 o = objset.new_license_expression(
130 spdx_license_expression, license_data, license_text_map
131 )
132 objset.set_element_alias(o)
133 return o
134
135
136def add_package_files(
137 d,
138 objset,
139 topdir,
140 get_spdxid,
141 get_purposes,
142 license_data=None,
143 *,
144 archive=None,
145 ignore_dirs=[],
146 ignore_top_level_dirs=[],
147):
148 source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
149 if source_date_epoch:
150 source_date_epoch = int(source_date_epoch)
151
152 spdx_files = set()
153
154 file_counter = 1
155 if not os.path.exists(topdir):
156 bb.note(f"Skip {topdir}")
157 return spdx_files
158
159 check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1"
160 if check_compiled_sources:
161 compiled_sources, types = oe.spdx_common.get_compiled_sources(d)
162 bb.debug(1, f"Total compiled files: {len(compiled_sources)}")
163
164 for subdir, dirs, files in os.walk(topdir, onerror=walk_error):
165 dirs[:] = [d for d in dirs if d not in ignore_dirs]
166 if subdir == str(topdir):
167 dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
168
169 dirs.sort()
170 files.sort()
171 for file in files:
172 filepath = Path(subdir) / file
173 if filepath.is_symlink() or not filepath.is_file():
174 continue
175
176 filename = str(filepath.relative_to(topdir))
177 file_purposes = get_purposes(filepath)
178
179 # Check if file is compiled
180 if check_compiled_sources:
181 if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types):
182 continue
183
184 spdx_file = objset.new_file(
185 get_spdxid(file_counter),
186 filename,
187 filepath,
188 purposes=file_purposes,
189 )
190 spdx_files.add(spdx_file)
191
192 if (
193 oe.spdx30.software_SoftwarePurpose.source in file_purposes
194 and license_data is not None
195 ):
196 objset.scan_declared_licenses(spdx_file, filepath, license_data)
197
198 if archive is not None:
199 with filepath.open("rb") as f:
200 info = archive.gettarinfo(fileobj=f)
201 info.name = filename
202 info.uid = 0
203 info.gid = 0
204 info.uname = "root"
205 info.gname = "root"
206
207 if source_date_epoch is not None and info.mtime > source_date_epoch:
208 info.mtime = source_date_epoch
209
210 archive.addfile(info, f)
211
212 file_counter += 1
213
214 bb.debug(1, "Added %d files to %s" % (len(spdx_files), objset.doc._id))
215
216 return spdx_files
217
218
219def get_package_sources_from_debug(
220 d, package, package_files, sources, source_hash_cache
221):
222 def file_path_match(file_path, pkg_file):
223 if file_path.lstrip("/") == pkg_file.name.lstrip("/"):
224 return True
225
226 for e in pkg_file.extension:
227 if isinstance(e, oe.sbom30.OEFileNameAliasExtension):
228 for a in e.aliases:
229 if file_path.lstrip("/") == a.lstrip("/"):
230 return True
231
232 return False
233
234 debug_search_paths = [
235 Path(d.getVar("SPDXWORK")),
236 Path(d.getVar("PKGD")),
237 Path(d.getVar("STAGING_DIR_TARGET")),
238 Path(d.getVar("STAGING_DIR_NATIVE")),
239 Path(d.getVar("STAGING_KERNEL_DIR")),
240 ]
241
242 pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
243
244 if pkg_data is None:
245 return
246
247 dep_source_files = set()
248
249 for file_path, file_data in pkg_data["files_info"].items():
250 if not "debugsrc" in file_data:
251 continue
252
253 if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files):
254 bb.fatal(
255 "No package file found for %s in %s; SPDX found: %s"
256 % (str(file_path), package, " ".join(p.name for p in package_files))
257 )
258 continue
259
260 for debugsrc in file_data["debugsrc"]:
261 for search in debug_search_paths:
262 if debugsrc.startswith("/usr/src/kernel"):
263 debugsrc_path = search / debugsrc.replace("/usr/src/kernel/", "")
264 else:
265 debugsrc_path = search / debugsrc.lstrip("/")
266
267 if debugsrc_path in source_hash_cache:
268 file_sha256 = source_hash_cache[debugsrc_path]
269 if file_sha256 is None:
270 continue
271 else:
272 # We can only hash files below, skip directories, links, etc.
273 if not debugsrc_path.is_file():
274 source_hash_cache[debugsrc_path] = None
275 continue
276
277 file_sha256 = bb.utils.sha256_file(debugsrc_path)
278 source_hash_cache[debugsrc_path] = file_sha256
279
280 if file_sha256 in sources:
281 source_file = sources[file_sha256]
282 dep_source_files.add(source_file)
283 else:
284 bb.debug(
285 1,
286 "Debug source %s with SHA256 %s not found in any dependency"
287 % (str(debugsrc_path), file_sha256),
288 )
289 break
290 else:
291 bb.debug(1, "Debug source %s not found" % debugsrc)
292
293 return dep_source_files
294
295
296def collect_dep_objsets(d, build):
297 deps = oe.spdx_common.get_spdx_deps(d)
298
299 dep_objsets = []
300 dep_builds = set()
301
302 dep_build_spdxids = set()
303 for dep in deps:
304 bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn))
305 dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(
306 d, "recipes", "recipe-" + dep.pn, oe.spdx30.build_Build
307 )
308 # If the dependency is part of the taskhash, return it to be linked
309 # against. Otherwise, it cannot be linked against because this recipe
310 # will not rebuilt if dependency changes
311 if dep.in_taskhash:
312 dep_objsets.append(dep_objset)
313
314 # The build _can_ be linked against (by alias)
315 dep_builds.add(dep_build)
316
317 return dep_objsets, dep_builds
318
319
320def index_sources_by_hash(sources, dest):
321 for s in sources:
322 if not isinstance(s, oe.spdx30.software_File):
323 continue
324
325 if s.software_primaryPurpose != oe.spdx30.software_SoftwarePurpose.source:
326 continue
327
328 for v in s.verifiedUsing:
329 if v.algorithm == oe.spdx30.HashAlgorithm.sha256:
330 if not v.hashValue in dest:
331 dest[v.hashValue] = s
332 break
333 else:
334 bb.fatal(f"No SHA256 found for {s.name}")
335
336
337def collect_dep_sources(dep_objsets, dest):
338 for objset in dep_objsets:
339 # Don't collect sources from native recipes as they
340 # match non-native sources also.
341 if objset.is_native():
342 continue
343
344 bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name))
345
346 dep_build = objset.find_root(oe.spdx30.build_Build)
347 if not dep_build:
348 bb.fatal("Unable to find a build")
349
350 for e in objset.foreach_type(oe.spdx30.Relationship):
351 if dep_build is not e.from_:
352 continue
353
354 if e.relationshipType != oe.spdx30.RelationshipType.hasInput:
355 continue
356
357 index_sources_by_hash(e.to, dest)
358
359
360def add_download_files(d, objset):
361 inputs = set()
362
363 urls = d.getVar("SRC_URI").split()
364 fetch = bb.fetch2.Fetch(urls, d)
365
366 for download_idx, src_uri in enumerate(urls):
367 fd = fetch.ud[src_uri]
368
369 file_name = os.path.basename(fetch.localpath(src_uri))
370 if oe.patch.patch_path(src_uri, fetch, "", expand=False):
371 primary_purpose = oe.spdx30.software_SoftwarePurpose.patch
372 else:
373 primary_purpose = oe.spdx30.software_SoftwarePurpose.source
374
375 if fd.type == "file":
376 if os.path.isdir(fd.localpath):
377 walk_idx = 1
378 for root, dirs, files in os.walk(fd.localpath, onerror=walk_error):
379 dirs.sort()
380 files.sort()
381 for f in files:
382 f_path = os.path.join(root, f)
383 if os.path.islink(f_path):
384 # TODO: SPDX doesn't support symlinks yet
385 continue
386
387 file = objset.new_file(
388 objset.new_spdxid(
389 "source", str(download_idx + 1), str(walk_idx)
390 ),
391 os.path.join(
392 file_name, os.path.relpath(f_path, fd.localpath)
393 ),
394 f_path,
395 purposes=[primary_purpose],
396 )
397
398 inputs.add(file)
399 walk_idx += 1
400
401 else:
402 file = objset.new_file(
403 objset.new_spdxid("source", str(download_idx + 1)),
404 file_name,
405 fd.localpath,
406 purposes=[primary_purpose],
407 )
408 inputs.add(file)
409
410 else:
411 dl = objset.add(
412 oe.spdx30.software_Package(
413 _id=objset.new_spdxid("source", str(download_idx + 1)),
414 creationInfo=objset.doc.creationInfo,
415 name=file_name,
416 software_primaryPurpose=primary_purpose,
417 software_downloadLocation=oe.spdx_common.fetch_data_to_uri(
418 fd, fd.name
419 ),
420 )
421 )
422
423 if fd.method.supports_checksum(fd):
424 # TODO Need something better than hard coding this
425 for checksum_id in ["sha256", "sha1"]:
426 expected_checksum = getattr(
427 fd, "%s_expected" % checksum_id, None
428 )
429 if expected_checksum is None:
430 continue
431
432 dl.verifiedUsing.append(
433 oe.spdx30.Hash(
434 algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id),
435 hashValue=expected_checksum,
436 )
437 )
438
439 inputs.add(dl)
440
441 return inputs
442
443
444def set_purposes(d, element, *var_names, force_purposes=[]):
445 purposes = force_purposes[:]
446
447 for var_name in var_names:
448 val = d.getVar(var_name)
449 if val:
450 purposes.extend(val.split())
451 break
452
453 if not purposes:
454 bb.warn("No SPDX purposes found in %s" % " ".join(var_names))
455 return
456
457 element.software_primaryPurpose = getattr(
458 oe.spdx30.software_SoftwarePurpose, purposes[0]
459 )
460 element.software_additionalPurpose = [
461 getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:]
462 ]
463
464
465def create_spdx(d):
466 def set_var_field(var, obj, name, package=None):
467 val = None
468 if package:
469 val = d.getVar("%s:%s" % (var, package))
470
471 if not val:
472 val = d.getVar(var)
473
474 if val:
475 setattr(obj, name, val)
476
477 license_data = oe.spdx_common.load_spdx_license_data(d)
478
479 deploydir = Path(d.getVar("SPDXDEPLOY"))
480 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
481 spdx_workdir = Path(d.getVar("SPDXWORK"))
482 include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
483 pkg_arch = d.getVar("SSTATE_PKGARCH")
484 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
485 "cross", d
486 )
487 include_vex = d.getVar("SPDX_INCLUDE_VEX")
488 if not include_vex in ("none", "current", "all"):
489 bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'")
490
491 build_objset = oe.sbom30.ObjectSet.new_objset(d, "recipe-" + d.getVar("PN"))
492
493 build = build_objset.new_task_build("recipe", "recipe")
494 build_objset.set_element_alias(build)
495
496 build_objset.doc.rootElement.append(build)
497
498 build_objset.set_is_native(is_native)
499
500 for var in (d.getVar("SPDX_CUSTOM_ANNOTATION_VARS") or "").split():
501 new_annotation(
502 d,
503 build_objset,
504 build,
505 "%s=%s" % (var, d.getVar(var)),
506 oe.spdx30.AnnotationType.other,
507 )
508
509 build_inputs = set()
510
511 # Add CVEs
512 cve_by_status = {}
513 if include_vex != "none":
514 patched_cves = oe.cve_check.get_patched_cves(d)
515 for cve, patched_cve in patched_cves.items():
516 decoded_status = {
517 "mapping": patched_cve["abbrev-status"],
518 "detail": patched_cve["status"],
519 "description": patched_cve.get("justification", None)
520 }
521
522 # If this CVE is fixed upstream, skip it unless all CVEs are
523 # specified.
524 if (
525 include_vex != "all"
526 and "detail" in decoded_status
527 and decoded_status["detail"]
528 in (
529 "fixed-version",
530 "cpe-stable-backport",
531 )
532 ):
533 bb.debug(1, "Skipping %s since it is already fixed upstream" % cve)
534 continue
535
536 spdx_cve = build_objset.new_cve_vuln(cve)
537 build_objset.set_element_alias(spdx_cve)
538
539 cve_by_status.setdefault(decoded_status["mapping"], {})[cve] = (
540 spdx_cve,
541 decoded_status["detail"],
542 decoded_status["description"],
543 )
544
545 cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
546
547 source_files = add_download_files(d, build_objset)
548 build_inputs |= source_files
549
550 recipe_spdx_license = add_license_expression(
551 d, build_objset, d.getVar("LICENSE"), license_data
552 )
553 build_objset.new_relationship(
554 source_files,
555 oe.spdx30.RelationshipType.hasDeclaredLicense,
556 [oe.sbom30.get_element_link_id(recipe_spdx_license)],
557 )
558
559 dep_sources = {}
560 if oe.spdx_common.process_sources(d) and include_sources:
561 bb.debug(1, "Adding source files to SPDX")
562 oe.spdx_common.get_patched_src(d)
563
564 files = add_package_files(
565 d,
566 build_objset,
567 spdx_workdir,
568 lambda file_counter: build_objset.new_spdxid(
569 "sourcefile", str(file_counter)
570 ),
571 lambda filepath: [oe.spdx30.software_SoftwarePurpose.source],
572 license_data,
573 ignore_dirs=[".git"],
574 ignore_top_level_dirs=["temp"],
575 archive=None,
576 )
577 build_inputs |= files
578 index_sources_by_hash(files, dep_sources)
579
580 dep_objsets, dep_builds = collect_dep_objsets(d, build)
581 if dep_builds:
582 build_objset.new_scoped_relationship(
583 [build],
584 oe.spdx30.RelationshipType.dependsOn,
585 oe.spdx30.LifecycleScopeType.build,
586 sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds),
587 )
588
589 debug_source_ids = set()
590 source_hash_cache = {}
591
592 # Write out the package SPDX data now. It is not complete as we cannot
593 # write the runtime data, so write it to a staging area and a later task
594 # will write out the final collection
595
596 # TODO: Handle native recipe output
597 if not is_native:
598 bb.debug(1, "Collecting Dependency sources files")
599 collect_dep_sources(dep_objsets, dep_sources)
600
601 bb.build.exec_func("read_subpackage_metadata", d)
602
603 pkgdest = Path(d.getVar("PKGDEST"))
604 for package in d.getVar("PACKAGES").split():
605 if not oe.packagedata.packaged(package, d):
606 continue
607
608 pkg_name = d.getVar("PKG:%s" % package) or package
609
610 bb.debug(1, "Creating SPDX for package %s" % pkg_name)
611
612 pkg_objset = oe.sbom30.ObjectSet.new_objset(d, "package-" + pkg_name)
613
614 spdx_package = pkg_objset.add_root(
615 oe.spdx30.software_Package(
616 _id=pkg_objset.new_spdxid("package", pkg_name),
617 creationInfo=pkg_objset.doc.creationInfo,
618 name=pkg_name,
619 software_packageVersion=d.getVar("SPDX_PACKAGE_VERSION"),
620 )
621 )
622 set_timestamp_now(d, spdx_package, "builtTime")
623
624 set_purposes(
625 d,
626 spdx_package,
627 "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package,
628 "SPDX_PACKAGE_ADDITIONAL_PURPOSE",
629 force_purposes=["install"],
630 )
631
632 supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER")
633 if supplier is not None:
634 spdx_package.suppliedBy = (
635 supplier if isinstance(supplier, str) else supplier._id
636 )
637
638 set_var_field(
639 "HOMEPAGE", spdx_package, "software_homePage", package=package
640 )
641 set_var_field("SUMMARY", spdx_package, "summary", package=package)
642 set_var_field("DESCRIPTION", spdx_package, "description", package=package)
643
644 if d.getVar("SPDX_PACKAGE_URL:%s" % package) or d.getVar("SPDX_PACKAGE_URL"):
645 set_var_field(
646 "SPDX_PACKAGE_URL",
647 spdx_package,
648 "software_packageUrl",
649 package=package
650 )
651
652 pkg_objset.new_scoped_relationship(
653 [oe.sbom30.get_element_link_id(build)],
654 oe.spdx30.RelationshipType.hasOutput,
655 oe.spdx30.LifecycleScopeType.build,
656 [spdx_package],
657 )
658
659 for cpe_id in cpe_ids:
660 spdx_package.externalIdentifier.append(
661 oe.spdx30.ExternalIdentifier(
662 externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23,
663 identifier=cpe_id,
664 )
665 )
666
667 # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file
668 # generated and link it to the package
669 # spdx_package_file = pkg_objset.add(oe.spdx30.software_File(
670 # _id=pkg_objset.new_spdxid("distribution", pkg_name),
671 # creationInfo=pkg_objset.doc.creationInfo,
672 # name=pkg_name,
673 # software_primaryPurpose=spdx_package.software_primaryPurpose,
674 # software_additionalPurpose=spdx_package.software_additionalPurpose,
675 # ))
676 # set_timestamp_now(d, spdx_package_file, "builtTime")
677
678 ## TODO add hashes
679 # pkg_objset.new_relationship(
680 # [spdx_package],
681 # oe.spdx30.RelationshipType.hasDistributionArtifact,
682 # [spdx_package_file],
683 # )
684
685 # NOTE: licenses live in the recipe collection and are referenced
686 # by ID in the package collection(s). This helps reduce duplication
687 # (since a lot of packages will have the same license), and also
688 # prevents duplicate license SPDX IDs in the packages
689 package_license = d.getVar("LICENSE:%s" % package)
690 if package_license and package_license != d.getVar("LICENSE"):
691 package_spdx_license = add_license_expression(
692 d, build_objset, package_license, license_data
693 )
694 else:
695 package_spdx_license = recipe_spdx_license
696
697 pkg_objset.new_relationship(
698 [spdx_package],
699 oe.spdx30.RelationshipType.hasConcludedLicense,
700 [oe.sbom30.get_element_link_id(package_spdx_license)],
701 )
702
703 # NOTE: CVE Elements live in the recipe collection
704 all_cves = set()
705 for status, cves in cve_by_status.items():
706 for cve, items in cves.items():
707 spdx_cve, detail, description = items
708 spdx_cve_id = oe.sbom30.get_element_link_id(spdx_cve)
709
710 all_cves.add(spdx_cve_id)
711
712 if status == "Patched":
713 pkg_objset.new_vex_patched_relationship(
714 [spdx_cve_id], [spdx_package]
715 )
716 elif status == "Unpatched":
717 pkg_objset.new_vex_unpatched_relationship(
718 [spdx_cve_id], [spdx_package]
719 )
720 elif status == "Ignored":
721 spdx_vex = pkg_objset.new_vex_ignored_relationship(
722 [spdx_cve_id],
723 [spdx_package],
724 impact_statement=description,
725 )
726
727 vex_just_type = d.getVarFlag(
728 "CVE_CHECK_VEX_JUSTIFICATION", detail
729 )
730 if vex_just_type:
731 if (
732 vex_just_type
733 not in oe.spdx30.security_VexJustificationType.NAMED_INDIVIDUALS
734 ):
735 bb.fatal(
736 f"Unknown vex justification '{vex_just_type}', detail '{detail}', for ignored {cve}"
737 )
738
739 for v in spdx_vex:
740 v.security_justificationType = oe.spdx30.security_VexJustificationType.NAMED_INDIVIDUALS[
741 vex_just_type
742 ]
743
744 elif status == "Unknown":
745 bb.note(f"Skipping {cve} with status 'Unknown'")
746 else:
747 bb.fatal(f"Unknown {cve} status '{status}'")
748
749 if all_cves:
750 pkg_objset.new_relationship(
751 [spdx_package],
752 oe.spdx30.RelationshipType.hasAssociatedVulnerability,
753 sorted(list(all_cves)),
754 )
755
756 bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name)
757 package_files = add_package_files(
758 d,
759 pkg_objset,
760 pkgdest / package,
761 lambda file_counter: pkg_objset.new_spdxid(
762 "package", pkg_name, "file", str(file_counter)
763 ),
764 # TODO: Can we know the purpose here?
765 lambda filepath: [],
766 license_data,
767 ignore_top_level_dirs=["CONTROL", "DEBIAN"],
768 archive=None,
769 )
770
771 if package_files:
772 pkg_objset.new_relationship(
773 [spdx_package],
774 oe.spdx30.RelationshipType.contains,
775 sorted(list(package_files)),
776 )
777
778 if include_sources:
779 debug_sources = get_package_sources_from_debug(
780 d, package, package_files, dep_sources, source_hash_cache
781 )
782 debug_source_ids |= set(
783 oe.sbom30.get_element_link_id(d) for d in debug_sources
784 )
785
786 oe.sbom30.write_recipe_jsonld_doc(
787 d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False
788 )
789
790 if include_sources:
791 bb.debug(1, "Adding sysroot files to SPDX")
792 sysroot_files = add_package_files(
793 d,
794 build_objset,
795 d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"),
796 lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)),
797 lambda filepath: [],
798 license_data,
799 archive=None,
800 )
801
802 if sysroot_files:
803 build_objset.new_scoped_relationship(
804 [build],
805 oe.spdx30.RelationshipType.hasOutput,
806 oe.spdx30.LifecycleScopeType.build,
807 sorted(list(sysroot_files)),
808 )
809
810 if build_inputs or debug_source_ids:
811 build_objset.new_scoped_relationship(
812 [build],
813 oe.spdx30.RelationshipType.hasInput,
814 oe.spdx30.LifecycleScopeType.build,
815 sorted(list(build_inputs)) + sorted(list(debug_source_ids)),
816 )
817
818 oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir)
819
820
821def create_package_spdx(d):
822 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
823 deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
824 is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
825 "cross", d
826 )
827
828 providers = oe.spdx_common.collect_package_providers(d)
829 pkg_arch = d.getVar("SSTATE_PKGARCH")
830
831 if is_native:
832 return
833
834 bb.build.exec_func("read_subpackage_metadata", d)
835
836 dep_package_cache = {}
837
838 # Any element common to all packages that need to be referenced by ID
839 # should be written into this objset set
840 common_objset = oe.sbom30.ObjectSet.new_objset(
841 d, "%s-package-common" % d.getVar("PN")
842 )
843
844 pkgdest = Path(d.getVar("PKGDEST"))
845 for package in d.getVar("PACKAGES").split():
846 localdata = bb.data.createCopy(d)
847 pkg_name = d.getVar("PKG:%s" % package) or package
848 localdata.setVar("PKG", pkg_name)
849 localdata.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + ":" + package)
850
851 if not oe.packagedata.packaged(package, localdata):
852 continue
853
854 spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld(
855 d,
856 pkg_arch,
857 "packages-staging",
858 "package-" + pkg_name,
859 oe.spdx30.software_Package,
860 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
861 )
862
863 # We will write out a new collection, so link it to the new
864 # creation info in the common package data. The old creation info
865 # should still exist and be referenced by all the existing elements
866 # in the package
867 pkg_objset.creationInfo = pkg_objset.copy_creation_info(
868 common_objset.doc.creationInfo
869 )
870
871 runtime_spdx_deps = set()
872
873 deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
874 seen_deps = set()
875 for dep, _ in deps.items():
876 if dep in seen_deps:
877 continue
878
879 if dep not in providers:
880 continue
881
882 (dep, _) = providers[dep]
883
884 if not oe.packagedata.packaged(dep, localdata):
885 continue
886
887 dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
888 dep_pkg = dep_pkg_data["PKG"]
889
890 if dep in dep_package_cache:
891 dep_spdx_package = dep_package_cache[dep]
892 else:
893 bb.debug(1, "Searching for %s" % dep_pkg)
894 dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld(
895 d,
896 "packages-staging",
897 "package-" + dep_pkg,
898 oe.spdx30.software_Package,
899 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
900 )
901 dep_package_cache[dep] = dep_spdx_package
902
903 runtime_spdx_deps.add(dep_spdx_package)
904 seen_deps.add(dep)
905
906 if runtime_spdx_deps:
907 pkg_objset.new_scoped_relationship(
908 [spdx_package],
909 oe.spdx30.RelationshipType.dependsOn,
910 oe.spdx30.LifecycleScopeType.runtime,
911 [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps],
912 )
913
914 oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir)
915
916 oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir)
917
918
919def write_bitbake_spdx(d):
920 # Set PN to "bitbake" so that SPDX IDs can be generated
921 d.setVar("PN", "bitbake")
922 d.setVar("BB_TASKHASH", "bitbake")
923 oe.spdx_common.load_spdx_license_data(d)
924
925 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
926
927 objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False)
928
929 host_import_key = d.getVar("SPDX_BUILD_HOST")
930 invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False)
931 on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False)
932
933 if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
934 # Since the Build objects are unique, we may as well set the creation
935 # time to the current time instead of the fallback SDE
936 objset.doc.creationInfo.created = datetime.now(timezone.utc)
937
938 # Each invocation of bitbake should have a unique ID since it is a
939 # unique build
940 nonce = os.urandom(16).hex()
941
942 build = objset.add_root(
943 oe.spdx30.build_Build(
944 _id=objset.new_spdxid(nonce, include_unihash=False),
945 creationInfo=objset.doc.creationInfo,
946 build_buildType=oe.sbom30.SPDX_BUILD_TYPE,
947 )
948 )
949 set_timestamp_now(d, build, "build_buildStartTime")
950
951 if host_import_key:
952 objset.new_scoped_relationship(
953 [build],
954 oe.spdx30.RelationshipType.hasHost,
955 oe.spdx30.LifecycleScopeType.build,
956 [objset.new_import(host_import_key)],
957 )
958
959 if invoked_by:
960 objset.add(invoked_by)
961 invoked_by_spdx = objset.new_scoped_relationship(
962 [build],
963 oe.spdx30.RelationshipType.invokedBy,
964 oe.spdx30.LifecycleScopeType.build,
965 [invoked_by],
966 )
967
968 if on_behalf_of:
969 objset.add(on_behalf_of)
970 objset.new_scoped_relationship(
971 [on_behalf_of],
972 oe.spdx30.RelationshipType.delegatedTo,
973 oe.spdx30.LifecycleScopeType.build,
974 invoked_by_spdx,
975 )
976
977 elif on_behalf_of:
978 bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set")
979
980 else:
981 if host_import_key:
982 bb.warn(
983 "SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
984 )
985
986 if invoked_by:
987 bb.warn(
988 "SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
989 )
990
991 if on_behalf_of:
992 bb.warn(
993 "SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
994 )
995
996 for obj in objset.foreach_type(oe.spdx30.Element):
997 obj.extension.append(oe.sbom30.OEIdAliasExtension())
998
999 oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json")
1000
1001
1002def collect_build_package_inputs(d, objset, build, packages, files_by_hash=None):
1003 import oe.sbom30
1004
1005 providers = oe.spdx_common.collect_package_providers(d)
1006
1007 build_deps = set()
1008 missing_providers = set()
1009
1010 for name in sorted(packages.keys()):
1011 if name not in providers:
1012 missing_providers.add(name)
1013 continue
1014
1015 pkg_name, pkg_hashfn = providers[name]
1016
1017 # Copy all of the package SPDX files into the Sbom elements
1018 pkg_spdx, pkg_objset = oe.sbom30.find_root_obj_in_jsonld(
1019 d,
1020 "packages",
1021 "package-" + pkg_name,
1022 oe.spdx30.software_Package,
1023 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
1024 )
1025 build_deps.add(oe.sbom30.get_element_link_id(pkg_spdx))
1026
1027 if files_by_hash is not None:
1028 for h, f in pkg_objset.by_sha256_hash.items():
1029 files_by_hash.setdefault(h, set()).update(f)
1030
1031 if missing_providers:
1032 bb.fatal(
1033 f"Unable to find SPDX provider(s) for: {', '.join(sorted(missing_providers))}"
1034 )
1035
1036 if build_deps:
1037 objset.new_scoped_relationship(
1038 [build],
1039 oe.spdx30.RelationshipType.hasInput,
1040 oe.spdx30.LifecycleScopeType.build,
1041 sorted(list(build_deps)),
1042 )
1043
1044
1045def create_rootfs_spdx(d):
1046 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
1047 deploydir = Path(d.getVar("SPDXROOTFSDEPLOY"))
1048 root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES"))
1049 image_basename = d.getVar("IMAGE_BASENAME")
1050 image_rootfs = d.getVar("IMAGE_ROOTFS")
1051 machine = d.getVar("MACHINE")
1052
1053 with root_packages_file.open("r") as f:
1054 packages = json.load(f)
1055
1056 objset = oe.sbom30.ObjectSet.new_objset(
1057 d, "%s-%s-rootfs" % (image_basename, machine)
1058 )
1059
1060 rootfs = objset.add_root(
1061 oe.spdx30.software_Package(
1062 _id=objset.new_spdxid("rootfs", image_basename),
1063 creationInfo=objset.doc.creationInfo,
1064 name=image_basename,
1065 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
1066 )
1067 )
1068 set_timestamp_now(d, rootfs, "builtTime")
1069
1070 rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs"))
1071 set_timestamp_now(d, rootfs_build, "build_buildEndTime")
1072
1073 objset.new_scoped_relationship(
1074 [rootfs_build],
1075 oe.spdx30.RelationshipType.hasOutput,
1076 oe.spdx30.LifecycleScopeType.build,
1077 [rootfs],
1078 )
1079
1080 files_by_hash = {}
1081 collect_build_package_inputs(d, objset, rootfs_build, packages, files_by_hash)
1082
1083 files = set()
1084 for dirpath, dirnames, filenames in os.walk(image_rootfs, onerror=walk_error):
1085 dirnames.sort()
1086 filenames.sort()
1087 for fn in filenames:
1088 fpath = Path(dirpath) / fn
1089 if fpath.is_symlink() or not fpath.is_file():
1090 continue
1091
1092 relpath = str(fpath.relative_to(image_rootfs))
1093 h = bb.utils.sha256_file(fpath)
1094
1095 found = False
1096 if h in files_by_hash:
1097 for f in files_by_hash[h]:
1098 if isinstance(f, oe.spdx30.software_File) and f.name == relpath:
1099 files.add(oe.sbom30.get_element_link_id(f))
1100 found = True
1101 break
1102
1103 if not found:
1104 files.add(
1105 objset.new_file(
1106 objset.new_spdxid("rootfs-file", relpath),
1107 relpath,
1108 fpath,
1109 )
1110 )
1111
1112 if files:
1113 objset.new_relationship(
1114 [rootfs],
1115 oe.spdx30.RelationshipType.contains,
1116 sorted(list(files)),
1117 )
1118
1119 oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir)
1120
1121
1122def create_image_spdx(d):
1123 import oe.sbom30
1124
1125 image_deploy_dir = Path(d.getVar("IMGDEPLOYDIR"))
1126 manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST"))
1127 spdx_work_dir = Path(d.getVar("SPDXIMAGEWORK"))
1128
1129 image_basename = d.getVar("IMAGE_BASENAME")
1130 machine = d.getVar("MACHINE")
1131
1132 objset = oe.sbom30.ObjectSet.new_objset(
1133 d, "%s-%s-image" % (image_basename, machine)
1134 )
1135
1136 with manifest_path.open("r") as f:
1137 manifest = json.load(f)
1138
1139 builds = []
1140 for task in manifest:
1141 imagetype = task["imagetype"]
1142 taskname = task["taskname"]
1143
1144 image_build = objset.add_root(
1145 objset.new_task_build(taskname, "image/%s" % imagetype)
1146 )
1147 set_timestamp_now(d, image_build, "build_buildEndTime")
1148 builds.append(image_build)
1149
1150 artifacts = []
1151
1152 for image in task["images"]:
1153 image_filename = image["filename"]
1154 image_path = image_deploy_dir / image_filename
1155 if os.path.isdir(image_path):
1156 a = add_package_files(
1157 d,
1158 objset,
1159 image_path,
1160 lambda file_counter: objset.new_spdxid(
1161 "imagefile", str(file_counter)
1162 ),
1163 lambda filepath: [],
1164 license_data=None,
1165 ignore_dirs=[],
1166 ignore_top_level_dirs=[],
1167 archive=None,
1168 )
1169 artifacts.extend(a)
1170 else:
1171 a = objset.add_root(
1172 oe.spdx30.software_File(
1173 _id=objset.new_spdxid("image", image_filename),
1174 creationInfo=objset.doc.creationInfo,
1175 name=image_filename,
1176 verifiedUsing=[
1177 oe.spdx30.Hash(
1178 algorithm=oe.spdx30.HashAlgorithm.sha256,
1179 hashValue=bb.utils.sha256_file(image_path),
1180 )
1181 ],
1182 )
1183 )
1184
1185 artifacts.append(a)
1186
1187 for a in artifacts:
1188 set_purposes(
1189 d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE"
1190 )
1191
1192 set_timestamp_now(d, a, "builtTime")
1193
1194
1195 if artifacts:
1196 objset.new_scoped_relationship(
1197 [image_build],
1198 oe.spdx30.RelationshipType.hasOutput,
1199 oe.spdx30.LifecycleScopeType.build,
1200 artifacts,
1201 )
1202
1203 if builds:
1204 rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
1205 d,
1206 "rootfs",
1207 "%s-%s-rootfs" % (image_basename, machine),
1208 oe.spdx30.software_Package,
1209 # TODO: Should use a purpose to filter here?
1210 )
1211 objset.new_scoped_relationship(
1212 builds,
1213 oe.spdx30.RelationshipType.hasInput,
1214 oe.spdx30.LifecycleScopeType.build,
1215 [oe.sbom30.get_element_link_id(rootfs_image)],
1216 )
1217
1218 objset.add_aliases()
1219 objset.link()
1220 oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir)
1221
1222
1223def create_image_sbom_spdx(d):
1224 import oe.sbom30
1225
1226 image_name = d.getVar("IMAGE_NAME")
1227 image_basename = d.getVar("IMAGE_BASENAME")
1228 image_link_name = d.getVar("IMAGE_LINK_NAME")
1229 imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR"))
1230 machine = d.getVar("MACHINE")
1231
1232 spdx_path = imgdeploydir / (image_name + ".spdx.json")
1233
1234 root_elements = []
1235
1236 # TODO: Do we need to add the rootfs or are the image files sufficient?
1237 rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
1238 d,
1239 "rootfs",
1240 "%s-%s-rootfs" % (image_basename, machine),
1241 oe.spdx30.software_Package,
1242 # TODO: Should use a purpose here?
1243 )
1244 root_elements.append(oe.sbom30.get_element_link_id(rootfs_image))
1245
1246 image_objset, _ = oe.sbom30.find_jsonld(
1247 d, "image", "%s-%s-image" % (image_basename, machine), required=True
1248 )
1249 for o in image_objset.foreach_root(oe.spdx30.software_File):
1250 root_elements.append(oe.sbom30.get_element_link_id(o))
1251
1252 objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements)
1253
1254 oe.sbom30.write_jsonld_doc(d, objset, spdx_path)
1255
1256 def make_image_link(target_path, suffix):
1257 if image_link_name:
1258 link = imgdeploydir / (image_link_name + suffix)
1259 if link != target_path:
1260 link.symlink_to(os.path.relpath(target_path, link.parent))
1261
1262 make_image_link(spdx_path, ".spdx.json")
1263
1264
1265def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname):
1266 sdk_name = toolchain_outputname + "-" + sdk_type
1267 sdk_packages = oe.sdk.sdk_list_installed_packages(d, sdk_type == "target")
1268
1269 objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name)
1270
1271 sdk_rootfs = objset.add_root(
1272 oe.spdx30.software_Package(
1273 _id=objset.new_spdxid("sdk-rootfs", sdk_name),
1274 creationInfo=objset.doc.creationInfo,
1275 name=sdk_name,
1276 software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
1277 )
1278 )
1279 set_timestamp_now(d, sdk_rootfs, "builtTime")
1280
1281 sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs"))
1282 set_timestamp_now(d, sdk_build, "build_buildEndTime")
1283
1284 objset.new_scoped_relationship(
1285 [sdk_build],
1286 oe.spdx30.RelationshipType.hasOutput,
1287 oe.spdx30.LifecycleScopeType.build,
1288 [sdk_rootfs],
1289 )
1290
1291 collect_build_package_inputs(d, objset, sdk_build, sdk_packages)
1292
1293 objset.add_aliases()
1294 oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json")
1295
1296
1297def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname):
1298 # Load the document written earlier
1299 rootfs_objset = oe.sbom30.load_jsonld(
1300 d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True
1301 )
1302
1303 # Create a new build for the SDK installer
1304 sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate")
1305 set_timestamp_now(d, sdk_build, "build_buildEndTime")
1306
1307 rootfs = rootfs_objset.find_root(oe.spdx30.software_Package)
1308 if rootfs is None:
1309 bb.fatal("Unable to find rootfs artifact")
1310
1311 rootfs_objset.new_scoped_relationship(
1312 [sdk_build],
1313 oe.spdx30.RelationshipType.hasInput,
1314 oe.spdx30.LifecycleScopeType.build,
1315 [rootfs],
1316 )
1317
1318 files = set()
1319 root_files = []
1320
1321 # NOTE: os.walk() doesn't return symlinks
1322 for dirpath, dirnames, filenames in os.walk(sdk_deploydir, onerror=walk_error):
1323 dirnames.sort()
1324 filenames.sort()
1325 for fn in filenames:
1326 fpath = Path(dirpath) / fn
1327 if not fpath.is_file() or fpath.is_symlink():
1328 continue
1329
1330 relpath = str(fpath.relative_to(sdk_deploydir))
1331
1332 f = rootfs_objset.new_file(
1333 rootfs_objset.new_spdxid("sdk-installer", relpath),
1334 relpath,
1335 fpath,
1336 )
1337 set_timestamp_now(d, f, "builtTime")
1338
1339 if fn.endswith(".manifest"):
1340 f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest
1341 elif fn.endswith(".testdata.json"):
1342 f.software_primaryPurpose = (
1343 oe.spdx30.software_SoftwarePurpose.configuration
1344 )
1345 else:
1346 set_purposes(d, f, "SPDX_SDK_PURPOSE")
1347 root_files.append(f)
1348
1349 files.add(f)
1350
1351 if files:
1352 rootfs_objset.new_scoped_relationship(
1353 [sdk_build],
1354 oe.spdx30.RelationshipType.hasOutput,
1355 oe.spdx30.LifecycleScopeType.build,
1356 files,
1357 )
1358 else:
1359 bb.warn(f"No SDK output files found in {sdk_deploydir}")
1360
1361 objset, sbom = oe.sbom30.create_sbom(
1362 d, toolchain_outputname, sorted(list(files)), [rootfs_objset]
1363 )
1364
1365 oe.sbom30.write_jsonld_doc(
1366 d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json")
1367 )
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py
new file mode 100644
index 0000000000..c2dec65563
--- /dev/null
+++ b/meta/lib/oe/spdx_common.py
@@ -0,0 +1,285 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb
8import collections
9import json
10import oe.packagedata
11import re
12import shutil
13
14from pathlib import Path
15from dataclasses import dataclass
16
17LIC_REGEX = re.compile(
18 rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$",
19 re.MULTILINE,
20)
21
22
23def extract_licenses(filename):
24 """
25 Extract SPDX License identifiers from a file
26 """
27 try:
28 with open(filename, "rb") as f:
29 size = min(15000, os.stat(filename).st_size)
30 txt = f.read(size)
31 licenses = re.findall(LIC_REGEX, txt)
32 if licenses:
33 ascii_licenses = [lic.decode("ascii") for lic in licenses]
34 return ascii_licenses
35 except Exception as e:
36 bb.warn(f"Exception reading {filename}: {e}")
37 return []
38
39
40def is_work_shared_spdx(d):
41 return '/work-shared/' in d.getVar('S')
42
43
44def load_spdx_license_data(d):
45 with open(d.getVar("SPDX_LICENSES"), "r") as f:
46 data = json.load(f)
47 # Transform the license array to a dictionary
48 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
49
50 return data
51
52
53def process_sources(d):
54 """
55 Returns True if the sources for this recipe should be included in the SPDX
56 or False if not
57 """
58 pn = d.getVar("PN")
59 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
60 if pn in assume_provided:
61 for p in d.getVar("PROVIDES").split():
62 if p != pn:
63 pn = p
64 break
65
66 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
67 # so avoid archiving source here.
68 if pn.startswith("glibc-locale"):
69 return False
70 if d.getVar("PN") == "libtool-cross":
71 return False
72 if d.getVar("PN") == "libgcc-initial":
73 return False
74 if d.getVar("PN") == "shadow-sysroot":
75 return False
76
77 return True
78
79
80@dataclass(frozen=True)
81class Dep(object):
82 pn: str
83 hashfn: str
84 in_taskhash: bool
85
86
87def collect_direct_deps(d, dep_task):
88 """
89 Find direct dependencies of current task
90
91 Returns the list of recipes that have a dep_task that the current task
92 depends on
93 """
94 current_task = "do_" + d.getVar("BB_CURRENTTASK")
95 pn = d.getVar("PN")
96
97 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
98
99 for this_dep in taskdepdata.values():
100 if this_dep[0] == pn and this_dep[1] == current_task:
101 break
102 else:
103 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
104
105 deps = set()
106
107 for dep_name in this_dep.deps:
108 dep_data = taskdepdata[dep_name]
109 if dep_data.taskname == dep_task and dep_data.pn != pn:
110 deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps))
111
112 return sorted(deps)
113
114
115def get_spdx_deps(d):
116 """
117 Reads the SPDX dependencies JSON file and returns the data
118 """
119 spdx_deps_file = Path(d.getVar("SPDXDEPS"))
120
121 deps = []
122 with spdx_deps_file.open("r") as f:
123 for d in json.load(f):
124 deps.append(Dep(*d))
125 return deps
126
127
128def collect_package_providers(d):
129 """
130 Returns a dictionary where each RPROVIDES is mapped to the package that
131 provides it
132 """
133 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
134
135 providers = {}
136
137 deps = collect_direct_deps(d, "do_create_spdx")
138 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
139
140 for dep_pn, dep_hashfn, _ in deps:
141 localdata = d
142 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
143 if not recipe_data:
144 localdata = bb.data.createCopy(d)
145 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
146 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
147
148 for pkg in recipe_data.get("PACKAGES", "").split():
149 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
150 rprovides = set(
151 n
152 for n, _ in bb.utils.explode_dep_versions2(
153 pkg_data.get("RPROVIDES", "")
154 ).items()
155 )
156 rprovides.add(pkg)
157
158 if "PKG" in pkg_data:
159 pkg = pkg_data["PKG"]
160 rprovides.add(pkg)
161
162 for r in rprovides:
163 providers[r] = (pkg, dep_hashfn)
164
165 return providers
166
167
168def get_patched_src(d):
169 """
170 Save patched source of the recipe in SPDX_WORKDIR.
171 """
172 spdx_workdir = d.getVar("SPDXWORK")
173 spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE")
174 pn = d.getVar("PN")
175
176 workdir = d.getVar("WORKDIR")
177
178 try:
179 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
180 if not is_work_shared_spdx(d):
181 # Change the WORKDIR to make do_unpack do_patch run in another dir.
182 d.setVar("WORKDIR", spdx_workdir)
183 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
184 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
185
186 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
187 # possibly requiring of the following tasks (such as some recipes's
188 # do_patch required 'B' existed).
189 bb.utils.mkdirhier(d.getVar("B"))
190
191 bb.build.exec_func("do_unpack", d)
192
193 if d.getVar("SRC_URI") != "":
194 if bb.data.inherits_class('dos2unix', d):
195 bb.build.exec_func('do_convert_crlf_to_lf', d)
196 bb.build.exec_func("do_patch", d)
197
198 # Copy source from work-share to spdx_workdir
199 if is_work_shared_spdx(d):
200 share_src = d.getVar('S')
201 d.setVar("WORKDIR", spdx_workdir)
202 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
203 # Copy source to ${SPDXWORK}, same basename dir of ${S};
204 src_dir = (
205 spdx_workdir
206 + "/"
207 + os.path.basename(share_src)
208 )
209 # For kernel souce, rename suffix dir 'kernel-source'
210 # to ${BP} (${BPN}-${PV})
211 if bb.data.inherits_class("kernel", d):
212 src_dir = spdx_workdir + "/" + d.getVar('BP')
213
214 bb.note(f"copyhardlinktree {share_src} to {src_dir}")
215 oe.path.copyhardlinktree(share_src, src_dir)
216
217 # Some userland has no source.
218 if not os.path.exists(spdx_workdir):
219 bb.utils.mkdirhier(spdx_workdir)
220 finally:
221 d.setVar("WORKDIR", workdir)
222
223
224def has_task(d, task):
225 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
226
227
228def fetch_data_to_uri(fd, name):
229 """
230 Translates a bitbake FetchData to a string URI
231 """
232 uri = fd.type
233 # Map gitsm to git, since gitsm:// is not a valid URI protocol
234 if uri == "gitsm":
235 uri = "git"
236 proto = getattr(fd, "proto", None)
237 if proto is not None:
238 uri = uri + "+" + proto
239 uri = uri + "://" + fd.host + fd.path
240
241 if fd.method.supports_srcrev():
242 uri = uri + "@" + fd.revision
243
244 return uri
245
246def is_compiled_source (filename, compiled_sources, types):
247 """
248 Check if the file is a compiled file
249 """
250 import os
251 # If we don't have compiled source, we assume all are compiled.
252 if not compiled_sources:
253 return True
254
255 # We return always true if the file type is not in the list of compiled files.
256 # Some files in the source directory are not compiled, for example, Makefiles,
257 # but also python .py file. We need to include them in the SPDX.
258 basename = os.path.basename(filename)
259 ext = basename.partition(".")[2]
260 if ext not in types:
261 return True
262 # Check that the file is in the list
263 return filename in compiled_sources
264
265def get_compiled_sources(d):
266 """
267 Get list of compiled sources from debug information and normalize the paths
268 """
269 import itertools
270 source_info = oe.package.read_debugsources_info(d)
271 if not source_info:
272 bb.debug(1, "Do not have debugsources.list. Skipping")
273 return [], []
274
275 # Sources are not split now in SPDX, so we aggregate them
276 sources = set(itertools.chain.from_iterable(source_info.values()))
277 # Check extensions of files
278 types = set()
279 for src in sources:
280 basename = os.path.basename(src)
281 ext = basename.partition(".")[2]
282 if ext not in types and ext:
283 types.add(ext)
284 bb.debug(1, f"Num of sources: {len(sources)} and types: {len(types)} {str(types)}")
285 return sources, types
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index 6cd6e11acc..ef687f5d41 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -1,9 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
6import bb.parse
4import bb.siggen 7import bb.siggen
5import bb.runqueue 8import bb.runqueue
6import oe 9import oe
10import netrc
7 11
8def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): 12def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
9 # Return True if we should keep the dependency, False to drop it 13 # Return True if we should keep the dependency, False to drop it
@@ -28,6 +32,12 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
28 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep) 32 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
29 mc, _ = bb.runqueue.split_mc(fn) 33 mc, _ = bb.runqueue.split_mc(fn)
30 34
35 # We can skip the rm_work task signature to avoid running the task
36 # when we remove some tasks from the dependencie chain
37 # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work
38 if task == "do_rm_work":
39 return False
40
31 # (Almost) always include our own inter-task dependencies (unless it comes 41 # (Almost) always include our own inter-task dependencies (unless it comes
32 # from a mcdepends). The exception is the special 42 # from a mcdepends). The exception is the special
33 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass. 43 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
@@ -59,7 +69,7 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
59 return False 69 return False
60 70
61 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum 71 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
62 # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum 72 # if we're just doing an RRECOMMENDS:xxx = "kernel-module-*", not least because the checksum
63 # is machine specific. 73 # is machine specific.
64 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) 74 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
65 # and we reccomend a kernel-module, we exclude the dependency. 75 # and we reccomend a kernel-module, we exclude the dependency.
@@ -84,14 +94,13 @@ def sstate_lockedsigs(d):
84 sigs[pn][task] = [h, siggen_lockedsigs_var] 94 sigs[pn][task] = [h, siggen_lockedsigs_var]
85 return sigs 95 return sigs
86 96
87class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): 97def lockedsigs_unihashmap(d):
88 name = "OEBasic" 98 unihashmap = {}
89 def init_rundepcheck(self, data): 99 data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split()
90 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() 100 for entry in data:
91 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() 101 pn, task, taskhash, unihash = entry.split(":")
92 pass 102 unihashmap[(pn, task)] = (taskhash, unihash)
93 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None): 103 return unihashmap
94 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
95 104
96class SignatureGeneratorOEBasicHashMixIn(object): 105class SignatureGeneratorOEBasicHashMixIn(object):
97 supports_multiconfig_datacaches = True 106 supports_multiconfig_datacaches = True
@@ -100,15 +109,17 @@ class SignatureGeneratorOEBasicHashMixIn(object):
100 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() 109 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
101 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() 110 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
102 self.lockedsigs = sstate_lockedsigs(data) 111 self.lockedsigs = sstate_lockedsigs(data)
112 self.unihashmap = lockedsigs_unihashmap(data)
103 self.lockedhashes = {} 113 self.lockedhashes = {}
104 self.lockedpnmap = {} 114 self.lockedpnmap = {}
105 self.lockedhashfn = {} 115 self.lockedhashfn = {}
106 self.machine = data.getVar("MACHINE") 116 self.machine = data.getVar("MACHINE")
107 self.mismatch_msgs = [] 117 self.mismatch_msgs = []
118 self.mismatch_number = 0
119 self.lockedsigs_msgs = ""
108 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or 120 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
109 "").split() 121 "").split()
110 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } 122 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
111 self.buildarch = data.getVar('BUILD_ARCH')
112 self._internal = False 123 self._internal = False
113 pass 124 pass
114 125
@@ -142,18 +153,12 @@ class SignatureGeneratorOEBasicHashMixIn(object):
142 super().set_taskdata(data[3:]) 153 super().set_taskdata(data[3:])
143 154
144 def dump_sigs(self, dataCache, options): 155 def dump_sigs(self, dataCache, options):
145 sigfile = os.getcwd() + "/locked-sigs.inc" 156 if 'lockedsigs' in options:
146 bb.plain("Writing locked sigs to %s" % sigfile) 157 sigfile = os.getcwd() + "/locked-sigs.inc"
147 self.dump_lockedsigs(sigfile) 158 bb.plain("Writing locked sigs to %s" % sigfile)
159 self.dump_lockedsigs(sigfile)
148 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) 160 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
149 161
150 def prep_taskhash(self, tid, deps, dataCaches):
151 super().prep_taskhash(tid, deps, dataCaches)
152 if hasattr(self, "extramethod"):
153 (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
154 inherits = " ".join(dataCaches[mc].inherits[fn])
155 if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
156 self.extramethod[tid] = ":" + self.buildarch
157 162
158 def get_taskhash(self, tid, deps, dataCaches): 163 def get_taskhash(self, tid, deps, dataCaches):
159 if tid in self.lockedhashes: 164 if tid in self.lockedhashes:
@@ -196,6 +201,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
196 #bb.warn("Using %s %s %s" % (recipename, task, h)) 201 #bb.warn("Using %s %s %s" % (recipename, task, h))
197 202
198 if h != h_locked and h_locked != unihash: 203 if h != h_locked and h_locked != unihash:
204 self.mismatch_number += 1
199 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' 205 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
200 % (recipename, task, h, h_locked, var)) 206 % (recipename, task, h, h_locked, var))
201 207
@@ -210,10 +216,19 @@ class SignatureGeneratorOEBasicHashMixIn(object):
210 return self.lockedhashes[tid] 216 return self.lockedhashes[tid]
211 return super().get_stampfile_hash(tid) 217 return super().get_stampfile_hash(tid)
212 218
213 def get_unihash(self, tid): 219 def get_cached_unihash(self, tid):
214 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: 220 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
215 return self.lockedhashes[tid] 221 return self.lockedhashes[tid]
216 return super().get_unihash(tid) 222
223 (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
224 recipename = self.lockedpnmap[fn]
225
226 if (recipename, task) in self.unihashmap:
227 taskhash, unihash = self.unihashmap[(recipename, task)]
228 if taskhash == self.taskhash[tid]:
229 return unihash
230
231 return super().get_cached_unihash(tid)
217 232
218 def dump_sigtask(self, fn, task, stampbase, runtime): 233 def dump_sigtask(self, fn, task, stampbase, runtime):
219 tid = fn + ":" + task 234 tid = fn + ":" + task
@@ -223,17 +238,26 @@ class SignatureGeneratorOEBasicHashMixIn(object):
223 238
224 def dump_lockedsigs(self, sigfile, taskfilter=None): 239 def dump_lockedsigs(self, sigfile, taskfilter=None):
225 types = {} 240 types = {}
241 unihashmap = {}
226 for tid in self.runtaskdeps: 242 for tid in self.runtaskdeps:
243 # Bitbake changed this to a tuple in newer versions
244 if isinstance(tid, tuple):
245 tid = tid[1]
227 if taskfilter: 246 if taskfilter:
228 if not tid in taskfilter: 247 if not tid in taskfilter:
229 continue 248 continue
230 fn = bb.runqueue.fn_from_tid(tid) 249 (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
231 t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] 250 t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
232 t = 't-' + t.replace('_', '-') 251 t = 't-' + t.replace('_', '-')
233 if t not in types: 252 if t not in types:
234 types[t] = [] 253 types[t] = []
235 types[t].append(tid) 254 types[t].append(tid)
236 255
256 taskhash = self.taskhash[tid]
257 unihash = self.get_unihash(tid)
258 if taskhash != unihash:
259 unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash
260
237 with open(sigfile, "w") as f: 261 with open(sigfile, "w") as f:
238 l = sorted(types) 262 l = sorted(types)
239 for t in l: 263 for t in l:
@@ -246,15 +270,31 @@ class SignatureGeneratorOEBasicHashMixIn(object):
246 continue 270 continue
247 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") 271 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
248 f.write(' "\n') 272 f.write(' "\n')
249 f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) 273 f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l)))
274 f.write('SIGGEN_UNIHASHMAP += "\\\n')
275 sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
276 for tid in sortedtid:
277 f.write(unihashmap[tid] + " \\\n")
278 f.write(' "\n')
279
280 def dump_siglist(self, sigfile, path_prefix_strip=None):
281 def strip_fn(fn):
282 nonlocal path_prefix_strip
283 if not path_prefix_strip:
284 return fn
285
286 fn_exp = fn.split(":")
287 if fn_exp[-1].startswith(path_prefix_strip):
288 fn_exp[-1] = fn_exp[-1][len(path_prefix_strip):]
289
290 return ":".join(fn_exp)
250 291
251 def dump_siglist(self, sigfile):
252 with open(sigfile, "w") as f: 292 with open(sigfile, "w") as f:
253 tasks = [] 293 tasks = []
254 for taskitem in self.taskhash: 294 for taskitem in self.taskhash:
255 (fn, task) = taskitem.rsplit(":", 1) 295 (fn, task) = taskitem.rsplit(":", 1)
256 pn = self.lockedpnmap[fn] 296 pn = self.lockedpnmap[fn]
257 tasks.append((pn, task, fn, self.taskhash[taskitem])) 297 tasks.append((pn, task, strip_fn(fn), self.taskhash[taskitem]))
258 for (pn, task, fn, taskhash) in sorted(tasks): 298 for (pn, task, fn, taskhash) in sorted(tasks):
259 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) 299 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
260 300
@@ -262,6 +302,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
262 warn_msgs = [] 302 warn_msgs = []
263 error_msgs = [] 303 error_msgs = []
264 sstate_missing_msgs = [] 304 sstate_missing_msgs = []
305 info_msgs = None
306
307 if self.lockedsigs:
308 if len(self.lockedsigs) > 10:
309 self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number)
310 else:
311 self.lockedsigs_msgs = "The following recipes have locked tasks:"
312 for pn in self.lockedsigs:
313 self.lockedsigs_msgs += " %s" % (pn)
265 314
266 for tid in sq_data['hash']: 315 for tid in sq_data['hash']:
267 if tid not in found: 316 if tid not in found:
@@ -274,7 +323,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
274 % (pn, taskname, sq_data['hash'][tid])) 323 % (pn, taskname, sq_data['hash'][tid]))
275 324
276 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") 325 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
277 if checklevel == 'warn': 326 if checklevel == 'info':
327 info_msgs = self.lockedsigs_msgs
328 if checklevel == 'warn' or checklevel == 'info':
278 warn_msgs += self.mismatch_msgs 329 warn_msgs += self.mismatch_msgs
279 elif checklevel == 'error': 330 elif checklevel == 'error':
280 error_msgs += self.mismatch_msgs 331 error_msgs += self.mismatch_msgs
@@ -285,6 +336,8 @@ class SignatureGeneratorOEBasicHashMixIn(object):
285 elif checklevel == 'error': 336 elif checklevel == 'error':
286 error_msgs += sstate_missing_msgs 337 error_msgs += sstate_missing_msgs
287 338
339 if info_msgs:
340 bb.note(info_msgs)
288 if warn_msgs: 341 if warn_msgs:
289 bb.warn("\n".join(warn_msgs)) 342 bb.warn("\n".join(warn_msgs))
290 if error_msgs: 343 if error_msgs:
@@ -304,9 +357,20 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge
304 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') 357 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
305 if not self.method: 358 if not self.method:
306 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") 359 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
360 self.username = data.getVar("BB_HASHSERVE_USERNAME")
361 self.password = data.getVar("BB_HASHSERVE_PASSWORD")
362 if not self.username or not self.password:
363 try:
364 n = netrc.netrc()
365 auth = n.authenticators(self.server)
366 if auth is not None:
367 self.username, _, self.password = auth
368 except FileNotFoundError:
369 pass
370 except netrc.NetrcParseError as e:
371 bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg))
307 372
308# Insert these classes into siggen's namespace so it can see and select them 373# Insert these classes into siggen's namespace so it can see and select them
309bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
310bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash 374bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
311bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash 375bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
312 376
@@ -320,14 +384,14 @@ def find_siginfo(pn, taskname, taskhashlist, d):
320 if not taskname: 384 if not taskname:
321 # We have to derive pn and taskname 385 # We have to derive pn and taskname
322 key = pn 386 key = pn
323 splitit = key.split('.bb:') 387 if key.startswith("mc:"):
324 taskname = splitit[1] 388 # mc:<mc>:<pn>:<task>
325 pn = os.path.basename(splitit[0]).split('_')[0] 389 _, _, pn, taskname = key.split(':', 3)
326 if key.startswith('virtual:native:'): 390 else:
327 pn = pn + '-native' 391 # <pn>:<task>
392 pn, taskname = key.split(':', 1)
328 393
329 hashfiles = {} 394 hashfiles = {}
330 filedates = {}
331 395
332 def get_hashval(siginfo): 396 def get_hashval(siginfo):
333 if siginfo.endswith('.siginfo'): 397 if siginfo.endswith('.siginfo'):
@@ -335,6 +399,15 @@ def find_siginfo(pn, taskname, taskhashlist, d):
335 else: 399 else:
336 return siginfo.rpartition('.')[2] 400 return siginfo.rpartition('.')[2]
337 401
402 def get_time(fullpath):
403 # NFS can end up in a weird state where the file exists but has no stat info.
404 # If that happens, we assume it doesn't acutally exist and show a warning
405 try:
406 return os.stat(fullpath).st_mtime
407 except FileNotFoundError:
408 bb.warn("Could not obtain mtime for {}".format(fullpath))
409 return None
410
338 # First search in stamps dir 411 # First search in stamps dir
339 localdata = d.createCopy() 412 localdata = d.createCopy()
340 localdata.setVar('MULTIMACH_TARGET_SYS', '*') 413 localdata.setVar('MULTIMACH_TARGET_SYS', '*')
@@ -346,28 +419,32 @@ def find_siginfo(pn, taskname, taskhashlist, d):
346 if pn.startswith("gcc-source"): 419 if pn.startswith("gcc-source"):
347 # gcc-source shared workdir is a special case :( 420 # gcc-source shared workdir is a special case :(
348 stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") 421 stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
422 elif pn.startswith("llvm-project-source"):
423 # llvm-project-source shared workdir is also a special case :*(
424 stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}")
349 425
350 filespec = '%s.%s.sigdata.*' % (stamp, taskname) 426 filespec = '%s.%s.sigdata.*' % (stamp, taskname)
351 foundall = False 427 foundall = False
352 import glob 428 import glob
429 bb.debug(1, "Calling glob.glob on {}".format(filespec))
353 for fullpath in glob.glob(filespec): 430 for fullpath in glob.glob(filespec):
354 match = False 431 match = False
355 if taskhashlist: 432 if taskhashlist:
356 for taskhash in taskhashlist: 433 for taskhash in taskhashlist:
357 if fullpath.endswith('.%s' % taskhash): 434 if fullpath.endswith('.%s' % taskhash):
358 hashfiles[taskhash] = fullpath 435 mtime = get_time(fullpath)
436 if mtime:
437 hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime}
359 if len(hashfiles) == len(taskhashlist): 438 if len(hashfiles) == len(taskhashlist):
360 foundall = True 439 foundall = True
361 break 440 break
362 else: 441 else:
363 try:
364 filedates[fullpath] = os.stat(fullpath).st_mtime
365 except OSError:
366 continue
367 hashval = get_hashval(fullpath) 442 hashval = get_hashval(fullpath)
368 hashfiles[hashval] = fullpath 443 mtime = get_time(fullpath)
444 if mtime:
445 hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime}
369 446
370 if not taskhashlist or (len(filedates) < 2 and not foundall): 447 if not taskhashlist or (len(hashfiles) < 2 and not foundall):
371 # That didn't work, look in sstate-cache 448 # That didn't work, look in sstate-cache
372 hashes = taskhashlist or ['?' * 64] 449 hashes = taskhashlist or ['?' * 64]
373 localdata = bb.data.createCopy(d) 450 localdata = bb.data.createCopy(d)
@@ -376,35 +453,34 @@ def find_siginfo(pn, taskname, taskhashlist, d):
376 localdata.setVar('TARGET_VENDOR', '*') 453 localdata.setVar('TARGET_VENDOR', '*')
377 localdata.setVar('TARGET_OS', '*') 454 localdata.setVar('TARGET_OS', '*')
378 localdata.setVar('PN', pn) 455 localdata.setVar('PN', pn)
456 # gcc-source is a special case, same as with local stamps above
457 if pn.startswith("gcc-source"):
458 localdata.setVar('PN', "gcc")
379 localdata.setVar('PV', '*') 459 localdata.setVar('PV', '*')
380 localdata.setVar('PR', '*') 460 localdata.setVar('PR', '*')
381 localdata.setVar('BB_TASKHASH', hashval) 461 localdata.setVar('BB_TASKHASH', hashval)
462 localdata.setVar('SSTATE_CURRTASK', taskname[3:])
382 swspec = localdata.getVar('SSTATE_SWSPEC') 463 swspec = localdata.getVar('SSTATE_SWSPEC')
383 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: 464 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
384 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') 465 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
385 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: 466 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
386 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") 467 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
387 sstatename = taskname[3:] 468 filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG')
388 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
389 469
470 bb.debug(1, "Calling glob.glob on {}".format(filespec))
390 matchedfiles = glob.glob(filespec) 471 matchedfiles = glob.glob(filespec)
391 for fullpath in matchedfiles: 472 for fullpath in matchedfiles:
392 actual_hashval = get_hashval(fullpath) 473 actual_hashval = get_hashval(fullpath)
393 if actual_hashval in hashfiles: 474 if actual_hashval in hashfiles:
394 continue 475 continue
395 hashfiles[hashval] = fullpath 476 mtime = get_time(fullpath)
396 if not taskhashlist: 477 if mtime:
397 try: 478 hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime}
398 filedates[fullpath] = os.stat(fullpath).st_mtime
399 except:
400 continue
401 479
402 if taskhashlist: 480 return hashfiles
403 return hashfiles
404 else:
405 return filedates
406 481
407bb.siggen.find_siginfo = find_siginfo 482bb.siggen.find_siginfo = find_siginfo
483bb.siggen.find_siginfo_version = 2
408 484
409 485
410def sstate_get_manifest_filename(task, d): 486def sstate_get_manifest_filename(task, d):
@@ -418,6 +494,7 @@ def sstate_get_manifest_filename(task, d):
418 d2.setVar("SSTATE_MANMACH", extrainf) 494 d2.setVar("SSTATE_MANMACH", extrainf)
419 return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) 495 return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
420 496
497@bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS")
421def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): 498def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
422 d2 = d 499 d2 = d
423 variant = '' 500 variant = ''
@@ -440,7 +517,7 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
440 elif "-cross-canadian" in taskdata: 517 elif "-cross-canadian" in taskdata:
441 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] 518 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
442 elif "-cross-" in taskdata: 519 elif "-cross-" in taskdata:
443 pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"] 520 pkgarchs = ["${BUILD_ARCH}"]
444 elif "-crosssdk" in taskdata: 521 elif "-crosssdk" in taskdata:
445 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] 522 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
446 else: 523 else:
@@ -449,11 +526,15 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
449 pkgarchs.append('allarch') 526 pkgarchs.append('allarch')
450 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') 527 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
451 528
529 searched_manifests = []
530
452 for pkgarch in pkgarchs: 531 for pkgarch in pkgarchs:
453 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) 532 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
454 if os.path.exists(manifest): 533 if os.path.exists(manifest):
455 return manifest, d2 534 return manifest, d2
456 bb.error("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant)) 535 searched_manifests.append(manifest)
536 bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
537 % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
457 return None, d2 538 return None, d2
458 539
459def OEOuthashBasic(path, sigfile, task, d): 540def OEOuthashBasic(path, sigfile, task, d):
@@ -467,6 +548,8 @@ def OEOuthashBasic(path, sigfile, task, d):
467 import stat 548 import stat
468 import pwd 549 import pwd
469 import grp 550 import grp
551 import re
552 import fnmatch
470 553
471 def update_hash(s): 554 def update_hash(s):
472 s = s.encode('utf-8') 555 s = s.encode('utf-8')
@@ -476,20 +559,38 @@ def OEOuthashBasic(path, sigfile, task, d):
476 559
477 h = hashlib.sha256() 560 h = hashlib.sha256()
478 prev_dir = os.getcwd() 561 prev_dir = os.getcwd()
562 corebase = d.getVar("COREBASE")
563 tmpdir = d.getVar("TMPDIR")
479 include_owners = os.environ.get('PSEUDO_DISABLED') == '0' 564 include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
480 if "package_write_" in task or task == "package_qa": 565 if "package_write_" in task or task == "package_qa":
481 include_owners = False 566 include_owners = False
482 include_timestamps = False 567 include_timestamps = False
568 include_root = True
483 if task == "package": 569 if task == "package":
484 include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' 570 include_timestamps = True
485 extra_content = d.getVar('HASHEQUIV_HASH_VERSION') 571 include_root = False
572 source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH"))
573 hash_version = d.getVar('HASHEQUIV_HASH_VERSION')
574 extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA")
575
576 filemaps = {}
577 for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split():
578 entry = m.split(":")
579 if len(entry) != 3 or entry[0] != task:
580 continue
581 filemaps.setdefault(entry[1], [])
582 filemaps[entry[1]].append(entry[2])
486 583
487 try: 584 try:
488 os.chdir(path) 585 os.chdir(path)
586 basepath = os.path.normpath(path)
489 587
490 update_hash("OEOuthashBasic\n") 588 update_hash("OEOuthashBasic\n")
491 if extra_content: 589 if hash_version:
492 update_hash(extra_content + "\n") 590 update_hash(hash_version + "\n")
591
592 if extra_sigdata:
593 update_hash(extra_sigdata + "\n")
493 594
494 # It is only currently useful to get equivalent hashes for things that 595 # It is only currently useful to get equivalent hashes for things that
495 # can be restored from sstate. Since the sstate object is named using 596 # can be restored from sstate. Since the sstate object is named using
@@ -534,32 +635,37 @@ def OEOuthashBasic(path, sigfile, task, d):
534 else: 635 else:
535 add_perm(stat.S_IXUSR, 'x') 636 add_perm(stat.S_IXUSR, 'x')
536 637
537 add_perm(stat.S_IRGRP, 'r') 638 if include_owners:
538 add_perm(stat.S_IWGRP, 'w') 639 # Group/other permissions are only relevant in pseudo context
539 if stat.S_ISGID & s.st_mode: 640 add_perm(stat.S_IRGRP, 'r')
540 add_perm(stat.S_IXGRP, 's', 'S') 641 add_perm(stat.S_IWGRP, 'w')
541 else: 642 if stat.S_ISGID & s.st_mode:
542 add_perm(stat.S_IXGRP, 'x') 643 add_perm(stat.S_IXGRP, 's', 'S')
644 else:
645 add_perm(stat.S_IXGRP, 'x')
543 646
544 add_perm(stat.S_IROTH, 'r') 647 add_perm(stat.S_IROTH, 'r')
545 add_perm(stat.S_IWOTH, 'w') 648 add_perm(stat.S_IWOTH, 'w')
546 if stat.S_ISVTX & s.st_mode: 649 if stat.S_ISVTX & s.st_mode:
547 update_hash('t') 650 update_hash('t')
548 else: 651 else:
549 add_perm(stat.S_IXOTH, 'x') 652 add_perm(stat.S_IXOTH, 'x')
550 653
551 if include_owners:
552 try: 654 try:
553 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) 655 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
554 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) 656 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
555 except KeyError as e: 657 except KeyError as e:
556 bb.warn("KeyError in %s" % path)
557 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match " 658 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match "
558 "any user/group on target. This may be due to host contamination." % (e, path, s.st_uid, s.st_gid)) 659 "any user/group on target. This may be due to host contamination." %
660 (e, os.path.abspath(path), s.st_uid, s.st_gid))
559 raise Exception(msg).with_traceback(e.__traceback__) 661 raise Exception(msg).with_traceback(e.__traceback__)
560 662
561 if include_timestamps: 663 if include_timestamps:
562 update_hash(" %10d" % s.st_mtime) 664 # Need to clamp to SOURCE_DATE_EPOCH
665 if s.st_mtime > source_date_epoch:
666 update_hash(" %10d" % source_date_epoch)
667 else:
668 update_hash(" %10d" % s.st_mtime)
563 669
564 update_hash(" ") 670 update_hash(" ")
565 if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): 671 if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
@@ -567,8 +673,13 @@ def OEOuthashBasic(path, sigfile, task, d):
567 else: 673 else:
568 update_hash(" " * 9) 674 update_hash(" " * 9)
569 675
676 filterfile = False
677 for entry in filemaps:
678 if fnmatch.fnmatch(path, entry):
679 filterfile = True
680
570 update_hash(" ") 681 update_hash(" ")
571 if stat.S_ISREG(s.st_mode): 682 if stat.S_ISREG(s.st_mode) and not filterfile:
572 update_hash("%10d" % s.st_size) 683 update_hash("%10d" % s.st_size)
573 else: 684 else:
574 update_hash(" " * 10) 685 update_hash(" " * 10)
@@ -577,9 +688,24 @@ def OEOuthashBasic(path, sigfile, task, d):
577 fh = hashlib.sha256() 688 fh = hashlib.sha256()
578 if stat.S_ISREG(s.st_mode): 689 if stat.S_ISREG(s.st_mode):
579 # Hash file contents 690 # Hash file contents
580 with open(path, 'rb') as d: 691 if filterfile:
581 for chunk in iter(lambda: d.read(4096), b""): 692 # Need to ignore paths in crossscripts and postinst-useradd files.
693 with open(path, 'rb') as d:
694 chunk = d.read()
695 chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'')
696 for entry in filemaps:
697 if not fnmatch.fnmatch(path, entry):
698 continue
699 for r in filemaps[entry]:
700 if r.startswith("regex-"):
701 chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk)
702 else:
703 chunk = chunk.replace(bytes(r, encoding='utf8'), b'')
582 fh.update(chunk) 704 fh.update(chunk)
705 else:
706 with open(path, 'rb') as d:
707 for chunk in iter(lambda: d.read(4096), b""):
708 fh.update(chunk)
583 update_hash(fh.hexdigest()) 709 update_hash(fh.hexdigest())
584 else: 710 else:
585 update_hash(" " * len(fh.hexdigest())) 711 update_hash(" " * len(fh.hexdigest()))
@@ -592,11 +718,16 @@ def OEOuthashBasic(path, sigfile, task, d):
592 update_hash("\n") 718 update_hash("\n")
593 719
594 # Process this directory and all its child files 720 # Process this directory and all its child files
595 process(root) 721 if include_root or root != ".":
722 process(root)
596 for f in files: 723 for f in files:
597 if f == 'fixmepath': 724 if f == 'fixmepath':
598 continue 725 continue
599 process(os.path.join(root, f)) 726 process(os.path.join(root, f))
727
728 for dir in dirs:
729 if os.path.islink(os.path.join(root, dir)):
730 process(os.path.join(root, dir))
600 finally: 731 finally:
601 os.chdir(prev_dir) 732 os.chdir(prev_dir)
602 733
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
index 61c2687ef4..4412bc14c1 100644
--- a/meta/lib/oe/terminal.py
+++ b/meta/lib/oe/terminal.py
@@ -1,11 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import logging 6import logging
5import oe.classutils 7import oe.classutils
6import shlex 8import shlex
7from bb.process import Popen, ExecutionError 9from bb.process import Popen, ExecutionError
8from distutils.version import LooseVersion
9 10
10logger = logging.getLogger('BitBake.OE.Terminal') 11logger = logging.getLogger('BitBake.OE.Terminal')
11 12
@@ -31,9 +32,10 @@ class Registry(oe.classutils.ClassRegistry):
31 32
32class Terminal(Popen, metaclass=Registry): 33class Terminal(Popen, metaclass=Registry):
33 def __init__(self, sh_cmd, title=None, env=None, d=None): 34 def __init__(self, sh_cmd, title=None, env=None, d=None):
35 from subprocess import STDOUT
34 fmt_sh_cmd = self.format_command(sh_cmd, title) 36 fmt_sh_cmd = self.format_command(sh_cmd, title)
35 try: 37 try:
36 Popen.__init__(self, fmt_sh_cmd, env=env) 38 Popen.__init__(self, fmt_sh_cmd, env=env, stderr=STDOUT)
37 except OSError as exc: 39 except OSError as exc:
38 import errno 40 import errno
39 if exc.errno == errno.ENOENT: 41 if exc.errno == errno.ENOENT:
@@ -86,10 +88,10 @@ class Konsole(XTerminal):
86 def __init__(self, sh_cmd, title=None, env=None, d=None): 88 def __init__(self, sh_cmd, title=None, env=None, d=None):
87 # Check version 89 # Check version
88 vernum = check_terminal_version("konsole") 90 vernum = check_terminal_version("konsole")
89 if vernum and LooseVersion(vernum) < '2.0.0': 91 if vernum and bb.utils.vercmp_string_op(vernum, "2.0.0", "<"):
90 # Konsole from KDE 3.x 92 # Konsole from KDE 3.x
91 self.command = 'konsole -T "{title}" -e {command}' 93 self.command = 'konsole -T "{title}" -e {command}'
92 elif vernum and LooseVersion(vernum) < '16.08.1': 94 elif vernum and bb.utils.vercmp_string_op(vernum, "16.08.1", "<"):
93 # Konsole pre 16.08.01 Has nofork 95 # Konsole pre 16.08.01 Has nofork
94 self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}' 96 self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}'
95 XTerminal.__init__(self, sh_cmd, title, env, d) 97 XTerminal.__init__(self, sh_cmd, title, env, d)
@@ -102,6 +104,10 @@ class Rxvt(XTerminal):
102 command = 'rxvt -T "{title}" -e {command}' 104 command = 'rxvt -T "{title}" -e {command}'
103 priority = 1 105 priority = 1
104 106
107class URxvt(XTerminal):
108 command = 'urxvt -T "{title}" -e {command}'
109 priority = 1
110
105class Screen(Terminal): 111class Screen(Terminal):
106 command = 'screen -D -m -t "{title}" -S devshell {command}' 112 command = 'screen -D -m -t "{title}" -S devshell {command}'
107 113
@@ -163,7 +169,12 @@ class Tmux(Terminal):
163 # devshells, if it's already there, add a new window to it. 169 # devshells, if it's already there, add a new window to it.
164 window_name = 'devshell-%i' % os.getpid() 170 window_name = 'devshell-%i' % os.getpid()
165 171
166 self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'.format(window_name) 172 self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'
173 if not check_tmux_version('1.9'):
174 # `tmux new-session -c` was added in 1.9;
175 # older versions fail with that flag
176 self.command = 'tmux new -d -s {0} -n {0} "{{command}}"'
177 self.command = self.command.format(window_name)
167 Terminal.__init__(self, sh_cmd, title, env, d) 178 Terminal.__init__(self, sh_cmd, title, env, d)
168 179
169 attach_cmd = 'tmux att -t {0}'.format(window_name) 180 attach_cmd = 'tmux att -t {0}'.format(window_name)
@@ -253,13 +264,18 @@ def spawn(name, sh_cmd, title=None, env=None, d=None):
253 except OSError: 264 except OSError:
254 return 265 return
255 266
267def check_tmux_version(desired):
268 vernum = check_terminal_version("tmux")
269 if vernum and bb.utils.vercmp_string_op(vernum, desired, "<"):
270 return False
271 return vernum
272
256def check_tmux_pane_size(tmux): 273def check_tmux_pane_size(tmux):
257 import subprocess as sub 274 import subprocess as sub
258 # On older tmux versions (<1.9), return false. The reason 275 # On older tmux versions (<1.9), return false. The reason
259 # is that there is no easy way to get the height of the active panel 276 # is that there is no easy way to get the height of the active panel
260 # on current window without nested formats (available from version 1.9) 277 # on current window without nested formats (available from version 1.9)
261 vernum = check_terminal_version("tmux") 278 if not check_tmux_version('1.9'):
262 if vernum and LooseVersion(vernum) < '1.9':
263 return False 279 return False
264 try: 280 try:
265 p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, 281 p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux,
diff --git a/meta/lib/oe/tune.py b/meta/lib/oe/tune.py
new file mode 100644
index 0000000000..7fda19430d
--- /dev/null
+++ b/meta/lib/oe/tune.py
@@ -0,0 +1,81 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7# riscv_isa_to_tune(isa)
8#
9# Automatically translate a RISC-V ISA string to TUNE_FEATURES
10#
11# Abbreviations, such as rv32g -> rv32imaffd_zicsr_zifencei are supported.
12#
13# Profiles, such as rva22u64, are NOT supported, you must use ISA strings.
14#
15def riscv_isa_to_tune(isa):
16 _isa = isa.lower()
17
18 feature = []
19 iter = 0
20
21 # rv or riscv
22 if _isa[iter:].startswith('rv'):
23 feature.append('rv')
24 iter = iter + 2
25 elif _isa[iter:].startswith('riscv'):
26 feature.append('rv')
27 iter = iter + 5
28 else:
29 # Not a risc-v ISA!
30 return _isa
31
32 while (_isa[iter:]):
33 # Skip _ and whitespace
34 if _isa[iter] == '_' or _isa[iter].isspace():
35 iter = iter + 1
36 continue
37
38 # Length, just capture numbers here
39 if _isa[iter].isdigit():
40 iter_end = iter
41 while iter_end < len(_isa) and _isa[iter_end].isdigit():
42 iter_end = iter_end + 1
43
44 feature.append(_isa[iter:iter_end])
45 iter = iter_end
46 continue
47
48 # Typically i, e or g is next, followed by extensions.
49 # Extensions are single character, except for Z, Ss, Sh, Sm, Sv, and X
50
51 # If the extension starts with 'Z', 'S' or 'X' use the name until the next _, whitespace or end
52 if _isa[iter] in ['z', 's', 'x']:
53 ext_type = _isa[iter]
54 iter_end = iter + 1
55
56 # Multicharacter extension, these are supposed to have a _ before the next multicharacter extension
57 # See 37.4 and 37.5:
58 # 37.4: Underscores "_" may be used to separate ISA extensions...
59 # 37.5: All multi-letter extensions ... must be separated from other multi-letter extensions by an underscore...
60 # Some extensions permit only alphabetic characters, while others allow alphanumeric chartacters
61 while iter_end < len(_isa) and _isa[iter_end] != "_" and not _isa[iter_end].isspace():
62 iter_end = iter_end + 1
63
64 feature.append(_isa[iter:iter_end])
65 iter = iter_end
66 continue
67
68 # 'g' is special, it's an abbreviation for imafd_zicsr_zifencei
69 # When expanding the abbreviation, any additional letters must appear before the _z* extensions
70 if _isa[iter] == 'g':
71 _isa = 'imafd' + _isa[iter+1:] + '_zicsr_zifencei'
72 iter = 0
73 continue
74
75 feature.append(_isa[iter])
76 iter = iter + 1
77 continue
78
79 # Eliminate duplicates, but preserve the order
80 feature = list(dict.fromkeys(feature))
81 return ' '.join(feature)
diff --git a/meta/lib/oe/types.py b/meta/lib/oe/types.py
index bbbabafbf6..b929afb1f3 100644
--- a/meta/lib/oe/types.py
+++ b/meta/lib/oe/types.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/useradd.py b/meta/lib/oe/useradd.py
index 8fc77568ff..54aa86feb5 100644
--- a/meta/lib/oe/useradd.py
+++ b/meta/lib/oe/useradd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import argparse 6import argparse
@@ -45,7 +47,6 @@ def build_useradd_parser():
45 parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False) 47 parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False)
46 parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true") 48 parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true")
47 parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account") 49 parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account")
48 parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new account")
49 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") 50 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into")
50 parser.add_argument("-r", "--system", help="create a system account", action="store_true") 51 parser.add_argument("-r", "--system", help="create a system account", action="store_true")
51 parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account") 52 parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account")
@@ -63,7 +64,6 @@ def build_groupadd_parser():
63 parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults") 64 parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults")
64 parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true") 65 parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true")
65 parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group") 66 parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group")
66 parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new group")
67 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") 67 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into")
68 parser.add_argument("-r", "--system", help="create a system account", action="store_true") 68 parser.add_argument("-r", "--system", help="create a system account", action="store_true")
69 parser.add_argument("GROUP", help="Group name of the new group") 69 parser.add_argument("GROUP", help="Group name of the new group")
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 9a2187e36f..779c5e593f 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -1,10 +1,15 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import subprocess 7import subprocess
6import multiprocessing 8import multiprocessing
7import traceback 9import traceback
10import errno
11
12import bb.parse
8 13
9def read_file(filename): 14def read_file(filename):
10 try: 15 try:
@@ -221,12 +226,12 @@ def packages_filter_out_system(d):
221 PN-dbg PN-doc PN-locale-eb-gb removed. 226 PN-dbg PN-doc PN-locale-eb-gb removed.
222 """ 227 """
223 pn = d.getVar('PN') 228 pn = d.getVar('PN')
224 blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')] 229 pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
225 localepkg = pn + "-locale-" 230 localepkg = pn + "-locale-"
226 pkgs = [] 231 pkgs = []
227 232
228 for pkg in d.getVar('PACKAGES').split(): 233 for pkg in d.getVar('PACKAGES').split():
229 if pkg not in blacklist and localepkg not in pkg: 234 if pkg not in pkgfilter and localepkg not in pkg:
230 pkgs.append(pkg) 235 pkgs.append(pkg)
231 return pkgs 236 return pkgs
232 237
@@ -248,24 +253,32 @@ def trim_version(version, num_parts=2):
248 trimmed = ".".join(parts[:num_parts]) 253 trimmed = ".".join(parts[:num_parts])
249 return trimmed 254 return trimmed
250 255
251def cpu_count(at_least=1): 256def cpu_count(at_least=1, at_most=64):
252 cpus = len(os.sched_getaffinity(0)) 257 cpus = len(os.sched_getaffinity(0))
253 return max(cpus, at_least) 258 return max(min(cpus, at_most), at_least)
254 259
255def execute_pre_post_process(d, cmds): 260def execute_pre_post_process(d, cmds):
256 if cmds is None: 261 if cmds is None:
257 return 262 return
258 263
259 for cmd in cmds.strip().split(';'): 264 cmds = cmds.replace(";", " ")
260 cmd = cmd.strip() 265
261 if cmd != '': 266 for cmd in cmds.split():
262 bb.note("Executing %s ..." % cmd) 267 bb.note("Executing %s ..." % cmd)
263 bb.build.exec_func(cmd, d) 268 bb.build.exec_func(cmd, d)
269
270@bb.parse.vardepsexclude("BB_NUMBER_THREADS")
271def get_bb_number_threads(d):
272 return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
273
274def multiprocess_launch(target, items, d, extraargs=None):
275 max_process = get_bb_number_threads(d)
276 return multiprocess_launch_mp(target, items, max_process, extraargs)
264 277
265# For each item in items, call the function 'target' with item as the first 278# For each item in items, call the function 'target' with item as the first
266# argument, extraargs as the other arguments and handle any exceptions in the 279# argument, extraargs as the other arguments and handle any exceptions in the
267# parent thread 280# parent thread
268def multiprocess_launch(target, items, d, extraargs=None): 281def multiprocess_launch_mp(target, items, max_process, extraargs=None):
269 282
270 class ProcessLaunch(multiprocessing.Process): 283 class ProcessLaunch(multiprocessing.Process):
271 def __init__(self, *args, **kwargs): 284 def __init__(self, *args, **kwargs):
@@ -300,14 +313,15 @@ def multiprocess_launch(target, items, d, extraargs=None):
300 self.update() 313 self.update()
301 return self._result 314 return self._result
302 315
303 max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
304 launched = [] 316 launched = []
305 errors = [] 317 errors = []
306 results = [] 318 results = []
307 items = list(items) 319 items = list(items)
308 while (items and not errors) or launched: 320 while (items and not errors) or launched:
309 if not errors and items and len(launched) < max_process: 321 if not errors and items and len(launched) < max_process:
310 args = (items.pop(),) 322 args = items.pop()
323 if not type(args) is tuple:
324 args = (args,)
311 if extraargs is not None: 325 if extraargs is not None:
312 args = args + extraargs 326 args = args + extraargs
313 p = ProcessLaunch(target=target, args=args) 327 p = ProcessLaunch(target=target, args=args)
@@ -344,7 +358,29 @@ def squashspaces(string):
344 import re 358 import re
345 return re.sub(r"\s+", " ", string).strip() 359 return re.sub(r"\s+", " ", string).strip()
346 360
347def format_pkg_list(pkg_dict, ret_format=None): 361def rprovides_map(pkgdata_dir, pkg_dict):
362 # Map file -> pkg provider
363 rprov_map = {}
364
365 for pkg in pkg_dict:
366 path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
367 if not os.path.isfile(path_to_pkgfile):
368 continue
369 with open(path_to_pkgfile) as f:
370 for line in f:
371 if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'):
372 # List all components provided by pkg.
373 # Exclude version strings, i.e. those starting with (
374 provides = [x for x in line.split()[1:] if not x.startswith('(')]
375 for prov in provides:
376 if prov in rprov_map:
377 rprov_map[prov].append(pkg)
378 else:
379 rprov_map[prov] = [pkg]
380
381 return rprov_map
382
383def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None):
348 output = [] 384 output = []
349 385
350 if ret_format == "arch": 386 if ret_format == "arch":
@@ -357,9 +393,15 @@ def format_pkg_list(pkg_dict, ret_format=None):
357 for pkg in sorted(pkg_dict): 393 for pkg in sorted(pkg_dict):
358 output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"])) 394 output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
359 elif ret_format == "deps": 395 elif ret_format == "deps":
396 rprov_map = rprovides_map(pkgdata_dir, pkg_dict)
360 for pkg in sorted(pkg_dict): 397 for pkg in sorted(pkg_dict):
361 for dep in pkg_dict[pkg]["deps"]: 398 for dep in pkg_dict[pkg]["deps"]:
362 output.append("%s|%s" % (pkg, dep)) 399 if dep in rprov_map:
400 # There could be multiple providers within the image
401 for pkg_provider in rprov_map[dep]:
402 output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep))
403 else:
404 output.append("%s|%s" % (pkg, dep))
363 else: 405 else:
364 for pkg in sorted(pkg_dict): 406 for pkg in sorted(pkg_dict):
365 output.append(pkg) 407 output.append(pkg)
@@ -373,62 +415,31 @@ def format_pkg_list(pkg_dict, ret_format=None):
373 return output_str 415 return output_str
374 416
375 417
376# Helper function to get the host compiler version 418# Helper function to get the host gcc version
377# Do not assume the compiler is gcc 419def get_host_gcc_version(d, taskcontextonly=False):
378def get_host_compiler_version(d, taskcontextonly=False):
379 import re, subprocess 420 import re, subprocess
380 421
381 if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1': 422 if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
382 return 423 return
383 424
384 compiler = d.getVar("BUILD_CC")
385 # Get rid of ccache since it is not present when parsing.
386 if compiler.startswith('ccache '):
387 compiler = compiler[7:]
388 try: 425 try:
389 env = os.environ.copy() 426 env = os.environ.copy()
390 # datastore PATH does not contain session PATH as set by environment-setup-... 427 # datastore PATH does not contain session PATH as set by environment-setup-...
391 # this breaks the install-buildtools use-case 428 # this breaks the install-buildtools use-case
392 # env["PATH"] = d.getVar("PATH") 429 # env["PATH"] = d.getVar("PATH")
393 output = subprocess.check_output("%s --version" % compiler, \ 430 output = subprocess.check_output("gcc --version", \
394 shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8") 431 shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
395 except subprocess.CalledProcessError as e: 432 except subprocess.CalledProcessError as e:
396 bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8"))) 433 bb.fatal("Error running gcc --version: %s" % (e.output.decode("utf-8")))
397 434
398 match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0]) 435 match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
399 if not match: 436 if not match:
400 bb.fatal("Can't get compiler version from %s --version output" % compiler) 437 bb.fatal("Can't get compiler version from gcc --version output")
401 438
402 version = match.group(1) 439 version = match.group(1)
403 return compiler, version 440 return version
404
405
406def host_gcc_version(d, taskcontextonly=False):
407 import re, subprocess
408
409 if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
410 return
411
412 compiler = d.getVar("BUILD_CC")
413 # Get rid of ccache since it is not present when parsing.
414 if compiler.startswith('ccache '):
415 compiler = compiler[7:]
416 try:
417 env = os.environ.copy()
418 env["PATH"] = d.getVar("PATH")
419 output = subprocess.check_output("%s --version" % compiler, \
420 shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
421 except subprocess.CalledProcessError as e:
422 bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
423
424 match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
425 if not match:
426 bb.fatal("Can't get compiler version from %s --version output" % compiler)
427
428 version = match.group(1)
429 return "-%s" % version if version in ("4.8", "4.9") else ""
430
431 441
442@bb.parse.vardepsexclude("DEFAULTTUNE_MULTILIB_ORIGINAL", "OVERRIDES")
432def get_multilib_datastore(variant, d): 443def get_multilib_datastore(variant, d):
433 localdata = bb.data.createCopy(d) 444 localdata = bb.data.createCopy(d)
434 if variant: 445 if variant:
@@ -445,94 +456,48 @@ def get_multilib_datastore(variant, d):
445 localdata.setVar("MLPREFIX", "") 456 localdata.setVar("MLPREFIX", "")
446 return localdata 457 return localdata
447 458
448#
449# Python 2.7 doesn't have threaded pools (just multiprocessing)
450# so implement a version here
451#
452
453from queue import Queue
454from threading import Thread
455
456class ThreadedWorker(Thread):
457 """Thread executing tasks from a given tasks queue"""
458 def __init__(self, tasks, worker_init, worker_end):
459 Thread.__init__(self)
460 self.tasks = tasks
461 self.daemon = True
462
463 self.worker_init = worker_init
464 self.worker_end = worker_end
465
466 def run(self):
467 from queue import Empty
468
469 if self.worker_init is not None:
470 self.worker_init(self)
471
472 while True:
473 try:
474 func, args, kargs = self.tasks.get(block=False)
475 except Empty:
476 if self.worker_end is not None:
477 self.worker_end(self)
478 break
479
480 try:
481 func(self, *args, **kargs)
482 except Exception as e:
483 print(e)
484 finally:
485 self.tasks.task_done()
486
487class ThreadedPool:
488 """Pool of threads consuming tasks from a queue"""
489 def __init__(self, num_workers, num_tasks, worker_init=None,
490 worker_end=None):
491 self.tasks = Queue(num_tasks)
492 self.workers = []
493
494 for _ in range(num_workers):
495 worker = ThreadedWorker(self.tasks, worker_init, worker_end)
496 self.workers.append(worker)
497
498 def start(self):
499 for worker in self.workers:
500 worker.start()
501
502 def add_task(self, func, *args, **kargs):
503 """Add a task to the queue"""
504 self.tasks.put((func, args, kargs))
505
506 def wait_completion(self):
507 """Wait for completion of all the tasks in the queue"""
508 self.tasks.join()
509 for worker in self.workers:
510 worker.join()
511
512def write_ld_so_conf(d):
513 # Some utils like prelink may not have the correct target library paths
514 # so write an ld.so.conf to help them
515 ldsoconf = d.expand("${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf")
516 if os.path.exists(ldsoconf):
517 bb.utils.remove(ldsoconf)
518 bb.utils.mkdirhier(os.path.dirname(ldsoconf))
519 with open(ldsoconf, "w") as f:
520 f.write(d.getVar("base_libdir") + '\n')
521 f.write(d.getVar("libdir") + '\n')
522
523class ImageQAFailed(Exception):
524 def __init__(self, description, name=None, logfile=None):
525 self.description = description
526 self.name = name
527 self.logfile=logfile
528
529 def __str__(self):
530 msg = 'Function failed: %s' % self.name
531 if self.description:
532 msg = msg + ' (%s)' % self.description
533
534 return msg
535
536def sh_quote(string): 459def sh_quote(string):
537 import shlex 460 import shlex
538 return shlex.quote(string) 461 return shlex.quote(string)
462
463def directory_size(root, blocksize=4096):
464 """
465 Calculate the size of the directory, taking into account hard links,
466 rounding up every size to multiples of the blocksize.
467 """
468 def roundup(size):
469 """
470 Round the size up to the nearest multiple of the block size.
471 """
472 import math
473 return math.ceil(size / blocksize) * blocksize
474
475 def getsize(filename):
476 """
477 Get the size of the filename, not following symlinks, taking into
478 account hard links.
479 """
480 stat = os.lstat(filename)
481 if stat.st_ino not in inodes:
482 inodes.add(stat.st_ino)
483 return stat.st_size
484 else:
485 return 0
486
487 inodes = set()
488 total = 0
489 for root, dirs, files in os.walk(root):
490 total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
491 total += roundup(getsize(root))
492 return total
493
494# Update the mtime of a file, skip if permission/read-only issues
495def touch(filename):
496 try:
497 os.utime(filename, None)
498 except PermissionError:
499 pass
500 except OSError as e:
501 # Handle read-only file systems gracefully
502 if e.errno != errno.EROFS:
503 raise e