diff options
Diffstat (limited to 'meta/lib/oe')
63 files changed, 0 insertions, 25222 deletions
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py deleted file mode 100644 index 9e4134c483..0000000000 --- a/meta/lib/oe/__init__.py +++ /dev/null | |||
| @@ -1,15 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from pkgutil import extend_path | ||
| 8 | __path__ = extend_path(__path__, __name__) | ||
| 9 | |||
| 10 | # Modules with vistorcode need to go first else anything depending on them won't be | ||
| 11 | # processed correctly (e.g. qa) | ||
| 12 | BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \ | ||
| 13 | "packagegroup", "sstatesig", "lsb", "cachedpath", "license", "qemu", \ | ||
| 14 | "reproducible", "rust", "buildcfg", "go", "spdx30_tasks", "spdx_common", \ | ||
| 15 | "cve_check", "tune", "classextend"] | ||
diff --git a/meta/lib/oe/bootfiles.py b/meta/lib/oe/bootfiles.py deleted file mode 100644 index 7ee148c4e2..0000000000 --- a/meta/lib/oe/bootfiles.py +++ /dev/null | |||
| @@ -1,57 +0,0 @@ | |||
| 1 | # | ||
| 2 | # SPDX-License-Identifier: MIT | ||
| 3 | # | ||
| 4 | # Copyright (C) 2024 Marcus Folkesson | ||
| 5 | # Author: Marcus Folkesson <marcus.folkesson@gmail.com> | ||
| 6 | # | ||
| 7 | # Utility functions handling boot files | ||
| 8 | # | ||
| 9 | # Look into deploy_dir and search for boot_files. | ||
| 10 | # Returns a list of tuples with (original filepath relative to | ||
| 11 | # deploy_dir, desired filepath renaming) | ||
| 12 | # | ||
| 13 | # Heavily inspired of bootimg_partition.py | ||
| 14 | # | ||
| 15 | def get_boot_files(deploy_dir, boot_files): | ||
| 16 | import re | ||
| 17 | import os | ||
| 18 | from glob import glob | ||
| 19 | |||
| 20 | if boot_files is None: | ||
| 21 | return None | ||
| 22 | |||
| 23 | # list of tuples (src_name, dst_name) | ||
| 24 | deploy_files = [] | ||
| 25 | for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files): | ||
| 26 | if ';' in src_entry: | ||
| 27 | dst_entry = tuple(src_entry.split(';')) | ||
| 28 | if not dst_entry[0] or not dst_entry[1]: | ||
| 29 | raise ValueError('Malformed boot file entry: %s' % src_entry) | ||
| 30 | else: | ||
| 31 | dst_entry = (src_entry, src_entry) | ||
| 32 | |||
| 33 | deploy_files.append(dst_entry) | ||
| 34 | |||
| 35 | install_files = [] | ||
| 36 | for deploy_entry in deploy_files: | ||
| 37 | src, dst = deploy_entry | ||
| 38 | if '*' in src: | ||
| 39 | # by default install files under their basename | ||
| 40 | entry_name_fn = os.path.basename | ||
| 41 | if dst != src: | ||
| 42 | # unless a target name was given, then treat name | ||
| 43 | # as a directory and append a basename | ||
| 44 | entry_name_fn = lambda name: \ | ||
| 45 | os.path.join(dst, | ||
| 46 | os.path.basename(name)) | ||
| 47 | |||
| 48 | srcs = glob(os.path.join(deploy_dir, src)) | ||
| 49 | |||
| 50 | for entry in srcs: | ||
| 51 | src = os.path.relpath(entry, deploy_dir) | ||
| 52 | entry_dst_name = entry_name_fn(entry) | ||
| 53 | install_files.append((src, entry_dst_name)) | ||
| 54 | else: | ||
| 55 | install_files.append((src, dst)) | ||
| 56 | |||
| 57 | return install_files | ||
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py deleted file mode 100644 index 85b903fab0..0000000000 --- a/meta/lib/oe/buildcfg.py +++ /dev/null | |||
| @@ -1,79 +0,0 @@ | |||
| 1 | |||
| 2 | import os | ||
| 3 | import subprocess | ||
| 4 | import bb.process | ||
| 5 | |||
| 6 | def detect_revision(d): | ||
| 7 | path = get_scmbasepath(d) | ||
| 8 | return get_metadata_git_revision(path) | ||
| 9 | |||
| 10 | def detect_branch(d): | ||
| 11 | path = get_scmbasepath(d) | ||
| 12 | return get_metadata_git_branch(path) | ||
| 13 | |||
| 14 | def get_scmbasepath(d): | ||
| 15 | return os.path.join(d.getVar('COREBASE'), 'meta') | ||
| 16 | |||
| 17 | def get_metadata_git_branch(path): | ||
| 18 | try: | ||
| 19 | rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path) | ||
| 20 | except (bb.process.ExecutionError, bb.process.NotFoundError): | ||
| 21 | rev = '<unknown>' | ||
| 22 | return rev.strip() | ||
| 23 | |||
| 24 | def get_metadata_git_revision(path): | ||
| 25 | try: | ||
| 26 | rev, _ = bb.process.run('git rev-parse HEAD', cwd=path) | ||
| 27 | except (bb.process.ExecutionError, bb.process.NotFoundError): | ||
| 28 | rev = '<unknown>' | ||
| 29 | return rev.strip() | ||
| 30 | |||
| 31 | def get_metadata_git_toplevel(path): | ||
| 32 | try: | ||
| 33 | toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path) | ||
| 34 | except (bb.process.ExecutionError, bb.process.NotFoundError): | ||
| 35 | return "" | ||
| 36 | return toplevel.strip() | ||
| 37 | |||
| 38 | def get_metadata_git_remotes(path): | ||
| 39 | try: | ||
| 40 | remotes_list, _ = bb.process.run('git remote', cwd=path) | ||
| 41 | remotes = remotes_list.split() | ||
| 42 | except (bb.process.ExecutionError, bb.process.NotFoundError): | ||
| 43 | remotes = [] | ||
| 44 | return remotes | ||
| 45 | |||
| 46 | def get_metadata_git_remote_url(path, remote): | ||
| 47 | try: | ||
| 48 | uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path) | ||
| 49 | except (bb.process.ExecutionError, bb.process.NotFoundError): | ||
| 50 | return "" | ||
| 51 | return uri.strip() | ||
| 52 | |||
| 53 | def get_metadata_git_describe(path): | ||
| 54 | try: | ||
| 55 | describe, _ = bb.process.run('git describe --tags --dirty', cwd=path) | ||
| 56 | except (bb.process.ExecutionError, bb.process.NotFoundError): | ||
| 57 | return "" | ||
| 58 | return describe.strip() | ||
| 59 | |||
| 60 | def is_layer_modified(path): | ||
| 61 | try: | ||
| 62 | subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e; | ||
| 63 | git diff --quiet --no-ext-diff | ||
| 64 | git diff --quiet --no-ext-diff --cached""" % path, | ||
| 65 | shell=True, | ||
| 66 | stderr=subprocess.STDOUT) | ||
| 67 | return "" | ||
| 68 | except subprocess.CalledProcessError as ex: | ||
| 69 | # Silently treat errors as "modified", without checking for the | ||
| 70 | # (expected) return code 1 in a modified git repo. For example, we get | ||
| 71 | # output and a 129 return code when a layer isn't a git repo at all. | ||
| 72 | return " -- modified" | ||
| 73 | |||
| 74 | def get_layer_revisions(d): | ||
| 75 | layers = (d.getVar("BBLAYERS") or "").split() | ||
| 76 | revisions = [] | ||
| 77 | for i in layers: | ||
| 78 | revisions.append((i, os.path.basename(i), get_metadata_git_branch(i).strip(), get_metadata_git_revision(i), is_layer_modified(i))) | ||
| 79 | return revisions | ||
diff --git a/meta/lib/oe/buildhistory_analysis.py b/meta/lib/oe/buildhistory_analysis.py deleted file mode 100644 index 4edad01580..0000000000 --- a/meta/lib/oe/buildhistory_analysis.py +++ /dev/null | |||
| @@ -1,723 +0,0 @@ | |||
| 1 | # Report significant differences in the buildhistory repository since a specific revision | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012-2013, 2016-2017 Intel Corporation | ||
| 4 | # Author: Paul Eggleton <paul.eggleton@linux.intel.com> | ||
| 5 | # | ||
| 6 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 7 | # | ||
| 8 | # Note: requires GitPython 0.3.1+ | ||
| 9 | # | ||
| 10 | # You can use this from the command line by running scripts/buildhistory-diff | ||
| 11 | # | ||
| 12 | |||
| 13 | import sys | ||
| 14 | import os.path | ||
| 15 | import difflib | ||
| 16 | import git | ||
| 17 | import re | ||
| 18 | import shlex | ||
| 19 | import hashlib | ||
| 20 | import collections | ||
| 21 | import bb.utils | ||
| 22 | import bb.tinfoil | ||
| 23 | |||
| 24 | |||
| 25 | # How to display fields | ||
| 26 | list_fields = ['DEPENDS', 'RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS', 'FILES', 'FILELIST', 'USER_CLASSES', 'IMAGE_CLASSES', 'IMAGE_FEATURES', 'IMAGE_LINGUAS', 'IMAGE_INSTALL', 'BAD_RECOMMENDATIONS', 'PACKAGE_EXCLUDE'] | ||
| 27 | list_order_fields = ['PACKAGES'] | ||
| 28 | defaultval_map = {'PKG': 'PKG', 'PKGE': 'PE', 'PKGV': 'PV', 'PKGR': 'PR'} | ||
| 29 | numeric_fields = ['PKGSIZE', 'IMAGESIZE'] | ||
| 30 | # Fields to monitor | ||
| 31 | monitor_fields = ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RREPLACES', 'RCONFLICTS', 'PACKAGES', 'FILELIST', 'PKGSIZE', 'IMAGESIZE', 'PKG'] | ||
| 32 | ver_monitor_fields = ['PKGE', 'PKGV', 'PKGR'] | ||
| 33 | # Percentage change to alert for numeric fields | ||
| 34 | monitor_numeric_threshold = 10 | ||
| 35 | # Image files to monitor (note that image-info.txt is handled separately) | ||
| 36 | img_monitor_files = ['installed-package-names.txt', 'files-in-image.txt'] | ||
| 37 | |||
| 38 | colours = { | ||
| 39 | 'colour_default': '', | ||
| 40 | 'colour_add': '', | ||
| 41 | 'colour_remove': '', | ||
| 42 | } | ||
| 43 | |||
| 44 | def init_colours(use_colours): | ||
| 45 | global colours | ||
| 46 | if use_colours: | ||
| 47 | colours = { | ||
| 48 | 'colour_default': '\033[0m', | ||
| 49 | 'colour_add': '\033[1;32m', | ||
| 50 | 'colour_remove': '\033[1;31m', | ||
| 51 | } | ||
| 52 | else: | ||
| 53 | colours = { | ||
| 54 | 'colour_default': '', | ||
| 55 | 'colour_add': '', | ||
| 56 | 'colour_remove': '', | ||
| 57 | } | ||
| 58 | |||
| 59 | class ChangeRecord: | ||
| 60 | def __init__(self, path, fieldname, oldvalue, newvalue, monitored): | ||
| 61 | self.path = path | ||
| 62 | self.fieldname = fieldname | ||
| 63 | self.oldvalue = oldvalue | ||
| 64 | self.newvalue = newvalue | ||
| 65 | self.monitored = monitored | ||
| 66 | self.filechanges = None | ||
| 67 | |||
| 68 | def __str__(self): | ||
| 69 | return self._str_internal(True) | ||
| 70 | |||
| 71 | def _str_internal(self, outer): | ||
| 72 | if outer: | ||
| 73 | if '/image-files/' in self.path: | ||
| 74 | prefix = '%s: ' % self.path.split('/image-files/')[0] | ||
| 75 | else: | ||
| 76 | prefix = '%s: ' % self.path | ||
| 77 | else: | ||
| 78 | prefix = '' | ||
| 79 | |||
| 80 | def pkglist_combine(depver): | ||
| 81 | pkglist = [] | ||
| 82 | for k,v in depver.items(): | ||
| 83 | if v: | ||
| 84 | pkglist.append("%s (%s)" % (k,v)) | ||
| 85 | else: | ||
| 86 | pkglist.append(k) | ||
| 87 | return pkglist | ||
| 88 | |||
| 89 | def detect_renamed_dirs(aitems, bitems): | ||
| 90 | adirs = set(map(os.path.dirname, aitems)) | ||
| 91 | bdirs = set(map(os.path.dirname, bitems)) | ||
| 92 | files_ab = [(name, sorted(os.path.basename(item) for item in aitems if os.path.dirname(item) == name)) \ | ||
| 93 | for name in adirs - bdirs] | ||
| 94 | files_ba = [(name, sorted(os.path.basename(item) for item in bitems if os.path.dirname(item) == name)) \ | ||
| 95 | for name in bdirs - adirs] | ||
| 96 | renamed_dirs = [] | ||
| 97 | for dir1, files1 in files_ab: | ||
| 98 | rename = False | ||
| 99 | for dir2, files2 in files_ba: | ||
| 100 | if files1 == files2 and not rename: | ||
| 101 | renamed_dirs.append((dir1,dir2)) | ||
| 102 | # Make sure that we don't use this (dir, files) pair again. | ||
| 103 | files_ba.remove((dir2,files2)) | ||
| 104 | # If a dir has already been found to have a rename, stop and go no further. | ||
| 105 | rename = True | ||
| 106 | |||
| 107 | # remove files that belong to renamed dirs from aitems and bitems | ||
| 108 | for dir1, dir2 in renamed_dirs: | ||
| 109 | aitems = [item for item in aitems if os.path.dirname(item) not in (dir1, dir2)] | ||
| 110 | bitems = [item for item in bitems if os.path.dirname(item) not in (dir1, dir2)] | ||
| 111 | return renamed_dirs, aitems, bitems | ||
| 112 | |||
| 113 | if self.fieldname in list_fields or self.fieldname in list_order_fields: | ||
| 114 | renamed_dirs = [] | ||
| 115 | changed_order = False | ||
| 116 | if self.fieldname in ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS']: | ||
| 117 | (depvera, depverb) = compare_pkg_lists(self.oldvalue, self.newvalue) | ||
| 118 | aitems = pkglist_combine(depvera) | ||
| 119 | bitems = pkglist_combine(depverb) | ||
| 120 | else: | ||
| 121 | if self.fieldname == 'FILELIST': | ||
| 122 | aitems = shlex.split(self.oldvalue) | ||
| 123 | bitems = shlex.split(self.newvalue) | ||
| 124 | renamed_dirs, aitems, bitems = detect_renamed_dirs(aitems, bitems) | ||
| 125 | else: | ||
| 126 | aitems = self.oldvalue.split() | ||
| 127 | bitems = self.newvalue.split() | ||
| 128 | |||
| 129 | removed = list(set(aitems) - set(bitems)) | ||
| 130 | added = list(set(bitems) - set(aitems)) | ||
| 131 | |||
| 132 | if not removed and not added and self.fieldname in ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS']: | ||
| 133 | depvera = bb.utils.explode_dep_versions2(self.oldvalue, sort=False) | ||
| 134 | depverb = bb.utils.explode_dep_versions2(self.newvalue, sort=False) | ||
| 135 | for i, j in zip(depvera.items(), depverb.items()): | ||
| 136 | if i[0] != j[0]: | ||
| 137 | changed_order = True | ||
| 138 | break | ||
| 139 | |||
| 140 | lines = [] | ||
| 141 | if renamed_dirs: | ||
| 142 | for dfrom, dto in renamed_dirs: | ||
| 143 | lines.append('directory renamed {colour_remove}{}{colour_default} -> {colour_add}{}{colour_default}'.format(dfrom, dto, **colours)) | ||
| 144 | if removed or added: | ||
| 145 | if removed and not bitems: | ||
| 146 | lines.append('removed all items "{colour_remove}{}{colour_default}"'.format(' '.join(removed), **colours)) | ||
| 147 | else: | ||
| 148 | if removed: | ||
| 149 | lines.append('removed "{colour_remove}{value}{colour_default}"'.format(value=' '.join(removed), **colours)) | ||
| 150 | if added: | ||
| 151 | lines.append('added "{colour_add}{value}{colour_default}"'.format(value=' '.join(added), **colours)) | ||
| 152 | else: | ||
| 153 | lines.append('changed order') | ||
| 154 | |||
| 155 | if not (removed or added or changed_order): | ||
| 156 | out = '' | ||
| 157 | else: | ||
| 158 | out = '%s: %s' % (self.fieldname, ', '.join(lines)) | ||
| 159 | |||
| 160 | elif self.fieldname in numeric_fields: | ||
| 161 | aval = int(self.oldvalue or 0) | ||
| 162 | bval = int(self.newvalue or 0) | ||
| 163 | if aval != 0: | ||
| 164 | percentchg = ((bval - aval) / float(aval)) * 100 | ||
| 165 | else: | ||
| 166 | percentchg = 100 | ||
| 167 | out = '{} changed from {colour_remove}{}{colour_default} to {colour_add}{}{colour_default} ({}{:.0f}%)'.format(self.fieldname, self.oldvalue or "''", self.newvalue or "''", '+' if percentchg > 0 else '', percentchg, **colours) | ||
| 168 | elif self.fieldname in defaultval_map: | ||
| 169 | out = '{} changed from {colour_remove}{}{colour_default} to {colour_add}{}{colour_default}'.format(self.fieldname, self.oldvalue, self.newvalue, **colours) | ||
| 170 | if self.fieldname == 'PKG' and '[default]' in self.newvalue: | ||
| 171 | out += ' - may indicate debian renaming failure' | ||
| 172 | elif self.fieldname in ['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm']: | ||
| 173 | if self.oldvalue and self.newvalue: | ||
| 174 | out = '%s changed:\n ' % self.fieldname | ||
| 175 | elif self.newvalue: | ||
| 176 | out = '%s added:\n ' % self.fieldname | ||
| 177 | elif self.oldvalue: | ||
| 178 | out = '%s cleared:\n ' % self.fieldname | ||
| 179 | alines = self.oldvalue.splitlines() | ||
| 180 | blines = self.newvalue.splitlines() | ||
| 181 | diff = difflib.unified_diff(alines, blines, self.fieldname, self.fieldname, lineterm='') | ||
| 182 | out += '\n '.join(list(diff)[2:]) | ||
| 183 | out += '\n --' | ||
| 184 | elif self.fieldname in img_monitor_files or '/image-files/' in self.path or self.fieldname == "sysroot": | ||
| 185 | if self.filechanges or (self.oldvalue and self.newvalue): | ||
| 186 | fieldname = self.fieldname | ||
| 187 | if '/image-files/' in self.path: | ||
| 188 | fieldname = os.path.join('/' + self.path.split('/image-files/')[1], self.fieldname) | ||
| 189 | out = 'Changes to %s:\n ' % fieldname | ||
| 190 | else: | ||
| 191 | if outer: | ||
| 192 | prefix = 'Changes to %s ' % self.path | ||
| 193 | out = '(%s):\n ' % self.fieldname | ||
| 194 | if self.filechanges: | ||
| 195 | out += '\n '.join(['%s' % i for i in self.filechanges]) | ||
| 196 | else: | ||
| 197 | alines = self.oldvalue.splitlines() | ||
| 198 | blines = self.newvalue.splitlines() | ||
| 199 | diff = difflib.unified_diff(alines, blines, fieldname, fieldname, lineterm='') | ||
| 200 | out += '\n '.join(list(diff)) | ||
| 201 | out += '\n --' | ||
| 202 | else: | ||
| 203 | out = '' | ||
| 204 | else: | ||
| 205 | out = '{} changed from "{colour_remove}{}{colour_default}" to "{colour_add}{}{colour_default}"'.format(self.fieldname, self.oldvalue, self.newvalue, **colours) | ||
| 206 | |||
| 207 | return '%s%s' % (prefix, out) if out else '' | ||
| 208 | |||
| 209 | class FileChange: | ||
| 210 | changetype_add = 'A' | ||
| 211 | changetype_remove = 'R' | ||
| 212 | changetype_type = 'T' | ||
| 213 | changetype_perms = 'P' | ||
| 214 | changetype_ownergroup = 'O' | ||
| 215 | changetype_link = 'L' | ||
| 216 | changetype_move = 'M' | ||
| 217 | |||
| 218 | def __init__(self, path, changetype, oldvalue = None, newvalue = None): | ||
| 219 | self.path = path | ||
| 220 | self.changetype = changetype | ||
| 221 | self.oldvalue = oldvalue | ||
| 222 | self.newvalue = newvalue | ||
| 223 | |||
| 224 | def _ftype_str(self, ftype): | ||
| 225 | if ftype == '-': | ||
| 226 | return 'file' | ||
| 227 | elif ftype == 'd': | ||
| 228 | return 'directory' | ||
| 229 | elif ftype == 'l': | ||
| 230 | return 'symlink' | ||
| 231 | elif ftype == 'c': | ||
| 232 | return 'char device' | ||
| 233 | elif ftype == 'b': | ||
| 234 | return 'block device' | ||
| 235 | elif ftype == 'p': | ||
| 236 | return 'fifo' | ||
| 237 | elif ftype == 's': | ||
| 238 | return 'socket' | ||
| 239 | else: | ||
| 240 | return 'unknown (%s)' % ftype | ||
| 241 | |||
| 242 | def __str__(self): | ||
| 243 | if self.changetype == self.changetype_add: | ||
| 244 | return '%s was added' % self.path | ||
| 245 | elif self.changetype == self.changetype_remove: | ||
| 246 | return '%s was removed' % self.path | ||
| 247 | elif self.changetype == self.changetype_type: | ||
| 248 | return '%s changed type from %s to %s' % (self.path, self._ftype_str(self.oldvalue), self._ftype_str(self.newvalue)) | ||
| 249 | elif self.changetype == self.changetype_perms: | ||
| 250 | return '%s changed permissions from %s to %s' % (self.path, self.oldvalue, self.newvalue) | ||
| 251 | elif self.changetype == self.changetype_ownergroup: | ||
| 252 | return '%s changed owner/group from %s to %s' % (self.path, self.oldvalue, self.newvalue) | ||
| 253 | elif self.changetype == self.changetype_link: | ||
| 254 | return '%s changed symlink target from %s to %s' % (self.path, self.oldvalue, self.newvalue) | ||
| 255 | elif self.changetype == self.changetype_move: | ||
| 256 | return '%s moved to %s' % (self.path, self.oldvalue) | ||
| 257 | else: | ||
| 258 | return '%s changed (unknown)' % self.path | ||
| 259 | |||
| 260 | def blob_to_dict(blob): | ||
| 261 | alines = [line for line in blob.data_stream.read().decode('utf-8').splitlines()] | ||
| 262 | adict = {} | ||
| 263 | for line in alines: | ||
| 264 | splitv = [i.strip() for i in line.split('=',1)] | ||
| 265 | if len(splitv) > 1: | ||
| 266 | adict[splitv[0]] = splitv[1] | ||
| 267 | return adict | ||
| 268 | |||
| 269 | |||
| 270 | def file_list_to_dict(lines): | ||
| 271 | adict = {} | ||
| 272 | for line in lines: | ||
| 273 | # Leave the last few fields intact so we handle file names containing spaces | ||
| 274 | splitv = line.split(None,4) | ||
| 275 | # Grab the path and remove the leading . | ||
| 276 | path = splitv[4][1:].strip() | ||
| 277 | # Handle symlinks | ||
| 278 | if(' -> ' in path): | ||
| 279 | target = path.split(' -> ')[1] | ||
| 280 | path = path.split(' -> ')[0] | ||
| 281 | adict[path] = splitv[0:3] + [target] | ||
| 282 | else: | ||
| 283 | adict[path] = splitv[0:3] | ||
| 284 | return adict | ||
| 285 | |||
| 286 | numeric_removal = str.maketrans('0123456789', 'XXXXXXXXXX') | ||
| 287 | |||
| 288 | def compare_file_lists(alines, blines, compare_ownership=True): | ||
| 289 | adict = file_list_to_dict(alines) | ||
| 290 | bdict = file_list_to_dict(blines) | ||
| 291 | filechanges = [] | ||
| 292 | additions = [] | ||
| 293 | removals = [] | ||
| 294 | for path, splitv in adict.items(): | ||
| 295 | newsplitv = bdict.pop(path, None) | ||
| 296 | if newsplitv: | ||
| 297 | # Check type | ||
| 298 | oldvalue = splitv[0][0] | ||
| 299 | newvalue = newsplitv[0][0] | ||
| 300 | if oldvalue != newvalue: | ||
| 301 | filechanges.append(FileChange(path, FileChange.changetype_type, oldvalue, newvalue)) | ||
| 302 | |||
| 303 | # Check permissions | ||
| 304 | oldvalue = splitv[0][1:] | ||
| 305 | newvalue = newsplitv[0][1:] | ||
| 306 | if oldvalue != newvalue: | ||
| 307 | filechanges.append(FileChange(path, FileChange.changetype_perms, oldvalue, newvalue)) | ||
| 308 | |||
| 309 | if compare_ownership: | ||
| 310 | # Check owner/group | ||
| 311 | oldvalue = '%s/%s' % (splitv[1], splitv[2]) | ||
| 312 | newvalue = '%s/%s' % (newsplitv[1], newsplitv[2]) | ||
| 313 | if oldvalue != newvalue: | ||
| 314 | filechanges.append(FileChange(path, FileChange.changetype_ownergroup, oldvalue, newvalue)) | ||
| 315 | |||
| 316 | # Check symlink target | ||
| 317 | if newsplitv[0][0] == 'l': | ||
| 318 | if len(splitv) > 3: | ||
| 319 | oldvalue = splitv[3] | ||
| 320 | else: | ||
| 321 | oldvalue = None | ||
| 322 | newvalue = newsplitv[3] | ||
| 323 | if oldvalue != newvalue: | ||
| 324 | filechanges.append(FileChange(path, FileChange.changetype_link, oldvalue, newvalue)) | ||
| 325 | else: | ||
| 326 | removals.append(path) | ||
| 327 | |||
| 328 | # Whatever is left over has been added | ||
| 329 | for path in bdict: | ||
| 330 | additions.append(path) | ||
| 331 | |||
| 332 | # Rather than print additions and removals, its nicer to print file 'moves' | ||
| 333 | # where names or paths are similar. | ||
| 334 | revmap_remove = {} | ||
| 335 | for removal in removals: | ||
| 336 | translated = removal.translate(numeric_removal) | ||
| 337 | if translated not in revmap_remove: | ||
| 338 | revmap_remove[translated] = [] | ||
| 339 | revmap_remove[translated].append(removal) | ||
| 340 | |||
| 341 | # | ||
| 342 | # We want to detect renames of large trees of files like | ||
| 343 | # /lib/modules/5.4.40-yocto-standard to /lib/modules/5.4.43-yocto-standard | ||
| 344 | # | ||
| 345 | renames = {} | ||
| 346 | for addition in additions.copy(): | ||
| 347 | if addition not in additions: | ||
| 348 | continue | ||
| 349 | translated = addition.translate(numeric_removal) | ||
| 350 | if translated in revmap_remove: | ||
| 351 | if len(revmap_remove[translated]) != 1: | ||
| 352 | continue | ||
| 353 | removal = revmap_remove[translated][0] | ||
| 354 | commondir = addition.split("/") | ||
| 355 | commondir2 = removal.split("/") | ||
| 356 | idx = None | ||
| 357 | for i in range(len(commondir)): | ||
| 358 | if commondir[i] != commondir2[i]: | ||
| 359 | idx = i | ||
| 360 | break | ||
| 361 | commondir = "/".join(commondir[:i+1]) | ||
| 362 | commondir2 = "/".join(commondir2[:i+1]) | ||
| 363 | # If the common parent is in one dict and not the other its likely a rename | ||
| 364 | # so iterate through those files and process as such | ||
| 365 | if commondir2 not in bdict and commondir not in adict: | ||
| 366 | if commondir not in renames: | ||
| 367 | renames[commondir] = commondir2 | ||
| 368 | for addition2 in additions.copy(): | ||
| 369 | if addition2.startswith(commondir): | ||
| 370 | removal2 = addition2.replace(commondir, commondir2) | ||
| 371 | if removal2 in removals: | ||
| 372 | additions.remove(addition2) | ||
| 373 | removals.remove(removal2) | ||
| 374 | continue | ||
| 375 | filechanges.append(FileChange(removal, FileChange.changetype_move, addition)) | ||
| 376 | if addition in additions: | ||
| 377 | additions.remove(addition) | ||
| 378 | if removal in removals: | ||
| 379 | removals.remove(removal) | ||
| 380 | for rename in renames: | ||
| 381 | filechanges.append(FileChange(renames[rename], FileChange.changetype_move, rename)) | ||
| 382 | |||
| 383 | for addition in additions: | ||
| 384 | filechanges.append(FileChange(addition, FileChange.changetype_add)) | ||
| 385 | for removal in removals: | ||
| 386 | filechanges.append(FileChange(removal, FileChange.changetype_remove)) | ||
| 387 | |||
| 388 | return filechanges | ||
| 389 | |||
| 390 | |||
| 391 | def compare_lists(alines, blines): | ||
| 392 | removed = list(set(alines) - set(blines)) | ||
| 393 | added = list(set(blines) - set(alines)) | ||
| 394 | |||
| 395 | filechanges = [] | ||
| 396 | for pkg in removed: | ||
| 397 | filechanges.append(FileChange(pkg, FileChange.changetype_remove)) | ||
| 398 | for pkg in added: | ||
| 399 | filechanges.append(FileChange(pkg, FileChange.changetype_add)) | ||
| 400 | |||
| 401 | return filechanges | ||
| 402 | |||
| 403 | |||
| 404 | def compare_pkg_lists(astr, bstr): | ||
| 405 | depvera = bb.utils.explode_dep_versions2(astr) | ||
| 406 | depverb = bb.utils.explode_dep_versions2(bstr) | ||
| 407 | |||
| 408 | # Strip out changes where the version has increased | ||
| 409 | remove = [] | ||
| 410 | for k in depvera: | ||
| 411 | if k in depverb: | ||
| 412 | dva = depvera[k] | ||
| 413 | dvb = depverb[k] | ||
| 414 | if dva and dvb and len(dva) == len(dvb): | ||
| 415 | # Since length is the same, sort so that prefixes (e.g. >=) will line up | ||
| 416 | dva.sort() | ||
| 417 | dvb.sort() | ||
| 418 | removeit = True | ||
| 419 | for dvai, dvbi in zip(dva, dvb): | ||
| 420 | if dvai != dvbi: | ||
| 421 | aiprefix = dvai.split(' ')[0] | ||
| 422 | biprefix = dvbi.split(' ')[0] | ||
| 423 | if aiprefix == biprefix and aiprefix in ['>=', '=']: | ||
| 424 | if bb.utils.vercmp(bb.utils.split_version(dvai), bb.utils.split_version(dvbi)) > 0: | ||
| 425 | removeit = False | ||
| 426 | break | ||
| 427 | else: | ||
| 428 | removeit = False | ||
| 429 | break | ||
| 430 | if removeit: | ||
| 431 | remove.append(k) | ||
| 432 | |||
| 433 | for k in remove: | ||
| 434 | depvera.pop(k) | ||
| 435 | depverb.pop(k) | ||
| 436 | |||
| 437 | return (depvera, depverb) | ||
| 438 | |||
| 439 | |||
| 440 | def compare_dict_blobs(path, ablob, bblob, report_all, report_ver): | ||
| 441 | adict = blob_to_dict(ablob) | ||
| 442 | bdict = blob_to_dict(bblob) | ||
| 443 | |||
| 444 | pkgname = os.path.basename(path) | ||
| 445 | |||
| 446 | defaultvals = {} | ||
| 447 | defaultvals['PKG'] = pkgname | ||
| 448 | defaultvals['PKGE'] = '0' | ||
| 449 | |||
| 450 | changes = [] | ||
| 451 | keys = list(set(adict.keys()) | set(bdict.keys()) | set(defaultval_map.keys())) | ||
| 452 | for key in keys: | ||
| 453 | astr = adict.get(key, '') | ||
| 454 | bstr = bdict.get(key, '') | ||
| 455 | if key in ver_monitor_fields: | ||
| 456 | monitored = report_ver or astr or bstr | ||
| 457 | else: | ||
| 458 | monitored = key in monitor_fields | ||
| 459 | mapped_key = defaultval_map.get(key, '') | ||
| 460 | if mapped_key: | ||
| 461 | if not astr: | ||
| 462 | astr = '%s [default]' % adict.get(mapped_key, defaultvals.get(key, '')) | ||
| 463 | if not bstr: | ||
| 464 | bstr = '%s [default]' % bdict.get(mapped_key, defaultvals.get(key, '')) | ||
| 465 | |||
| 466 | if astr != bstr: | ||
| 467 | if (not report_all) and key in numeric_fields: | ||
| 468 | aval = int(astr or 0) | ||
| 469 | bval = int(bstr or 0) | ||
| 470 | if aval != 0: | ||
| 471 | percentchg = ((bval - aval) / float(aval)) * 100 | ||
| 472 | else: | ||
| 473 | percentchg = 100 | ||
| 474 | if abs(percentchg) < monitor_numeric_threshold: | ||
| 475 | continue | ||
| 476 | elif (not report_all) and key in list_fields: | ||
| 477 | if key == "FILELIST" and (path.endswith("-dbg") or path.endswith("-src")) and bstr.strip() != '': | ||
| 478 | continue | ||
| 479 | if key in ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS']: | ||
| 480 | (depvera, depverb) = compare_pkg_lists(astr, bstr) | ||
| 481 | if depvera == depverb: | ||
| 482 | continue | ||
| 483 | if key == 'FILELIST': | ||
| 484 | alist = shlex.split(astr) | ||
| 485 | blist = shlex.split(bstr) | ||
| 486 | else: | ||
| 487 | alist = astr.split() | ||
| 488 | blist = bstr.split() | ||
| 489 | alist.sort() | ||
| 490 | blist.sort() | ||
| 491 | # We don't care about the removal of self-dependencies | ||
| 492 | if pkgname in alist and not pkgname in blist: | ||
| 493 | alist.remove(pkgname) | ||
| 494 | if ' '.join(alist) == ' '.join(blist): | ||
| 495 | continue | ||
| 496 | |||
| 497 | if key == 'PKGR' and not report_all: | ||
| 498 | vers = [] | ||
| 499 | # strip leading 'r' and dots | ||
| 500 | for ver in (astr.split()[0], bstr.split()[0]): | ||
| 501 | if ver.startswith('r'): | ||
| 502 | ver = ver[1:] | ||
| 503 | vers.append(ver.replace('.', '')) | ||
| 504 | maxlen = max(len(vers[0]), len(vers[1])) | ||
| 505 | try: | ||
| 506 | # pad with '0' and convert to int | ||
| 507 | vers = [int(ver.ljust(maxlen, '0')) for ver in vers] | ||
| 508 | except ValueError: | ||
| 509 | pass | ||
| 510 | else: | ||
| 511 | # skip decrements and increments | ||
| 512 | if abs(vers[0] - vers[1]) == 1: | ||
| 513 | continue | ||
| 514 | |||
| 515 | chg = ChangeRecord(path, key, astr, bstr, monitored) | ||
| 516 | changes.append(chg) | ||
| 517 | return changes | ||
| 518 | |||
| 519 | |||
| 520 | def compare_siglists(a_blob, b_blob, taskdiff=False): | ||
| 521 | # FIXME collapse down a recipe's tasks? | ||
| 522 | alines = a_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 523 | blines = b_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 524 | keys = [] | ||
| 525 | pnmap = {} | ||
| 526 | def readsigs(lines): | ||
| 527 | sigs = {} | ||
| 528 | for line in lines: | ||
| 529 | linesplit = line.split() | ||
| 530 | if len(linesplit) > 2: | ||
| 531 | sigs[linesplit[0]] = linesplit[2] | ||
| 532 | if not linesplit[0] in keys: | ||
| 533 | keys.append(linesplit[0]) | ||
| 534 | pnmap[linesplit[1]] = linesplit[0].rsplit('.', 1)[0] | ||
| 535 | return sigs | ||
| 536 | adict = readsigs(alines) | ||
| 537 | bdict = readsigs(blines) | ||
| 538 | out = [] | ||
| 539 | |||
| 540 | changecount = 0 | ||
| 541 | addcount = 0 | ||
| 542 | removecount = 0 | ||
| 543 | if taskdiff: | ||
| 544 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
| 545 | tinfoil.prepare(config_only=True) | ||
| 546 | |||
| 547 | changes = collections.OrderedDict() | ||
| 548 | |||
| 549 | def compare_hashfiles(pn, taskname, hash1, hash2): | ||
| 550 | hashes = [hash1, hash2] | ||
| 551 | hashfiles = bb.siggen.find_siginfo(pn, taskname, hashes, tinfoil.config_data) | ||
| 552 | |||
| 553 | if not taskname: | ||
| 554 | (pn, taskname) = pn.rsplit('.', 1) | ||
| 555 | pn = pnmap.get(pn, pn) | ||
| 556 | desc = '%s.%s' % (pn, taskname) | ||
| 557 | |||
| 558 | if len(hashfiles) == 0: | ||
| 559 | out.append("Unable to find matching sigdata for %s with hashes %s or %s" % (desc, hash1, hash2)) | ||
| 560 | elif not hash1 in hashfiles: | ||
| 561 | out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash1)) | ||
| 562 | elif not hash2 in hashfiles: | ||
| 563 | out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2)) | ||
| 564 | else: | ||
| 565 | out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, collapsed=True) | ||
| 566 | for line in out2: | ||
| 567 | m = hashlib.sha256() | ||
| 568 | m.update(line.encode('utf-8')) | ||
| 569 | entry = changes.get(m.hexdigest(), (line, [])) | ||
| 570 | if desc not in entry[1]: | ||
| 571 | changes[m.hexdigest()] = (line, entry[1] + [desc]) | ||
| 572 | |||
| 573 | # Define recursion callback | ||
| 574 | def recursecb(key, hash1, hash2): | ||
| 575 | compare_hashfiles(key, None, hash1, hash2) | ||
| 576 | return [] | ||
| 577 | |||
| 578 | for key in keys: | ||
| 579 | siga = adict.get(key, None) | ||
| 580 | sigb = bdict.get(key, None) | ||
| 581 | if siga is not None and sigb is not None and siga != sigb: | ||
| 582 | changecount += 1 | ||
| 583 | (pn, taskname) = key.rsplit('.', 1) | ||
| 584 | compare_hashfiles(pn, taskname, siga, sigb) | ||
| 585 | elif siga is None: | ||
| 586 | addcount += 1 | ||
| 587 | elif sigb is None: | ||
| 588 | removecount += 1 | ||
| 589 | for key, item in changes.items(): | ||
| 590 | line, tasks = item | ||
| 591 | if len(tasks) == 1: | ||
| 592 | desc = tasks[0] | ||
| 593 | elif len(tasks) == 2: | ||
| 594 | desc = '%s and %s' % (tasks[0], tasks[1]) | ||
| 595 | else: | ||
| 596 | desc = '%s and %d others' % (tasks[-1], len(tasks)-1) | ||
| 597 | out.append('%s: %s' % (desc, line)) | ||
| 598 | else: | ||
| 599 | for key in keys: | ||
| 600 | siga = adict.get(key, None) | ||
| 601 | sigb = bdict.get(key, None) | ||
| 602 | if siga is not None and sigb is not None and siga != sigb: | ||
| 603 | out.append('%s changed from %s to %s' % (key, siga, sigb)) | ||
| 604 | changecount += 1 | ||
| 605 | elif siga is None: | ||
| 606 | out.append('%s was added' % key) | ||
| 607 | addcount += 1 | ||
| 608 | elif sigb is None: | ||
| 609 | out.append('%s was removed' % key) | ||
| 610 | removecount += 1 | ||
| 611 | out.append('Summary: %d tasks added, %d tasks removed, %d tasks modified (%.1f%%)' % (addcount, removecount, changecount, (changecount / float(len(bdict)) * 100))) | ||
| 612 | return '\n'.join(out) | ||
| 613 | |||
| 614 | |||
| 615 | def process_changes(repopath, revision1, revision2='HEAD', report_all=False, report_ver=False, | ||
| 616 | sigs=False, sigsdiff=False, exclude_path=None): | ||
| 617 | repo = git.Repo(repopath) | ||
| 618 | assert repo.bare == False | ||
| 619 | commit = repo.commit(revision1) | ||
| 620 | diff = commit.diff(revision2) | ||
| 621 | |||
| 622 | changes = [] | ||
| 623 | |||
| 624 | if sigs or sigsdiff: | ||
| 625 | for d in diff.iter_change_type('M'): | ||
| 626 | if d.a_blob.path == 'siglist.txt': | ||
| 627 | changes.append(compare_siglists(d.a_blob, d.b_blob, taskdiff=sigsdiff)) | ||
| 628 | return changes | ||
| 629 | |||
| 630 | for d in diff.iter_change_type('M'): | ||
| 631 | path = os.path.dirname(d.a_blob.path) | ||
| 632 | if path.startswith('packages/'): | ||
| 633 | filename = os.path.basename(d.a_blob.path) | ||
| 634 | if filename == 'latest': | ||
| 635 | changes.extend(compare_dict_blobs(path, d.a_blob, d.b_blob, report_all, report_ver)) | ||
| 636 | elif filename.startswith('latest.'): | ||
| 637 | chg = ChangeRecord(path, filename, d.a_blob.data_stream.read().decode('utf-8'), d.b_blob.data_stream.read().decode('utf-8'), True) | ||
| 638 | changes.append(chg) | ||
| 639 | elif filename == 'sysroot': | ||
| 640 | alines = d.a_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 641 | blines = d.b_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 642 | filechanges = compare_file_lists(alines,blines, compare_ownership=False) | ||
| 643 | if filechanges: | ||
| 644 | chg = ChangeRecord(path, filename, None, None, True) | ||
| 645 | chg.filechanges = filechanges | ||
| 646 | changes.append(chg) | ||
| 647 | |||
| 648 | elif path.startswith('images/'): | ||
| 649 | filename = os.path.basename(d.a_blob.path) | ||
| 650 | if filename in img_monitor_files: | ||
| 651 | if filename == 'files-in-image.txt': | ||
| 652 | alines = d.a_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 653 | blines = d.b_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 654 | filechanges = compare_file_lists(alines,blines) | ||
| 655 | if filechanges: | ||
| 656 | chg = ChangeRecord(path, filename, None, None, True) | ||
| 657 | chg.filechanges = filechanges | ||
| 658 | changes.append(chg) | ||
| 659 | elif filename == 'installed-package-names.txt': | ||
| 660 | alines = d.a_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 661 | blines = d.b_blob.data_stream.read().decode('utf-8').splitlines() | ||
| 662 | filechanges = compare_lists(alines,blines) | ||
| 663 | if filechanges: | ||
| 664 | chg = ChangeRecord(path, filename, None, None, True) | ||
| 665 | chg.filechanges = filechanges | ||
| 666 | changes.append(chg) | ||
| 667 | else: | ||
| 668 | chg = ChangeRecord(path, filename, d.a_blob.data_stream.read().decode('utf-8'), d.b_blob.data_stream.read().decode('utf-8'), True) | ||
| 669 | changes.append(chg) | ||
| 670 | elif filename == 'image-info.txt': | ||
| 671 | changes.extend(compare_dict_blobs(path, d.a_blob, d.b_blob, report_all, report_ver)) | ||
| 672 | elif '/image-files/' in path: | ||
| 673 | chg = ChangeRecord(path, filename, d.a_blob.data_stream.read().decode('utf-8'), d.b_blob.data_stream.read().decode('utf-8'), True) | ||
| 674 | changes.append(chg) | ||
| 675 | |||
| 676 | # Look for added preinst/postinst/prerm/postrm | ||
| 677 | # (without reporting newly added recipes) | ||
| 678 | addedpkgs = [] | ||
| 679 | addedchanges = [] | ||
| 680 | for d in diff.iter_change_type('A'): | ||
| 681 | path = os.path.dirname(d.b_blob.path) | ||
| 682 | if path.startswith('packages/'): | ||
| 683 | filename = os.path.basename(d.b_blob.path) | ||
| 684 | if filename == 'latest': | ||
| 685 | addedpkgs.append(path) | ||
| 686 | elif filename.startswith('latest.'): | ||
| 687 | chg = ChangeRecord(path, filename[7:], '', d.b_blob.data_stream.read().decode('utf-8'), True) | ||
| 688 | addedchanges.append(chg) | ||
| 689 | for chg in addedchanges: | ||
| 690 | found = False | ||
| 691 | for pkg in addedpkgs: | ||
| 692 | if chg.path.startswith(pkg): | ||
| 693 | found = True | ||
| 694 | break | ||
| 695 | if not found: | ||
| 696 | changes.append(chg) | ||
| 697 | |||
| 698 | # Look for cleared preinst/postinst/prerm/postrm | ||
| 699 | for d in diff.iter_change_type('D'): | ||
| 700 | path = os.path.dirname(d.a_blob.path) | ||
| 701 | if path.startswith('packages/'): | ||
| 702 | filename = os.path.basename(d.a_blob.path) | ||
| 703 | if filename != 'latest' and filename.startswith('latest.'): | ||
| 704 | chg = ChangeRecord(path, filename[7:], d.a_blob.data_stream.read().decode('utf-8'), '', True) | ||
| 705 | changes.append(chg) | ||
| 706 | |||
| 707 | # filter out unwanted paths | ||
| 708 | if exclude_path: | ||
| 709 | for chg in changes: | ||
| 710 | if chg.filechanges: | ||
| 711 | fchgs = [] | ||
| 712 | for fchg in chg.filechanges: | ||
| 713 | for epath in exclude_path: | ||
| 714 | if fchg.path.startswith(epath): | ||
| 715 | break | ||
| 716 | else: | ||
| 717 | fchgs.append(fchg) | ||
| 718 | chg.filechanges = fchgs | ||
| 719 | |||
| 720 | if report_all: | ||
| 721 | return changes | ||
| 722 | else: | ||
| 723 | return [chg for chg in changes if chg.monitored] | ||
diff --git a/meta/lib/oe/buildstats.py b/meta/lib/oe/buildstats.py deleted file mode 100644 index 2700245ec6..0000000000 --- a/meta/lib/oe/buildstats.py +++ /dev/null | |||
| @@ -1,254 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # Implements system state sampling. Called by buildstats.bbclass. | ||
| 7 | # Because it is a real Python module, it can hold persistent state, | ||
| 8 | # like open log files and the time of the last sampling. | ||
| 9 | |||
| 10 | import time | ||
| 11 | import re | ||
| 12 | import bb.event | ||
| 13 | from collections import deque | ||
| 14 | |||
| 15 | class SystemStats: | ||
| 16 | def __init__(self, d): | ||
| 17 | bn = d.getVar('BUILDNAME') | ||
| 18 | bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) | ||
| 19 | bb.utils.mkdirhier(bsdir) | ||
| 20 | file_handlers = [('diskstats', self._reduce_diskstats), | ||
| 21 | ('meminfo', self._reduce_meminfo), | ||
| 22 | ('stat', self._reduce_stat), | ||
| 23 | ('net/dev', self._reduce_net)] | ||
| 24 | |||
| 25 | # Some hosts like openSUSE have readable /proc/pressure files | ||
| 26 | # but throw errors when these files are opened. Catch these error | ||
| 27 | # and ensure that the reduce_proc_pressure directory is not created. | ||
| 28 | if os.path.exists("/proc/pressure"): | ||
| 29 | try: | ||
| 30 | with open('/proc/pressure/cpu', 'rb') as source: | ||
| 31 | source.read() | ||
| 32 | pressuredir = os.path.join(bsdir, 'reduced_proc_pressure') | ||
| 33 | bb.utils.mkdirhier(pressuredir) | ||
| 34 | file_handlers.extend([('pressure/cpu', self._reduce_pressure), | ||
| 35 | ('pressure/io', self._reduce_pressure), | ||
| 36 | ('pressure/memory', self._reduce_pressure)]) | ||
| 37 | except Exception: | ||
| 38 | pass | ||
| 39 | |||
| 40 | self.proc_files = [] | ||
| 41 | for filename, handler in (file_handlers): | ||
| 42 | # The corresponding /proc files might not exist on the host. | ||
| 43 | # For example, /proc/diskstats is not available in virtualized | ||
| 44 | # environments like Linux-VServer. Silently skip collecting | ||
| 45 | # the data. | ||
| 46 | if os.path.exists(os.path.join('/proc', filename)): | ||
| 47 | # In practice, this class gets instantiated only once in | ||
| 48 | # the bitbake cooker process. Therefore 'append' mode is | ||
| 49 | # not strictly necessary, but using it makes the class | ||
| 50 | # more robust should two processes ever write | ||
| 51 | # concurrently. | ||
| 52 | if filename == 'net/dev': | ||
| 53 | destfile = os.path.join(bsdir, 'reduced_proc_net.log') | ||
| 54 | else: | ||
| 55 | destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename)) | ||
| 56 | self.proc_files.append((filename, open(destfile, 'ab'), handler)) | ||
| 57 | self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab') | ||
| 58 | # Last time that we sampled /proc data resp. recorded disk monitoring data. | ||
| 59 | self.last_proc = 0 | ||
| 60 | self.last_disk_monitor = 0 | ||
| 61 | # Minimum number of seconds between recording a sample. This becames relevant when we get | ||
| 62 | # called very often while many short tasks get started. Sampling during quiet periods | ||
| 63 | # depends on the heartbeat event, which fires less often. | ||
| 64 | # By default, the Heartbeat events occur roughly once every second but the actual time | ||
| 65 | # between these events deviates by a few milliseconds, in most cases. Hence | ||
| 66 | # pick a somewhat arbitary tolerance such that we sample a large majority | ||
| 67 | # of the Heartbeat events. This ignores rare events that fall outside the minimum | ||
| 68 | # and may lead an extra sample in a given second every so often. However, it allows for fairly | ||
| 69 | # consistent intervals between samples without missing many events. | ||
| 70 | self.tolerance = 0.01 | ||
| 71 | self.min_seconds = 1.0 - self.tolerance | ||
| 72 | |||
| 73 | self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') | ||
| 74 | self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+|nvme\d+n\d+.*)$') | ||
| 75 | self.diskstats_ltime = None | ||
| 76 | self.diskstats_data = None | ||
| 77 | self.stat_ltimes = None | ||
| 78 | # Last time we sampled /proc/pressure. All resources stored in a single dict with the key as filename | ||
| 79 | self.last_pressure = {"pressure/cpu": None, "pressure/io": None, "pressure/memory": None} | ||
| 80 | self.net_stats = {} | ||
| 81 | |||
| 82 | def close(self): | ||
| 83 | self.monitor_disk.close() | ||
| 84 | for _, output, _ in self.proc_files: | ||
| 85 | output.close() | ||
| 86 | |||
| 87 | def _reduce_meminfo(self, time, data, filename): | ||
| 88 | """ | ||
| 89 | Extracts 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree' | ||
| 90 | and writes their values into a single line, in that order. | ||
| 91 | """ | ||
| 92 | values = {} | ||
| 93 | for line in data.split(b'\n'): | ||
| 94 | m = self.meminfo_regex.match(line) | ||
| 95 | if m: | ||
| 96 | values[m.group(1)] = m.group(2) | ||
| 97 | if len(values) == 6: | ||
| 98 | return (time, | ||
| 99 | b' '.join([values[x] for x in | ||
| 100 | (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n') | ||
| 101 | |||
| 102 | def _reduce_net(self, time, data, filename): | ||
| 103 | data = data.split(b'\n') | ||
| 104 | for line in data[2:]: | ||
| 105 | if b":" not in line: | ||
| 106 | continue | ||
| 107 | try: | ||
| 108 | parts = line.split() | ||
| 109 | iface = (parts[0].strip(b':')).decode('ascii') | ||
| 110 | receive_bytes = int(parts[1]) | ||
| 111 | transmit_bytes = int(parts[9]) | ||
| 112 | except Exception: | ||
| 113 | continue | ||
| 114 | |||
| 115 | if iface not in self.net_stats: | ||
| 116 | self.net_stats[iface] = deque(maxlen=2) | ||
| 117 | self.net_stats[iface].append((receive_bytes, transmit_bytes, 0, 0)) | ||
| 118 | prev = self.net_stats[iface][-1] if self.net_stats[iface] else (0, 0, 0, 0) | ||
| 119 | receive_diff = receive_bytes - prev[0] | ||
| 120 | transmit_diff = transmit_bytes - prev[1] | ||
| 121 | self.net_stats[iface].append(( | ||
| 122 | receive_bytes, | ||
| 123 | transmit_bytes, | ||
| 124 | receive_diff, | ||
| 125 | transmit_diff | ||
| 126 | )) | ||
| 127 | |||
| 128 | result_str = "\n".join( | ||
| 129 | f"{iface}: {net_data[-1][0]} {net_data[-1][1]} {net_data[-1][2]} {net_data[-1][3]}" | ||
| 130 | for iface, net_data in self.net_stats.items() | ||
| 131 | ) + "\n" | ||
| 132 | |||
| 133 | return time, result_str.encode('ascii') | ||
| 134 | |||
| 135 | def _diskstats_is_relevant_line(self, linetokens): | ||
| 136 | if len(linetokens) < 14: | ||
| 137 | return False | ||
| 138 | disk = linetokens[2] | ||
| 139 | return self.diskstats_regex.match(disk) | ||
| 140 | |||
| 141 | def _reduce_diskstats(self, time, data, filename): | ||
| 142 | relevant_tokens = filter(self._diskstats_is_relevant_line, map(lambda x: x.split(), data.split(b'\n'))) | ||
| 143 | diskdata = [0] * 3 | ||
| 144 | reduced = None | ||
| 145 | for tokens in relevant_tokens: | ||
| 146 | # rsect | ||
| 147 | diskdata[0] += int(tokens[5]) | ||
| 148 | # wsect | ||
| 149 | diskdata[1] += int(tokens[9]) | ||
| 150 | # use | ||
| 151 | diskdata[2] += int(tokens[12]) | ||
| 152 | if self.diskstats_ltime: | ||
| 153 | # We need to compute information about the time interval | ||
| 154 | # since the last sampling and record the result as sample | ||
| 155 | # for that point in the past. | ||
| 156 | interval = time - self.diskstats_ltime | ||
| 157 | if interval > 0: | ||
| 158 | sums = [ a - b for a, b in zip(diskdata, self.diskstats_data) ] | ||
| 159 | readTput = sums[0] / 2.0 * 100.0 / interval | ||
| 160 | writeTput = sums[1] / 2.0 * 100.0 / interval | ||
| 161 | util = float( sums[2] ) / 10 / interval | ||
| 162 | util = max(0.0, min(1.0, util)) | ||
| 163 | reduced = (self.diskstats_ltime, (readTput, writeTput, util)) | ||
| 164 | |||
| 165 | self.diskstats_ltime = time | ||
| 166 | self.diskstats_data = diskdata | ||
| 167 | return reduced | ||
| 168 | |||
| 169 | |||
| 170 | def _reduce_nop(self, time, data, filename): | ||
| 171 | return (time, data) | ||
| 172 | |||
| 173 | def _reduce_stat(self, time, data, filename): | ||
| 174 | if not data: | ||
| 175 | return None | ||
| 176 | # CPU times {user, nice, system, idle, io_wait, irq, softirq} from first line | ||
| 177 | tokens = data.split(b'\n', 1)[0].split() | ||
| 178 | times = [ int(token) for token in tokens[1:] ] | ||
| 179 | reduced = None | ||
| 180 | if self.stat_ltimes: | ||
| 181 | user = float((times[0] + times[1]) - (self.stat_ltimes[0] + self.stat_ltimes[1])) | ||
| 182 | system = float((times[2] + times[5] + times[6]) - (self.stat_ltimes[2] + self.stat_ltimes[5] + self.stat_ltimes[6])) | ||
| 183 | idle = float(times[3] - self.stat_ltimes[3]) | ||
| 184 | iowait = float(times[4] - self.stat_ltimes[4]) | ||
| 185 | |||
| 186 | aSum = max(user + system + idle + iowait, 1) | ||
| 187 | reduced = (time, (user/aSum, system/aSum, iowait/aSum)) | ||
| 188 | |||
| 189 | self.stat_ltimes = times | ||
| 190 | return reduced | ||
| 191 | |||
| 192 | def _reduce_pressure(self, time, data, filename): | ||
| 193 | """ | ||
| 194 | Return reduced pressure: {avg10, avg60, avg300} and delta total compared to the previous sample | ||
| 195 | for the cpu, io and memory resources. A common function is used for all 3 resources since the | ||
| 196 | format of the /proc/pressure file is the same in each case. | ||
| 197 | """ | ||
| 198 | if not data: | ||
| 199 | return None | ||
| 200 | tokens = data.split(b'\n', 1)[0].split() | ||
| 201 | avg10 = float(tokens[1].split(b'=')[1]) | ||
| 202 | avg60 = float(tokens[2].split(b'=')[1]) | ||
| 203 | avg300 = float(tokens[3].split(b'=')[1]) | ||
| 204 | total = int(tokens[4].split(b'=')[1]) | ||
| 205 | |||
| 206 | reduced = None | ||
| 207 | if self.last_pressure[filename]: | ||
| 208 | delta = total - self.last_pressure[filename] | ||
| 209 | reduced = (time, (avg10, avg60, avg300, delta)) | ||
| 210 | self.last_pressure[filename] = total | ||
| 211 | return reduced | ||
| 212 | |||
| 213 | def sample(self, event, force): | ||
| 214 | """ | ||
| 215 | Collect and log proc or disk_monitor stats periodically. | ||
| 216 | Return True if a new sample is collected and hence the value last_proc or last_disk_monitor | ||
| 217 | is changed. | ||
| 218 | """ | ||
| 219 | retval = False | ||
| 220 | now = time.time() | ||
| 221 | if (now - self.last_proc > self.min_seconds) or force: | ||
| 222 | for filename, output, handler in self.proc_files: | ||
| 223 | with open(os.path.join('/proc', filename), 'rb') as input: | ||
| 224 | data = input.read() | ||
| 225 | if handler: | ||
| 226 | reduced = handler(now, data, filename) | ||
| 227 | else: | ||
| 228 | reduced = (now, data) | ||
| 229 | if reduced: | ||
| 230 | if isinstance(reduced[1], bytes): | ||
| 231 | # Use as it is. | ||
| 232 | data = reduced[1] | ||
| 233 | else: | ||
| 234 | # Convert to a single line. | ||
| 235 | data = (' '.join([str(x) for x in reduced[1]]) + '\n').encode('ascii') | ||
| 236 | # Unbuffered raw write, less overhead and useful | ||
| 237 | # in case that we end up with concurrent writes. | ||
| 238 | os.write(output.fileno(), | ||
| 239 | ('%.0f\n' % reduced[0]).encode('ascii') + | ||
| 240 | data + | ||
| 241 | b'\n') | ||
| 242 | self.last_proc = now | ||
| 243 | retval = True | ||
| 244 | |||
| 245 | if isinstance(event, bb.event.MonitorDiskEvent) and \ | ||
| 246 | ((now - self.last_disk_monitor > self.min_seconds) or force): | ||
| 247 | os.write(self.monitor_disk.fileno(), | ||
| 248 | ('%.0f\n' % now).encode('ascii') + | ||
| 249 | ''.join(['%s: %d\n' % (dev, sample.total_bytes - sample.free_bytes) | ||
| 250 | for dev, sample in event.disk_usage.items()]).encode('ascii') + | ||
| 251 | b'\n') | ||
| 252 | self.last_disk_monitor = now | ||
| 253 | retval = True | ||
| 254 | return retval \ No newline at end of file | ||
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py deleted file mode 100644 index 68c85807d9..0000000000 --- a/meta/lib/oe/cachedpath.py +++ /dev/null | |||
| @@ -1,241 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # Based on standard python library functions but avoid | ||
| 7 | # repeated stat calls. Its assumed the files will not change from under us | ||
| 8 | # so we can cache stat calls. | ||
| 9 | # | ||
| 10 | |||
| 11 | import os | ||
| 12 | import errno | ||
| 13 | import stat as statmod | ||
| 14 | |||
| 15 | class CachedPath(object): | ||
| 16 | def __init__(self): | ||
| 17 | self.statcache = {} | ||
| 18 | self.lstatcache = {} | ||
| 19 | self.normpathcache = {} | ||
| 20 | return | ||
| 21 | |||
| 22 | def updatecache(self, x): | ||
| 23 | x = self.normpath(x) | ||
| 24 | if x in self.statcache: | ||
| 25 | del self.statcache[x] | ||
| 26 | if x in self.lstatcache: | ||
| 27 | del self.lstatcache[x] | ||
| 28 | |||
| 29 | def normpath(self, path): | ||
| 30 | if path in self.normpathcache: | ||
| 31 | return self.normpathcache[path] | ||
| 32 | newpath = os.path.normpath(path) | ||
| 33 | self.normpathcache[path] = newpath | ||
| 34 | return newpath | ||
| 35 | |||
| 36 | def _callstat(self, path): | ||
| 37 | if path in self.statcache: | ||
| 38 | return self.statcache[path] | ||
| 39 | try: | ||
| 40 | st = os.stat(path) | ||
| 41 | self.statcache[path] = st | ||
| 42 | return st | ||
| 43 | except os.error: | ||
| 44 | self.statcache[path] = False | ||
| 45 | return False | ||
| 46 | |||
| 47 | # We might as well call lstat and then only | ||
| 48 | # call stat as well in the symbolic link case | ||
| 49 | # since this turns out to be much more optimal | ||
| 50 | # in real world usage of this cache | ||
| 51 | def callstat(self, path): | ||
| 52 | path = self.normpath(path) | ||
| 53 | self.calllstat(path) | ||
| 54 | return self.statcache[path] | ||
| 55 | |||
| 56 | def calllstat(self, path): | ||
| 57 | path = self.normpath(path) | ||
| 58 | if path in self.lstatcache: | ||
| 59 | return self.lstatcache[path] | ||
| 60 | #bb.error("LStatpath:" + path) | ||
| 61 | try: | ||
| 62 | lst = os.lstat(path) | ||
| 63 | self.lstatcache[path] = lst | ||
| 64 | if not statmod.S_ISLNK(lst.st_mode): | ||
| 65 | self.statcache[path] = lst | ||
| 66 | else: | ||
| 67 | self._callstat(path) | ||
| 68 | return lst | ||
| 69 | except (os.error, AttributeError): | ||
| 70 | self.lstatcache[path] = False | ||
| 71 | self.statcache[path] = False | ||
| 72 | return False | ||
| 73 | |||
| 74 | # This follows symbolic links, so both islink() and isdir() can be true | ||
| 75 | # for the same path ono systems that support symlinks | ||
| 76 | def isfile(self, path): | ||
| 77 | """Test whether a path is a regular file""" | ||
| 78 | st = self.callstat(path) | ||
| 79 | if not st: | ||
| 80 | return False | ||
| 81 | return statmod.S_ISREG(st.st_mode) | ||
| 82 | |||
| 83 | # Is a path a directory? | ||
| 84 | # This follows symbolic links, so both islink() and isdir() | ||
| 85 | # can be true for the same path on systems that support symlinks | ||
| 86 | def isdir(self, s): | ||
| 87 | """Return true if the pathname refers to an existing directory.""" | ||
| 88 | st = self.callstat(s) | ||
| 89 | if not st: | ||
| 90 | return False | ||
| 91 | return statmod.S_ISDIR(st.st_mode) | ||
| 92 | |||
| 93 | def islink(self, path): | ||
| 94 | """Test whether a path is a symbolic link""" | ||
| 95 | st = self.calllstat(path) | ||
| 96 | if not st: | ||
| 97 | return False | ||
| 98 | return statmod.S_ISLNK(st.st_mode) | ||
| 99 | |||
| 100 | # Does a path exist? | ||
| 101 | # This is false for dangling symbolic links on systems that support them. | ||
| 102 | def exists(self, path): | ||
| 103 | """Test whether a path exists. Returns False for broken symbolic links""" | ||
| 104 | if self.callstat(path): | ||
| 105 | return True | ||
| 106 | return False | ||
| 107 | |||
| 108 | def lexists(self, path): | ||
| 109 | """Test whether a path exists. Returns True for broken symbolic links""" | ||
| 110 | if self.calllstat(path): | ||
| 111 | return True | ||
| 112 | return False | ||
| 113 | |||
| 114 | # WARNING - this is not currently a drop in replacement since they return False | ||
| 115 | # rather than raise exceptions. | ||
| 116 | def stat(self, path): | ||
| 117 | return self.callstat(path) | ||
| 118 | |||
| 119 | # WARNING - this is not currently a drop in replacement since they return False | ||
| 120 | # rather than raise exceptions. | ||
| 121 | def lstat(self, path): | ||
| 122 | return self.calllstat(path) | ||
| 123 | |||
| 124 | def walk(self, top, topdown=True, onerror=None, followlinks=False): | ||
| 125 | # Matches os.walk, not os.path.walk() | ||
| 126 | |||
| 127 | # We may not have read permission for top, in which case we can't | ||
| 128 | # get a list of the files the directory contains. os.path.walk | ||
| 129 | # always suppressed the exception then, rather than blow up for a | ||
| 130 | # minor reason when (say) a thousand readable directories are still | ||
| 131 | # left to visit. That logic is copied here. | ||
| 132 | try: | ||
| 133 | names = os.listdir(top) | ||
| 134 | except os.error as err: | ||
| 135 | if onerror is not None: | ||
| 136 | onerror(err) | ||
| 137 | return | ||
| 138 | |||
| 139 | dirs, nondirs = [], [] | ||
| 140 | for name in names: | ||
| 141 | if self.isdir(os.path.join(top, name)): | ||
| 142 | dirs.append(name) | ||
| 143 | else: | ||
| 144 | nondirs.append(name) | ||
| 145 | |||
| 146 | if topdown: | ||
| 147 | yield top, dirs, nondirs | ||
| 148 | for name in dirs: | ||
| 149 | new_path = os.path.join(top, name) | ||
| 150 | if followlinks or not self.islink(new_path): | ||
| 151 | for x in self.walk(new_path, topdown, onerror, followlinks): | ||
| 152 | yield x | ||
| 153 | if not topdown: | ||
| 154 | yield top, dirs, nondirs | ||
| 155 | |||
| 156 | ## realpath() related functions | ||
| 157 | def __is_path_below(self, file, root): | ||
| 158 | return (file + os.path.sep).startswith(root) | ||
| 159 | |||
| 160 | def __realpath_rel(self, start, rel_path, root, loop_cnt, assume_dir): | ||
| 161 | """Calculates real path of symlink 'start' + 'rel_path' below | ||
| 162 | 'root'; no part of 'start' below 'root' must contain symlinks. """ | ||
| 163 | have_dir = True | ||
| 164 | |||
| 165 | for d in rel_path.split(os.path.sep): | ||
| 166 | if not have_dir and not assume_dir: | ||
| 167 | raise OSError(errno.ENOENT, "no such directory %s" % start) | ||
| 168 | |||
| 169 | if d == os.path.pardir: # '..' | ||
| 170 | if len(start) >= len(root): | ||
| 171 | # do not follow '..' before root | ||
| 172 | start = os.path.dirname(start) | ||
| 173 | else: | ||
| 174 | # emit warning? | ||
| 175 | pass | ||
| 176 | else: | ||
| 177 | (start, have_dir) = self.__realpath(os.path.join(start, d), | ||
| 178 | root, loop_cnt, assume_dir) | ||
| 179 | |||
| 180 | assert(self.__is_path_below(start, root)) | ||
| 181 | |||
| 182 | return start | ||
| 183 | |||
| 184 | def __realpath(self, file, root, loop_cnt, assume_dir): | ||
| 185 | while self.islink(file) and len(file) >= len(root): | ||
| 186 | if loop_cnt == 0: | ||
| 187 | raise OSError(errno.ELOOP, file) | ||
| 188 | |||
| 189 | loop_cnt -= 1 | ||
| 190 | target = os.path.normpath(os.readlink(file)) | ||
| 191 | |||
| 192 | if not os.path.isabs(target): | ||
| 193 | tdir = os.path.dirname(file) | ||
| 194 | assert(self.__is_path_below(tdir, root)) | ||
| 195 | else: | ||
| 196 | tdir = root | ||
| 197 | |||
| 198 | file = self.__realpath_rel(tdir, target, root, loop_cnt, assume_dir) | ||
| 199 | |||
| 200 | try: | ||
| 201 | is_dir = self.isdir(file) | ||
| 202 | except: | ||
| 203 | is_dir = False | ||
| 204 | |||
| 205 | return (file, is_dir) | ||
| 206 | |||
| 207 | def realpath(self, file, root, use_physdir = True, loop_cnt = 100, assume_dir = False): | ||
| 208 | """ Returns the canonical path of 'file' with assuming a | ||
| 209 | toplevel 'root' directory. When 'use_physdir' is set, all | ||
| 210 | preceding path components of 'file' will be resolved first; | ||
| 211 | this flag should be set unless it is guaranteed that there is | ||
| 212 | no symlink in the path. When 'assume_dir' is not set, missing | ||
| 213 | path components will raise an ENOENT error""" | ||
| 214 | |||
| 215 | root = os.path.normpath(root) | ||
| 216 | file = os.path.normpath(file) | ||
| 217 | |||
| 218 | if not root.endswith(os.path.sep): | ||
| 219 | # letting root end with '/' makes some things easier | ||
| 220 | root = root + os.path.sep | ||
| 221 | |||
| 222 | if not self.__is_path_below(file, root): | ||
| 223 | raise OSError(errno.EINVAL, "file '%s' is not below root" % file) | ||
| 224 | |||
| 225 | try: | ||
| 226 | if use_physdir: | ||
| 227 | file = self.__realpath_rel(root, file[(len(root) - 1):], root, loop_cnt, assume_dir) | ||
| 228 | else: | ||
| 229 | file = self.__realpath(file, root, loop_cnt, assume_dir)[0] | ||
| 230 | except OSError as e: | ||
| 231 | if e.errno == errno.ELOOP: | ||
| 232 | # make ELOOP more readable; without catching it, there will | ||
| 233 | # be printed a backtrace with 100s of OSError exceptions | ||
| 234 | # else | ||
| 235 | raise OSError(errno.ELOOP, | ||
| 236 | "too much recursions while resolving '%s'; loop in '%s'" % | ||
| 237 | (file, e.strerror)) | ||
| 238 | |||
| 239 | raise | ||
| 240 | |||
| 241 | return file | ||
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py deleted file mode 100644 index ddca10dee5..0000000000 --- a/meta/lib/oe/classextend.py +++ /dev/null | |||
| @@ -1,140 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import collections | ||
| 8 | import bb.filter | ||
| 9 | |||
| 10 | @bb.filter.filter_proc() | ||
| 11 | def native_filter(val, pn, bpn, regex=False, selfref=True): | ||
| 12 | deps = val | ||
| 13 | if not deps: | ||
| 14 | return | ||
| 15 | deps = bb.utils.explode_deps(deps) | ||
| 16 | newdeps = [] | ||
| 17 | for dep in deps: | ||
| 18 | if regex and dep.startswith("^") and dep.endswith("$"): | ||
| 19 | if not dep.endswith("-native$"): | ||
| 20 | newdeps.append(dep[:-1].replace(pn, bpn) + "-native$") | ||
| 21 | else: | ||
| 22 | newdeps.append(dep) | ||
| 23 | elif dep == pn: | ||
| 24 | if not selfref: | ||
| 25 | continue | ||
| 26 | newdeps.append(dep) | ||
| 27 | elif "-cross-" in dep: | ||
| 28 | newdeps.append(dep.replace("-cross", "-native")) | ||
| 29 | elif not dep.endswith("-native"): | ||
| 30 | # Replace ${PN} with ${BPN} in the dependency to make sure | ||
| 31 | # dependencies on, e.g., ${PN}-foo become ${BPN}-foo-native | ||
| 32 | # rather than ${BPN}-native-foo-native. | ||
| 33 | newdeps.append(dep.replace(pn, bpn) + "-native") | ||
| 34 | else: | ||
| 35 | newdeps.append(dep) | ||
| 36 | return " ".join(newdeps) | ||
| 37 | |||
| 38 | def add_suffix(val, extname, prefixes): | ||
| 39 | if val.startswith(extname + "-"): | ||
| 40 | return val | ||
| 41 | if val.endswith(("-native", "-native-runtime")) or ('nativesdk-' in val) or ('-cross-' in val) or ('-crosssdk-' in val): | ||
| 42 | return val | ||
| 43 | # If it starts with a known prefix (e.g. multilibs), just pass it through | ||
| 44 | for prefix in prefixes: | ||
| 45 | if val.startswith(prefix + "-"): | ||
| 46 | return val | ||
| 47 | if val.startswith("kernel-") or val == "virtual/kernel": | ||
| 48 | return val | ||
| 49 | if val.startswith("rtld"): | ||
| 50 | return val | ||
| 51 | if val.endswith("-crosssdk"): | ||
| 52 | return val | ||
| 53 | if val.endswith("-" + extname): | ||
| 54 | val = val.replace("-" + extname, "") | ||
| 55 | if val.startswith("virtual/"): | ||
| 56 | # Assume large numbers of dashes means a triplet is present and we don't need to convert | ||
| 57 | if val.count("-") >= 3 and val.endswith(("-go",)): | ||
| 58 | return val | ||
| 59 | subs = val.split("/", 1)[1] | ||
| 60 | if not subs.startswith(extname): | ||
| 61 | return "virtual/" + extname + "-" + subs | ||
| 62 | return val | ||
| 63 | if val.startswith("/") or (val.startswith("${") and val.endswith("}")): | ||
| 64 | return val | ||
| 65 | if not val.startswith(extname): | ||
| 66 | return extname + "-" + val | ||
| 67 | return val | ||
| 68 | |||
| 69 | def get_package_mappings(packages, extname): | ||
| 70 | pkgs_mapping = [] | ||
| 71 | for pkg in packages.split(): | ||
| 72 | if pkg.startswith(extname): | ||
| 73 | pkgs_mapping.append([pkg.split(extname + "-")[1], pkg]) | ||
| 74 | continue | ||
| 75 | pkgs_mapping.append([pkg, add_suffix(pkg, extname, [])]) | ||
| 76 | return pkgs_mapping | ||
| 77 | |||
| 78 | @bb.filter.filter_proc() | ||
| 79 | def package_suffix_filter(val, extname): | ||
| 80 | pkgs_mapping = get_package_mappings(val, extname) | ||
| 81 | return " ".join([row[1] for row in pkgs_mapping]) | ||
| 82 | |||
| 83 | @bb.filter.filter_proc() | ||
| 84 | def suffix_filter(val, extname, prefixes): | ||
| 85 | newdeps = [] | ||
| 86 | for dep in val.split(): | ||
| 87 | newdeps.append(add_suffix(dep, extname, prefixes)) | ||
| 88 | return " ".join(newdeps) | ||
| 89 | |||
| 90 | @bb.filter.filter_proc() | ||
| 91 | def suffix_filter_regex(val, extname, prefixes): | ||
| 92 | newvar = [] | ||
| 93 | for v in val.split(): | ||
| 94 | if v.startswith("^" + extname): | ||
| 95 | newvar.append(v) | ||
| 96 | elif v.startswith("^"): | ||
| 97 | newvar.append("^" + extname + "-" + v[1:]) | ||
| 98 | else: | ||
| 99 | newvar.append(add_suffix(v, extname, prefixes)) | ||
| 100 | return " ".join(newvar) | ||
| 101 | |||
| 102 | @bb.filter.filter_proc() | ||
| 103 | def suffix_filter_deps(val, extname, prefixes): | ||
| 104 | deps = bb.utils.explode_dep_versions2(val) | ||
| 105 | newdeps = collections.OrderedDict() | ||
| 106 | for dep in deps: | ||
| 107 | newdeps[add_suffix(dep, extname, prefixes)] = deps[dep] | ||
| 108 | return bb.utils.join_deps(newdeps, False) | ||
| 109 | |||
| 110 | class ClassExtender(object): | ||
| 111 | def __init__(self, extname, prefixes, d): | ||
| 112 | self.extname = extname | ||
| 113 | self.d = d | ||
| 114 | self.prefixes = prefixes | ||
| 115 | |||
| 116 | def set_filter(self, var, deps): | ||
| 117 | if deps: | ||
| 118 | self.d.setVarFilter(var, "suffix_filter_deps(val, '" + self.extname + "', " + str(self.prefixes) + ")") | ||
| 119 | else: | ||
| 120 | self.d.setVarFilter(var, "suffix_filter(val, '" + self.extname + "', " + str(self.prefixes) + ")") | ||
| 121 | |||
| 122 | def map_packagevars(self): | ||
| 123 | self.set_filter("RDEPENDS", deps=True) | ||
| 124 | self.set_filter("RRECOMMENDS", deps=True) | ||
| 125 | self.set_filter("RSUGGESTS", deps=True) | ||
| 126 | self.set_filter("RPROVIDES", deps=True) | ||
| 127 | self.set_filter("RREPLACES", deps=True) | ||
| 128 | self.set_filter("RCONFLICTS", deps=True) | ||
| 129 | self.set_filter("PKG", deps=True) | ||
| 130 | |||
| 131 | def rename_package_variables(self, variables): | ||
| 132 | pkgs_mapping = get_package_mappings(self.d.getVar('PACKAGES'), self.extname) | ||
| 133 | self.d.setVarFilter('PACKAGES', "package_suffix_filter(val, '" + self.extname + "')") | ||
| 134 | self.d.setVarFilter('PACKAGES_DYNAMIC', "suffix_filter_regex(val, '" + self.extname + "', " + str(self.prefixes) + ")") | ||
| 135 | |||
| 136 | for pkg_mapping in pkgs_mapping: | ||
| 137 | if pkg_mapping[0].startswith("${") and pkg_mapping[0].endswith("}"): | ||
| 138 | continue | ||
| 139 | for subs in variables: | ||
| 140 | self.d.renameVar("%s:%s" % (subs, pkg_mapping[0]), "%s:%s" % (subs, pkg_mapping[1])) | ||
diff --git a/meta/lib/oe/classutils.py b/meta/lib/oe/classutils.py deleted file mode 100644 index ec3f6ad720..0000000000 --- a/meta/lib/oe/classutils.py +++ /dev/null | |||
| @@ -1,49 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | class ClassRegistryMeta(type): | ||
| 8 | """Give each ClassRegistry their own registry""" | ||
| 9 | def __init__(cls, name, bases, attrs): | ||
| 10 | cls.registry = {} | ||
| 11 | type.__init__(cls, name, bases, attrs) | ||
| 12 | |||
| 13 | class ClassRegistry(type, metaclass=ClassRegistryMeta): | ||
| 14 | """Maintain a registry of classes, indexed by name. | ||
| 15 | |||
| 16 | Note that this implementation requires that the names be unique, as it uses | ||
| 17 | a dictionary to hold the classes by name. | ||
| 18 | |||
| 19 | The name in the registry can be overridden via the 'name' attribute of the | ||
| 20 | class, and the 'priority' attribute controls priority. The prioritized() | ||
| 21 | method returns the registered classes in priority order. | ||
| 22 | |||
| 23 | Subclasses of ClassRegistry may define an 'implemented' property to exert | ||
| 24 | control over whether the class will be added to the registry (e.g. to keep | ||
| 25 | abstract base classes out of the registry).""" | ||
| 26 | priority = 0 | ||
| 27 | def __init__(cls, name, bases, attrs): | ||
| 28 | super(ClassRegistry, cls).__init__(name, bases, attrs) | ||
| 29 | try: | ||
| 30 | if not cls.implemented: | ||
| 31 | return | ||
| 32 | except AttributeError: | ||
| 33 | pass | ||
| 34 | |||
| 35 | try: | ||
| 36 | cls.name | ||
| 37 | except AttributeError: | ||
| 38 | cls.name = name | ||
| 39 | cls.registry[cls.name] = cls | ||
| 40 | |||
| 41 | @classmethod | ||
| 42 | def prioritized(tcls): | ||
| 43 | return sorted(list(tcls.registry.values()), | ||
| 44 | key=lambda v: (v.priority, v.name), reverse=True) | ||
| 45 | |||
| 46 | def unregister(cls): | ||
| 47 | for key in cls.registry.keys(): | ||
| 48 | if cls.registry[key] is cls: | ||
| 49 | del cls.registry[key] | ||
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py deleted file mode 100644 index ced751b835..0000000000 --- a/meta/lib/oe/copy_buildsystem.py +++ /dev/null | |||
| @@ -1,301 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # This class should provide easy access to the different aspects of the | ||
| 7 | # buildsystem such as layers, bitbake location, etc. | ||
| 8 | # | ||
| 9 | # SDK_LAYERS_EXCLUDE: Layers which will be excluded from SDK layers. | ||
| 10 | # SDK_LAYERS_EXCLUDE_PATTERN: The simiar to SDK_LAYERS_EXCLUDE, this supports | ||
| 11 | # python regular expression, use space as separator, | ||
| 12 | # e.g.: ".*-downloads closed-.*" | ||
| 13 | # | ||
| 14 | |||
| 15 | import stat | ||
| 16 | import shutil | ||
| 17 | |||
| 18 | def _smart_copy(src, dest): | ||
| 19 | import subprocess | ||
| 20 | # smart_copy will choose the correct function depending on whether the | ||
| 21 | # source is a file or a directory. | ||
| 22 | mode = os.stat(src).st_mode | ||
| 23 | if stat.S_ISDIR(mode): | ||
| 24 | bb.utils.mkdirhier(dest) | ||
| 25 | cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -cf - -C %s -p . \ | ||
| 26 | | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) | ||
| 27 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 28 | else: | ||
| 29 | shutil.copyfile(src, dest) | ||
| 30 | shutil.copymode(src, dest) | ||
| 31 | |||
| 32 | class BuildSystem(object): | ||
| 33 | def __init__(self, context, d): | ||
| 34 | self.d = d | ||
| 35 | self.context = context | ||
| 36 | self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS').split()] | ||
| 37 | self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE') or "").split() | ||
| 38 | self.layers_exclude_pattern = d.getVar('SDK_LAYERS_EXCLUDE_PATTERN') | ||
| 39 | |||
| 40 | def copy_bitbake_and_layers(self, destdir, workspace_name=None): | ||
| 41 | import re | ||
| 42 | # Copy in all metadata layers + bitbake (as repositories) | ||
| 43 | copied_corebase = None | ||
| 44 | layers_copied = [] | ||
| 45 | bb.utils.mkdirhier(destdir) | ||
| 46 | layers = list(self.layerdirs) | ||
| 47 | |||
| 48 | corebase = os.path.abspath(self.d.getVar('COREBASE')) | ||
| 49 | layers.append(corebase) | ||
| 50 | # The bitbake build system uses the meta-skeleton layer as a layout | ||
| 51 | # for common recipies, e.g: the recipetool script to create kernel recipies | ||
| 52 | # Add the meta-skeleton layer to be included as part of the eSDK installation | ||
| 53 | layers.append(os.path.join(corebase, 'meta-skeleton')) | ||
| 54 | |||
| 55 | # Exclude layers | ||
| 56 | for layer_exclude in self.layers_exclude: | ||
| 57 | if layer_exclude in layers: | ||
| 58 | bb.note('Excluded %s from sdk layers since it is in SDK_LAYERS_EXCLUDE' % layer_exclude) | ||
| 59 | layers.remove(layer_exclude) | ||
| 60 | |||
| 61 | if self.layers_exclude_pattern: | ||
| 62 | layers_cp = layers[:] | ||
| 63 | for pattern in self.layers_exclude_pattern.split(): | ||
| 64 | for layer in layers_cp: | ||
| 65 | if re.match(pattern, layer): | ||
| 66 | bb.note('Excluded %s from sdk layers since matched SDK_LAYERS_EXCLUDE_PATTERN' % layer) | ||
| 67 | layers.remove(layer) | ||
| 68 | |||
| 69 | workspace_newname = workspace_name | ||
| 70 | if workspace_newname: | ||
| 71 | layernames = [os.path.basename(layer) for layer in layers] | ||
| 72 | extranum = 0 | ||
| 73 | while workspace_newname in layernames: | ||
| 74 | extranum += 1 | ||
| 75 | workspace_newname = '%s-%d' % (workspace_name, extranum) | ||
| 76 | |||
| 77 | corebase_files = self.d.getVar('COREBASE_FILES').split() | ||
| 78 | corebase_files = [corebase + '/' +x for x in corebase_files] | ||
| 79 | # Make sure bitbake goes in | ||
| 80 | bitbake_dir = bb.__file__.rsplit('/', 3)[0] | ||
| 81 | corebase_files.append(bitbake_dir) | ||
| 82 | |||
| 83 | for layer in layers: | ||
| 84 | layerconf = os.path.join(layer, 'conf', 'layer.conf') | ||
| 85 | layernewname = os.path.basename(layer) | ||
| 86 | workspace = False | ||
| 87 | if os.path.exists(layerconf): | ||
| 88 | with open(layerconf, 'r') as f: | ||
| 89 | if f.readline().startswith("# ### workspace layer auto-generated by devtool ###"): | ||
| 90 | if workspace_newname: | ||
| 91 | layernewname = workspace_newname | ||
| 92 | workspace = True | ||
| 93 | else: | ||
| 94 | bb.plain("NOTE: Excluding local workspace layer %s from %s" % (layer, self.context)) | ||
| 95 | continue | ||
| 96 | |||
| 97 | # If the layer was already under corebase, leave it there | ||
| 98 | # since layers such as meta have issues when moved. | ||
| 99 | layerdestpath = destdir | ||
| 100 | if corebase == os.path.dirname(layer): | ||
| 101 | layerdestpath += '/' + os.path.basename(corebase) | ||
| 102 | # If the layer is located somewhere under the same parent directory | ||
| 103 | # as corebase we keep the layer structure. | ||
| 104 | elif os.path.commonpath([layer, corebase]) == os.path.dirname(corebase): | ||
| 105 | layer_relative = os.path.relpath(layer, os.path.dirname(corebase)) | ||
| 106 | if os.path.dirname(layer_relative) != layernewname: | ||
| 107 | layerdestpath += '/' + os.path.dirname(layer_relative) | ||
| 108 | |||
| 109 | layerdestpath += '/' + layernewname | ||
| 110 | |||
| 111 | layer_relative = os.path.relpath(layerdestpath, | ||
| 112 | destdir) | ||
| 113 | # Treat corebase as special since it typically will contain | ||
| 114 | # build directories or other custom items. | ||
| 115 | if corebase == layer: | ||
| 116 | copied_corebase = layer_relative | ||
| 117 | bb.utils.mkdirhier(layerdestpath) | ||
| 118 | for f in corebase_files: | ||
| 119 | f_basename = os.path.basename(f) | ||
| 120 | destname = os.path.join(layerdestpath, f_basename) | ||
| 121 | _smart_copy(f, destname) | ||
| 122 | else: | ||
| 123 | layers_copied.append(layer_relative) | ||
| 124 | |||
| 125 | if os.path.exists(os.path.join(layerdestpath, 'conf/layer.conf')): | ||
| 126 | bb.note("Skipping layer %s, already handled" % layer) | ||
| 127 | else: | ||
| 128 | _smart_copy(layer, layerdestpath) | ||
| 129 | |||
| 130 | if workspace: | ||
| 131 | # Make some adjustments original workspace layer | ||
| 132 | # Drop sources (recipe tasks will be locked, so we don't need them) | ||
| 133 | srcdir = os.path.join(layerdestpath, 'sources') | ||
| 134 | if os.path.isdir(srcdir): | ||
| 135 | shutil.rmtree(srcdir) | ||
| 136 | # Drop all bbappends except the one for the image the SDK is being built for | ||
| 137 | # (because of externalsrc, the workspace bbappends will interfere with the | ||
| 138 | # locked signatures if present, and we don't need them anyway) | ||
| 139 | image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE')))[0] + '.bbappend' | ||
| 140 | appenddir = os.path.join(layerdestpath, 'appends') | ||
| 141 | if os.path.isdir(appenddir): | ||
| 142 | for fn in os.listdir(appenddir): | ||
| 143 | if fn == image_bbappend: | ||
| 144 | continue | ||
| 145 | else: | ||
| 146 | os.remove(os.path.join(appenddir, fn)) | ||
| 147 | # Drop README | ||
| 148 | readme = os.path.join(layerdestpath, 'README') | ||
| 149 | if os.path.exists(readme): | ||
| 150 | os.remove(readme) | ||
| 151 | # Filter out comments in layer.conf and change layer name | ||
| 152 | layerconf = os.path.join(layerdestpath, 'conf', 'layer.conf') | ||
| 153 | with open(layerconf, 'r') as f: | ||
| 154 | origlines = f.readlines() | ||
| 155 | with open(layerconf, 'w') as f: | ||
| 156 | for line in origlines: | ||
| 157 | if line.startswith('#'): | ||
| 158 | continue | ||
| 159 | line = line.replace('workspacelayer', workspace_newname) | ||
| 160 | f.write(line) | ||
| 161 | |||
| 162 | # meta-skeleton layer is added as part of the build system | ||
| 163 | # but not as a layer included in the build, therefore it is | ||
| 164 | # not reported to the function caller. | ||
| 165 | for layer in layers_copied: | ||
| 166 | if layer.endswith('/meta-skeleton'): | ||
| 167 | layers_copied.remove(layer) | ||
| 168 | break | ||
| 169 | |||
| 170 | return copied_corebase, layers_copied | ||
| 171 | |||
| 172 | def generate_locked_sigs(sigfile, d): | ||
| 173 | bb.utils.mkdirhier(os.path.dirname(sigfile)) | ||
| 174 | depd = d.getVar('BB_TASKDEPDATA', False) | ||
| 175 | tasks = ['%s:%s' % (v[2], v[1]) for v in depd.values()] | ||
| 176 | bb.parse.siggen.dump_lockedsigs(sigfile, tasks) | ||
| 177 | |||
| 178 | def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, pruned_output): | ||
| 179 | with open(lockedsigs, 'r') as infile: | ||
| 180 | bb.utils.mkdirhier(os.path.dirname(pruned_output)) | ||
| 181 | with open(pruned_output, 'w') as f: | ||
| 182 | invalue = False | ||
| 183 | for line in infile: | ||
| 184 | if invalue: | ||
| 185 | if line.endswith('\\\n'): | ||
| 186 | splitval = line.strip().split(':') | ||
| 187 | if not splitval[1] in excluded_tasks and not splitval[0] in excluded_targets: | ||
| 188 | if onlynative: | ||
| 189 | if 'nativesdk' in splitval[0]: | ||
| 190 | f.write(line) | ||
| 191 | else: | ||
| 192 | f.write(line) | ||
| 193 | else: | ||
| 194 | f.write(line) | ||
| 195 | invalue = False | ||
| 196 | elif line.startswith('SIGGEN_LOCKEDSIGS_t'): | ||
| 197 | invalue = True | ||
| 198 | f.write(line) | ||
| 199 | else: | ||
| 200 | invalue = False | ||
| 201 | f.write(line) | ||
| 202 | |||
| 203 | def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None): | ||
| 204 | merged = {} | ||
| 205 | arch_order = [] | ||
| 206 | otherdata = [] | ||
| 207 | with open(lockedsigs_main, 'r') as f: | ||
| 208 | invalue = None | ||
| 209 | for line in f: | ||
| 210 | if invalue: | ||
| 211 | if line.endswith('\\\n'): | ||
| 212 | merged[invalue].append(line) | ||
| 213 | else: | ||
| 214 | invalue = None | ||
| 215 | elif line.startswith('SIGGEN_LOCKEDSIGS_t-'): | ||
| 216 | invalue = line[18:].split('=', 1)[0].rstrip() | ||
| 217 | merged[invalue] = [] | ||
| 218 | arch_order.append(invalue) | ||
| 219 | else: | ||
| 220 | invalue = None | ||
| 221 | otherdata.append(line) | ||
| 222 | |||
| 223 | with open(lockedsigs_extra, 'r') as f: | ||
| 224 | invalue = None | ||
| 225 | tocopy = {} | ||
| 226 | for line in f: | ||
| 227 | if invalue: | ||
| 228 | if line.endswith('\\\n'): | ||
| 229 | if not line in merged[invalue]: | ||
| 230 | target, task = line.strip().split(':')[:2] | ||
| 231 | if not copy_tasks or task in copy_tasks: | ||
| 232 | tocopy[invalue].append(line) | ||
| 233 | merged[invalue].append(line) | ||
| 234 | else: | ||
| 235 | invalue = None | ||
| 236 | elif line.startswith('SIGGEN_LOCKEDSIGS_t-'): | ||
| 237 | invalue = line[18:].split('=', 1)[0].rstrip() | ||
| 238 | if not invalue in merged: | ||
| 239 | merged[invalue] = [] | ||
| 240 | arch_order.append(invalue) | ||
| 241 | tocopy[invalue] = [] | ||
| 242 | |||
| 243 | def write_sigs_file(fn, types, sigs): | ||
| 244 | fulltypes = [] | ||
| 245 | bb.utils.mkdirhier(os.path.dirname(fn)) | ||
| 246 | with open(fn, 'w') as f: | ||
| 247 | for typename in types: | ||
| 248 | lines = sigs[typename] | ||
| 249 | if lines: | ||
| 250 | f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % typename) | ||
| 251 | for line in lines: | ||
| 252 | f.write(line) | ||
| 253 | f.write(' "\n') | ||
| 254 | fulltypes.append(typename) | ||
| 255 | f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes)) | ||
| 256 | f.write('\n' + ''.join(otherdata)) | ||
| 257 | |||
| 258 | if copy_output: | ||
| 259 | write_sigs_file(copy_output, list(tocopy.keys()), tocopy) | ||
| 260 | if merged_output: | ||
| 261 | write_sigs_file(merged_output, arch_order, merged) | ||
| 262 | |||
| 263 | def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cache, d, fixedlsbstring="", filterfile=None): | ||
| 264 | import shutil | ||
| 265 | bb.note('Generating sstate-cache...') | ||
| 266 | |||
| 267 | nativelsbstring = d.getVar('NATIVELSBSTRING') | ||
| 268 | bb.process.run("PYTHONDONTWRITEBYTECODE=1 gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) | ||
| 269 | if fixedlsbstring and nativelsbstring != fixedlsbstring: | ||
| 270 | nativedir = output_sstate_cache + '/' + nativelsbstring | ||
| 271 | if os.path.isdir(nativedir): | ||
| 272 | destdir = os.path.join(output_sstate_cache, fixedlsbstring) | ||
| 273 | for root, _, files in os.walk(nativedir): | ||
| 274 | for fn in files: | ||
| 275 | src = os.path.join(root, fn) | ||
| 276 | dest = os.path.join(destdir, os.path.relpath(src, nativedir)) | ||
| 277 | if os.path.exists(dest): | ||
| 278 | # Already exists, and it'll be the same file, so just delete it | ||
| 279 | os.unlink(src) | ||
| 280 | else: | ||
| 281 | bb.utils.mkdirhier(os.path.dirname(dest)) | ||
| 282 | shutil.move(src, dest) | ||
| 283 | |||
| 284 | def check_sstate_task_list(d, targets, filteroutfile, cmdprefix='', cwd=None, logfile=None): | ||
| 285 | import subprocess | ||
| 286 | |||
| 287 | bb.note('Generating sstate task list...') | ||
| 288 | |||
| 289 | if not cwd: | ||
| 290 | cwd = os.getcwd() | ||
| 291 | if logfile: | ||
| 292 | logparam = '-l %s' % logfile | ||
| 293 | else: | ||
| 294 | logparam = '' | ||
| 295 | cmd = "%sPYTHONDONTWRITEBYTECODE=1 BB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) | ||
| 296 | env = dict(d.getVar('BB_ORIGENV', False)) | ||
| 297 | env.pop('BUILDDIR', '') | ||
| 298 | env.pop('BBPATH', '') | ||
| 299 | pathitems = env['PATH'].split(':') | ||
| 300 | env['PATH'] = ':'.join([item for item in pathitems if not item.endswith('/bitbake/bin')]) | ||
| 301 | bb.process.run(cmd, stderr=subprocess.STDOUT, env=env, cwd=cwd, executable='/bin/bash') | ||
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py deleted file mode 100644 index ae194f27cf..0000000000 --- a/meta/lib/oe/cve_check.py +++ /dev/null | |||
| @@ -1,378 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | import collections | ||
| 8 | import functools | ||
| 9 | import itertools | ||
| 10 | import os.path | ||
| 11 | import re | ||
| 12 | import oe.patch | ||
| 13 | |||
| 14 | _Version = collections.namedtuple( | ||
| 15 | "_Version", ["release", "patch_l", "pre_l", "pre_v"] | ||
| 16 | ) | ||
| 17 | |||
| 18 | @functools.total_ordering | ||
| 19 | class Version(): | ||
| 20 | |||
| 21 | def __init__(self, version, suffix=None): | ||
| 22 | |||
| 23 | suffixes = ["alphabetical", "patch"] | ||
| 24 | |||
| 25 | if str(suffix) == "alphabetical": | ||
| 26 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | ||
| 27 | elif str(suffix) == "patch": | ||
| 28 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(p|patch)(?P<patch_l>[0-9]+))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | ||
| 29 | else: | ||
| 30 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | ||
| 31 | regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) | ||
| 32 | |||
| 33 | match = regex.search(version) | ||
| 34 | if not match: | ||
| 35 | raise Exception("Invalid version: '{0}'".format(version)) | ||
| 36 | |||
| 37 | self._version = _Version( | ||
| 38 | release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), | ||
| 39 | patch_l=match.group("patch_l") if str(suffix) in suffixes and match.group("patch_l") else "", | ||
| 40 | pre_l=match.group("pre_l"), | ||
| 41 | pre_v=match.group("pre_v") | ||
| 42 | ) | ||
| 43 | |||
| 44 | self._key = _cmpkey( | ||
| 45 | self._version.release, | ||
| 46 | self._version.patch_l, | ||
| 47 | self._version.pre_l, | ||
| 48 | self._version.pre_v | ||
| 49 | ) | ||
| 50 | |||
| 51 | def __eq__(self, other): | ||
| 52 | if not isinstance(other, Version): | ||
| 53 | return NotImplemented | ||
| 54 | return self._key == other._key | ||
| 55 | |||
| 56 | def __gt__(self, other): | ||
| 57 | if not isinstance(other, Version): | ||
| 58 | return NotImplemented | ||
| 59 | return self._key > other._key | ||
| 60 | |||
| 61 | def _cmpkey(release, patch_l, pre_l, pre_v): | ||
| 62 | # remove leading 0 | ||
| 63 | _release = tuple( | ||
| 64 | reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) | ||
| 65 | ) | ||
| 66 | |||
| 67 | _patch = patch_l.upper() | ||
| 68 | |||
| 69 | if pre_l is None and pre_v is None: | ||
| 70 | _pre = float('inf') | ||
| 71 | else: | ||
| 72 | _pre = float(pre_v) if pre_v else float('-inf') | ||
| 73 | return _release, _patch, _pre | ||
| 74 | |||
| 75 | |||
| 76 | def parse_cve_from_filename(patch_filename): | ||
| 77 | """ | ||
| 78 | Parses CVE ID from the filename | ||
| 79 | |||
| 80 | Matches the last "CVE-YYYY-ID" in the file name, also if written | ||
| 81 | in lowercase. Possible to have multiple CVE IDs in a single | ||
| 82 | file name, but only the last one will be detected from the file name. | ||
| 83 | |||
| 84 | Returns the last CVE ID foudn in the filename. If no CVE ID is found | ||
| 85 | an empty string is returned. | ||
| 86 | """ | ||
| 87 | cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d{4,})", re.IGNORECASE) | ||
| 88 | |||
| 89 | # Check patch file name for CVE ID | ||
| 90 | fname_match = cve_file_name_match.search(patch_filename) | ||
| 91 | return fname_match.group(1).upper() if fname_match else "" | ||
| 92 | |||
| 93 | |||
| 94 | def parse_cves_from_patch_contents(patch_contents): | ||
| 95 | """ | ||
| 96 | Parses CVE IDs from patch contents | ||
| 97 | |||
| 98 | Matches all CVE IDs contained on a line that starts with "CVE: ". Any | ||
| 99 | delimiter (',', '&', "and", etc.) can be used without any issues. Multiple | ||
| 100 | "CVE:" lines can also exist. | ||
| 101 | |||
| 102 | Returns a set of all CVE IDs found in the patch contents. | ||
| 103 | """ | ||
| 104 | cve_ids = set() | ||
| 105 | cve_match = re.compile(r"CVE-\d{4}-\d{4,}") | ||
| 106 | # Search for one or more "CVE: " lines | ||
| 107 | for line in patch_contents.split("\n"): | ||
| 108 | if not line.startswith("CVE:"): | ||
| 109 | continue | ||
| 110 | cve_ids.update(cve_match.findall(line)) | ||
| 111 | return cve_ids | ||
| 112 | |||
| 113 | |||
| 114 | def parse_cves_from_patch_file(patch_file): | ||
| 115 | """ | ||
| 116 | Parses CVE IDs associated with a particular patch file, using both the filename | ||
| 117 | and patch contents. | ||
| 118 | |||
| 119 | Returns a set of all CVE IDs found in the patch filename and contents. | ||
| 120 | """ | ||
| 121 | cve_ids = set() | ||
| 122 | filename_cve = parse_cve_from_filename(patch_file) | ||
| 123 | if filename_cve: | ||
| 124 | bb.debug(2, "Found %s from patch file name %s" % (filename_cve, patch_file)) | ||
| 125 | cve_ids.add(parse_cve_from_filename(patch_file)) | ||
| 126 | |||
| 127 | # Remote patches won't be present and compressed patches won't be | ||
| 128 | # unpacked, so say we're not scanning them | ||
| 129 | if not os.path.isfile(patch_file): | ||
| 130 | bb.note("%s is remote or compressed, not scanning content" % patch_file) | ||
| 131 | return cve_ids | ||
| 132 | |||
| 133 | with open(patch_file, "r", encoding="utf-8") as f: | ||
| 134 | try: | ||
| 135 | patch_text = f.read() | ||
| 136 | except UnicodeDecodeError: | ||
| 137 | bb.debug( | ||
| 138 | 1, | ||
| 139 | "Failed to read patch %s using UTF-8 encoding" | ||
| 140 | " trying with iso8859-1" % patch_file, | ||
| 141 | ) | ||
| 142 | f.close() | ||
| 143 | with open(patch_file, "r", encoding="iso8859-1") as f: | ||
| 144 | patch_text = f.read() | ||
| 145 | |||
| 146 | cve_ids.update(parse_cves_from_patch_contents(patch_text)) | ||
| 147 | |||
| 148 | if not cve_ids: | ||
| 149 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) | ||
| 150 | else: | ||
| 151 | bb.debug(2, "Patch %s solves %s" % (patch_file, ", ".join(sorted(cve_ids)))) | ||
| 152 | |||
| 153 | return cve_ids | ||
| 154 | |||
| 155 | |||
| 156 | @bb.parse.vardeps("CVE_STATUS") | ||
| 157 | def get_patched_cves(d): | ||
| 158 | """ | ||
| 159 | Determines the CVE IDs that have been solved by either patches incuded within | ||
| 160 | SRC_URI or by setting CVE_STATUS. | ||
| 161 | |||
| 162 | Returns a dictionary with the CVE IDs as keys and an associated dictonary of | ||
| 163 | relevant metadata as the value. | ||
| 164 | """ | ||
| 165 | patched_cves = {} | ||
| 166 | patches = oe.patch.src_patches(d) | ||
| 167 | bb.debug(2, "Scanning %d patches for CVEs" % len(patches)) | ||
| 168 | |||
| 169 | # Check each patch file | ||
| 170 | for url in patches: | ||
| 171 | patch_file = bb.fetch.decodeurl(url)[2] | ||
| 172 | for cve_id in parse_cves_from_patch_file(patch_file): | ||
| 173 | if cve_id not in patched_cves: | ||
| 174 | patched_cves[cve_id] = { | ||
| 175 | "abbrev-status": "Patched", | ||
| 176 | "status": "fix-file-included", | ||
| 177 | "resource": [patch_file], | ||
| 178 | } | ||
| 179 | else: | ||
| 180 | patched_cves[cve_id]["resource"].append(patch_file) | ||
| 181 | |||
| 182 | # Search for additional patched CVEs | ||
| 183 | for cve_id in d.getVarFlags("CVE_STATUS") or {}: | ||
| 184 | decoded_status = decode_cve_status(d, cve_id) | ||
| 185 | products = d.getVar("CVE_PRODUCT") | ||
| 186 | if has_cve_product_match(decoded_status, products): | ||
| 187 | if cve_id in patched_cves: | ||
| 188 | bb.warn( | ||
| 189 | 'CVE_STATUS[%s] = "%s" is overwriting previous status of "%s: %s"' | ||
| 190 | % ( | ||
| 191 | cve_id, | ||
| 192 | d.getVarFlag("CVE_STATUS", cve_id), | ||
| 193 | patched_cves[cve_id]["abbrev-status"], | ||
| 194 | patched_cves[cve_id]["status"], | ||
| 195 | ) | ||
| 196 | ) | ||
| 197 | patched_cves[cve_id] = { | ||
| 198 | "abbrev-status": decoded_status["mapping"], | ||
| 199 | "status": decoded_status["detail"], | ||
| 200 | "justification": decoded_status["description"], | ||
| 201 | "affected-vendor": decoded_status["vendor"], | ||
| 202 | "affected-product": decoded_status["product"], | ||
| 203 | } | ||
| 204 | |||
| 205 | return patched_cves | ||
| 206 | |||
| 207 | |||
| 208 | def get_cpe_ids(cve_product, version): | ||
| 209 | """ | ||
| 210 | Get list of CPE identifiers for the given product and version | ||
| 211 | """ | ||
| 212 | |||
| 213 | version = version.split("+git")[0] | ||
| 214 | |||
| 215 | cpe_ids = [] | ||
| 216 | for product in cve_product.split(): | ||
| 217 | # CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not, | ||
| 218 | # use wildcard for vendor. | ||
| 219 | if ":" in product: | ||
| 220 | vendor, product = product.split(":", 1) | ||
| 221 | else: | ||
| 222 | vendor = "*" | ||
| 223 | |||
| 224 | cpe_id = 'cpe:2.3:*:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version) | ||
| 225 | cpe_ids.append(cpe_id) | ||
| 226 | |||
| 227 | return cpe_ids | ||
| 228 | |||
| 229 | def cve_check_merge_jsons(output, data): | ||
| 230 | """ | ||
| 231 | Merge the data in the "package" property to the main data file | ||
| 232 | output | ||
| 233 | """ | ||
| 234 | if output["version"] != data["version"]: | ||
| 235 | bb.error("Version mismatch when merging JSON outputs") | ||
| 236 | return | ||
| 237 | |||
| 238 | for product in output["package"]: | ||
| 239 | if product["name"] == data["package"][0]["name"]: | ||
| 240 | bb.error("Error adding the same package %s twice" % product["name"]) | ||
| 241 | return | ||
| 242 | |||
| 243 | output["package"].append(data["package"][0]) | ||
| 244 | |||
| 245 | def update_symlinks(target_path, link_path): | ||
| 246 | """ | ||
| 247 | Update a symbolic link link_path to point to target_path. | ||
| 248 | Remove the link and recreate it if exist and is different. | ||
| 249 | """ | ||
| 250 | if link_path != target_path and os.path.exists(target_path): | ||
| 251 | if os.path.exists(os.path.realpath(link_path)): | ||
| 252 | os.remove(link_path) | ||
| 253 | os.symlink(os.path.basename(target_path), link_path) | ||
| 254 | |||
| 255 | |||
| 256 | def convert_cve_version(version): | ||
| 257 | """ | ||
| 258 | This function converts from CVE format to Yocto version format. | ||
| 259 | eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1 | ||
| 260 | |||
| 261 | Unless it is redefined using CVE_VERSION in the recipe, | ||
| 262 | cve_check uses the version in the name of the recipe (${PV}) | ||
| 263 | to check vulnerabilities against a CVE in the database downloaded from NVD. | ||
| 264 | |||
| 265 | When the version has an update, i.e. | ||
| 266 | "p1" in OpenSSH 8.3p1, | ||
| 267 | "-rc1" in linux kernel 6.2-rc1, | ||
| 268 | the database stores the version as version_update (8.3_p1, 6.2_rc1). | ||
| 269 | Therefore, we must transform this version before comparing to the | ||
| 270 | recipe version. | ||
| 271 | |||
| 272 | In this case, the parameter of the function is 8.3_p1. | ||
| 273 | If the version uses the Release Candidate format, "rc", | ||
| 274 | this function replaces the '_' by '-'. | ||
| 275 | If the version uses the Update format, "p", | ||
| 276 | this function removes the '_' completely. | ||
| 277 | """ | ||
| 278 | import re | ||
| 279 | |||
| 280 | matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version) | ||
| 281 | |||
| 282 | if not matches: | ||
| 283 | return version | ||
| 284 | |||
| 285 | version = matches.group(1) | ||
| 286 | update = matches.group(2) | ||
| 287 | |||
| 288 | if matches.group(3) == "rc": | ||
| 289 | return version + '-' + update | ||
| 290 | |||
| 291 | return version + update | ||
| 292 | |||
| 293 | @bb.parse.vardeps("CVE_STATUS", "CVE_CHECK_STATUSMAP") | ||
| 294 | def decode_cve_status(d, cve): | ||
| 295 | """ | ||
| 296 | Convert CVE_STATUS into status, vendor, product, detail and description. | ||
| 297 | """ | ||
| 298 | status = d.getVarFlag("CVE_STATUS", cve) | ||
| 299 | if not status: | ||
| 300 | return {} | ||
| 301 | |||
| 302 | status_split = status.split(':', 4) | ||
| 303 | status_out = {} | ||
| 304 | status_out["detail"] = status_split[0] | ||
| 305 | product = "*" | ||
| 306 | vendor = "*" | ||
| 307 | description = "" | ||
| 308 | if len(status_split) >= 4 and status_split[1].strip() == "cpe": | ||
| 309 | # Both vendor and product are mandatory if cpe: present, the syntax is then: | ||
| 310 | # detail: cpe:vendor:product:description | ||
| 311 | vendor = status_split[2].strip() | ||
| 312 | product = status_split[3].strip() | ||
| 313 | description = status_split[4].strip() | ||
| 314 | elif len(status_split) >= 2 and status_split[1].strip() == "cpe": | ||
| 315 | # Malformed CPE | ||
| 316 | bb.warn( | ||
| 317 | 'Invalid CPE information for CVE_STATUS[%s] = "%s", not setting CPE' | ||
| 318 | % (cve, status) | ||
| 319 | ) | ||
| 320 | else: | ||
| 321 | # Other case: no CPE, the syntax is then: | ||
| 322 | # detail: description | ||
| 323 | description = status.split(':', 1)[1].strip() if (len(status_split) > 1) else "" | ||
| 324 | |||
| 325 | status_out["vendor"] = vendor | ||
| 326 | status_out["product"] = product | ||
| 327 | status_out["description"] = description | ||
| 328 | |||
| 329 | detail = status_out["detail"] | ||
| 330 | status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail) | ||
| 331 | if status_mapping is None: | ||
| 332 | bb.warn( | ||
| 333 | 'Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' | ||
| 334 | % (detail, cve, status) | ||
| 335 | ) | ||
| 336 | status_mapping = "Unpatched" | ||
| 337 | status_out["mapping"] = status_mapping | ||
| 338 | |||
| 339 | return status_out | ||
| 340 | |||
| 341 | def has_cve_product_match(detailed_status, products): | ||
| 342 | """ | ||
| 343 | Check product/vendor match between detailed_status from decode_cve_status and a string of | ||
| 344 | products (like from CVE_PRODUCT) | ||
| 345 | """ | ||
| 346 | for product in products.split(): | ||
| 347 | vendor = "*" | ||
| 348 | if ":" in product: | ||
| 349 | vendor, product = product.split(":", 1) | ||
| 350 | |||
| 351 | if (vendor == detailed_status["vendor"] or detailed_status["vendor"] == "*") and \ | ||
| 352 | (product == detailed_status["product"] or detailed_status["product"] == "*"): | ||
| 353 | return True | ||
| 354 | |||
| 355 | #if no match, return False | ||
| 356 | return False | ||
| 357 | |||
| 358 | def extend_cve_status(d): | ||
| 359 | # do this only once in case multiple classes use this | ||
| 360 | if d.getVar("CVE_STATUS_EXTENDED"): | ||
| 361 | return | ||
| 362 | d.setVar("CVE_STATUS_EXTENDED", "1") | ||
| 363 | |||
| 364 | # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS | ||
| 365 | cve_check_ignore = d.getVar("CVE_CHECK_IGNORE") | ||
| 366 | if cve_check_ignore: | ||
| 367 | bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS") | ||
| 368 | for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split(): | ||
| 369 | d.setVarFlag("CVE_STATUS", cve, "ignored") | ||
| 370 | |||
| 371 | # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once | ||
| 372 | for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split(): | ||
| 373 | cve_group = d.getVar(cve_status_group) | ||
| 374 | if cve_group is not None: | ||
| 375 | for cve in cve_group.split(): | ||
| 376 | d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status")) | ||
| 377 | else: | ||
| 378 | bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group) | ||
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py deleted file mode 100644 index 37121cfad2..0000000000 --- a/meta/lib/oe/data.py +++ /dev/null | |||
| @@ -1,53 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import json | ||
| 8 | import oe.maketype | ||
| 9 | |||
| 10 | def typed_value(key, d): | ||
| 11 | """Construct a value for the specified metadata variable, using its flags | ||
| 12 | to determine the type and parameters for construction.""" | ||
| 13 | var_type = d.getVarFlag(key, 'type') | ||
| 14 | flags = d.getVarFlags(key) | ||
| 15 | if flags is not None: | ||
| 16 | flags = dict((flag, d.expand(value)) | ||
| 17 | for flag, value in list(flags.items())) | ||
| 18 | else: | ||
| 19 | flags = {} | ||
| 20 | |||
| 21 | try: | ||
| 22 | return oe.maketype.create(d.getVar(key) or '', var_type, **flags) | ||
| 23 | except (TypeError, ValueError) as exc: | ||
| 24 | bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) | ||
| 25 | |||
| 26 | def export2json(d, json_file, expand=True, searchString="",replaceString=""): | ||
| 27 | data2export = {} | ||
| 28 | keys2export = [] | ||
| 29 | |||
| 30 | for key in d.keys(): | ||
| 31 | if key.startswith("_"): | ||
| 32 | continue | ||
| 33 | elif key.startswith("BB"): | ||
| 34 | continue | ||
| 35 | elif key.startswith("B_pn"): | ||
| 36 | continue | ||
| 37 | elif key.startswith("do_"): | ||
| 38 | continue | ||
| 39 | elif d.getVarFlag(key, "func"): | ||
| 40 | continue | ||
| 41 | |||
| 42 | keys2export.append(key) | ||
| 43 | |||
| 44 | for key in keys2export: | ||
| 45 | try: | ||
| 46 | data2export[key] = d.getVar(key, expand).replace(searchString,replaceString) | ||
| 47 | except bb.data_smart.ExpansionError: | ||
| 48 | data2export[key] = '' | ||
| 49 | except AttributeError: | ||
| 50 | pass | ||
| 51 | |||
| 52 | with open(json_file, "w") as f: | ||
| 53 | json.dump(data2export, f, skipkeys=True, indent=4, sort_keys=True) | ||
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py deleted file mode 100644 index 3494520f40..0000000000 --- a/meta/lib/oe/distro_check.py +++ /dev/null | |||
| @@ -1,314 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | def create_socket(url, d): | ||
| 8 | import urllib | ||
| 9 | from bb.utils import export_proxies | ||
| 10 | |||
| 11 | export_proxies(d) | ||
| 12 | return urllib.request.urlopen(url) | ||
| 13 | |||
| 14 | def get_links_from_url(url, d): | ||
| 15 | "Return all the href links found on the web location" | ||
| 16 | |||
| 17 | from bs4 import BeautifulSoup, SoupStrainer | ||
| 18 | |||
| 19 | soup = BeautifulSoup(create_socket(url,d), "html.parser", parse_only=SoupStrainer("a")) | ||
| 20 | hyperlinks = [] | ||
| 21 | for line in soup.find_all('a', href=True): | ||
| 22 | hyperlinks.append(line['href'].strip('/')) | ||
| 23 | return hyperlinks | ||
| 24 | |||
| 25 | def find_latest_numeric_release(url, d): | ||
| 26 | "Find the latest listed numeric release on the given url" | ||
| 27 | max=0 | ||
| 28 | maxstr="" | ||
| 29 | for link in get_links_from_url(url, d): | ||
| 30 | try: | ||
| 31 | # TODO use bb.utils.vercmp_string_op() | ||
| 32 | release = float(link) | ||
| 33 | except: | ||
| 34 | release = 0 | ||
| 35 | if release > max: | ||
| 36 | max = release | ||
| 37 | maxstr = link | ||
| 38 | return maxstr | ||
| 39 | |||
| 40 | def is_src_rpm(name): | ||
| 41 | "Check if the link is pointing to a src.rpm file" | ||
| 42 | return name.endswith(".src.rpm") | ||
| 43 | |||
| 44 | def package_name_from_srpm(srpm): | ||
| 45 | "Strip out the package name from the src.rpm filename" | ||
| 46 | |||
| 47 | # ca-certificates-2016.2.7-1.0.fc24.src.rpm | ||
| 48 | # ^name ^ver ^release^removed | ||
| 49 | (name, version, release) = srpm.replace(".src.rpm", "").rsplit("-", 2) | ||
| 50 | return name | ||
| 51 | |||
| 52 | def get_source_package_list_from_url(url, section, d): | ||
| 53 | "Return a sectioned list of package names from a URL list" | ||
| 54 | |||
| 55 | bb.note("Reading %s: %s" % (url, section)) | ||
| 56 | links = get_links_from_url(url, d) | ||
| 57 | srpms = filter(is_src_rpm, links) | ||
| 58 | names_list = map(package_name_from_srpm, srpms) | ||
| 59 | |||
| 60 | new_pkgs = set() | ||
| 61 | for pkgs in names_list: | ||
| 62 | new_pkgs.add(pkgs + ":" + section) | ||
| 63 | return new_pkgs | ||
| 64 | |||
| 65 | def get_source_package_list_from_url_by_letter(url, section, d): | ||
| 66 | import string | ||
| 67 | from urllib.error import HTTPError | ||
| 68 | packages = set() | ||
| 69 | for letter in (string.ascii_lowercase + string.digits): | ||
| 70 | # Not all subfolders may exist, so silently handle 404 | ||
| 71 | try: | ||
| 72 | packages |= get_source_package_list_from_url(url + "/" + letter, section, d) | ||
| 73 | except HTTPError as e: | ||
| 74 | if e.code != 404: raise | ||
| 75 | return packages | ||
| 76 | |||
| 77 | def get_latest_released_fedora_source_package_list(d): | ||
| 78 | "Returns list of all the name os packages in the latest fedora distro" | ||
| 79 | latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d) | ||
| 80 | package_names = get_source_package_list_from_url_by_letter("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Everything/source/tree/Packages/" % latest, "main", d) | ||
| 81 | package_names |= get_source_package_list_from_url_by_letter("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d) | ||
| 82 | return latest, package_names | ||
| 83 | |||
| 84 | def get_latest_released_opensuse_source_package_list(d): | ||
| 85 | "Returns list of all the name os packages in the latest opensuse distro" | ||
| 86 | latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/leap", d) | ||
| 87 | |||
| 88 | package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/leap/%s/repo/oss/suse/src/" % latest, "main", d) | ||
| 89 | package_names |= get_source_package_list_from_url("http://download.opensuse.org/update/leap/%s/oss/src/" % latest, "updates", d) | ||
| 90 | return latest, package_names | ||
| 91 | |||
| 92 | def get_latest_released_clear_source_package_list(d): | ||
| 93 | latest = find_latest_numeric_release("https://download.clearlinux.org/releases/", d) | ||
| 94 | package_names = get_source_package_list_from_url("https://download.clearlinux.org/releases/%s/clear/source/SRPMS/" % latest, "main", d) | ||
| 95 | return latest, package_names | ||
| 96 | |||
| 97 | def find_latest_debian_release(url, d): | ||
| 98 | "Find the latest listed debian release on the given url" | ||
| 99 | |||
| 100 | releases = [link.replace("Debian", "") | ||
| 101 | for link in get_links_from_url(url, d) | ||
| 102 | if link.startswith("Debian")] | ||
| 103 | releases.sort() | ||
| 104 | try: | ||
| 105 | return releases[-1] | ||
| 106 | except: | ||
| 107 | return "_NotFound_" | ||
| 108 | |||
| 109 | def get_debian_style_source_package_list(url, section, d): | ||
| 110 | "Return the list of package-names stored in the debian style Sources.gz file" | ||
| 111 | import gzip | ||
| 112 | |||
| 113 | package_names = set() | ||
| 114 | for line in gzip.open(create_socket(url, d), mode="rt"): | ||
| 115 | if line.startswith("Package:"): | ||
| 116 | pkg = line.split(":", 1)[1].strip() | ||
| 117 | package_names.add(pkg + ":" + section) | ||
| 118 | return package_names | ||
| 119 | |||
| 120 | def get_latest_released_debian_source_package_list(d): | ||
| 121 | "Returns list of all the name of packages in the latest debian distro" | ||
| 122 | latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d) | ||
| 123 | url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" | ||
| 124 | package_names = get_debian_style_source_package_list(url, "main", d) | ||
| 125 | url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" | ||
| 126 | package_names |= get_debian_style_source_package_list(url, "updates", d) | ||
| 127 | return latest, package_names | ||
| 128 | |||
| 129 | def find_latest_ubuntu_release(url, d): | ||
| 130 | """ | ||
| 131 | Find the latest listed Ubuntu release on the given ubuntu/dists/ URL. | ||
| 132 | |||
| 133 | To avoid matching development releases look for distributions that have | ||
| 134 | updates, so the resulting distro could be any supported release. | ||
| 135 | """ | ||
| 136 | url += "?C=M;O=D" # Descending Sort by Last Modified | ||
| 137 | for link in get_links_from_url(url, d): | ||
| 138 | if "-updates" in link: | ||
| 139 | distro = link.replace("-updates", "") | ||
| 140 | return distro | ||
| 141 | return "_NotFound_" | ||
| 142 | |||
| 143 | def get_latest_released_ubuntu_source_package_list(d): | ||
| 144 | "Returns list of all the name os packages in the latest ubuntu distro" | ||
| 145 | latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d) | ||
| 146 | url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest | ||
| 147 | package_names = get_debian_style_source_package_list(url, "main", d) | ||
| 148 | url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest | ||
| 149 | package_names |= get_debian_style_source_package_list(url, "updates", d) | ||
| 150 | return latest, package_names | ||
| 151 | |||
| 152 | def create_distro_packages_list(distro_check_dir, d): | ||
| 153 | import shutil | ||
| 154 | |||
| 155 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | ||
| 156 | bb.utils.remove(pkglst_dir, True) | ||
| 157 | bb.utils.mkdirhier(pkglst_dir) | ||
| 158 | |||
| 159 | per_distro_functions = ( | ||
| 160 | ("Debian", get_latest_released_debian_source_package_list), | ||
| 161 | ("Ubuntu", get_latest_released_ubuntu_source_package_list), | ||
| 162 | ("Fedora", get_latest_released_fedora_source_package_list), | ||
| 163 | ("openSUSE", get_latest_released_opensuse_source_package_list), | ||
| 164 | ("Clear", get_latest_released_clear_source_package_list), | ||
| 165 | ) | ||
| 166 | |||
| 167 | for name, fetcher_func in per_distro_functions: | ||
| 168 | try: | ||
| 169 | release, package_list = fetcher_func(d) | ||
| 170 | except Exception as e: | ||
| 171 | bb.warn("Cannot fetch packages for %s: %s" % (name, e)) | ||
| 172 | bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) | ||
| 173 | if len(package_list) == 0: | ||
| 174 | bb.error("Didn't fetch any packages for %s %s" % (name, release)) | ||
| 175 | |||
| 176 | package_list_file = os.path.join(pkglst_dir, name + "-" + release) | ||
| 177 | with open(package_list_file, 'w') as f: | ||
| 178 | for pkg in sorted(package_list): | ||
| 179 | f.write(pkg + "\n") | ||
| 180 | |||
| 181 | def update_distro_data(distro_check_dir, datetime, d): | ||
| 182 | """ | ||
| 183 | If distro packages list data is old then rebuild it. | ||
| 184 | The operations has to be protected by a lock so that | ||
| 185 | only one thread performes it at a time. | ||
| 186 | """ | ||
| 187 | if not os.path.isdir (distro_check_dir): | ||
| 188 | try: | ||
| 189 | bb.note ("Making new directory: %s" % distro_check_dir) | ||
| 190 | os.makedirs (distro_check_dir) | ||
| 191 | except OSError: | ||
| 192 | raise Exception('Unable to create directory %s' % (distro_check_dir)) | ||
| 193 | |||
| 194 | |||
| 195 | datetime_file = os.path.join(distro_check_dir, "build_datetime") | ||
| 196 | saved_datetime = "_invalid_" | ||
| 197 | import fcntl | ||
| 198 | try: | ||
| 199 | if not os.path.exists(datetime_file): | ||
| 200 | open(datetime_file, 'w+').close() # touch the file so that the next open won't fail | ||
| 201 | |||
| 202 | f = open(datetime_file, "r+") | ||
| 203 | fcntl.lockf(f, fcntl.LOCK_EX) | ||
| 204 | saved_datetime = f.read() | ||
| 205 | if saved_datetime[0:8] != datetime[0:8]: | ||
| 206 | bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime)) | ||
| 207 | bb.note("Regenerating distro package lists") | ||
| 208 | create_distro_packages_list(distro_check_dir, d) | ||
| 209 | f.seek(0) | ||
| 210 | f.write(datetime) | ||
| 211 | |||
| 212 | except OSError as e: | ||
| 213 | raise Exception('Unable to open timestamp: %s' % e) | ||
| 214 | finally: | ||
| 215 | fcntl.lockf(f, fcntl.LOCK_UN) | ||
| 216 | f.close() | ||
| 217 | |||
| 218 | def compare_in_distro_packages_list(distro_check_dir, d): | ||
| 219 | if not os.path.isdir(distro_check_dir): | ||
| 220 | raise Exception("compare_in_distro_packages_list: invalid distro_check_dir passed") | ||
| 221 | |||
| 222 | localdata = bb.data.createCopy(d) | ||
| 223 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | ||
| 224 | matching_distros = [] | ||
| 225 | pn = recipe_name = d.getVar('PN') | ||
| 226 | bb.note("Checking: %s" % pn) | ||
| 227 | |||
| 228 | if pn.find("-native") != -1: | ||
| 229 | pnstripped = pn.split("-native") | ||
| 230 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) | ||
| 231 | recipe_name = pnstripped[0] | ||
| 232 | |||
| 233 | if pn.startswith("nativesdk-"): | ||
| 234 | pnstripped = pn.split("nativesdk-") | ||
| 235 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES')) | ||
| 236 | recipe_name = pnstripped[1] | ||
| 237 | |||
| 238 | if pn.find("-cross") != -1: | ||
| 239 | pnstripped = pn.split("-cross") | ||
| 240 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) | ||
| 241 | recipe_name = pnstripped[0] | ||
| 242 | |||
| 243 | if pn.find("-initial") != -1: | ||
| 244 | pnstripped = pn.split("-initial") | ||
| 245 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) | ||
| 246 | recipe_name = pnstripped[0] | ||
| 247 | |||
| 248 | bb.note("Recipe: %s" % recipe_name) | ||
| 249 | |||
| 250 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) | ||
| 251 | tmp = localdata.getVar('DISTRO_PN_ALIAS') or "" | ||
| 252 | for str in tmp.split(): | ||
| 253 | if str and str.find("=") == -1 and distro_exceptions[str]: | ||
| 254 | matching_distros.append(str) | ||
| 255 | |||
| 256 | distro_pn_aliases = {} | ||
| 257 | for str in tmp.split(): | ||
| 258 | if "=" in str: | ||
| 259 | (dist, pn_alias) = str.split('=') | ||
| 260 | distro_pn_aliases[dist.strip().lower()] = pn_alias.strip() | ||
| 261 | |||
| 262 | for file in os.listdir(pkglst_dir): | ||
| 263 | (distro, distro_release) = file.split("-") | ||
| 264 | f = open(os.path.join(pkglst_dir, file), "r") | ||
| 265 | for line in f: | ||
| 266 | (pkg, section) = line.split(":") | ||
| 267 | if distro.lower() in distro_pn_aliases: | ||
| 268 | pn = distro_pn_aliases[distro.lower()] | ||
| 269 | else: | ||
| 270 | pn = recipe_name | ||
| 271 | if pn == pkg: | ||
| 272 | matching_distros.append(distro + "-" + section[:-1]) # strip the \n at the end | ||
| 273 | f.close() | ||
| 274 | break | ||
| 275 | f.close() | ||
| 276 | |||
| 277 | for item in tmp.split(): | ||
| 278 | matching_distros.append(item) | ||
| 279 | bb.note("Matching: %s" % matching_distros) | ||
| 280 | return matching_distros | ||
| 281 | |||
| 282 | def create_log_file(d, logname): | ||
| 283 | logpath = d.getVar('LOG_DIR') | ||
| 284 | bb.utils.mkdirhier(logpath) | ||
| 285 | logfn, logsuffix = os.path.splitext(logname) | ||
| 286 | logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME'), logsuffix)) | ||
| 287 | if not os.path.exists(logfile): | ||
| 288 | slogfile = os.path.join(logpath, logname) | ||
| 289 | if os.path.exists(slogfile): | ||
| 290 | os.remove(slogfile) | ||
| 291 | open(logfile, 'w+').close() | ||
| 292 | os.symlink(logfile, slogfile) | ||
| 293 | d.setVar('LOG_FILE', logfile) | ||
| 294 | return logfile | ||
| 295 | |||
| 296 | |||
| 297 | def save_distro_check_result(result, datetime, result_file, d): | ||
| 298 | pn = d.getVar('PN') | ||
| 299 | logdir = d.getVar('LOG_DIR') | ||
| 300 | if not logdir: | ||
| 301 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") | ||
| 302 | return | ||
| 303 | bb.utils.mkdirhier(logdir) | ||
| 304 | |||
| 305 | line = pn | ||
| 306 | for i in result: | ||
| 307 | line = line + "," + i | ||
| 308 | f = open(result_file, "a") | ||
| 309 | import fcntl | ||
| 310 | fcntl.lockf(f, fcntl.LOCK_EX) | ||
| 311 | f.seek(0, os.SEEK_END) # seek to the end of file | ||
| 312 | f.write(line + "\n") | ||
| 313 | fcntl.lockf(f, fcntl.LOCK_UN) | ||
| 314 | f.close() | ||
diff --git a/meta/lib/oe/elf.py b/meta/lib/oe/elf.py deleted file mode 100644 index 9794453092..0000000000 --- a/meta/lib/oe/elf.py +++ /dev/null | |||
| @@ -1,148 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | def machine_dict(d): | ||
| 8 | # Generating this data is slow, so cache it | ||
| 9 | if not hasattr(machine_dict, "machdata"): | ||
| 10 | machine_dict.machdata = { | ||
| 11 | # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? | ||
| 12 | "darwin9" : { | ||
| 13 | "arm" : (40, 0, 0, True, 32), | ||
| 14 | }, | ||
| 15 | "eabi" : { | ||
| 16 | "arm" : (40, 0, 0, True, 32), | ||
| 17 | }, | ||
| 18 | "elf" : { | ||
| 19 | "aarch64" : (183, 0, 0, True, 64), | ||
| 20 | "aarch64_be" :(183, 0, 0, False, 64), | ||
| 21 | "i586" : (3, 0, 0, True, 32), | ||
| 22 | "i686" : (3, 0, 0, True, 32), | ||
| 23 | "x86_64": (62, 0, 0, True, 64), | ||
| 24 | "epiphany": (4643, 0, 0, True, 32), | ||
| 25 | "lm32": (138, 0, 0, False, 32), | ||
| 26 | "loongarch64":(258, 0, 0, True, 64), | ||
| 27 | "mips": ( 8, 0, 0, False, 32), | ||
| 28 | "mipsel": ( 8, 0, 0, True, 32), | ||
| 29 | "microblaze": (189, 0, 0, False, 32), | ||
| 30 | "microblazeel":(189, 0, 0, True, 32), | ||
| 31 | "powerpc": (20, 0, 0, False, 32), | ||
| 32 | "riscv32": (243, 0, 0, True, 32), | ||
| 33 | "riscv64": (243, 0, 0, True, 64), | ||
| 34 | }, | ||
| 35 | "linux" : { | ||
| 36 | "aarch64" : (183, 0, 0, True, 64), | ||
| 37 | "aarch64_be" :(183, 0, 0, False, 64), | ||
| 38 | "arm" : (40, 97, 0, True, 32), | ||
| 39 | "armeb": (40, 97, 0, False, 32), | ||
| 40 | "powerpc": (20, 0, 0, False, 32), | ||
| 41 | "powerpc64": (21, 0, 0, False, 64), | ||
| 42 | "powerpc64le": (21, 0, 0, True, 64), | ||
| 43 | "i386": ( 3, 0, 0, True, 32), | ||
| 44 | "i486": ( 3, 0, 0, True, 32), | ||
| 45 | "i586": ( 3, 0, 0, True, 32), | ||
| 46 | "i686": ( 3, 0, 0, True, 32), | ||
| 47 | "x86_64": (62, 0, 0, True, 64), | ||
| 48 | "ia64": (50, 0, 0, True, 64), | ||
| 49 | "alpha": (36902, 0, 0, True, 64), | ||
| 50 | "hppa": (15, 3, 0, False, 32), | ||
| 51 | "loongarch64":(258, 0, 0, True, 64), | ||
| 52 | "m68k": ( 4, 0, 0, False, 32), | ||
| 53 | "mips": ( 8, 0, 0, False, 32), | ||
| 54 | "mipsel": ( 8, 0, 0, True, 32), | ||
| 55 | "mips64": ( 8, 0, 0, False, 64), | ||
| 56 | "mips64el": ( 8, 0, 0, True, 64), | ||
| 57 | "mipsisa32r6": ( 8, 0, 0, False, 32), | ||
| 58 | "mipsisa32r6el": ( 8, 0, 0, True, 32), | ||
| 59 | "mipsisa64r6": ( 8, 0, 0, False, 64), | ||
| 60 | "mipsisa64r6el": ( 8, 0, 0, True, 64), | ||
| 61 | "nios2": (113, 0, 0, True, 32), | ||
| 62 | "riscv32": (243, 0, 0, True, 32), | ||
| 63 | "riscv64": (243, 0, 0, True, 64), | ||
| 64 | "s390": (22, 0, 0, False, 32), | ||
| 65 | "sh4": (42, 0, 0, True, 32), | ||
| 66 | "sparc": ( 2, 0, 0, False, 32), | ||
| 67 | "microblaze": (189, 0, 0, False, 32), | ||
| 68 | "microblazeel":(189, 0, 0, True, 32), | ||
| 69 | }, | ||
| 70 | "linux-android" : { | ||
| 71 | "aarch64" : (183, 0, 0, True, 64), | ||
| 72 | "i686": ( 3, 0, 0, True, 32), | ||
| 73 | "x86_64": (62, 0, 0, True, 64), | ||
| 74 | }, | ||
| 75 | "linux-androideabi" : { | ||
| 76 | "arm" : (40, 97, 0, True, 32), | ||
| 77 | }, | ||
| 78 | "linux-musl" : { | ||
| 79 | "aarch64" : (183, 0, 0, True, 64), | ||
| 80 | "aarch64_be" :(183, 0, 0, False, 64), | ||
| 81 | "arm" : ( 40, 97, 0, True, 32), | ||
| 82 | "armeb": ( 40, 97, 0, False, 32), | ||
| 83 | "powerpc": ( 20, 0, 0, False, 32), | ||
| 84 | "powerpc64": ( 21, 0, 0, False, 64), | ||
| 85 | "powerpc64le": (21, 0, 0, True, 64), | ||
| 86 | "i386": ( 3, 0, 0, True, 32), | ||
| 87 | "i486": ( 3, 0, 0, True, 32), | ||
| 88 | "i586": ( 3, 0, 0, True, 32), | ||
| 89 | "i686": ( 3, 0, 0, True, 32), | ||
| 90 | "x86_64": ( 62, 0, 0, True, 64), | ||
| 91 | "loongarch64":( 258, 0, 0, True, 64), | ||
| 92 | "mips": ( 8, 0, 0, False, 32), | ||
| 93 | "mipsel": ( 8, 0, 0, True, 32), | ||
| 94 | "mips64": ( 8, 0, 0, False, 64), | ||
| 95 | "mips64el": ( 8, 0, 0, True, 64), | ||
| 96 | "microblaze": (189, 0, 0, False, 32), | ||
| 97 | "microblazeel":(189, 0, 0, True, 32), | ||
| 98 | "riscv32": (243, 0, 0, True, 32), | ||
| 99 | "riscv64": (243, 0, 0, True, 64), | ||
| 100 | "sh4": ( 42, 0, 0, True, 32), | ||
| 101 | }, | ||
| 102 | "uclinux-uclibc" : { | ||
| 103 | "bfin": ( 106, 0, 0, True, 32), | ||
| 104 | }, | ||
| 105 | "linux-gnueabi" : { | ||
| 106 | "arm" : (40, 0, 0, True, 32), | ||
| 107 | "armeb" : (40, 0, 0, False, 32), | ||
| 108 | }, | ||
| 109 | "linux-musleabi" : { | ||
| 110 | "arm" : (40, 0, 0, True, 32), | ||
| 111 | "armeb" : (40, 0, 0, False, 32), | ||
| 112 | }, | ||
| 113 | "linux-gnuspe" : { | ||
| 114 | "powerpc": (20, 0, 0, False, 32), | ||
| 115 | }, | ||
| 116 | "linux-muslspe" : { | ||
| 117 | "powerpc": (20, 0, 0, False, 32), | ||
| 118 | }, | ||
| 119 | "linux-gnu" : { | ||
| 120 | "powerpc": (20, 0, 0, False, 32), | ||
| 121 | "sh4": (42, 0, 0, True, 32), | ||
| 122 | }, | ||
| 123 | "linux-gnu_ilp32" : { | ||
| 124 | "aarch64" : (183, 0, 0, True, 32), | ||
| 125 | }, | ||
| 126 | "linux-gnux32" : { | ||
| 127 | "x86_64": (62, 0, 0, True, 32), | ||
| 128 | }, | ||
| 129 | "linux-muslx32" : { | ||
| 130 | "x86_64": (62, 0, 0, True, 32), | ||
| 131 | }, | ||
| 132 | "linux-gnun32" : { | ||
| 133 | "mips64": ( 8, 0, 0, False, 32), | ||
| 134 | "mips64el": ( 8, 0, 0, True, 32), | ||
| 135 | "mipsisa64r6": ( 8, 0, 0, False, 32), | ||
| 136 | "mipsisa64r6el":( 8, 0, 0, True, 32), | ||
| 137 | }, | ||
| 138 | } | ||
| 139 | |||
| 140 | # Add in any extra user supplied data which may come from a BSP layer, removing the | ||
| 141 | # need to always change this class directly | ||
| 142 | extra_machdata = (d and d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS" or None) or "").split() | ||
| 143 | for m in extra_machdata: | ||
| 144 | call = m + "(machdata, d)" | ||
| 145 | locs = { "machdata" : machine_dict.machdata, "d" : d} | ||
| 146 | machine_dict.machdata = bb.utils.better_eval(call, locs) | ||
| 147 | |||
| 148 | return machine_dict.machdata | ||
diff --git a/meta/lib/oe/fitimage.py b/meta/lib/oe/fitimage.py deleted file mode 100644 index f303799155..0000000000 --- a/meta/lib/oe/fitimage.py +++ /dev/null | |||
| @@ -1,547 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # This file contains common functions for the fitimage generation | ||
| 7 | |||
| 8 | import os | ||
| 9 | import shlex | ||
| 10 | import subprocess | ||
| 11 | import bb | ||
| 12 | |||
| 13 | from oeqa.utils.commands import runCmd | ||
| 14 | |||
| 15 | class ItsNode: | ||
| 16 | INDENT_SIZE = 8 | ||
| 17 | |||
| 18 | def __init__(self, name, parent_node, sub_nodes=None, properties=None): | ||
| 19 | self.name = name | ||
| 20 | self.parent_node = parent_node | ||
| 21 | |||
| 22 | self.sub_nodes = [] | ||
| 23 | if sub_nodes: | ||
| 24 | self.sub_nodes = sub_nodes | ||
| 25 | |||
| 26 | self.properties = {} | ||
| 27 | if properties: | ||
| 28 | self.properties = properties | ||
| 29 | |||
| 30 | if parent_node: | ||
| 31 | parent_node.add_sub_node(self) | ||
| 32 | |||
| 33 | def add_sub_node(self, sub_node): | ||
| 34 | self.sub_nodes.append(sub_node) | ||
| 35 | |||
| 36 | def add_property(self, key, value): | ||
| 37 | self.properties[key] = value | ||
| 38 | |||
| 39 | def emit(self, f, indent): | ||
| 40 | indent_str_name = " " * indent | ||
| 41 | indent_str_props = " " * (indent + self.INDENT_SIZE) | ||
| 42 | f.write("%s%s {\n" % (indent_str_name, self.name)) | ||
| 43 | for key, value in self.properties.items(): | ||
| 44 | bb.debug(1, "key: %s, value: %s" % (key, str(value))) | ||
| 45 | # Single integer: <0x12ab> | ||
| 46 | if isinstance(value, int): | ||
| 47 | f.write(indent_str_props + key + ' = <0x%x>;\n' % value) | ||
| 48 | # list of strings: "string1", "string2" or integers: <0x12ab 0x34cd> | ||
| 49 | elif isinstance(value, list): | ||
| 50 | if len(value) == 0: | ||
| 51 | f.write(indent_str_props + key + ' = "";\n') | ||
| 52 | elif isinstance(value[0], int): | ||
| 53 | list_entries = ' '.join('0x%x' % entry for entry in value) | ||
| 54 | f.write(indent_str_props + key + ' = <%s>;\n' % list_entries) | ||
| 55 | else: | ||
| 56 | list_entries = ', '.join('"%s"' % entry for entry in value) | ||
| 57 | f.write(indent_str_props + key + ' = %s;\n' % list_entries) | ||
| 58 | elif isinstance(value, str): | ||
| 59 | # path: /incbin/("path/to/file") | ||
| 60 | if key in ["data"] and value.startswith('/incbin/('): | ||
| 61 | f.write(indent_str_props + key + ' = %s;\n' % value) | ||
| 62 | # Integers which are already string formatted | ||
| 63 | elif value.startswith("<") and value.endswith(">"): | ||
| 64 | f.write(indent_str_props + key + ' = %s;\n' % value) | ||
| 65 | else: | ||
| 66 | f.write(indent_str_props + key + ' = "%s";\n' % value) | ||
| 67 | else: | ||
| 68 | bb.fatal("%s has unexpexted data type." % str(value)) | ||
| 69 | for sub_node in self.sub_nodes: | ||
| 70 | sub_node.emit(f, indent + self.INDENT_SIZE) | ||
| 71 | f.write(indent_str_name + '};\n') | ||
| 72 | |||
| 73 | class ItsNodeImages(ItsNode): | ||
| 74 | def __init__(self, parent_node): | ||
| 75 | super().__init__("images", parent_node) | ||
| 76 | |||
| 77 | class ItsNodeConfigurations(ItsNode): | ||
| 78 | def __init__(self, parent_node): | ||
| 79 | super().__init__("configurations", parent_node) | ||
| 80 | |||
| 81 | class ItsNodeHash(ItsNode): | ||
| 82 | def __init__(self, name, parent_node, algo, opt_props=None): | ||
| 83 | properties = { | ||
| 84 | "algo": algo | ||
| 85 | } | ||
| 86 | if opt_props: | ||
| 87 | properties.update(opt_props) | ||
| 88 | super().__init__(name, parent_node, None, properties) | ||
| 89 | |||
| 90 | class ItsImageSignature(ItsNode): | ||
| 91 | def __init__(self, name, parent_node, algo, keyname, opt_props=None): | ||
| 92 | properties = { | ||
| 93 | "algo": algo, | ||
| 94 | "key-name-hint": keyname | ||
| 95 | } | ||
| 96 | if opt_props: | ||
| 97 | properties.update(opt_props) | ||
| 98 | super().__init__(name, parent_node, None, properties) | ||
| 99 | |||
| 100 | class ItsNodeImage(ItsNode): | ||
| 101 | def __init__(self, name, parent_node, description, type, compression, sub_nodes=None, opt_props=None): | ||
| 102 | properties = { | ||
| 103 | "description": description, | ||
| 104 | "type": type, | ||
| 105 | "compression": compression, | ||
| 106 | } | ||
| 107 | if opt_props: | ||
| 108 | properties.update(opt_props) | ||
| 109 | super().__init__(name, parent_node, sub_nodes, properties) | ||
| 110 | |||
| 111 | class ItsNodeDtb(ItsNodeImage): | ||
| 112 | def __init__(self, name, parent_node, description, type, compression, | ||
| 113 | sub_nodes=None, opt_props=None, compatible=None): | ||
| 114 | super().__init__(name, parent_node, description, type, compression, sub_nodes, opt_props) | ||
| 115 | self.compatible = compatible | ||
| 116 | |||
| 117 | class ItsNodeDtbAlias(ItsNode): | ||
| 118 | """Additional Configuration Node for a DTB | ||
| 119 | |||
| 120 | Symlinks pointing to a DTB file are handled by an addtitional | ||
| 121 | configuration node referring to another DTB image node. | ||
| 122 | """ | ||
| 123 | def __init__(self, name, alias_name, compatible=None): | ||
| 124 | super().__init__(name, parent_node=None, sub_nodes=None, properties=None) | ||
| 125 | self.alias_name = alias_name | ||
| 126 | self.compatible = compatible | ||
| 127 | |||
| 128 | class ItsNodeConfigurationSignature(ItsNode): | ||
| 129 | def __init__(self, name, parent_node, algo, keyname, opt_props=None): | ||
| 130 | properties = { | ||
| 131 | "algo": algo, | ||
| 132 | "key-name-hint": keyname | ||
| 133 | } | ||
| 134 | if opt_props: | ||
| 135 | properties.update(opt_props) | ||
| 136 | super().__init__(name, parent_node, None, properties) | ||
| 137 | |||
| 138 | class ItsNodeConfiguration(ItsNode): | ||
| 139 | def __init__(self, name, parent_node, description, sub_nodes=None, opt_props=None): | ||
| 140 | properties = { | ||
| 141 | "description": description, | ||
| 142 | } | ||
| 143 | if opt_props: | ||
| 144 | properties.update(opt_props) | ||
| 145 | super().__init__(name, parent_node, sub_nodes, properties) | ||
| 146 | |||
| 147 | class ItsNodeRootKernel(ItsNode): | ||
| 148 | """Create FIT images for the kernel | ||
| 149 | |||
| 150 | Currently only a single kernel (no less or more) can be added to the FIT | ||
| 151 | image along with 0 or more device trees and 0 or 1 ramdisk. | ||
| 152 | |||
| 153 | If a device tree included in the FIT image, the default configuration is the | ||
| 154 | firt DTB. If there is no dtb present than the default configuation the kernel. | ||
| 155 | """ | ||
| 156 | def __init__(self, description, address_cells, host_prefix, arch, conf_prefix, | ||
| 157 | sign_enable=False, sign_keydir=None, | ||
| 158 | mkimage=None, mkimage_dtcopts=None, | ||
| 159 | mkimage_sign=None, mkimage_sign_args=None, | ||
| 160 | hash_algo=None, sign_algo=None, pad_algo=None, | ||
| 161 | sign_keyname_conf=None, | ||
| 162 | sign_individual=False, sign_keyname_img=None): | ||
| 163 | props = { | ||
| 164 | "description": description, | ||
| 165 | "#address-cells": f"<{address_cells}>" | ||
| 166 | } | ||
| 167 | super().__init__("/", None, None, props) | ||
| 168 | self.images = ItsNodeImages(self) | ||
| 169 | self.configurations = ItsNodeConfigurations(self) | ||
| 170 | |||
| 171 | self._host_prefix = host_prefix | ||
| 172 | self._arch = arch | ||
| 173 | self._conf_prefix = conf_prefix | ||
| 174 | |||
| 175 | # Signature related properties | ||
| 176 | self._sign_enable = sign_enable | ||
| 177 | self._sign_keydir = sign_keydir | ||
| 178 | self._mkimage = mkimage | ||
| 179 | self._mkimage_dtcopts = mkimage_dtcopts | ||
| 180 | self._mkimage_sign = mkimage_sign | ||
| 181 | self._mkimage_sign_args = mkimage_sign_args | ||
| 182 | self._hash_algo = hash_algo | ||
| 183 | self._sign_algo = sign_algo | ||
| 184 | self._pad_algo = pad_algo | ||
| 185 | self._sign_keyname_conf = sign_keyname_conf | ||
| 186 | self._sign_individual = sign_individual | ||
| 187 | self._sign_keyname_img = sign_keyname_img | ||
| 188 | self._sanitize_sign_config() | ||
| 189 | |||
| 190 | self._dtbs = [] | ||
| 191 | self._dtb_alias = [] | ||
| 192 | self._kernel = None | ||
| 193 | self._ramdisk = None | ||
| 194 | self._bootscr = None | ||
| 195 | self._setup = None | ||
| 196 | |||
| 197 | def _sanitize_sign_config(self): | ||
| 198 | if self._sign_enable: | ||
| 199 | if not self._hash_algo: | ||
| 200 | bb.fatal("FIT image signing is enabled but no hash algorithm is provided.") | ||
| 201 | if not self._sign_algo: | ||
| 202 | bb.fatal("FIT image signing is enabled but no signature algorithm is provided.") | ||
| 203 | if not self._pad_algo: | ||
| 204 | bb.fatal("FIT image signing is enabled but no padding algorithm is provided.") | ||
| 205 | if not self._sign_keyname_conf: | ||
| 206 | bb.fatal("FIT image signing is enabled but no configuration key name is provided.") | ||
| 207 | if self._sign_individual and not self._sign_keyname_img: | ||
| 208 | bb.fatal("FIT image signing is enabled for individual images but no image key name is provided.") | ||
| 209 | |||
| 210 | def write_its_file(self, itsfile): | ||
| 211 | with open(itsfile, 'w') as f: | ||
| 212 | f.write("/dts-v1/;\n\n") | ||
| 213 | self.emit(f, 0) | ||
| 214 | |||
| 215 | def its_add_node_image(self, image_id, description, image_type, compression, opt_props): | ||
| 216 | image_node = ItsNodeImage( | ||
| 217 | image_id, | ||
| 218 | self.images, | ||
| 219 | description, | ||
| 220 | image_type, | ||
| 221 | compression, | ||
| 222 | opt_props=opt_props | ||
| 223 | ) | ||
| 224 | if self._hash_algo: | ||
| 225 | ItsNodeHash( | ||
| 226 | "hash-1", | ||
| 227 | image_node, | ||
| 228 | self._hash_algo | ||
| 229 | ) | ||
| 230 | if self._sign_individual: | ||
| 231 | ItsImageSignature( | ||
| 232 | "signature-1", | ||
| 233 | image_node, | ||
| 234 | f"{self._hash_algo},{self._sign_algo}", | ||
| 235 | self._sign_keyname_img | ||
| 236 | ) | ||
| 237 | return image_node | ||
| 238 | |||
| 239 | def its_add_node_dtb(self, image_id, description, image_type, compression, opt_props, compatible): | ||
| 240 | dtb_node = ItsNodeDtb( | ||
| 241 | image_id, | ||
| 242 | self.images, | ||
| 243 | description, | ||
| 244 | image_type, | ||
| 245 | compression, | ||
| 246 | opt_props=opt_props, | ||
| 247 | compatible=compatible | ||
| 248 | ) | ||
| 249 | if self._hash_algo: | ||
| 250 | ItsNodeHash( | ||
| 251 | "hash-1", | ||
| 252 | dtb_node, | ||
| 253 | self._hash_algo | ||
| 254 | ) | ||
| 255 | if self._sign_individual: | ||
| 256 | ItsImageSignature( | ||
| 257 | "signature-1", | ||
| 258 | dtb_node, | ||
| 259 | f"{self._hash_algo},{self._sign_algo}", | ||
| 260 | self._sign_keyname_img | ||
| 261 | ) | ||
| 262 | return dtb_node | ||
| 263 | |||
| 264 | def fitimage_emit_section_kernel(self, kernel_id, kernel_path, compression, | ||
| 265 | load, entrypoint, mkimage_kernel_type, entrysymbol=None): | ||
| 266 | """Emit the fitImage ITS kernel section""" | ||
| 267 | if self._kernel: | ||
| 268 | bb.fatal("Kernel section already exists in the ITS file.") | ||
| 269 | if entrysymbol: | ||
| 270 | result = subprocess.run([self._host_prefix + "nm", "vmlinux"], capture_output=True, text=True) | ||
| 271 | for line in result.stdout.splitlines(): | ||
| 272 | parts = line.split() | ||
| 273 | if len(parts) == 3 and parts[2] == entrysymbol: | ||
| 274 | entrypoint = "<0x%s>" % parts[0] | ||
| 275 | break | ||
| 276 | kernel_node = self.its_add_node_image( | ||
| 277 | kernel_id, | ||
| 278 | "Linux kernel", | ||
| 279 | mkimage_kernel_type, | ||
| 280 | compression, | ||
| 281 | { | ||
| 282 | "data": '/incbin/("' + kernel_path + '")', | ||
| 283 | "arch": self._arch, | ||
| 284 | "os": "linux", | ||
| 285 | "load": f"<{load}>", | ||
| 286 | "entry": f"<{entrypoint}>" | ||
| 287 | } | ||
| 288 | ) | ||
| 289 | self._kernel = kernel_node | ||
| 290 | |||
| 291 | def fitimage_emit_section_dtb(self, dtb_id, dtb_path, dtb_loadaddress=None, | ||
| 292 | dtbo_loadaddress=None, add_compatible=False): | ||
| 293 | """Emit the fitImage ITS DTB section""" | ||
| 294 | load=None | ||
| 295 | dtb_ext = os.path.splitext(dtb_path)[1] | ||
| 296 | if dtb_ext == ".dtbo": | ||
| 297 | if dtbo_loadaddress: | ||
| 298 | load = dtbo_loadaddress | ||
| 299 | elif dtb_loadaddress: | ||
| 300 | load = dtb_loadaddress | ||
| 301 | |||
| 302 | opt_props = { | ||
| 303 | "data": '/incbin/("' + dtb_path + '")', | ||
| 304 | "arch": self._arch | ||
| 305 | } | ||
| 306 | if load: | ||
| 307 | opt_props["load"] = f"<{load}>" | ||
| 308 | |||
| 309 | # Preserve the DTB's compatible string to be added to the configuration node | ||
| 310 | compatible = None | ||
| 311 | if add_compatible: | ||
| 312 | compatible = get_compatible_from_dtb(dtb_path) | ||
| 313 | |||
| 314 | dtb_node = self.its_add_node_dtb( | ||
| 315 | "fdt-" + dtb_id, | ||
| 316 | "Flattened Device Tree blob", | ||
| 317 | "flat_dt", | ||
| 318 | "none", | ||
| 319 | opt_props, | ||
| 320 | compatible | ||
| 321 | ) | ||
| 322 | self._dtbs.append(dtb_node) | ||
| 323 | |||
| 324 | def fitimage_emit_section_dtb_alias(self, dtb_alias_id, dtb_path, add_compatible=False): | ||
| 325 | """Add a configuration node referring to another DTB""" | ||
| 326 | # Preserve the DTB's compatible string to be added to the configuration node | ||
| 327 | compatible = None | ||
| 328 | if add_compatible: | ||
| 329 | compatible = get_compatible_from_dtb(dtb_path) | ||
| 330 | |||
| 331 | dtb_id = os.path.basename(dtb_path) | ||
| 332 | dtb_alias_node = ItsNodeDtbAlias("fdt-" + dtb_id, dtb_alias_id, compatible) | ||
| 333 | self._dtb_alias.append(dtb_alias_node) | ||
| 334 | bb.warn(f"compatible: {compatible}, dtb_alias_id: {dtb_alias_id}, dtb_id: {dtb_id}, dtb_path: {dtb_path}") | ||
| 335 | |||
| 336 | def fitimage_emit_section_boot_script(self, bootscr_id, bootscr_path): | ||
| 337 | """Emit the fitImage ITS u-boot script section""" | ||
| 338 | if self._bootscr: | ||
| 339 | bb.fatal("U-boot script section already exists in the ITS file.") | ||
| 340 | bootscr_node = self.its_add_node_image( | ||
| 341 | bootscr_id, | ||
| 342 | "U-boot script", | ||
| 343 | "script", | ||
| 344 | "none", | ||
| 345 | { | ||
| 346 | "data": '/incbin/("' + bootscr_path + '")', | ||
| 347 | "arch": self._arch, | ||
| 348 | "type": "script" | ||
| 349 | } | ||
| 350 | ) | ||
| 351 | self._bootscr = bootscr_node | ||
| 352 | |||
| 353 | def fitimage_emit_section_setup(self, setup_id, setup_path): | ||
| 354 | """Emit the fitImage ITS setup section""" | ||
| 355 | if self._setup: | ||
| 356 | bb.fatal("Setup section already exists in the ITS file.") | ||
| 357 | load = "<0x00090000>" | ||
| 358 | entry = "<0x00090000>" | ||
| 359 | setup_node = self.its_add_node_image( | ||
| 360 | setup_id, | ||
| 361 | "Linux setup.bin", | ||
| 362 | "x86_setup", | ||
| 363 | "none", | ||
| 364 | { | ||
| 365 | "data": '/incbin/("' + setup_path + '")', | ||
| 366 | "arch": self._arch, | ||
| 367 | "os": "linux", | ||
| 368 | "load": load, | ||
| 369 | "entry": entry | ||
| 370 | } | ||
| 371 | ) | ||
| 372 | self._setup = setup_node | ||
| 373 | |||
| 374 | def fitimage_emit_section_ramdisk(self, ramdisk_id, ramdisk_path, description="ramdisk", load=None, entry=None): | ||
| 375 | """Emit the fitImage ITS ramdisk section""" | ||
| 376 | if self._ramdisk: | ||
| 377 | bb.fatal("Ramdisk section already exists in the ITS file.") | ||
| 378 | opt_props = { | ||
| 379 | "data": '/incbin/("' + ramdisk_path + '")', | ||
| 380 | "type": "ramdisk", | ||
| 381 | "arch": self._arch, | ||
| 382 | "os": "linux" | ||
| 383 | } | ||
| 384 | if load: | ||
| 385 | opt_props["load"] = f"<{load}>" | ||
| 386 | if entry: | ||
| 387 | opt_props["entry"] = f"<{entry}>" | ||
| 388 | |||
| 389 | ramdisk_node = self.its_add_node_image( | ||
| 390 | ramdisk_id, | ||
| 391 | description, | ||
| 392 | "ramdisk", | ||
| 393 | "none", | ||
| 394 | opt_props | ||
| 395 | ) | ||
| 396 | self._ramdisk = ramdisk_node | ||
| 397 | |||
| 398 | def _fitimage_emit_one_section_config(self, conf_node_name, dtb=None): | ||
| 399 | """Emit the fitImage ITS configuration section""" | ||
| 400 | opt_props = {} | ||
| 401 | conf_desc = [] | ||
| 402 | sign_entries = [] | ||
| 403 | |||
| 404 | if self._kernel: | ||
| 405 | conf_desc.append("Linux kernel") | ||
| 406 | opt_props["kernel"] = self._kernel.name | ||
| 407 | if self._sign_enable: | ||
| 408 | sign_entries.append("kernel") | ||
| 409 | |||
| 410 | if dtb: | ||
| 411 | conf_desc.append("FDT blob") | ||
| 412 | opt_props["fdt"] = dtb.name | ||
| 413 | if dtb.compatible: | ||
| 414 | opt_props["compatible"] = dtb.compatible | ||
| 415 | if self._sign_enable: | ||
| 416 | sign_entries.append("fdt") | ||
| 417 | |||
| 418 | if self._ramdisk: | ||
| 419 | conf_desc.append("ramdisk") | ||
| 420 | opt_props["ramdisk"] = self._ramdisk.name | ||
| 421 | if self._sign_enable: | ||
| 422 | sign_entries.append("ramdisk") | ||
| 423 | |||
| 424 | if self._bootscr: | ||
| 425 | conf_desc.append("u-boot script") | ||
| 426 | opt_props["bootscr"] = self._bootscr.name | ||
| 427 | if self._sign_enable: | ||
| 428 | sign_entries.append("bootscr") | ||
| 429 | |||
| 430 | if self._setup: | ||
| 431 | conf_desc.append("setup") | ||
| 432 | opt_props["setup"] = self._setup.name | ||
| 433 | if self._sign_enable: | ||
| 434 | sign_entries.append("setup") | ||
| 435 | |||
| 436 | # First added configuration is the default configuration | ||
| 437 | default_flag = "0" | ||
| 438 | if len(self.configurations.sub_nodes) == 0: | ||
| 439 | default_flag = "1" | ||
| 440 | |||
| 441 | conf_node = ItsNodeConfiguration( | ||
| 442 | conf_node_name, | ||
| 443 | self.configurations, | ||
| 444 | f"{default_flag} {', '.join(conf_desc)}", | ||
| 445 | opt_props=opt_props | ||
| 446 | ) | ||
| 447 | if self._hash_algo: | ||
| 448 | ItsNodeHash( | ||
| 449 | "hash-1", | ||
| 450 | conf_node, | ||
| 451 | self._hash_algo | ||
| 452 | ) | ||
| 453 | if self._sign_enable: | ||
| 454 | ItsNodeConfigurationSignature( | ||
| 455 | "signature-1", | ||
| 456 | conf_node, | ||
| 457 | f"{self._hash_algo},{self._sign_algo}", | ||
| 458 | self._sign_keyname_conf, | ||
| 459 | opt_props={ | ||
| 460 | "padding": self._pad_algo, | ||
| 461 | "sign-images": sign_entries | ||
| 462 | } | ||
| 463 | ) | ||
| 464 | |||
| 465 | def fitimage_emit_section_config(self, default_dtb_image=None): | ||
| 466 | if self._dtbs: | ||
| 467 | for dtb in self._dtbs: | ||
| 468 | dtb_name = dtb.name | ||
| 469 | if dtb.name.startswith("fdt-"): | ||
| 470 | dtb_name = dtb.name[len("fdt-"):] | ||
| 471 | self._fitimage_emit_one_section_config(self._conf_prefix + dtb_name, dtb) | ||
| 472 | for dtb in self._dtb_alias: | ||
| 473 | self._fitimage_emit_one_section_config(self._conf_prefix + dtb.alias_name, dtb) | ||
| 474 | else: | ||
| 475 | # Currently exactly one kernel is supported. | ||
| 476 | self._fitimage_emit_one_section_config(self._conf_prefix + "1") | ||
| 477 | |||
| 478 | default_conf = self.configurations.sub_nodes[0].name | ||
| 479 | if default_dtb_image and self._dtbs: | ||
| 480 | default_conf = self._conf_prefix + default_dtb_image | ||
| 481 | self.configurations.add_property('default', default_conf) | ||
| 482 | |||
| 483 | def run_mkimage_assemble(self, itsfile, fitfile): | ||
| 484 | cmd = [ | ||
| 485 | self._mkimage, | ||
| 486 | '-f', itsfile, | ||
| 487 | fitfile | ||
| 488 | ] | ||
| 489 | if self._mkimage_dtcopts: | ||
| 490 | cmd.insert(1, '-D') | ||
| 491 | cmd.insert(2, self._mkimage_dtcopts) | ||
| 492 | try: | ||
| 493 | subprocess.run(cmd, check=True, capture_output=True) | ||
| 494 | except subprocess.CalledProcessError as e: | ||
| 495 | bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}\nitsflile: {os.path.abspath(itsfile)}") | ||
| 496 | |||
| 497 | def run_mkimage_sign(self, fitfile): | ||
| 498 | if not self._sign_enable: | ||
| 499 | bb.debug(1, "FIT image signing is disabled. Skipping signing.") | ||
| 500 | return | ||
| 501 | |||
| 502 | # Some sanity checks because mkimage exits with 0 also without needed keys | ||
| 503 | sign_key_path = os.path.join(self._sign_keydir, self._sign_keyname_conf) | ||
| 504 | if not os.path.exists(sign_key_path + '.key') or not os.path.exists(sign_key_path + '.crt'): | ||
| 505 | bb.fatal("%s.key or .crt does not exist" % sign_key_path) | ||
| 506 | if self._sign_individual: | ||
| 507 | sign_key_img_path = os.path.join(self._sign_keydir, self._sign_keyname_img) | ||
| 508 | if not os.path.exists(sign_key_img_path + '.key') or not os.path.exists(sign_key_img_path + '.crt'): | ||
| 509 | bb.fatal("%s.key or .crt does not exist" % sign_key_img_path) | ||
| 510 | |||
| 511 | cmd = [ | ||
| 512 | self._mkimage_sign, | ||
| 513 | '-F', | ||
| 514 | '-k', self._sign_keydir, | ||
| 515 | '-r', fitfile | ||
| 516 | ] | ||
| 517 | if self._mkimage_dtcopts: | ||
| 518 | cmd.extend(['-D', self._mkimage_dtcopts]) | ||
| 519 | if self._mkimage_sign_args: | ||
| 520 | cmd.extend(shlex.split(self._mkimage_sign_args)) | ||
| 521 | try: | ||
| 522 | subprocess.run(cmd, check=True, capture_output=True) | ||
| 523 | except subprocess.CalledProcessError as e: | ||
| 524 | bb.fatal(f"Command '{' '.join(cmd)}' failed with return code {e.returncode}\nstdout: {e.stdout.decode()}\nstderr: {e.stderr.decode()}") | ||
| 525 | |||
| 526 | |||
| 527 | def symlink_points_below(file_or_symlink, expected_parent_dir): | ||
| 528 | """returns symlink destination if it points below directory""" | ||
| 529 | file_path = os.path.join(expected_parent_dir, file_or_symlink) | ||
| 530 | if not os.path.islink(file_path): | ||
| 531 | return None | ||
| 532 | |||
| 533 | realpath = os.path.relpath(os.path.realpath(file_path), expected_parent_dir) | ||
| 534 | if realpath.startswith(".."): | ||
| 535 | return None | ||
| 536 | |||
| 537 | return realpath | ||
| 538 | |||
| 539 | def get_compatible_from_dtb(dtb_path, fdtget_path="fdtget"): | ||
| 540 | compatible = None | ||
| 541 | cmd = [fdtget_path, "-t", "s", dtb_path, "/", "compatible"] | ||
| 542 | try: | ||
| 543 | ret = subprocess.run(cmd, check=True, capture_output=True, text=True) | ||
| 544 | compatible = ret.stdout.strip().split() | ||
| 545 | except subprocess.CalledProcessError: | ||
| 546 | compatible = None | ||
| 547 | return compatible | ||
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py deleted file mode 100644 index 4559dc63b2..0000000000 --- a/meta/lib/oe/go.py +++ /dev/null | |||
| @@ -1,38 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | import re | ||
| 8 | |||
| 9 | def map_arch(a): | ||
| 10 | """ | ||
| 11 | Map our architecture names to Go's GOARCH names. | ||
| 12 | See https://github.com/golang/go/blob/master/src/internal/syslist/syslist.go for the complete list. | ||
| 13 | """ | ||
| 14 | if re.match('i.86', a): | ||
| 15 | return '386' | ||
| 16 | elif a == 'x86_64': | ||
| 17 | return 'amd64' | ||
| 18 | elif re.match('arm.*', a): | ||
| 19 | return 'arm' | ||
| 20 | elif re.match('aarch64.*', a): | ||
| 21 | return 'arm64' | ||
| 22 | elif re.match('mips64el.*', a): | ||
| 23 | return 'mips64le' | ||
| 24 | elif re.match('mips64.*', a): | ||
| 25 | return 'mips64' | ||
| 26 | elif a == 'mips': | ||
| 27 | return 'mips' | ||
| 28 | elif a == 'mipsel': | ||
| 29 | return 'mipsle' | ||
| 30 | elif re.match('p(pc|owerpc)(64le)', a): | ||
| 31 | return 'ppc64le' | ||
| 32 | elif re.match('p(pc|owerpc)(64)', a): | ||
| 33 | return 'ppc64' | ||
| 34 | elif a == 'riscv64': | ||
| 35 | return 'riscv64' | ||
| 36 | elif a == 'loongarch64': | ||
| 37 | return 'loong64' | ||
| 38 | raise KeyError(f"Cannot map architecture {a}") | ||
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py deleted file mode 100644 index ede6186c84..0000000000 --- a/meta/lib/oe/gpg_sign.py +++ /dev/null | |||
| @@ -1,160 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | """Helper module for GPG signing""" | ||
| 8 | |||
| 9 | import bb | ||
| 10 | import os | ||
| 11 | import shlex | ||
| 12 | import subprocess | ||
| 13 | import tempfile | ||
| 14 | |||
| 15 | class LocalSigner(object): | ||
| 16 | """Class for handling local (on the build host) signing""" | ||
| 17 | def __init__(self, d): | ||
| 18 | self.gpg_bin = d.getVar('GPG_BIN') or \ | ||
| 19 | bb.utils.which(os.getenv('PATH'), 'gpg') | ||
| 20 | self.gpg_cmd = [self.gpg_bin] | ||
| 21 | self.gpg_agent_bin = bb.utils.which(os.getenv('PATH'), "gpg-agent") | ||
| 22 | # Without this we see "Cannot allocate memory" errors when running processes in parallel | ||
| 23 | # It needs to be set for any gpg command since any agent launched can stick around in memory | ||
| 24 | # and this parameter must be set. | ||
| 25 | if self.gpg_agent_bin: | ||
| 26 | self.gpg_cmd += ["--agent-program=%s|--auto-expand-secmem" % (self.gpg_agent_bin)] | ||
| 27 | self.gpg_path = d.getVar('GPG_PATH') | ||
| 28 | self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpmsign") | ||
| 29 | self.gpg_version = self.get_gpg_version() | ||
| 30 | |||
| 31 | |||
| 32 | def export_pubkey(self, output_file, keyid, armor=True): | ||
| 33 | """Export GPG public key to a file""" | ||
| 34 | cmd = self.gpg_cmd + ["--no-permission-warning", "--batch", "--yes", "--export", "-o", output_file] | ||
| 35 | if self.gpg_path: | ||
| 36 | cmd += ["--homedir", self.gpg_path] | ||
| 37 | if armor: | ||
| 38 | cmd += ["--armor"] | ||
| 39 | cmd += [keyid] | ||
| 40 | subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
| 41 | |||
| 42 | def sign_rpms(self, files, keyid, passphrase, digest, sign_chunk, fsk=None, fsk_password=None): | ||
| 43 | """Sign RPM files""" | ||
| 44 | |||
| 45 | cmd = self.rpm_bin + " --addsign --define '_gpg_name %s' " % keyid | ||
| 46 | gpg_args = '--no-permission-warning --batch --passphrase=%s --agent-program=%s|--auto-expand-secmem' % (passphrase, self.gpg_agent_bin) | ||
| 47 | if self.gpg_version > (2,1,): | ||
| 48 | gpg_args += ' --pinentry-mode=loopback' | ||
| 49 | cmd += "--define '_gpg_sign_cmd_extra_args %s' " % gpg_args | ||
| 50 | cmd += "--define '_binary_filedigest_algorithm %s' " % digest | ||
| 51 | if self.gpg_bin: | ||
| 52 | cmd += "--define '__gpg %s' " % self.gpg_bin | ||
| 53 | if self.gpg_path: | ||
| 54 | cmd += "--define '_gpg_path %s' " % self.gpg_path | ||
| 55 | if fsk: | ||
| 56 | cmd += "--signfiles --fskpath %s " % fsk | ||
| 57 | if fsk_password: | ||
| 58 | cmd += "--define '_file_signing_key_password %s' " % fsk_password | ||
| 59 | |||
| 60 | # Sign in chunks | ||
| 61 | for i in range(0, len(files), sign_chunk): | ||
| 62 | subprocess.check_output(shlex.split(cmd + ' '.join(files[i:i+sign_chunk])), stderr=subprocess.STDOUT) | ||
| 63 | |||
| 64 | def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True, output_suffix=None, use_sha256=False): | ||
| 65 | """Create a detached signature of a file""" | ||
| 66 | |||
| 67 | if passphrase_file and passphrase: | ||
| 68 | raise Exception("You should use either passphrase_file of passphrase, not both") | ||
| 69 | |||
| 70 | cmd = self.gpg_cmd + ['--detach-sign', '--no-permission-warning', '--batch', | ||
| 71 | '--no-tty', '--yes', '--passphrase-fd', '0', '-u', keyid] | ||
| 72 | |||
| 73 | if self.gpg_path: | ||
| 74 | cmd += ['--homedir', self.gpg_path] | ||
| 75 | if armor: | ||
| 76 | cmd += ['--armor'] | ||
| 77 | if use_sha256: | ||
| 78 | cmd += ['--digest-algo', "SHA256"] | ||
| 79 | |||
| 80 | #gpg > 2.1 supports password pipes only through the loopback interface | ||
| 81 | #gpg < 2.1 errors out if given unknown parameters | ||
| 82 | if self.gpg_version > (2,1,): | ||
| 83 | cmd += ['--pinentry-mode', 'loopback'] | ||
| 84 | |||
| 85 | try: | ||
| 86 | if passphrase_file: | ||
| 87 | with open(passphrase_file) as fobj: | ||
| 88 | passphrase = fobj.readline(); | ||
| 89 | |||
| 90 | if not output_suffix: | ||
| 91 | output_suffix = 'asc' if armor else 'sig' | ||
| 92 | output_file = input_file + "." + output_suffix | ||
| 93 | with tempfile.TemporaryDirectory(dir=os.path.dirname(output_file)) as tmp_dir: | ||
| 94 | tmp_file = os.path.join(tmp_dir, os.path.basename(output_file)) | ||
| 95 | cmd += ['-o', tmp_file] | ||
| 96 | |||
| 97 | cmd += [input_file] | ||
| 98 | |||
| 99 | job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 100 | (_, stderr) = job.communicate(passphrase.encode("utf-8")) | ||
| 101 | |||
| 102 | if job.returncode: | ||
| 103 | bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8"))) | ||
| 104 | |||
| 105 | os.rename(tmp_file, output_file) | ||
| 106 | except IOError as e: | ||
| 107 | bb.error("IO error (%s): %s" % (e.errno, e.strerror)) | ||
| 108 | raise Exception("Failed to sign '%s'" % input_file) | ||
| 109 | |||
| 110 | except OSError as e: | ||
| 111 | bb.error("OS error (%s): %s" % (e.errno, e.strerror)) | ||
| 112 | raise Exception("Failed to sign '%s" % input_file) | ||
| 113 | |||
| 114 | |||
| 115 | def get_gpg_version(self): | ||
| 116 | """Return the gpg version as a tuple of ints""" | ||
| 117 | try: | ||
| 118 | cmd = self.gpg_cmd + ["--version", "--no-permission-warning"] | ||
| 119 | ver_str = subprocess.check_output(cmd).split()[2].decode("utf-8") | ||
| 120 | return tuple([int(i) for i in ver_str.split("-")[0].split('.')]) | ||
| 121 | except subprocess.CalledProcessError as e: | ||
| 122 | bb.fatal("Could not get gpg version: %s" % e) | ||
| 123 | |||
| 124 | |||
| 125 | def verify(self, sig_file, valid_sigs = ''): | ||
| 126 | """Verify signature""" | ||
| 127 | cmd = self.gpg_cmd + ["--verify", "--no-permission-warning", "--status-fd", "1"] | ||
| 128 | if self.gpg_path: | ||
| 129 | cmd += ["--homedir", self.gpg_path] | ||
| 130 | |||
| 131 | cmd += [sig_file] | ||
| 132 | status = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 133 | # Valid if any key matches if unspecified | ||
| 134 | if not valid_sigs: | ||
| 135 | ret = False if status.returncode else True | ||
| 136 | return ret | ||
| 137 | |||
| 138 | import re | ||
| 139 | goodsigs = [] | ||
| 140 | sigre = re.compile(r'^\[GNUPG:\] GOODSIG (\S+)\s(.*)$') | ||
| 141 | for l in status.stdout.decode("utf-8").splitlines(): | ||
| 142 | s = sigre.match(l) | ||
| 143 | if s: | ||
| 144 | goodsigs += [s.group(1)] | ||
| 145 | |||
| 146 | for sig in valid_sigs.split(): | ||
| 147 | if sig in goodsigs: | ||
| 148 | return True | ||
| 149 | if len(goodsigs): | ||
| 150 | bb.warn('No accepted signatures found. Good signatures found: %s.' % ' '.join(goodsigs)) | ||
| 151 | return False | ||
| 152 | |||
| 153 | |||
| 154 | def get_signer(d, backend): | ||
| 155 | """Get signer object for the specified backend""" | ||
| 156 | # Use local signing by default | ||
| 157 | if backend == 'local': | ||
| 158 | return LocalSigner(d) | ||
| 159 | else: | ||
| 160 | bb.fatal("Unsupported signing backend '%s'" % backend) | ||
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py deleted file mode 100644 index bd28a247c9..0000000000 --- a/meta/lib/oe/license.py +++ /dev/null | |||
| @@ -1,473 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | """Code for parsing OpenEmbedded license strings""" | ||
| 7 | |||
| 8 | import ast | ||
| 9 | import re | ||
| 10 | import oe.qa | ||
| 11 | from fnmatch import fnmatchcase as fnmatch | ||
| 12 | |||
| 13 | def license_ok(license, dont_want_licenses): | ||
| 14 | """ Return False if License exist in dont_want_licenses else True """ | ||
| 15 | for dwl in dont_want_licenses: | ||
| 16 | if fnmatch(license, dwl): | ||
| 17 | return False | ||
| 18 | return True | ||
| 19 | |||
| 20 | def obsolete_license_list(): | ||
| 21 | return ["AGPL-3", "AGPL-3+", "AGPLv3", "AGPLv3+", "AGPLv3.0", "AGPLv3.0+", "AGPL-3.0", "AGPL-3.0+", "BSD-0-Clause", | ||
| 22 | "GPL-1", "GPL-1+", "GPLv1", "GPLv1+", "GPLv1.0", "GPLv1.0+", "GPL-1.0", "GPL-1.0+", "GPL-2", "GPL-2+", "GPLv2", | ||
| 23 | "GPLv2+", "GPLv2.0", "GPLv2.0+", "GPL-2.0", "GPL-2.0+", "GPL-3", "GPL-3+", "GPLv3", "GPLv3+", "GPLv3.0", "GPLv3.0+", | ||
| 24 | "GPL-3.0", "GPL-3.0+", "LGPLv2", "LGPLv2+", "LGPLv2.0", "LGPLv2.0+", "LGPL-2.0", "LGPL-2.0+", "LGPL2.1", "LGPL2.1+", | ||
| 25 | "LGPLv2.1", "LGPLv2.1+", "LGPL-2.1", "LGPL-2.1+", "LGPLv3", "LGPLv3+", "LGPL-3.0", "LGPL-3.0+", "MPL-1", "MPLv1", | ||
| 26 | "MPLv1.1", "MPLv2", "MIT-X", "MIT-style", "openssl", "PSF", "PSFv2", "Python-2", "Apachev2", "Apache-2", "Artisticv1", | ||
| 27 | "Artistic-1", "AFL-2", "AFL-1", "AFLv2", "AFLv1", "CDDLv1", "CDDL-1", "EPLv1.0", "FreeType", "Nauman", | ||
| 28 | "tcl", "vim", "SGIv1"] | ||
| 29 | |||
| 30 | class LicenseError(Exception): | ||
| 31 | pass | ||
| 32 | |||
| 33 | class LicenseSyntaxError(LicenseError): | ||
| 34 | def __init__(self, licensestr, exc): | ||
| 35 | self.licensestr = licensestr | ||
| 36 | self.exc = exc | ||
| 37 | LicenseError.__init__(self) | ||
| 38 | |||
| 39 | def __str__(self): | ||
| 40 | return "error in '%s': %s" % (self.licensestr, self.exc) | ||
| 41 | |||
| 42 | class InvalidLicense(LicenseError): | ||
| 43 | def __init__(self, license): | ||
| 44 | self.license = license | ||
| 45 | LicenseError.__init__(self) | ||
| 46 | |||
| 47 | def __str__(self): | ||
| 48 | return "invalid characters in license '%s'" % self.license | ||
| 49 | |||
| 50 | license_operator_chars = '&|() ' | ||
| 51 | license_operator = re.compile(r'([' + license_operator_chars + '])') | ||
| 52 | license_pattern = re.compile(r'[a-zA-Z0-9.+_\-]+$') | ||
| 53 | |||
| 54 | class LicenseVisitor(ast.NodeVisitor): | ||
| 55 | """Get elements based on OpenEmbedded license strings""" | ||
| 56 | def get_elements(self, licensestr): | ||
| 57 | new_elements = [] | ||
| 58 | elements = list([x for x in license_operator.split(licensestr) if x.strip()]) | ||
| 59 | for pos, element in enumerate(elements): | ||
| 60 | if license_pattern.match(element): | ||
| 61 | if pos > 0 and license_pattern.match(elements[pos-1]): | ||
| 62 | new_elements.append('&') | ||
| 63 | element = '"' + element + '"' | ||
| 64 | elif not license_operator.match(element): | ||
| 65 | raise InvalidLicense(element) | ||
| 66 | new_elements.append(element) | ||
| 67 | |||
| 68 | return new_elements | ||
| 69 | |||
| 70 | """Syntax tree visitor which can accept elements previously generated with | ||
| 71 | OpenEmbedded license string""" | ||
| 72 | def visit_elements(self, elements): | ||
| 73 | self.visit(ast.parse(' '.join(elements))) | ||
| 74 | |||
| 75 | """Syntax tree visitor which can accept OpenEmbedded license strings""" | ||
| 76 | def visit_string(self, licensestr): | ||
| 77 | self.visit_elements(self.get_elements(licensestr)) | ||
| 78 | |||
| 79 | class FlattenVisitor(LicenseVisitor): | ||
| 80 | """Flatten a license tree (parsed from a string) by selecting one of each | ||
| 81 | set of OR options, in the way the user specifies""" | ||
| 82 | def __init__(self, choose_licenses): | ||
| 83 | self.choose_licenses = choose_licenses | ||
| 84 | self.licenses = [] | ||
| 85 | LicenseVisitor.__init__(self) | ||
| 86 | |||
| 87 | def visit_Constant(self, node): | ||
| 88 | self.licenses.append(node.value) | ||
| 89 | |||
| 90 | def visit_BinOp(self, node): | ||
| 91 | if isinstance(node.op, ast.BitOr): | ||
| 92 | left = FlattenVisitor(self.choose_licenses) | ||
| 93 | left.visit(node.left) | ||
| 94 | |||
| 95 | right = FlattenVisitor(self.choose_licenses) | ||
| 96 | right.visit(node.right) | ||
| 97 | |||
| 98 | selected = self.choose_licenses(left.licenses, right.licenses) | ||
| 99 | self.licenses.extend(selected) | ||
| 100 | else: | ||
| 101 | self.generic_visit(node) | ||
| 102 | |||
| 103 | def flattened_licenses(licensestr, choose_licenses): | ||
| 104 | """Given a license string and choose_licenses function, return a flat list of licenses""" | ||
| 105 | flatten = FlattenVisitor(choose_licenses) | ||
| 106 | try: | ||
| 107 | flatten.visit_string(licensestr) | ||
| 108 | except SyntaxError as exc: | ||
| 109 | raise LicenseSyntaxError(licensestr, exc) | ||
| 110 | return flatten.licenses | ||
| 111 | |||
| 112 | def is_included(licensestr, include_licenses=None, exclude_licenses=None): | ||
| 113 | """Given a license string, a list of licenses to include and a list of | ||
| 114 | licenses to exclude, determine if the license string matches the include | ||
| 115 | list and does not match the exclude list. | ||
| 116 | |||
| 117 | Returns a tuple holding the boolean state and a list of the applicable | ||
| 118 | licenses that were excluded if state is False, or the licenses that were | ||
| 119 | included if the state is True.""" | ||
| 120 | |||
| 121 | def include_license(license): | ||
| 122 | return any(fnmatch(license, pattern) for pattern in include_licenses) | ||
| 123 | |||
| 124 | def exclude_license(license): | ||
| 125 | return any(fnmatch(license, pattern) for pattern in exclude_licenses) | ||
| 126 | |||
| 127 | def choose_licenses(alpha, beta): | ||
| 128 | """Select the option in an OR which is the 'best' (has the most | ||
| 129 | included licenses and no excluded licenses).""" | ||
| 130 | # The factor 1000 below is arbitrary, just expected to be much larger | ||
| 131 | # than the number of licenses actually specified. That way the weight | ||
| 132 | # will be negative if the list of licenses contains an excluded license, | ||
| 133 | # but still gives a higher weight to the list with the most included | ||
| 134 | # licenses. | ||
| 135 | alpha_weight = (len(list(filter(include_license, alpha))) - | ||
| 136 | 1000 * (len(list(filter(exclude_license, alpha))) > 0)) | ||
| 137 | beta_weight = (len(list(filter(include_license, beta))) - | ||
| 138 | 1000 * (len(list(filter(exclude_license, beta))) > 0)) | ||
| 139 | if alpha_weight >= beta_weight: | ||
| 140 | return alpha | ||
| 141 | else: | ||
| 142 | return beta | ||
| 143 | |||
| 144 | if not include_licenses: | ||
| 145 | include_licenses = ['*'] | ||
| 146 | |||
| 147 | if not exclude_licenses: | ||
| 148 | exclude_licenses = [] | ||
| 149 | |||
| 150 | licenses = flattened_licenses(licensestr, choose_licenses) | ||
| 151 | excluded = [lic for lic in licenses if exclude_license(lic)] | ||
| 152 | included = [lic for lic in licenses if include_license(lic)] | ||
| 153 | if excluded: | ||
| 154 | return False, excluded | ||
| 155 | else: | ||
| 156 | return True, included | ||
| 157 | |||
| 158 | class ManifestVisitor(LicenseVisitor): | ||
| 159 | """Walk license tree (parsed from a string) removing the incompatible | ||
| 160 | licenses specified""" | ||
| 161 | def __init__(self, dont_want_licenses, canonical_license, d): | ||
| 162 | self._dont_want_licenses = dont_want_licenses | ||
| 163 | self._canonical_license = canonical_license | ||
| 164 | self._d = d | ||
| 165 | self._operators = [] | ||
| 166 | |||
| 167 | self.licenses = [] | ||
| 168 | self.licensestr = '' | ||
| 169 | |||
| 170 | LicenseVisitor.__init__(self) | ||
| 171 | |||
| 172 | def visit(self, node): | ||
| 173 | if isinstance(node, ast.Constant): | ||
| 174 | lic = node.value | ||
| 175 | |||
| 176 | if license_ok(self._canonical_license(self._d, lic), | ||
| 177 | self._dont_want_licenses) == True: | ||
| 178 | if self._operators: | ||
| 179 | ops = [] | ||
| 180 | for op in self._operators: | ||
| 181 | if op == '[': | ||
| 182 | ops.append(op) | ||
| 183 | elif op == ']': | ||
| 184 | ops.append(op) | ||
| 185 | else: | ||
| 186 | if not ops: | ||
| 187 | ops.append(op) | ||
| 188 | elif ops[-1] in ['[', ']']: | ||
| 189 | ops.append(op) | ||
| 190 | else: | ||
| 191 | ops[-1] = op | ||
| 192 | |||
| 193 | for op in ops: | ||
| 194 | if op == '[' or op == ']': | ||
| 195 | self.licensestr += op | ||
| 196 | elif self.licenses: | ||
| 197 | self.licensestr += ' ' + op + ' ' | ||
| 198 | |||
| 199 | self._operators = [] | ||
| 200 | |||
| 201 | self.licensestr += lic | ||
| 202 | self.licenses.append(lic) | ||
| 203 | elif isinstance(node, ast.BitAnd): | ||
| 204 | self._operators.append("&") | ||
| 205 | elif isinstance(node, ast.BitOr): | ||
| 206 | self._operators.append("|") | ||
| 207 | elif isinstance(node, ast.List): | ||
| 208 | self._operators.append("[") | ||
| 209 | elif isinstance(node, ast.Load): | ||
| 210 | self.licensestr += "]" | ||
| 211 | |||
| 212 | self.generic_visit(node) | ||
| 213 | |||
| 214 | def manifest_licenses(licensestr, dont_want_licenses, canonical_license, d): | ||
| 215 | """Given a license string and dont_want_licenses list, | ||
| 216 | return license string filtered and a list of licenses""" | ||
| 217 | manifest = ManifestVisitor(dont_want_licenses, canonical_license, d) | ||
| 218 | |||
| 219 | try: | ||
| 220 | elements = manifest.get_elements(licensestr) | ||
| 221 | |||
| 222 | # Replace '()' to '[]' for handle in ast as List and Load types. | ||
| 223 | elements = ['[' if e == '(' else e for e in elements] | ||
| 224 | elements = [']' if e == ')' else e for e in elements] | ||
| 225 | |||
| 226 | manifest.visit_elements(elements) | ||
| 227 | except SyntaxError as exc: | ||
| 228 | raise LicenseSyntaxError(licensestr, exc) | ||
| 229 | |||
| 230 | # Replace '[]' to '()' for output correct license. | ||
| 231 | manifest.licensestr = manifest.licensestr.replace('[', '(').replace(']', ')') | ||
| 232 | |||
| 233 | return (manifest.licensestr, manifest.licenses) | ||
| 234 | |||
| 235 | class ListVisitor(LicenseVisitor): | ||
| 236 | """Record all different licenses found in the license string""" | ||
| 237 | def __init__(self): | ||
| 238 | self.licenses = set() | ||
| 239 | |||
| 240 | def visit_Constant(self, node): | ||
| 241 | self.licenses.add(node.value) | ||
| 242 | |||
| 243 | def list_licenses(licensestr): | ||
| 244 | """Simply get a list of all licenses mentioned in a license string. | ||
| 245 | Binary operators are not applied or taken into account in any way""" | ||
| 246 | visitor = ListVisitor() | ||
| 247 | try: | ||
| 248 | visitor.visit_string(licensestr) | ||
| 249 | except SyntaxError as exc: | ||
| 250 | raise LicenseSyntaxError(licensestr, exc) | ||
| 251 | return visitor.licenses | ||
| 252 | |||
| 253 | def apply_pkg_license_exception(pkg, bad_licenses, exceptions): | ||
| 254 | """Return remaining bad licenses after removing any package exceptions""" | ||
| 255 | |||
| 256 | return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions] | ||
| 257 | |||
| 258 | def return_spdx(d, license): | ||
| 259 | """ | ||
| 260 | This function returns the spdx mapping of a license if it exists. | ||
| 261 | """ | ||
| 262 | return d.getVarFlag('SPDXLICENSEMAP', license) | ||
| 263 | |||
| 264 | def canonical_license(d, license): | ||
| 265 | """ | ||
| 266 | Return the canonical (SPDX) form of the license if available (so GPLv3 | ||
| 267 | becomes GPL-3.0-only) or the passed license if there is no canonical form. | ||
| 268 | """ | ||
| 269 | return d.getVarFlag('SPDXLICENSEMAP', license) or license | ||
| 270 | |||
| 271 | def expand_wildcard_licenses(d, wildcard_licenses): | ||
| 272 | """ | ||
| 273 | There are some common wildcard values users may want to use. Support them | ||
| 274 | here. | ||
| 275 | """ | ||
| 276 | licenses = set(wildcard_licenses) | ||
| 277 | mapping = { | ||
| 278 | "AGPL-3.0*" : ["AGPL-3.0-only", "AGPL-3.0-or-later"], | ||
| 279 | "GPL-3.0*" : ["GPL-3.0-only", "GPL-3.0-or-later"], | ||
| 280 | "LGPL-3.0*" : ["LGPL-3.0-only", "LGPL-3.0-or-later"], | ||
| 281 | } | ||
| 282 | for k in mapping: | ||
| 283 | if k in wildcard_licenses: | ||
| 284 | licenses.remove(k) | ||
| 285 | for item in mapping[k]: | ||
| 286 | licenses.add(item) | ||
| 287 | |||
| 288 | for l in licenses: | ||
| 289 | if l in obsolete_license_list(): | ||
| 290 | bb.fatal("Error, %s is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE" % l) | ||
| 291 | if "*" in l: | ||
| 292 | bb.fatal("Error, %s is an invalid license wildcard entry" % l) | ||
| 293 | |||
| 294 | return list(licenses) | ||
| 295 | |||
| 296 | def incompatible_license_contains(license, truevalue, falsevalue, d): | ||
| 297 | license = canonical_license(d, license) | ||
| 298 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | ||
| 299 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
| 300 | return truevalue if license in bad_licenses else falsevalue | ||
| 301 | |||
| 302 | def incompatible_pkg_license(d, dont_want_licenses, license): | ||
| 303 | # Handles an "or" or two license sets provided by | ||
| 304 | # flattened_licenses(), pick one that works if possible. | ||
| 305 | def choose_lic_set(a, b): | ||
| 306 | return a if all(license_ok(canonical_license(d, lic), | ||
| 307 | dont_want_licenses) for lic in a) else b | ||
| 308 | |||
| 309 | try: | ||
| 310 | licenses = flattened_licenses(license, choose_lic_set) | ||
| 311 | except LicenseError as exc: | ||
| 312 | bb.fatal('%s: %s' % (d.getVar('P'), exc)) | ||
| 313 | |||
| 314 | incompatible_lic = [] | ||
| 315 | for l in licenses: | ||
| 316 | license = canonical_license(d, l) | ||
| 317 | if not license_ok(license, dont_want_licenses): | ||
| 318 | incompatible_lic.append(license) | ||
| 319 | |||
| 320 | return sorted(incompatible_lic) | ||
| 321 | |||
| 322 | def incompatible_license(d, dont_want_licenses, package=None): | ||
| 323 | """ | ||
| 324 | This function checks if a recipe has only incompatible licenses. It also | ||
| 325 | take into consideration 'or' operand. dont_want_licenses should be passed | ||
| 326 | as canonical (SPDX) names. | ||
| 327 | """ | ||
| 328 | license = d.getVar("LICENSE:%s" % package) if package else None | ||
| 329 | if not license: | ||
| 330 | license = d.getVar('LICENSE') | ||
| 331 | |||
| 332 | return incompatible_pkg_license(d, dont_want_licenses, license) | ||
| 333 | |||
| 334 | def check_license_flags(d): | ||
| 335 | """ | ||
| 336 | This function checks if a recipe has any LICENSE_FLAGS that | ||
| 337 | aren't acceptable. | ||
| 338 | |||
| 339 | If it does, it returns the all LICENSE_FLAGS missing from the list | ||
| 340 | of acceptable license flags, or all of the LICENSE_FLAGS if there | ||
| 341 | is no list of acceptable flags. | ||
| 342 | |||
| 343 | If everything is is acceptable, it returns None. | ||
| 344 | """ | ||
| 345 | |||
| 346 | def license_flag_matches(flag, acceptlist, pn): | ||
| 347 | """ | ||
| 348 | Return True if flag matches something in acceptlist, None if not. | ||
| 349 | |||
| 350 | Before we test a flag against the acceptlist, we append _${PN} | ||
| 351 | to it. We then try to match that string against the | ||
| 352 | acceptlist. This covers the normal case, where we expect | ||
| 353 | LICENSE_FLAGS to be a simple string like 'commercial', which | ||
| 354 | the user typically matches exactly in the acceptlist by | ||
| 355 | explicitly appending the package name e.g 'commercial_foo'. | ||
| 356 | If we fail the match however, we then split the flag across | ||
| 357 | '_' and append each fragment and test until we either match or | ||
| 358 | run out of fragments. | ||
| 359 | """ | ||
| 360 | flag_pn = ("%s_%s" % (flag, pn)) | ||
| 361 | for candidate in acceptlist: | ||
| 362 | if flag_pn == candidate: | ||
| 363 | return True | ||
| 364 | |||
| 365 | flag_cur = "" | ||
| 366 | flagments = flag_pn.split("_") | ||
| 367 | flagments.pop() # we've already tested the full string | ||
| 368 | for flagment in flagments: | ||
| 369 | if flag_cur: | ||
| 370 | flag_cur += "_" | ||
| 371 | flag_cur += flagment | ||
| 372 | for candidate in acceptlist: | ||
| 373 | if flag_cur == candidate: | ||
| 374 | return True | ||
| 375 | return False | ||
| 376 | |||
| 377 | def all_license_flags_match(license_flags, acceptlist): | ||
| 378 | """ Return all unmatched flags, None if all flags match """ | ||
| 379 | pn = d.getVar('PN') | ||
| 380 | split_acceptlist = acceptlist.split() | ||
| 381 | flags = [] | ||
| 382 | for flag in license_flags.split(): | ||
| 383 | if not license_flag_matches(flag, split_acceptlist, pn): | ||
| 384 | flags.append(flag) | ||
| 385 | return flags if flags else None | ||
| 386 | |||
| 387 | license_flags = d.getVar('LICENSE_FLAGS') | ||
| 388 | if license_flags: | ||
| 389 | acceptlist = d.getVar('LICENSE_FLAGS_ACCEPTED') | ||
| 390 | if not acceptlist: | ||
| 391 | return license_flags.split() | ||
| 392 | unmatched_flags = all_license_flags_match(license_flags, acceptlist) | ||
| 393 | if unmatched_flags: | ||
| 394 | return unmatched_flags | ||
| 395 | return None | ||
| 396 | |||
| 397 | def check_license_format(d): | ||
| 398 | """ | ||
| 399 | This function checks if LICENSE is well defined, | ||
| 400 | Validate operators in LICENSES. | ||
| 401 | No spaces are allowed between LICENSES. | ||
| 402 | """ | ||
| 403 | pn = d.getVar('PN') | ||
| 404 | licenses = d.getVar('LICENSE') | ||
| 405 | |||
| 406 | elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) | ||
| 407 | for pos, element in enumerate(elements): | ||
| 408 | if license_pattern.match(element): | ||
| 409 | if pos > 0 and license_pattern.match(elements[pos - 1]): | ||
| 410 | oe.qa.handle_error('license-format', | ||
| 411 | '%s: LICENSE value "%s" has an invalid format - license names ' \ | ||
| 412 | 'must be separated by the following characters to indicate ' \ | ||
| 413 | 'the license selection: %s' % | ||
| 414 | (pn, licenses, license_operator_chars), d) | ||
| 415 | elif not license_operator.match(element): | ||
| 416 | oe.qa.handle_error('license-format', | ||
| 417 | '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \ | ||
| 418 | 'in the valid list of separators (%s)' % | ||
| 419 | (pn, licenses, element, license_operator_chars), d) | ||
| 420 | |||
| 421 | def skip_incompatible_package_licenses(d, pkgs): | ||
| 422 | if not pkgs: | ||
| 423 | return {} | ||
| 424 | |||
| 425 | pn = d.getVar("PN") | ||
| 426 | |||
| 427 | check_license = False if pn.startswith("nativesdk-") else True | ||
| 428 | for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", | ||
| 429 | "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}", | ||
| 430 | "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]: | ||
| 431 | if pn.endswith(d.expand(t)): | ||
| 432 | check_license = False | ||
| 433 | if pn.startswith("gcc-source-"): | ||
| 434 | check_license = False | ||
| 435 | |||
| 436 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() | ||
| 437 | if not check_license or not bad_licenses: | ||
| 438 | return {} | ||
| 439 | |||
| 440 | bad_licenses = expand_wildcard_licenses(d, bad_licenses) | ||
| 441 | |||
| 442 | exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split() | ||
| 443 | |||
| 444 | for lic_exception in exceptions: | ||
| 445 | if ":" in lic_exception: | ||
| 446 | lic_exception = lic_exception.split(":")[1] | ||
| 447 | if lic_exception in obsolete_license_list(): | ||
| 448 | bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception) | ||
| 449 | |||
| 450 | skipped_pkgs = {} | ||
| 451 | for pkg in pkgs: | ||
| 452 | remaining_bad_licenses = apply_pkg_license_exception(pkg, bad_licenses, exceptions) | ||
| 453 | |||
| 454 | incompatible_lic = incompatible_license(d, remaining_bad_licenses, pkg) | ||
| 455 | if incompatible_lic: | ||
| 456 | skipped_pkgs[pkg] = incompatible_lic | ||
| 457 | |||
| 458 | return skipped_pkgs | ||
| 459 | |||
| 460 | def tidy_licenses(value): | ||
| 461 | """ | ||
| 462 | Flat, split and sort licenses. | ||
| 463 | """ | ||
| 464 | from oe.license import flattened_licenses | ||
| 465 | |||
| 466 | def _choose(a, b): | ||
| 467 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
| 468 | return ["(%s | %s)" % (str_a, str_b)] | ||
| 469 | |||
| 470 | if not isinstance(value, str): | ||
| 471 | value = " & ".join(value) | ||
| 472 | |||
| 473 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
diff --git a/meta/lib/oe/license_finder.py b/meta/lib/oe/license_finder.py deleted file mode 100644 index 4f2bb661fd..0000000000 --- a/meta/lib/oe/license_finder.py +++ /dev/null | |||
| @@ -1,204 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import fnmatch | ||
| 8 | import hashlib | ||
| 9 | import logging | ||
| 10 | import os | ||
| 11 | import re | ||
| 12 | |||
| 13 | import bb | ||
| 14 | import bb.utils | ||
| 15 | |||
| 16 | logger = logging.getLogger("BitBake.OE.LicenseFinder") | ||
| 17 | |||
| 18 | def _load_hash_csv(d): | ||
| 19 | """ | ||
| 20 | Load a mapping of (checksum: license name) from all files/license-hashes.csv | ||
| 21 | files that can be found in the available layers. | ||
| 22 | """ | ||
| 23 | import csv | ||
| 24 | md5sums = {} | ||
| 25 | |||
| 26 | # Read license md5sums from csv file | ||
| 27 | for path in d.getVar('BBPATH').split(':'): | ||
| 28 | csv_path = os.path.join(path, 'files', 'license-hashes.csv') | ||
| 29 | if os.path.isfile(csv_path): | ||
| 30 | with open(csv_path, newline='') as csv_file: | ||
| 31 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=['md5sum', 'license']) | ||
| 32 | for row in reader: | ||
| 33 | md5sums[row['md5sum']] = row['license'] | ||
| 34 | |||
| 35 | return md5sums | ||
| 36 | |||
| 37 | |||
| 38 | def _crunch_known_licenses(d): | ||
| 39 | """ | ||
| 40 | Calculate the MD5 checksums for the original and "crunched" versions of all | ||
| 41 | known licenses. | ||
| 42 | """ | ||
| 43 | md5sums = {} | ||
| 44 | |||
| 45 | lic_dirs = [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or "").split() | ||
| 46 | for lic_dir in lic_dirs: | ||
| 47 | for fn in os.listdir(lic_dir): | ||
| 48 | path = os.path.join(lic_dir, fn) | ||
| 49 | # Hash the exact contents | ||
| 50 | md5value = bb.utils.md5_file(path) | ||
| 51 | md5sums[md5value] = fn | ||
| 52 | # Also hash a "crunched" version | ||
| 53 | md5value = _crunch_license(path) | ||
| 54 | md5sums[md5value] = fn | ||
| 55 | |||
| 56 | return md5sums | ||
| 57 | |||
| 58 | |||
| 59 | def _crunch_license(licfile): | ||
| 60 | ''' | ||
| 61 | Remove non-material text from a license file and then calculate its | ||
| 62 | md5sum. This works well for licenses that contain a copyright statement, | ||
| 63 | but is also a useful way to handle people's insistence upon reformatting | ||
| 64 | the license text slightly (with no material difference to the text of the | ||
| 65 | license). | ||
| 66 | ''' | ||
| 67 | |||
| 68 | import oe.utils | ||
| 69 | |||
| 70 | # Note: these are carefully constructed! | ||
| 71 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
| 72 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
| 73 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
| 74 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
| 75 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
| 76 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
| 77 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
| 78 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
| 79 | |||
| 80 | lictext = [] | ||
| 81 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
| 82 | for line in f: | ||
| 83 | # Drop opening statements | ||
| 84 | if copyright_re.match(line): | ||
| 85 | continue | ||
| 86 | elif disclaimer_re.match(line): | ||
| 87 | continue | ||
| 88 | elif email_re.match(line): | ||
| 89 | continue | ||
| 90 | elif header_re.match(line): | ||
| 91 | continue | ||
| 92 | elif tag_re.match(line): | ||
| 93 | continue | ||
| 94 | elif url_re.match(line): | ||
| 95 | continue | ||
| 96 | elif license_title_re.match(line): | ||
| 97 | continue | ||
| 98 | elif license_statement_re.match(line): | ||
| 99 | continue | ||
| 100 | # Strip comment symbols | ||
| 101 | line = line.replace('*', '') \ | ||
| 102 | .replace('#', '') | ||
| 103 | # Unify spelling | ||
| 104 | line = line.replace('sub-license', 'sublicense') | ||
| 105 | # Squash spaces | ||
| 106 | line = oe.utils.squashspaces(line.strip()) | ||
| 107 | # Replace smart quotes, double quotes and backticks with single quotes | ||
| 108 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
| 109 | # Unify brackets | ||
| 110 | line = line.replace("{", "[").replace("}", "]") | ||
| 111 | if line: | ||
| 112 | lictext.append(line) | ||
| 113 | |||
| 114 | m = hashlib.md5() | ||
| 115 | try: | ||
| 116 | m.update(' '.join(lictext).encode('utf-8')) | ||
| 117 | md5val = m.hexdigest() | ||
| 118 | except UnicodeEncodeError: | ||
| 119 | md5val = None | ||
| 120 | return md5val | ||
| 121 | |||
| 122 | |||
| 123 | def find_license_files(srctree, first_only=False, bottom=""): | ||
| 124 | """ | ||
| 125 | Search srctree for files that look like they could be licenses. | ||
| 126 | If first_only is True, only return the first file found. | ||
| 127 | If bottom is not empty, start at bottom and continue upwards to the top. | ||
| 128 | """ | ||
| 129 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
| 130 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go", ".sh") | ||
| 131 | licfiles = [] | ||
| 132 | if bottom: | ||
| 133 | srcdir = bottom | ||
| 134 | while srcdir.startswith(srctree): | ||
| 135 | files = [] | ||
| 136 | with os.scandir(srcdir) as it: | ||
| 137 | for entry in it: | ||
| 138 | if entry.is_file(): | ||
| 139 | files.append(entry.name) | ||
| 140 | for name in sorted(files): | ||
| 141 | if name.endswith(skip_extensions): | ||
| 142 | continue | ||
| 143 | for spec in licspecs: | ||
| 144 | if fnmatch.fnmatch(name, spec): | ||
| 145 | licfiles.append(os.path.join(srcdir, name)) | ||
| 146 | if first_only: | ||
| 147 | return licfiles | ||
| 148 | srcdir = os.path.dirname(srcdir) | ||
| 149 | return licfiles | ||
| 150 | |||
| 151 | for root, dirs, files in os.walk(srctree): | ||
| 152 | # Sort files so that LICENSE is before LICENSE.subcomponent, which is | ||
| 153 | # meaningful if first_only is set. | ||
| 154 | for fn in sorted(files): | ||
| 155 | if fn.endswith(skip_extensions): | ||
| 156 | continue | ||
| 157 | for spec in licspecs: | ||
| 158 | if fnmatch.fnmatch(fn, spec): | ||
| 159 | fullpath = os.path.join(root, fn) | ||
| 160 | if not fullpath in licfiles: | ||
| 161 | licfiles.append(fullpath) | ||
| 162 | if first_only: | ||
| 163 | return licfiles | ||
| 164 | |||
| 165 | return licfiles | ||
| 166 | |||
| 167 | |||
| 168 | def match_licenses(licfiles, srctree, d, extra_hashes={}): | ||
| 169 | md5sums = {} | ||
| 170 | md5sums.update(_load_hash_csv(d)) | ||
| 171 | md5sums.update(_crunch_known_licenses(d)) | ||
| 172 | md5sums.update(extra_hashes) | ||
| 173 | |||
| 174 | licenses = [] | ||
| 175 | for licfile in sorted(licfiles): | ||
| 176 | resolved_licfile = d.expand(licfile) | ||
| 177 | md5value = bb.utils.md5_file(resolved_licfile) | ||
| 178 | license = md5sums.get(md5value, None) | ||
| 179 | if not license: | ||
| 180 | crunched_md5 = _crunch_license(resolved_licfile) | ||
| 181 | license = md5sums.get(crunched_md5, None) | ||
| 182 | if not license: | ||
| 183 | license = 'Unknown' | ||
| 184 | logger.info("Please add the following line for '%s' to a 'license-hashes.csv' " \ | ||
| 185 | "and replace `Unknown` with the license:\n" \ | ||
| 186 | "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value)) | ||
| 187 | |||
| 188 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
| 189 | |||
| 190 | return licenses | ||
| 191 | |||
| 192 | |||
| 193 | def find_licenses(srctree, d, first_only=False, extra_hashes={}): | ||
| 194 | licfiles = find_license_files(srctree, first_only) | ||
| 195 | licenses = match_licenses(licfiles, srctree, d, extra_hashes) | ||
| 196 | |||
| 197 | # FIXME should we grab at least one source file with a license header and add that too? | ||
| 198 | |||
| 199 | return licenses | ||
| 200 | |||
| 201 | |||
| 202 | def find_licenses_up(srcdir, topdir, d, first_only=False, extra_hashes={}): | ||
| 203 | licfiles = find_license_files(topdir, first_only, srcdir) | ||
| 204 | return match_licenses(licfiles, topdir, d, extra_hashes) | ||
diff --git a/meta/lib/oe/lsb.py b/meta/lib/oe/lsb.py deleted file mode 100644 index 3ec03e5042..0000000000 --- a/meta/lib/oe/lsb.py +++ /dev/null | |||
| @@ -1,123 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | def get_os_release(): | ||
| 8 | """Get all key-value pairs from /etc/os-release as a dict""" | ||
| 9 | from collections import OrderedDict | ||
| 10 | |||
| 11 | data = OrderedDict() | ||
| 12 | if os.path.exists('/etc/os-release'): | ||
| 13 | with open('/etc/os-release') as f: | ||
| 14 | for line in f: | ||
| 15 | try: | ||
| 16 | key, val = line.rstrip().split('=', 1) | ||
| 17 | except ValueError: | ||
| 18 | continue | ||
| 19 | data[key.strip()] = val.strip('"') | ||
| 20 | return data | ||
| 21 | |||
| 22 | def release_dict_osr(): | ||
| 23 | """ Populate a dict with pertinent values from /etc/os-release """ | ||
| 24 | data = {} | ||
| 25 | os_release = get_os_release() | ||
| 26 | if 'ID' in os_release: | ||
| 27 | data['DISTRIB_ID'] = os_release['ID'] | ||
| 28 | if 'VERSION_ID' in os_release: | ||
| 29 | data['DISTRIB_RELEASE'] = os_release['VERSION_ID'] | ||
| 30 | |||
| 31 | return data | ||
| 32 | |||
| 33 | def release_dict_lsb(): | ||
| 34 | """ Return the output of lsb_release -ir as a dictionary """ | ||
| 35 | from subprocess import PIPE | ||
| 36 | |||
| 37 | try: | ||
| 38 | output, err = bb.process.run(['lsb_release', '-ir'], stderr=PIPE) | ||
| 39 | except bb.process.CmdError as exc: | ||
| 40 | return {} | ||
| 41 | |||
| 42 | lsb_map = { 'Distributor ID': 'DISTRIB_ID', | ||
| 43 | 'Release': 'DISTRIB_RELEASE'} | ||
| 44 | lsb_keys = lsb_map.keys() | ||
| 45 | |||
| 46 | data = {} | ||
| 47 | for line in output.splitlines(): | ||
| 48 | if line.startswith("-e"): | ||
| 49 | line = line[3:] | ||
| 50 | try: | ||
| 51 | key, value = line.split(":\t", 1) | ||
| 52 | except ValueError: | ||
| 53 | continue | ||
| 54 | if key in lsb_keys: | ||
| 55 | data[lsb_map[key]] = value | ||
| 56 | |||
| 57 | if len(data.keys()) != 2: | ||
| 58 | return None | ||
| 59 | |||
| 60 | return data | ||
| 61 | |||
| 62 | def release_dict_file(): | ||
| 63 | """ Try to gather release information manually when other methods fail """ | ||
| 64 | data = {} | ||
| 65 | try: | ||
| 66 | if os.path.exists('/etc/lsb-release'): | ||
| 67 | data = {} | ||
| 68 | with open('/etc/lsb-release') as f: | ||
| 69 | for line in f: | ||
| 70 | key, value = line.split("=", 1) | ||
| 71 | data[key] = value.strip() | ||
| 72 | elif os.path.exists('/etc/redhat-release'): | ||
| 73 | data = {} | ||
| 74 | with open('/etc/redhat-release') as f: | ||
| 75 | distro = f.readline().strip() | ||
| 76 | import re | ||
| 77 | match = re.match(r'(.*) release (.*) \((.*)\)', distro) | ||
| 78 | if match: | ||
| 79 | data['DISTRIB_ID'] = match.group(1) | ||
| 80 | data['DISTRIB_RELEASE'] = match.group(2) | ||
| 81 | elif os.path.exists('/etc/SuSE-release'): | ||
| 82 | data = {} | ||
| 83 | data['DISTRIB_ID'] = 'SUSE LINUX' | ||
| 84 | with open('/etc/SuSE-release') as f: | ||
| 85 | for line in f: | ||
| 86 | if line.startswith('VERSION = '): | ||
| 87 | data['DISTRIB_RELEASE'] = line[10:].rstrip() | ||
| 88 | break | ||
| 89 | |||
| 90 | except IOError: | ||
| 91 | return {} | ||
| 92 | return data | ||
| 93 | |||
| 94 | def distro_identifier(adjust_hook=None): | ||
| 95 | """Return a distro identifier string based upon lsb_release -ri, | ||
| 96 | with optional adjustment via a hook""" | ||
| 97 | |||
| 98 | import re | ||
| 99 | |||
| 100 | # Try /etc/os-release first, then the output of `lsb_release -ir` and | ||
| 101 | # finally fall back on parsing various release files in order to determine | ||
| 102 | # host distro name and version. | ||
| 103 | distro_data = release_dict_osr() | ||
| 104 | if not distro_data: | ||
| 105 | distro_data = release_dict_lsb() | ||
| 106 | if not distro_data: | ||
| 107 | distro_data = release_dict_file() | ||
| 108 | |||
| 109 | distro_id = distro_data.get('DISTRIB_ID', '') | ||
| 110 | release = distro_data.get('DISTRIB_RELEASE', '') | ||
| 111 | |||
| 112 | if adjust_hook: | ||
| 113 | distro_id, release = adjust_hook(distro_id, release) | ||
| 114 | if not distro_id: | ||
| 115 | return "unknown" | ||
| 116 | # Filter out any non-alphanumerics and convert to lowercase | ||
| 117 | distro_id = re.sub(r'\W', '', distro_id).lower() | ||
| 118 | |||
| 119 | if release: | ||
| 120 | id_str = '{0}-{1}'.format(distro_id, release) | ||
| 121 | else: | ||
| 122 | id_str = distro_id | ||
| 123 | return id_str.replace(' ','-').replace('/','-') | ||
diff --git a/meta/lib/oe/maketype.py b/meta/lib/oe/maketype.py deleted file mode 100644 index 7a83bdf602..0000000000 --- a/meta/lib/oe/maketype.py +++ /dev/null | |||
| @@ -1,107 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | """OpenEmbedded variable typing support | ||
| 7 | |||
| 8 | Types are defined in the metadata by name, using the 'type' flag on a | ||
| 9 | variable. Other flags may be utilized in the construction of the types. See | ||
| 10 | the arguments of the type's factory for details. | ||
| 11 | """ | ||
| 12 | |||
| 13 | import inspect | ||
| 14 | import oe.types as types | ||
| 15 | from collections.abc import Callable | ||
| 16 | |||
| 17 | available_types = {} | ||
| 18 | |||
| 19 | class MissingFlag(TypeError): | ||
| 20 | """A particular flag is required to construct the type, but has not been | ||
| 21 | provided.""" | ||
| 22 | def __init__(self, flag, type): | ||
| 23 | self.flag = flag | ||
| 24 | self.type = type | ||
| 25 | TypeError.__init__(self) | ||
| 26 | |||
| 27 | def __str__(self): | ||
| 28 | return "Type '%s' requires flag '%s'" % (self.type, self.flag) | ||
| 29 | |||
| 30 | def factory(var_type): | ||
| 31 | """Return the factory for a specified type.""" | ||
| 32 | if var_type is None: | ||
| 33 | raise TypeError("No type specified. Valid types: %s" % | ||
| 34 | ', '.join(available_types)) | ||
| 35 | try: | ||
| 36 | return available_types[var_type] | ||
| 37 | except KeyError: | ||
| 38 | raise TypeError("Invalid type '%s':\n Valid types: %s" % | ||
| 39 | (var_type, ', '.join(available_types))) | ||
| 40 | |||
| 41 | def create(value, var_type, **flags): | ||
| 42 | """Create an object of the specified type, given the specified flags and | ||
| 43 | string value.""" | ||
| 44 | obj = factory(var_type) | ||
| 45 | objflags = {} | ||
| 46 | for flag in obj.flags: | ||
| 47 | if flag not in flags: | ||
| 48 | if flag not in obj.optflags: | ||
| 49 | raise MissingFlag(flag, var_type) | ||
| 50 | else: | ||
| 51 | objflags[flag] = flags[flag] | ||
| 52 | |||
| 53 | return obj(value, **objflags) | ||
| 54 | |||
| 55 | def get_callable_args(obj): | ||
| 56 | """Grab all but the first argument of the specified callable, returning | ||
| 57 | the list, as well as a list of which of the arguments have default | ||
| 58 | values.""" | ||
| 59 | if type(obj) is type: | ||
| 60 | obj = obj.__init__ | ||
| 61 | |||
| 62 | sig = inspect.signature(obj) | ||
| 63 | args = list(sig.parameters.keys()) | ||
| 64 | defaults = list(s for s in sig.parameters.keys() if sig.parameters[s].default != inspect.Parameter.empty) | ||
| 65 | flaglist = [] | ||
| 66 | if args: | ||
| 67 | if len(args) > 1 and args[0] == 'self': | ||
| 68 | args = args[1:] | ||
| 69 | flaglist.extend(args) | ||
| 70 | |||
| 71 | optional = set() | ||
| 72 | if defaults: | ||
| 73 | optional |= set(flaglist[-len(defaults):]) | ||
| 74 | return flaglist, optional | ||
| 75 | |||
| 76 | def factory_setup(name, obj): | ||
| 77 | """Prepare a factory for use.""" | ||
| 78 | args, optional = get_callable_args(obj) | ||
| 79 | extra_args = args[1:] | ||
| 80 | if extra_args: | ||
| 81 | obj.flags, optional = extra_args, optional | ||
| 82 | obj.optflags = set(optional) | ||
| 83 | else: | ||
| 84 | obj.flags = obj.optflags = () | ||
| 85 | |||
| 86 | if not hasattr(obj, 'name'): | ||
| 87 | obj.name = name | ||
| 88 | |||
| 89 | def register(name, factory): | ||
| 90 | """Register a type, given its name and a factory callable. | ||
| 91 | |||
| 92 | Determines the required and optional flags from the factory's | ||
| 93 | arguments.""" | ||
| 94 | factory_setup(name, factory) | ||
| 95 | available_types[factory.name] = factory | ||
| 96 | |||
| 97 | |||
| 98 | # Register all our included types | ||
| 99 | for name in dir(types): | ||
| 100 | if name.startswith('_'): | ||
| 101 | continue | ||
| 102 | |||
| 103 | obj = getattr(types, name) | ||
| 104 | if not isinstance(obj, Callable): | ||
| 105 | continue | ||
| 106 | |||
| 107 | register(name, obj) | ||
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py deleted file mode 100644 index cf7a13c247..0000000000 --- a/meta/lib/oe/manifest.py +++ /dev/null | |||
| @@ -1,202 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from abc import ABCMeta, abstractmethod | ||
| 8 | import os | ||
| 9 | import re | ||
| 10 | import bb | ||
| 11 | |||
| 12 | class Manifest(object, metaclass=ABCMeta): | ||
| 13 | """ | ||
| 14 | This is an abstract class. Do not instantiate this directly. | ||
| 15 | """ | ||
| 16 | |||
| 17 | PKG_TYPE_MUST_INSTALL = "mip" | ||
| 18 | PKG_TYPE_MULTILIB = "mlp" | ||
| 19 | PKG_TYPE_LANGUAGE = "lgp" | ||
| 20 | PKG_TYPE_ATTEMPT_ONLY = "aop" | ||
| 21 | |||
| 22 | MANIFEST_TYPE_IMAGE = "image" | ||
| 23 | MANIFEST_TYPE_SDK_HOST = "sdk_host" | ||
| 24 | MANIFEST_TYPE_SDK_TARGET = "sdk_target" | ||
| 25 | |||
| 26 | var_maps = { | ||
| 27 | MANIFEST_TYPE_IMAGE: { | ||
| 28 | "PACKAGE_INSTALL": PKG_TYPE_MUST_INSTALL, | ||
| 29 | "PACKAGE_INSTALL_ATTEMPTONLY": PKG_TYPE_ATTEMPT_ONLY, | ||
| 30 | "LINGUAS_INSTALL": PKG_TYPE_LANGUAGE | ||
| 31 | }, | ||
| 32 | MANIFEST_TYPE_SDK_HOST: { | ||
| 33 | "TOOLCHAIN_HOST_TASK": PKG_TYPE_MUST_INSTALL, | ||
| 34 | "TOOLCHAIN_HOST_TASK_ATTEMPTONLY": PKG_TYPE_ATTEMPT_ONLY | ||
| 35 | }, | ||
| 36 | MANIFEST_TYPE_SDK_TARGET: { | ||
| 37 | "TOOLCHAIN_TARGET_TASK": PKG_TYPE_MUST_INSTALL, | ||
| 38 | "TOOLCHAIN_TARGET_TASK_ATTEMPTONLY": PKG_TYPE_ATTEMPT_ONLY | ||
| 39 | } | ||
| 40 | } | ||
| 41 | |||
| 42 | INSTALL_ORDER = [ | ||
| 43 | PKG_TYPE_LANGUAGE, | ||
| 44 | PKG_TYPE_MUST_INSTALL, | ||
| 45 | PKG_TYPE_ATTEMPT_ONLY, | ||
| 46 | PKG_TYPE_MULTILIB | ||
| 47 | ] | ||
| 48 | |||
| 49 | initial_manifest_file_header = \ | ||
| 50 | "# This file was generated automatically and contains the packages\n" \ | ||
| 51 | "# passed on to the package manager in order to create the rootfs.\n\n" \ | ||
| 52 | "# Format:\n" \ | ||
| 53 | "# <package_type>,<package_name>\n" \ | ||
| 54 | "# where:\n" \ | ||
| 55 | "# <package_type> can be:\n" \ | ||
| 56 | "# 'mip' = must install package\n" \ | ||
| 57 | "# 'aop' = attempt only package\n" \ | ||
| 58 | "# 'mlp' = multilib package\n" \ | ||
| 59 | "# 'lgp' = language package\n\n" | ||
| 60 | |||
| 61 | def __init__(self, d, manifest_dir=None, manifest_type=MANIFEST_TYPE_IMAGE): | ||
| 62 | self.d = d | ||
| 63 | self.manifest_type = manifest_type | ||
| 64 | |||
| 65 | if manifest_dir is None: | ||
| 66 | if manifest_type != self.MANIFEST_TYPE_IMAGE: | ||
| 67 | self.manifest_dir = self.d.getVar('SDK_DIR') | ||
| 68 | else: | ||
| 69 | self.manifest_dir = self.d.getVar('WORKDIR') | ||
| 70 | else: | ||
| 71 | self.manifest_dir = manifest_dir | ||
| 72 | |||
| 73 | bb.utils.mkdirhier(self.manifest_dir) | ||
| 74 | |||
| 75 | self.initial_manifest = os.path.join(self.manifest_dir, "%s_initial_manifest" % manifest_type) | ||
| 76 | self.final_manifest = os.path.join(self.manifest_dir, "%s_final_manifest" % manifest_type) | ||
| 77 | self.full_manifest = os.path.join(self.manifest_dir, "%s_full_manifest" % manifest_type) | ||
| 78 | |||
| 79 | # packages in the following vars will be split in 'must install' and | ||
| 80 | # 'multilib' | ||
| 81 | self.vars_to_split = ["PACKAGE_INSTALL", | ||
| 82 | "TOOLCHAIN_HOST_TASK", | ||
| 83 | "TOOLCHAIN_TARGET_TASK"] | ||
| 84 | |||
| 85 | """ | ||
| 86 | This creates a standard initial manifest for core-image-(minimal|sato|sato-sdk). | ||
| 87 | This will be used for testing until the class is implemented properly! | ||
| 88 | """ | ||
| 89 | def _create_dummy_initial(self): | ||
| 90 | image_rootfs = self.d.getVar('IMAGE_ROOTFS') | ||
| 91 | pkg_list = dict() | ||
| 92 | if image_rootfs.find("core-image-sato-sdk") > 0: | ||
| 93 | pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ | ||
| 94 | "packagegroup-core-x11-sato-games packagegroup-base-extended " \ | ||
| 95 | "packagegroup-core-x11-sato packagegroup-core-x11-base " \ | ||
| 96 | "packagegroup-core-sdk packagegroup-core-tools-debug " \ | ||
| 97 | "packagegroup-core-boot packagegroup-core-tools-testapps " \ | ||
| 98 | "packagegroup-core-eclipse-debug packagegroup-core-qt-demoapps " \ | ||
| 99 | "apt packagegroup-core-tools-profile psplash " \ | ||
| 100 | "packagegroup-core-standalone-sdk-target " \ | ||
| 101 | "packagegroup-core-ssh-openssh dpkg kernel-dev" | ||
| 102 | pkg_list[self.PKG_TYPE_LANGUAGE] = \ | ||
| 103 | "locale-base-en-us locale-base-en-gb" | ||
| 104 | elif image_rootfs.find("core-image-sato") > 0: | ||
| 105 | pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ | ||
| 106 | "packagegroup-core-ssh-dropbear packagegroup-core-x11-sato-games " \ | ||
| 107 | "packagegroup-core-x11-base psplash apt dpkg packagegroup-base-extended " \ | ||
| 108 | "packagegroup-core-x11-sato packagegroup-core-boot" | ||
| 109 | pkg_list['lgp'] = \ | ||
| 110 | "locale-base-en-us locale-base-en-gb" | ||
| 111 | elif image_rootfs.find("core-image-minimal") > 0: | ||
| 112 | pkg_list[self.PKG_TYPE_MUST_INSTALL] = "packagegroup-core-boot" | ||
| 113 | |||
| 114 | with open(self.initial_manifest, "w+") as manifest: | ||
| 115 | manifest.write(self.initial_manifest_file_header) | ||
| 116 | |||
| 117 | for pkg_type in pkg_list: | ||
| 118 | for pkg in pkg_list[pkg_type].split(): | ||
| 119 | manifest.write("%s,%s\n" % (pkg_type, pkg)) | ||
| 120 | |||
| 121 | """ | ||
| 122 | This will create the initial manifest which will be used by Rootfs class to | ||
| 123 | generate the rootfs | ||
| 124 | """ | ||
| 125 | @abstractmethod | ||
| 126 | def create_initial(self): | ||
| 127 | pass | ||
| 128 | |||
| 129 | """ | ||
| 130 | This creates the manifest after everything has been installed. | ||
| 131 | """ | ||
| 132 | @abstractmethod | ||
| 133 | def create_final(self): | ||
| 134 | pass | ||
| 135 | |||
| 136 | """ | ||
| 137 | This creates the manifest after the package in initial manifest has been | ||
| 138 | dummy installed. It lists all *to be installed* packages. There is no real | ||
| 139 | installation, just a test. | ||
| 140 | """ | ||
| 141 | @abstractmethod | ||
| 142 | def create_full(self, pm): | ||
| 143 | pass | ||
| 144 | |||
| 145 | """ | ||
| 146 | The following function parses an initial manifest and returns a dictionary | ||
| 147 | object with the must install, attempt only, multilib and language packages. | ||
| 148 | """ | ||
| 149 | def parse_initial_manifest(self): | ||
| 150 | pkgs = dict() | ||
| 151 | |||
| 152 | with open(self.initial_manifest) as manifest: | ||
| 153 | for line in manifest.read().split('\n'): | ||
| 154 | comment = re.match("^#.*", line) | ||
| 155 | pattern = "^(%s|%s|%s|%s),(.*)$" % \ | ||
| 156 | (self.PKG_TYPE_MUST_INSTALL, | ||
| 157 | self.PKG_TYPE_ATTEMPT_ONLY, | ||
| 158 | self.PKG_TYPE_MULTILIB, | ||
| 159 | self.PKG_TYPE_LANGUAGE) | ||
| 160 | pkg = re.match(pattern, line) | ||
| 161 | |||
| 162 | if comment is not None: | ||
| 163 | continue | ||
| 164 | |||
| 165 | if pkg is not None: | ||
| 166 | pkg_type = pkg.group(1) | ||
| 167 | pkg_name = pkg.group(2) | ||
| 168 | |||
| 169 | if not pkg_type in pkgs: | ||
| 170 | pkgs[pkg_type] = [pkg_name] | ||
| 171 | else: | ||
| 172 | pkgs[pkg_type].append(pkg_name) | ||
| 173 | |||
| 174 | return pkgs | ||
| 175 | |||
| 176 | ''' | ||
| 177 | This following function parses a full manifest and return a list | ||
| 178 | object with packages. | ||
| 179 | ''' | ||
| 180 | def parse_full_manifest(self): | ||
| 181 | installed_pkgs = list() | ||
| 182 | if not os.path.exists(self.full_manifest): | ||
| 183 | bb.note('full manifest not exist') | ||
| 184 | return installed_pkgs | ||
| 185 | |||
| 186 | with open(self.full_manifest, 'r') as manifest: | ||
| 187 | for pkg in manifest.read().split('\n'): | ||
| 188 | installed_pkgs.append(pkg.strip()) | ||
| 189 | |||
| 190 | return installed_pkgs | ||
| 191 | |||
| 192 | |||
| 193 | |||
| 194 | def create_manifest(d, final_manifest=False, manifest_dir=None, | ||
| 195 | manifest_type=Manifest.MANIFEST_TYPE_IMAGE): | ||
| 196 | import importlib | ||
| 197 | manifest = importlib.import_module('oe.package_manager.' + d.getVar('IMAGE_PKGTYPE') + '.manifest').PkgManifest(d, manifest_dir, manifest_type) | ||
| 198 | |||
| 199 | if final_manifest: | ||
| 200 | manifest.create_final() | ||
| 201 | else: | ||
| 202 | manifest.create_initial() | ||
diff --git a/meta/lib/oe/npm_registry.py b/meta/lib/oe/npm_registry.py deleted file mode 100644 index d97ced7cda..0000000000 --- a/meta/lib/oe/npm_registry.py +++ /dev/null | |||
| @@ -1,175 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | import bb | ||
| 8 | import json | ||
| 9 | import subprocess | ||
| 10 | |||
| 11 | _ALWAYS_SAFE = frozenset('ABCDEFGHIJKLMNOPQRSTUVWXYZ' | ||
| 12 | 'abcdefghijklmnopqrstuvwxyz' | ||
| 13 | '0123456789' | ||
| 14 | '_.-~()') | ||
| 15 | |||
| 16 | MISSING_OK = object() | ||
| 17 | |||
| 18 | REGISTRY = "https://registry.npmjs.org" | ||
| 19 | |||
| 20 | # we can not use urllib.parse here because npm expects lowercase | ||
| 21 | # hex-chars but urllib generates uppercase ones | ||
| 22 | def uri_quote(s, safe = '/'): | ||
| 23 | res = "" | ||
| 24 | safe_set = set(safe) | ||
| 25 | for c in s: | ||
| 26 | if c in _ALWAYS_SAFE or c in safe_set: | ||
| 27 | res += c | ||
| 28 | else: | ||
| 29 | res += '%%%02x' % ord(c) | ||
| 30 | return res | ||
| 31 | |||
| 32 | class PackageJson: | ||
| 33 | def __init__(self, spec): | ||
| 34 | self.__spec = spec | ||
| 35 | |||
| 36 | @property | ||
| 37 | def name(self): | ||
| 38 | return self.__spec['name'] | ||
| 39 | |||
| 40 | @property | ||
| 41 | def version(self): | ||
| 42 | return self.__spec['version'] | ||
| 43 | |||
| 44 | @property | ||
| 45 | def empty_manifest(self): | ||
| 46 | return { | ||
| 47 | 'name': self.name, | ||
| 48 | 'description': self.__spec.get('description', ''), | ||
| 49 | 'versions': {}, | ||
| 50 | } | ||
| 51 | |||
| 52 | def base_filename(self): | ||
| 53 | return uri_quote(self.name, safe = '@') | ||
| 54 | |||
| 55 | def as_manifest_entry(self, tarball_uri): | ||
| 56 | res = {} | ||
| 57 | |||
| 58 | ## NOTE: 'npm install' requires more than basic meta information; | ||
| 59 | ## e.g. it takes 'bin' from this manifest entry but not the actual | ||
| 60 | ## 'package.json' | ||
| 61 | for (idx,dflt) in [('name', None), | ||
| 62 | ('description', ""), | ||
| 63 | ('version', None), | ||
| 64 | ('bin', MISSING_OK), | ||
| 65 | ('man', MISSING_OK), | ||
| 66 | ('scripts', MISSING_OK), | ||
| 67 | ('directories', MISSING_OK), | ||
| 68 | ('dependencies', MISSING_OK), | ||
| 69 | ('devDependencies', MISSING_OK), | ||
| 70 | ('optionalDependencies', MISSING_OK), | ||
| 71 | ('license', "unknown")]: | ||
| 72 | if idx in self.__spec: | ||
| 73 | res[idx] = self.__spec[idx] | ||
| 74 | elif dflt == MISSING_OK: | ||
| 75 | pass | ||
| 76 | elif dflt != None: | ||
| 77 | res[idx] = dflt | ||
| 78 | else: | ||
| 79 | raise Exception("%s-%s: missing key %s" % (self.name, | ||
| 80 | self.version, | ||
| 81 | idx)) | ||
| 82 | |||
| 83 | res['dist'] = { | ||
| 84 | 'tarball': tarball_uri, | ||
| 85 | } | ||
| 86 | |||
| 87 | return res | ||
| 88 | |||
| 89 | class ManifestImpl: | ||
| 90 | def __init__(self, base_fname, spec): | ||
| 91 | self.__base = base_fname | ||
| 92 | self.__spec = spec | ||
| 93 | |||
| 94 | def load(self): | ||
| 95 | try: | ||
| 96 | with open(self.filename, "r") as f: | ||
| 97 | res = json.load(f) | ||
| 98 | except IOError: | ||
| 99 | res = self.__spec.empty_manifest | ||
| 100 | |||
| 101 | return res | ||
| 102 | |||
| 103 | def save(self, meta): | ||
| 104 | with open(self.filename, "w") as f: | ||
| 105 | json.dump(meta, f, indent = 2) | ||
| 106 | |||
| 107 | @property | ||
| 108 | def filename(self): | ||
| 109 | return self.__base + ".meta" | ||
| 110 | |||
| 111 | class Manifest: | ||
| 112 | def __init__(self, base_fname, spec): | ||
| 113 | self.__base = base_fname | ||
| 114 | self.__spec = spec | ||
| 115 | self.__lockf = None | ||
| 116 | self.__impl = None | ||
| 117 | |||
| 118 | def __enter__(self): | ||
| 119 | self.__lockf = bb.utils.lockfile(self.__base + ".lock") | ||
| 120 | self.__impl = ManifestImpl(self.__base, self.__spec) | ||
| 121 | return self.__impl | ||
| 122 | |||
| 123 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
| 124 | bb.utils.unlockfile(self.__lockf) | ||
| 125 | |||
| 126 | class NpmCache: | ||
| 127 | def __init__(self, cache): | ||
| 128 | self.__cache = cache | ||
| 129 | |||
| 130 | @property | ||
| 131 | def path(self): | ||
| 132 | return self.__cache | ||
| 133 | |||
| 134 | def run(self, type, key, fname): | ||
| 135 | subprocess.run(['oe-npm-cache', self.__cache, type, key, fname], | ||
| 136 | check = True) | ||
| 137 | |||
| 138 | class NpmRegistry: | ||
| 139 | def __init__(self, path, cache): | ||
| 140 | self.__path = path | ||
| 141 | self.__cache = NpmCache(cache + '/_cacache') | ||
| 142 | bb.utils.mkdirhier(self.__path) | ||
| 143 | bb.utils.mkdirhier(self.__cache.path) | ||
| 144 | |||
| 145 | @staticmethod | ||
| 146 | ## This function is critical and must match nodejs expectations | ||
| 147 | def _meta_uri(spec): | ||
| 148 | return REGISTRY + '/' + uri_quote(spec.name, safe = '@') | ||
| 149 | |||
| 150 | @staticmethod | ||
| 151 | ## Exact return value does not matter; just make it look like a | ||
| 152 | ## usual registry url | ||
| 153 | def _tarball_uri(spec): | ||
| 154 | return '%s/%s/-/%s-%s.tgz' % (REGISTRY, | ||
| 155 | uri_quote(spec.name, safe = '@'), | ||
| 156 | uri_quote(spec.name, safe = '@/'), | ||
| 157 | spec.version) | ||
| 158 | |||
| 159 | def add_pkg(self, tarball, pkg_json): | ||
| 160 | pkg_json = PackageJson(pkg_json) | ||
| 161 | base = os.path.join(self.__path, pkg_json.base_filename()) | ||
| 162 | |||
| 163 | with Manifest(base, pkg_json) as manifest: | ||
| 164 | meta = manifest.load() | ||
| 165 | tarball_uri = self._tarball_uri(pkg_json) | ||
| 166 | |||
| 167 | meta['versions'][pkg_json.version] = pkg_json.as_manifest_entry(tarball_uri) | ||
| 168 | |||
| 169 | manifest.save(meta) | ||
| 170 | |||
| 171 | ## Cache entries are a little bit dependent on the nodejs | ||
| 172 | ## version; version specific cache implementation must | ||
| 173 | ## mitigate differences | ||
| 174 | self.__cache.run('meta', self._meta_uri(pkg_json), manifest.filename); | ||
| 175 | self.__cache.run('tgz', tarball_uri, tarball); | ||
diff --git a/meta/lib/oe/overlayfs.py b/meta/lib/oe/overlayfs.py deleted file mode 100644 index 8b88900f71..0000000000 --- a/meta/lib/oe/overlayfs.py +++ /dev/null | |||
| @@ -1,54 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | # This file contains common functions for overlayfs and its QA check | ||
| 7 | |||
| 8 | # this function is based on https://github.com/systemd/systemd/blob/main/src/basic/unit-name.c | ||
| 9 | def escapeSystemdUnitName(path): | ||
| 10 | escapeMap = { | ||
| 11 | '/': '-', | ||
| 12 | '-': "\\x2d", | ||
| 13 | '\\': "\\x5d" | ||
| 14 | } | ||
| 15 | return "".join([escapeMap.get(c, c) for c in path.strip('/')]) | ||
| 16 | |||
| 17 | def strForBash(s): | ||
| 18 | return s.replace('\\', '\\\\') | ||
| 19 | |||
| 20 | def allOverlaysUnitName(d): | ||
| 21 | return d.getVar('PN') + '-overlays.service' | ||
| 22 | |||
| 23 | def mountUnitName(unit): | ||
| 24 | return escapeSystemdUnitName(unit) + '.mount' | ||
| 25 | |||
| 26 | def helperUnitName(unit): | ||
| 27 | return escapeSystemdUnitName(unit) + '-create-upper-dir.service' | ||
| 28 | |||
| 29 | def unitFileList(d): | ||
| 30 | fileList = [] | ||
| 31 | overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT") | ||
| 32 | |||
| 33 | if not overlayMountPoints: | ||
| 34 | bb.fatal("A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration") | ||
| 35 | |||
| 36 | # check that we have required mount points set first | ||
| 37 | requiredMountPoints = d.getVarFlags('OVERLAYFS_WRITABLE_PATHS') | ||
| 38 | for mountPoint in requiredMountPoints: | ||
| 39 | if mountPoint not in overlayMountPoints: | ||
| 40 | bb.fatal("Missing required mount point for OVERLAYFS_MOUNT_POINT[%s] in your MACHINE configuration" % mountPoint) | ||
| 41 | |||
| 42 | for mountPoint in overlayMountPoints: | ||
| 43 | mountPointList = d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint) | ||
| 44 | if not mountPointList: | ||
| 45 | bb.debug(1, "No mount points defined for %s flag, don't add to file list", mountPoint) | ||
| 46 | continue | ||
| 47 | for path in mountPointList.split(): | ||
| 48 | fileList.append(mountUnitName(path)) | ||
| 49 | fileList.append(helperUnitName(path)) | ||
| 50 | |||
| 51 | fileList.append(allOverlaysUnitName(d)) | ||
| 52 | |||
| 53 | return fileList | ||
| 54 | |||
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py deleted file mode 100644 index baaa0cba02..0000000000 --- a/meta/lib/oe/package.py +++ /dev/null | |||
| @@ -1,2110 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import errno | ||
| 8 | import fnmatch | ||
| 9 | import itertools | ||
| 10 | import os | ||
| 11 | import shlex | ||
| 12 | import re | ||
| 13 | import glob | ||
| 14 | import stat | ||
| 15 | import mmap | ||
| 16 | import subprocess | ||
| 17 | import shutil | ||
| 18 | |||
| 19 | import bb.parse | ||
| 20 | import oe.cachedpath | ||
| 21 | |||
| 22 | def runstrip(file, elftype, strip, extra_strip_sections=''): | ||
| 23 | # Function to strip a single file, called from split_and_strip_files below | ||
| 24 | # A working 'file' (one which works on the target architecture) | ||
| 25 | # | ||
| 26 | # The elftype is a bit pattern (explained in is_elf below) to tell | ||
| 27 | # us what type of file we're processing... | ||
| 28 | # 4 - executable | ||
| 29 | # 8 - shared library | ||
| 30 | # 16 - kernel module | ||
| 31 | |||
| 32 | newmode = None | ||
| 33 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 34 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 35 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 36 | os.chmod(file, newmode) | ||
| 37 | |||
| 38 | stripcmd = [strip] | ||
| 39 | skip_strip = False | ||
| 40 | # kernel module | ||
| 41 | if elftype & 16: | ||
| 42 | if is_kernel_module_signed(file): | ||
| 43 | bb.debug(1, "Skip strip on signed module %s" % file) | ||
| 44 | skip_strip = True | ||
| 45 | else: | ||
| 46 | stripcmd.extend(["--strip-debug", "--remove-section=.comment", | ||
| 47 | "--remove-section=.note", "--preserve-dates"]) | ||
| 48 | # .so and shared library | ||
| 49 | elif ".so" in file and elftype & 8: | ||
| 50 | stripcmd.extend(["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"]) | ||
| 51 | # shared or executable: | ||
| 52 | elif elftype & 8 or elftype & 4: | ||
| 53 | stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"]) | ||
| 54 | if extra_strip_sections != '': | ||
| 55 | for section in extra_strip_sections.split(): | ||
| 56 | stripcmd.extend(["--remove-section=" + section]) | ||
| 57 | |||
| 58 | stripcmd.append(file) | ||
| 59 | bb.debug(1, "runstrip: %s" % stripcmd) | ||
| 60 | |||
| 61 | if not skip_strip: | ||
| 62 | output = subprocess.check_output(stripcmd, stderr=subprocess.STDOUT) | ||
| 63 | |||
| 64 | if newmode: | ||
| 65 | os.chmod(file, origmode) | ||
| 66 | |||
| 67 | # Detect .ko module by searching for "vermagic=" string | ||
| 68 | def is_kernel_module(path): | ||
| 69 | with open(path) as f: | ||
| 70 | return mmap.mmap(f.fileno(), 0, prot=mmap.PROT_READ).find(b"vermagic=") >= 0 | ||
| 71 | |||
| 72 | # Detect if .ko module is signed | ||
| 73 | def is_kernel_module_signed(path): | ||
| 74 | with open(path, "rb") as f: | ||
| 75 | f.seek(-28, 2) | ||
| 76 | module_tail = f.read() | ||
| 77 | return "Module signature appended" in "".join(chr(c) for c in bytearray(module_tail)) | ||
| 78 | |||
| 79 | # Return type (bits): | ||
| 80 | # 0 - not elf | ||
| 81 | # 1 - ELF | ||
| 82 | # 2 - stripped | ||
| 83 | # 4 - executable | ||
| 84 | # 8 - shared library | ||
| 85 | # 16 - kernel module | ||
| 86 | def is_elf(path): | ||
| 87 | exec_type = 0 | ||
| 88 | result = subprocess.check_output(["file", "-b", path], stderr=subprocess.STDOUT).decode("utf-8") | ||
| 89 | |||
| 90 | if "ELF" in result: | ||
| 91 | exec_type |= 1 | ||
| 92 | if "not stripped" not in result: | ||
| 93 | exec_type |= 2 | ||
| 94 | if "executable" in result: | ||
| 95 | exec_type |= 4 | ||
| 96 | if "shared" in result: | ||
| 97 | exec_type |= 8 | ||
| 98 | if "relocatable" in result: | ||
| 99 | if path.endswith(".ko") and path.find("/lib/modules/") != -1 and is_kernel_module(path): | ||
| 100 | exec_type |= 16 | ||
| 101 | return (path, exec_type) | ||
| 102 | |||
| 103 | def is_static_lib(path): | ||
| 104 | if path.endswith('.a') and not os.path.islink(path): | ||
| 105 | with open(path, 'rb') as fh: | ||
| 106 | # The magic must include the first slash to avoid | ||
| 107 | # matching golang static libraries | ||
| 108 | magic = b'!<arch>\x0a/' | ||
| 109 | start = fh.read(len(magic)) | ||
| 110 | return start == magic | ||
| 111 | return False | ||
| 112 | |||
| 113 | def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False): | ||
| 114 | """ | ||
| 115 | Strip executable code (like executables, shared libraries) _in_place_ | ||
| 116 | - Based on sysroot_strip in staging.bbclass | ||
| 117 | :param dstdir: directory in which to strip files | ||
| 118 | :param strip_cmd: Strip command (usually ${STRIP}) | ||
| 119 | :param libdir: ${libdir} - strip .so files in this directory | ||
| 120 | :param base_libdir: ${base_libdir} - strip .so files in this directory | ||
| 121 | :param max_process: number of stripping processes started in parallel | ||
| 122 | :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP} | ||
| 123 | This is for proper logging and messages only. | ||
| 124 | """ | ||
| 125 | import stat, errno, oe.path, oe.utils | ||
| 126 | |||
| 127 | elffiles = {} | ||
| 128 | inodes = {} | ||
| 129 | libdir = os.path.abspath(dstdir + os.sep + libdir) | ||
| 130 | base_libdir = os.path.abspath(dstdir + os.sep + base_libdir) | ||
| 131 | exec_mask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | ||
| 132 | # | ||
| 133 | # First lets figure out all of the files we may have to process | ||
| 134 | # | ||
| 135 | checkelf = [] | ||
| 136 | inodecache = {} | ||
| 137 | for root, dirs, files in os.walk(dstdir): | ||
| 138 | for f in files: | ||
| 139 | file = os.path.join(root, f) | ||
| 140 | |||
| 141 | try: | ||
| 142 | ltarget = oe.path.realpath(file, dstdir, False) | ||
| 143 | s = os.lstat(ltarget) | ||
| 144 | except OSError as e: | ||
| 145 | (err, strerror) = e.args | ||
| 146 | if err != errno.ENOENT: | ||
| 147 | raise | ||
| 148 | # Skip broken symlinks | ||
| 149 | continue | ||
| 150 | if not s: | ||
| 151 | continue | ||
| 152 | # Check its an excutable | ||
| 153 | if s[stat.ST_MODE] & exec_mask \ | ||
| 154 | or ((file.startswith(libdir) or file.startswith(base_libdir)) and ".so" in f) \ | ||
| 155 | or file.endswith('.ko'): | ||
| 156 | # If it's a symlink, and points to an ELF file, we capture the readlink target | ||
| 157 | if os.path.islink(file): | ||
| 158 | continue | ||
| 159 | |||
| 160 | # It's a file (or hardlink), not a link | ||
| 161 | # ...but is it ELF, and is it already stripped? | ||
| 162 | checkelf.append(file) | ||
| 163 | inodecache[file] = s.st_ino | ||
| 164 | results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process) | ||
| 165 | for (file, elf_file) in results: | ||
| 166 | #elf_file = is_elf(file) | ||
| 167 | if elf_file & 1: | ||
| 168 | if elf_file & 2: | ||
| 169 | if qa_already_stripped: | ||
| 170 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dstdir):], pn)) | ||
| 171 | else: | ||
| 172 | bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dstdir):], pn)) | ||
| 173 | continue | ||
| 174 | |||
| 175 | if inodecache[file] in inodes: | ||
| 176 | os.unlink(file) | ||
| 177 | os.link(inodes[inodecache[file]], file) | ||
| 178 | else: | ||
| 179 | # break hardlinks so that we do not strip the original. | ||
| 180 | inodes[inodecache[file]] = file | ||
| 181 | bb.utils.break_hardlinks(file) | ||
| 182 | elffiles[file] = elf_file | ||
| 183 | |||
| 184 | # | ||
| 185 | # Now strip them (in parallel) | ||
| 186 | # | ||
| 187 | sfiles = [] | ||
| 188 | for file in elffiles: | ||
| 189 | elf_file = int(elffiles[file]) | ||
| 190 | sfiles.append((file, elf_file, strip_cmd)) | ||
| 191 | |||
| 192 | oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process) | ||
| 193 | |||
| 194 | TRANSLATE = ( | ||
| 195 | ("@", "@at@"), | ||
| 196 | (" ", "@space@"), | ||
| 197 | ("\t", "@tab@"), | ||
| 198 | ("[", "@openbrace@"), | ||
| 199 | ("]", "@closebrace@"), | ||
| 200 | ("_", "@underscore@"), | ||
| 201 | (":", "@colon@"), | ||
| 202 | ) | ||
| 203 | |||
| 204 | def file_translate(file): | ||
| 205 | ft = file | ||
| 206 | for s, replace in TRANSLATE: | ||
| 207 | ft = ft.replace(s, replace) | ||
| 208 | |||
| 209 | return ft | ||
| 210 | |||
| 211 | def file_reverse_translate(file): | ||
| 212 | ft = file | ||
| 213 | for s, replace in reversed(TRANSLATE): | ||
| 214 | ft = ft.replace(replace, s) | ||
| 215 | |||
| 216 | return ft | ||
| 217 | |||
| 218 | def filedeprunner(pkg, pkgfiles, rpmdeps, pkgdest): | ||
| 219 | import re, subprocess, shlex | ||
| 220 | |||
| 221 | provides = {} | ||
| 222 | requires = {} | ||
| 223 | |||
| 224 | file_re = re.compile(r'\s+\d+\s(.*)') | ||
| 225 | dep_re = re.compile(r'\s+(\S)\s+(.*)') | ||
| 226 | r = re.compile(r'[<>=]+\s+\S*') | ||
| 227 | |||
| 228 | def process_deps(pipe, pkg, pkgdest, provides, requires): | ||
| 229 | file = None | ||
| 230 | for line in pipe.split("\n"): | ||
| 231 | |||
| 232 | m = file_re.match(line) | ||
| 233 | if m: | ||
| 234 | file = m.group(1) | ||
| 235 | file = file.replace(pkgdest + "/" + pkg, "") | ||
| 236 | file = file_translate(file) | ||
| 237 | continue | ||
| 238 | |||
| 239 | m = dep_re.match(line) | ||
| 240 | if not m or not file: | ||
| 241 | continue | ||
| 242 | |||
| 243 | type, dep = m.groups() | ||
| 244 | |||
| 245 | if type == 'R': | ||
| 246 | i = requires | ||
| 247 | elif type == 'P': | ||
| 248 | i = provides | ||
| 249 | else: | ||
| 250 | continue | ||
| 251 | |||
| 252 | if dep.startswith("python("): | ||
| 253 | continue | ||
| 254 | |||
| 255 | # Ignore all perl(VMS::...) and perl(Mac::...) dependencies. These | ||
| 256 | # are typically used conditionally from the Perl code, but are | ||
| 257 | # generated as unconditional dependencies. | ||
| 258 | if dep.startswith('perl(VMS::') or dep.startswith('perl(Mac::'): | ||
| 259 | continue | ||
| 260 | |||
| 261 | # Ignore perl dependencies on .pl files. | ||
| 262 | if dep.startswith('perl(') and dep.endswith('.pl)'): | ||
| 263 | continue | ||
| 264 | |||
| 265 | # Remove perl versions and perl module versions since they typically | ||
| 266 | # do not make sense when used as package versions. | ||
| 267 | if dep.startswith('perl') and r.search(dep): | ||
| 268 | dep = dep.split()[0] | ||
| 269 | |||
| 270 | # Put parentheses around any version specifications. | ||
| 271 | dep = r.sub(r'(\g<0>)',dep) | ||
| 272 | |||
| 273 | if file not in i: | ||
| 274 | i[file] = [] | ||
| 275 | i[file].append(dep) | ||
| 276 | |||
| 277 | return provides, requires | ||
| 278 | |||
| 279 | output = subprocess.check_output(shlex.split(rpmdeps) + pkgfiles, stderr=subprocess.STDOUT).decode("utf-8") | ||
| 280 | provides, requires = process_deps(output, pkg, pkgdest, provides, requires) | ||
| 281 | |||
| 282 | return (pkg, provides, requires) | ||
| 283 | |||
| 284 | |||
| 285 | def read_shlib_providers(d): | ||
| 286 | import re | ||
| 287 | |||
| 288 | shlib_provider = {} | ||
| 289 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() | ||
| 290 | list_re = re.compile(r'^(.*)\.list$') | ||
| 291 | # Go from least to most specific since the last one found wins | ||
| 292 | for dir in reversed(shlibs_dirs): | ||
| 293 | bb.debug(2, "Reading shlib providers in %s" % (dir)) | ||
| 294 | if not os.path.exists(dir): | ||
| 295 | continue | ||
| 296 | for file in sorted(os.listdir(dir)): | ||
| 297 | m = list_re.match(file) | ||
| 298 | if m: | ||
| 299 | dep_pkg = m.group(1) | ||
| 300 | try: | ||
| 301 | fd = open(os.path.join(dir, file)) | ||
| 302 | except IOError: | ||
| 303 | # During a build unrelated shlib files may be deleted, so | ||
| 304 | # handle files disappearing between the listdirs and open. | ||
| 305 | continue | ||
| 306 | lines = fd.readlines() | ||
| 307 | fd.close() | ||
| 308 | for l in lines: | ||
| 309 | s = l.strip().split(":") | ||
| 310 | if s[0] not in shlib_provider: | ||
| 311 | shlib_provider[s[0]] = {} | ||
| 312 | shlib_provider[s[0]][s[1]] = (dep_pkg, s[2]) | ||
| 313 | return shlib_provider | ||
| 314 | |||
| 315 | # We generate a master list of directories to process, we start by | ||
| 316 | # seeding this list with reasonable defaults, then load from | ||
| 317 | # the fs-perms.txt files | ||
| 318 | def fixup_perms(d): | ||
| 319 | import pwd, grp | ||
| 320 | |||
| 321 | cpath = oe.cachedpath.CachedPath() | ||
| 322 | dvar = d.getVar('PKGD') | ||
| 323 | |||
| 324 | # init using a string with the same format as a line as documented in | ||
| 325 | # the fs-perms.txt file | ||
| 326 | # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid> | ||
| 327 | # <path> link <link target> | ||
| 328 | # | ||
| 329 | # __str__ can be used to print out an entry in the input format | ||
| 330 | # | ||
| 331 | # if fs_perms_entry.path is None: | ||
| 332 | # an error occurred | ||
| 333 | # if fs_perms_entry.link, you can retrieve: | ||
| 334 | # fs_perms_entry.path = path | ||
| 335 | # fs_perms_entry.link = target of link | ||
| 336 | # if not fs_perms_entry.link, you can retrieve: | ||
| 337 | # fs_perms_entry.path = path | ||
| 338 | # fs_perms_entry.mode = expected dir mode or None | ||
| 339 | # fs_perms_entry.uid = expected uid or -1 | ||
| 340 | # fs_perms_entry.gid = expected gid or -1 | ||
| 341 | # fs_perms_entry.walk = 'true' or something else | ||
| 342 | # fs_perms_entry.fmode = expected file mode or None | ||
| 343 | # fs_perms_entry.fuid = expected file uid or -1 | ||
| 344 | # fs_perms_entry_fgid = expected file gid or -1 | ||
| 345 | class fs_perms_entry(): | ||
| 346 | def __init__(self, line): | ||
| 347 | lsplit = line.split() | ||
| 348 | if len(lsplit) == 3 and lsplit[1].lower() == "link": | ||
| 349 | self._setlink(lsplit[0], lsplit[2]) | ||
| 350 | elif len(lsplit) == 8: | ||
| 351 | self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) | ||
| 352 | else: | ||
| 353 | msg = "Fixup Perms: invalid config line %s" % line | ||
| 354 | oe.qa.handle_error("perm-config", msg, d) | ||
| 355 | self.path = None | ||
| 356 | self.link = None | ||
| 357 | |||
| 358 | def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid): | ||
| 359 | self.path = os.path.normpath(path) | ||
| 360 | self.link = None | ||
| 361 | self.mode = self._procmode(mode) | ||
| 362 | self.uid = self._procuid(uid) | ||
| 363 | self.gid = self._procgid(gid) | ||
| 364 | self.walk = walk.lower() | ||
| 365 | self.fmode = self._procmode(fmode) | ||
| 366 | self.fuid = self._procuid(fuid) | ||
| 367 | self.fgid = self._procgid(fgid) | ||
| 368 | |||
| 369 | def _setlink(self, path, link): | ||
| 370 | self.path = os.path.normpath(path) | ||
| 371 | self.link = link | ||
| 372 | |||
| 373 | def _procmode(self, mode): | ||
| 374 | if not mode or (mode and mode == "-"): | ||
| 375 | return None | ||
| 376 | else: | ||
| 377 | return int(mode,8) | ||
| 378 | |||
| 379 | # Note uid/gid -1 has special significance in os.lchown | ||
| 380 | def _procuid(self, uid): | ||
| 381 | if uid is None or uid == "-": | ||
| 382 | return -1 | ||
| 383 | elif uid.isdigit(): | ||
| 384 | return int(uid) | ||
| 385 | else: | ||
| 386 | return pwd.getpwnam(uid).pw_uid | ||
| 387 | |||
| 388 | def _procgid(self, gid): | ||
| 389 | if gid is None or gid == "-": | ||
| 390 | return -1 | ||
| 391 | elif gid.isdigit(): | ||
| 392 | return int(gid) | ||
| 393 | else: | ||
| 394 | return grp.getgrnam(gid).gr_gid | ||
| 395 | |||
| 396 | # Use for debugging the entries | ||
| 397 | def __str__(self): | ||
| 398 | if self.link: | ||
| 399 | return "%s link %s" % (self.path, self.link) | ||
| 400 | else: | ||
| 401 | mode = "-" | ||
| 402 | if self.mode: | ||
| 403 | mode = "0%o" % self.mode | ||
| 404 | fmode = "-" | ||
| 405 | if self.fmode: | ||
| 406 | fmode = "0%o" % self.fmode | ||
| 407 | uid = self._mapugid(self.uid) | ||
| 408 | gid = self._mapugid(self.gid) | ||
| 409 | fuid = self._mapugid(self.fuid) | ||
| 410 | fgid = self._mapugid(self.fgid) | ||
| 411 | return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid) | ||
| 412 | |||
| 413 | def _mapugid(self, id): | ||
| 414 | if id is None or id == -1: | ||
| 415 | return "-" | ||
| 416 | else: | ||
| 417 | return "%d" % id | ||
| 418 | |||
| 419 | # Fix the permission, owner and group of path | ||
| 420 | def fix_perms(path, mode, uid, gid, dir): | ||
| 421 | if mode and not os.path.islink(path): | ||
| 422 | #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir)) | ||
| 423 | os.chmod(path, mode) | ||
| 424 | # -1 is a special value that means don't change the uid/gid | ||
| 425 | # if they are BOTH -1, don't bother to lchown | ||
| 426 | if not (uid == -1 and gid == -1): | ||
| 427 | #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir)) | ||
| 428 | os.lchown(path, uid, gid) | ||
| 429 | |||
| 430 | # Return a list of configuration files based on either the default | ||
| 431 | # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES | ||
| 432 | # paths are resolved via BBPATH | ||
| 433 | def get_fs_perms_list(d): | ||
| 434 | str = "" | ||
| 435 | bbpath = d.getVar('BBPATH') | ||
| 436 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or "" | ||
| 437 | for conf_file in fs_perms_tables.split(): | ||
| 438 | confpath = bb.utils.which(bbpath, conf_file) | ||
| 439 | if confpath: | ||
| 440 | str += " %s" % bb.utils.which(bbpath, conf_file) | ||
| 441 | else: | ||
| 442 | bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file) | ||
| 443 | return str | ||
| 444 | |||
| 445 | fs_perms_table = {} | ||
| 446 | fs_link_table = {} | ||
| 447 | |||
| 448 | # By default all of the standard directories specified in | ||
| 449 | # bitbake.conf will get 0755 root:root. | ||
| 450 | target_path_vars = [ 'base_prefix', | ||
| 451 | 'prefix', | ||
| 452 | 'exec_prefix', | ||
| 453 | 'base_bindir', | ||
| 454 | 'base_sbindir', | ||
| 455 | 'base_libdir', | ||
| 456 | 'datadir', | ||
| 457 | 'sysconfdir', | ||
| 458 | 'servicedir', | ||
| 459 | 'sharedstatedir', | ||
| 460 | 'localstatedir', | ||
| 461 | 'infodir', | ||
| 462 | 'mandir', | ||
| 463 | 'docdir', | ||
| 464 | 'bindir', | ||
| 465 | 'sbindir', | ||
| 466 | 'libexecdir', | ||
| 467 | 'libdir', | ||
| 468 | 'includedir' ] | ||
| 469 | |||
| 470 | for path in target_path_vars: | ||
| 471 | dir = d.getVar(path) or "" | ||
| 472 | if dir == "": | ||
| 473 | continue | ||
| 474 | fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir))) | ||
| 475 | |||
| 476 | # Now we actually load from the configuration files | ||
| 477 | for conf in get_fs_perms_list(d).split(): | ||
| 478 | if not os.path.exists(conf): | ||
| 479 | continue | ||
| 480 | with open(conf) as f: | ||
| 481 | for line in f: | ||
| 482 | if line.startswith('#'): | ||
| 483 | continue | ||
| 484 | lsplit = line.split() | ||
| 485 | if len(lsplit) == 0: | ||
| 486 | continue | ||
| 487 | if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): | ||
| 488 | msg = "Fixup perms: %s invalid line: %s" % (conf, line) | ||
| 489 | oe.qa.handle_error("perm-line", msg, d) | ||
| 490 | continue | ||
| 491 | entry = fs_perms_entry(d.expand(line)) | ||
| 492 | if entry and entry.path: | ||
| 493 | if entry.link: | ||
| 494 | fs_link_table[entry.path] = entry | ||
| 495 | if entry.path in fs_perms_table: | ||
| 496 | fs_perms_table.pop(entry.path) | ||
| 497 | else: | ||
| 498 | fs_perms_table[entry.path] = entry | ||
| 499 | if entry.path in fs_link_table: | ||
| 500 | fs_link_table.pop(entry.path) | ||
| 501 | |||
| 502 | # Debug -- list out in-memory table | ||
| 503 | #for dir in fs_perms_table: | ||
| 504 | # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir]))) | ||
| 505 | #for link in fs_link_table: | ||
| 506 | # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link]))) | ||
| 507 | |||
| 508 | # We process links first, so we can go back and fixup directory ownership | ||
| 509 | # for any newly created directories | ||
| 510 | # Process in sorted order so /run gets created before /run/lock, etc. | ||
| 511 | for entry in sorted(fs_link_table.values(), key=lambda x: x.link): | ||
| 512 | link = entry.link | ||
| 513 | dir = entry.path | ||
| 514 | origin = dvar + dir | ||
| 515 | if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)): | ||
| 516 | continue | ||
| 517 | |||
| 518 | if link[0] == "/": | ||
| 519 | target = dvar + link | ||
| 520 | ptarget = link | ||
| 521 | else: | ||
| 522 | target = os.path.join(os.path.dirname(origin), link) | ||
| 523 | ptarget = os.path.join(os.path.dirname(dir), link) | ||
| 524 | if os.path.exists(target): | ||
| 525 | msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget) | ||
| 526 | oe.qa.handle_error("perm-link", msg, d) | ||
| 527 | continue | ||
| 528 | |||
| 529 | # Create path to move directory to, move it, and then setup the symlink | ||
| 530 | bb.utils.mkdirhier(os.path.dirname(target)) | ||
| 531 | #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget)) | ||
| 532 | bb.utils.rename(origin, target) | ||
| 533 | #bb.note("Fixup Perms: Link %s -> %s" % (dir, link)) | ||
| 534 | os.symlink(link, origin) | ||
| 535 | |||
| 536 | for dir in fs_perms_table: | ||
| 537 | origin = dvar + dir | ||
| 538 | if not (cpath.exists(origin) and cpath.isdir(origin)): | ||
| 539 | continue | ||
| 540 | |||
| 541 | fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | ||
| 542 | |||
| 543 | if fs_perms_table[dir].walk == 'true': | ||
| 544 | for root, dirs, files in os.walk(origin): | ||
| 545 | for dr in dirs: | ||
| 546 | each_dir = os.path.join(root, dr) | ||
| 547 | fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | ||
| 548 | for f in files: | ||
| 549 | each_file = os.path.join(root, f) | ||
| 550 | fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir) | ||
| 551 | |||
| 552 | # Get a list of files from file vars by searching files under current working directory | ||
| 553 | # The list contains symlinks, directories and normal files. | ||
| 554 | def files_from_filevars(filevars): | ||
| 555 | cpath = oe.cachedpath.CachedPath() | ||
| 556 | files = [] | ||
| 557 | for f in filevars: | ||
| 558 | if os.path.isabs(f): | ||
| 559 | f = '.' + f | ||
| 560 | if not f.startswith("./"): | ||
| 561 | f = './' + f | ||
| 562 | globbed = glob.glob(f, recursive=True) | ||
| 563 | if globbed: | ||
| 564 | if [ f ] != globbed: | ||
| 565 | files += globbed | ||
| 566 | continue | ||
| 567 | files.append(f) | ||
| 568 | |||
| 569 | symlink_paths = [] | ||
| 570 | for ind, f in enumerate(files): | ||
| 571 | # Handle directory symlinks. Truncate path to the lowest level symlink | ||
| 572 | parent = '' | ||
| 573 | for dirname in f.split('/')[:-1]: | ||
| 574 | parent = os.path.join(parent, dirname) | ||
| 575 | if dirname == '.': | ||
| 576 | continue | ||
| 577 | if cpath.islink(parent): | ||
| 578 | bb.warn("FILES contains file '%s' which resides under a " | ||
| 579 | "directory symlink. Please fix the recipe and use the " | ||
| 580 | "real path for the file." % f[1:]) | ||
| 581 | symlink_paths.append(f) | ||
| 582 | files[ind] = parent | ||
| 583 | f = parent | ||
| 584 | break | ||
| 585 | |||
| 586 | if not cpath.islink(f): | ||
| 587 | if cpath.isdir(f): | ||
| 588 | newfiles = [ os.path.join(f,x) for x in os.listdir(f) ] | ||
| 589 | if newfiles: | ||
| 590 | files += newfiles | ||
| 591 | |||
| 592 | return files, symlink_paths | ||
| 593 | |||
| 594 | # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files | ||
| 595 | def get_conffiles(pkg, d): | ||
| 596 | pkgdest = d.getVar('PKGDEST') | ||
| 597 | root = os.path.join(pkgdest, pkg) | ||
| 598 | cwd = os.getcwd() | ||
| 599 | os.chdir(root) | ||
| 600 | |||
| 601 | conffiles = d.getVar('CONFFILES:%s' % pkg); | ||
| 602 | if conffiles == None: | ||
| 603 | conffiles = d.getVar('CONFFILES') | ||
| 604 | if conffiles == None: | ||
| 605 | conffiles = "" | ||
| 606 | conffiles = conffiles.split() | ||
| 607 | conf_orig_list = files_from_filevars(conffiles)[0] | ||
| 608 | |||
| 609 | # Remove links and directories from conf_orig_list to get conf_list which only contains normal files | ||
| 610 | conf_list = [] | ||
| 611 | for f in conf_orig_list: | ||
| 612 | if os.path.isdir(f): | ||
| 613 | continue | ||
| 614 | if os.path.islink(f): | ||
| 615 | continue | ||
| 616 | if not os.path.exists(f): | ||
| 617 | continue | ||
| 618 | conf_list.append(f) | ||
| 619 | |||
| 620 | # Remove the leading './' | ||
| 621 | for i in range(0, len(conf_list)): | ||
| 622 | conf_list[i] = conf_list[i][1:] | ||
| 623 | |||
| 624 | os.chdir(cwd) | ||
| 625 | return sorted(conf_list) | ||
| 626 | |||
| 627 | def legitimize_package_name(s): | ||
| 628 | """ | ||
| 629 | Make sure package names are legitimate strings | ||
| 630 | """ | ||
| 631 | |||
| 632 | def fixutf(m): | ||
| 633 | cp = m.group(1) | ||
| 634 | if cp: | ||
| 635 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') | ||
| 636 | |||
| 637 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | ||
| 638 | s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s) | ||
| 639 | |||
| 640 | # Remaining package name validity fixes | ||
| 641 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | ||
| 642 | |||
| 643 | def split_locales(d): | ||
| 644 | cpath = oe.cachedpath.CachedPath() | ||
| 645 | if (d.getVar('PACKAGE_NO_LOCALE') == '1'): | ||
| 646 | bb.debug(1, "package requested not splitting locales") | ||
| 647 | return | ||
| 648 | |||
| 649 | packages = (d.getVar('PACKAGES') or "").split() | ||
| 650 | |||
| 651 | dvar = d.getVar('PKGD') | ||
| 652 | pn = d.getVar('LOCALEBASEPN') | ||
| 653 | |||
| 654 | try: | ||
| 655 | locale_index = packages.index(pn + '-locale') | ||
| 656 | packages.pop(locale_index) | ||
| 657 | except ValueError: | ||
| 658 | locale_index = len(packages) | ||
| 659 | |||
| 660 | lic = d.getVar("LICENSE:" + pn + "-locale") | ||
| 661 | |||
| 662 | localepaths = [] | ||
| 663 | locales = set() | ||
| 664 | for localepath in (d.getVar('LOCALE_PATHS') or "").split(): | ||
| 665 | localedir = dvar + localepath | ||
| 666 | if not cpath.isdir(localedir): | ||
| 667 | bb.debug(1, 'No locale files in %s' % localepath) | ||
| 668 | continue | ||
| 669 | |||
| 670 | localepaths.append(localepath) | ||
| 671 | with os.scandir(localedir) as it: | ||
| 672 | for entry in it: | ||
| 673 | if entry.is_dir(): | ||
| 674 | locales.add(entry.name) | ||
| 675 | |||
| 676 | if len(locales) == 0: | ||
| 677 | bb.debug(1, "No locale files in this package") | ||
| 678 | return | ||
| 679 | |||
| 680 | summary = d.getVar('SUMMARY') or pn | ||
| 681 | description = d.getVar('DESCRIPTION') or "" | ||
| 682 | locale_section = d.getVar('LOCALE_SECTION') | ||
| 683 | mlprefix = d.getVar('MLPREFIX') or "" | ||
| 684 | for l in sorted(locales): | ||
| 685 | ln = legitimize_package_name(l) | ||
| 686 | pkg = pn + '-locale-' + ln | ||
| 687 | packages.insert(locale_index, pkg) | ||
| 688 | locale_index += 1 | ||
| 689 | files = [] | ||
| 690 | for localepath in localepaths: | ||
| 691 | files.append(os.path.join(localepath, l)) | ||
| 692 | d.setVar('FILES:' + pkg, " ".join(files)) | ||
| 693 | d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) | ||
| 694 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | ||
| 695 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) | ||
| 696 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | ||
| 697 | if lic: | ||
| 698 | d.setVar('LICENSE:' + pkg, lic) | ||
| 699 | if locale_section: | ||
| 700 | d.setVar('SECTION:' + pkg, locale_section) | ||
| 701 | |||
| 702 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 703 | |||
| 704 | # Disabled by RP 18/06/07 | ||
| 705 | # Wildcards aren't supported in debian | ||
| 706 | # They break with ipkg since glibc-locale* will mean that | ||
| 707 | # glibc-localedata-translit* won't install as a dependency | ||
| 708 | # for some other package which breaks meta-toolchain | ||
| 709 | # Probably breaks since virtual-locale- isn't provided anywhere | ||
| 710 | #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() | ||
| 711 | #rdep.append('%s-locale*' % pn) | ||
| 712 | #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) | ||
| 713 | |||
| 714 | def package_debug_vars(d): | ||
| 715 | # We default to '.debug' style | ||
| 716 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': | ||
| 717 | # Single debug-file-directory style debug info | ||
| 718 | debug_vars = { | ||
| 719 | "append": ".debug", | ||
| 720 | "staticappend": "", | ||
| 721 | "dir": "", | ||
| 722 | "staticdir": "", | ||
| 723 | "libdir": "/usr/lib/debug", | ||
| 724 | "staticlibdir": "/usr/lib/debug-static", | ||
| 725 | "srcdir": "/usr/src/debug", | ||
| 726 | } | ||
| 727 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': | ||
| 728 | # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug | ||
| 729 | debug_vars = { | ||
| 730 | "append": "", | ||
| 731 | "staticappend": "", | ||
| 732 | "dir": "/.debug", | ||
| 733 | "staticdir": "/.debug-static", | ||
| 734 | "libdir": "", | ||
| 735 | "staticlibdir": "", | ||
| 736 | "srcdir": "", | ||
| 737 | } | ||
| 738 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': | ||
| 739 | debug_vars = { | ||
| 740 | "append": "", | ||
| 741 | "staticappend": "", | ||
| 742 | "dir": "/.debug", | ||
| 743 | "staticdir": "/.debug-static", | ||
| 744 | "libdir": "", | ||
| 745 | "staticlibdir": "", | ||
| 746 | "srcdir": "/usr/src/debug", | ||
| 747 | } | ||
| 748 | else: | ||
| 749 | # Original OE-core, a.k.a. ".debug", style debug info | ||
| 750 | debug_vars = { | ||
| 751 | "append": "", | ||
| 752 | "staticappend": "", | ||
| 753 | "dir": "/.debug", | ||
| 754 | "staticdir": "/.debug-static", | ||
| 755 | "libdir": "", | ||
| 756 | "staticlibdir": "", | ||
| 757 | "srcdir": "/usr/src/debug", | ||
| 758 | } | ||
| 759 | |||
| 760 | return debug_vars | ||
| 761 | |||
| 762 | |||
| 763 | def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output): | ||
| 764 | debugfiles = {} | ||
| 765 | |||
| 766 | for line in dwarfsrcfiles_output.splitlines(): | ||
| 767 | if line.startswith("\t"): | ||
| 768 | debugfiles[os.path.normpath(line.split()[0])] = "" | ||
| 769 | |||
| 770 | return debugfiles.keys() | ||
| 771 | |||
| 772 | def source_info(file, d, fatal=True): | ||
| 773 | cmd = ["dwarfsrcfiles", file] | ||
| 774 | try: | ||
| 775 | output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT) | ||
| 776 | retval = 0 | ||
| 777 | except subprocess.CalledProcessError as exc: | ||
| 778 | output = exc.output | ||
| 779 | retval = exc.returncode | ||
| 780 | |||
| 781 | # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure | ||
| 782 | if retval != 0 and retval != 255: | ||
| 783 | msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "") | ||
| 784 | if fatal: | ||
| 785 | bb.fatal(msg) | ||
| 786 | bb.note(msg) | ||
| 787 | |||
| 788 | debugsources = parse_debugsources_from_dwarfsrcfiles_output(output) | ||
| 789 | |||
| 790 | return list(debugsources) | ||
| 791 | |||
| 792 | def splitdebuginfo(file, dvar, dv, d): | ||
| 793 | # Function to split a single file into two components, one is the stripped | ||
| 794 | # target system binary, the other contains any debugging information. The | ||
| 795 | # two files are linked to reference each other. | ||
| 796 | # | ||
| 797 | # return a mapping of files:debugsources | ||
| 798 | |||
| 799 | src = file[len(dvar):] | ||
| 800 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 801 | debugfile = dvar + dest | ||
| 802 | sources = [] | ||
| 803 | |||
| 804 | if file.endswith(".ko") and file.find("/lib/modules/") != -1: | ||
| 805 | if oe.package.is_kernel_module_signed(file): | ||
| 806 | bb.debug(1, "Skip strip on signed module %s" % file) | ||
| 807 | return (file, sources) | ||
| 808 | |||
| 809 | # Split the file... | ||
| 810 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 811 | #bb.note("Split %s -> %s" % (file, debugfile)) | ||
| 812 | # Only store off the hard link reference if we successfully split! | ||
| 813 | |||
| 814 | dvar = d.getVar('PKGD') | ||
| 815 | objcopy = d.getVar("OBJCOPY") | ||
| 816 | |||
| 817 | newmode = None | ||
| 818 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 819 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 820 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 821 | os.chmod(file, newmode) | ||
| 822 | |||
| 823 | # We need to extract the debug src information here... | ||
| 824 | if dv["srcdir"]: | ||
| 825 | sources = source_info(file, d) | ||
| 826 | |||
| 827 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 828 | |||
| 829 | subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT) | ||
| 830 | |||
| 831 | # Set the debuglink to have the view of the file path on the target | ||
| 832 | subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT) | ||
| 833 | |||
| 834 | if newmode: | ||
| 835 | os.chmod(file, origmode) | ||
| 836 | |||
| 837 | return (file, sources) | ||
| 838 | |||
| 839 | def splitstaticdebuginfo(file, dvar, dv, d): | ||
| 840 | # Unlike the function above, there is no way to split a static library | ||
| 841 | # two components. So to get similar results we will copy the unmodified | ||
| 842 | # static library (containing the debug symbols) into a new directory. | ||
| 843 | # We will then strip (preserving symbols) the static library in the | ||
| 844 | # typical location. | ||
| 845 | # | ||
| 846 | # return a mapping of files:debugsources | ||
| 847 | |||
| 848 | src = file[len(dvar):] | ||
| 849 | dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"] | ||
| 850 | debugfile = dvar + dest | ||
| 851 | sources = [] | ||
| 852 | |||
| 853 | # Copy the file... | ||
| 854 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 855 | #bb.note("Copy %s -> %s" % (file, debugfile)) | ||
| 856 | |||
| 857 | dvar = d.getVar('PKGD') | ||
| 858 | |||
| 859 | newmode = None | ||
| 860 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 861 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 862 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 863 | os.chmod(file, newmode) | ||
| 864 | |||
| 865 | # We need to extract the debug src information here... | ||
| 866 | if dv["srcdir"]: | ||
| 867 | sources = source_info(file, d) | ||
| 868 | |||
| 869 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 870 | |||
| 871 | # Copy the unmodified item to the debug directory | ||
| 872 | shutil.copy2(file, debugfile) | ||
| 873 | |||
| 874 | if newmode: | ||
| 875 | os.chmod(file, origmode) | ||
| 876 | |||
| 877 | return (file, sources) | ||
| 878 | |||
| 879 | def inject_minidebuginfo(file, dvar, dv, d): | ||
| 880 | # Extract just the symbols from debuginfo into minidebuginfo, | ||
| 881 | # compress it with xz and inject it back into the binary in a .gnu_debugdata section. | ||
| 882 | # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html | ||
| 883 | |||
| 884 | readelf = d.getVar('READELF') | ||
| 885 | nm = d.getVar('NM') | ||
| 886 | objcopy = d.getVar('OBJCOPY') | ||
| 887 | |||
| 888 | minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo') | ||
| 889 | |||
| 890 | src = file[len(dvar):] | ||
| 891 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 892 | debugfile = dvar + dest | ||
| 893 | minidebugfile = minidebuginfodir + src + '.minidebug' | ||
| 894 | bb.utils.mkdirhier(os.path.dirname(minidebugfile)) | ||
| 895 | |||
| 896 | # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either | ||
| 897 | # so skip it. | ||
| 898 | if not os.path.exists(debugfile): | ||
| 899 | bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file)) | ||
| 900 | return | ||
| 901 | |||
| 902 | # minidebuginfo does not make sense to apply to ELF objects other than | ||
| 903 | # executables and shared libraries, skip applying the minidebuginfo | ||
| 904 | # generation for objects like kernel modules. | ||
| 905 | for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines(): | ||
| 906 | if not line.strip().startswith("Type:"): | ||
| 907 | continue | ||
| 908 | elftype = line.split(":")[1].strip() | ||
| 909 | if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]): | ||
| 910 | bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file)) | ||
| 911 | return | ||
| 912 | break | ||
| 913 | |||
| 914 | # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo. | ||
| 915 | # We will exclude all of these from minidebuginfo to save space. | ||
| 916 | remove_section_names = [] | ||
| 917 | for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines(): | ||
| 918 | # strip the leading " [ 1]" section index to allow splitting on space | ||
| 919 | if ']' not in line: | ||
| 920 | continue | ||
| 921 | fields = line[line.index(']') + 1:].split() | ||
| 922 | if len(fields) < 7: | ||
| 923 | continue | ||
| 924 | name = fields[0] | ||
| 925 | type = fields[1] | ||
| 926 | flags = fields[6] | ||
| 927 | # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them | ||
| 928 | if name.startswith('.debug_'): | ||
| 929 | continue | ||
| 930 | if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']: | ||
| 931 | remove_section_names.append(name) | ||
| 932 | |||
| 933 | # List dynamic symbols in the binary. We can exclude these from minidebuginfo | ||
| 934 | # because they are always present in the binary. | ||
| 935 | dynsyms = set() | ||
| 936 | for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 937 | dynsyms.add(line.split()[0]) | ||
| 938 | |||
| 939 | # Find all function symbols from debuginfo which aren't in the dynamic symbols table. | ||
| 940 | # These are the ones we want to keep in minidebuginfo. | ||
| 941 | keep_symbols_file = minidebugfile + '.symlist' | ||
| 942 | found_any_symbols = False | ||
| 943 | with open(keep_symbols_file, 'w') as f: | ||
| 944 | for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 945 | fields = line.split('|') | ||
| 946 | if len(fields) < 7: | ||
| 947 | continue | ||
| 948 | name = fields[0].strip() | ||
| 949 | type = fields[3].strip() | ||
| 950 | if type == 'FUNC' and name not in dynsyms: | ||
| 951 | f.write('{}\n'.format(name)) | ||
| 952 | found_any_symbols = True | ||
| 953 | |||
| 954 | if not found_any_symbols: | ||
| 955 | bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file)) | ||
| 956 | return | ||
| 957 | |||
| 958 | bb.utils.remove(minidebugfile) | ||
| 959 | bb.utils.remove(minidebugfile + '.xz') | ||
| 960 | |||
| 961 | subprocess.check_call([objcopy, '-S'] + | ||
| 962 | ['--remove-section={}'.format(s) for s in remove_section_names] + | ||
| 963 | ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile]) | ||
| 964 | |||
| 965 | subprocess.check_call(['xz', '--keep', minidebugfile]) | ||
| 966 | |||
| 967 | subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file]) | ||
| 968 | |||
| 969 | def copydebugsources(debugsrcdir, sources, d): | ||
| 970 | # The debug src information written out to sourcefile is further processed | ||
| 971 | # and copied to the destination here. | ||
| 972 | |||
| 973 | cpath = oe.cachedpath.CachedPath() | ||
| 974 | |||
| 975 | if debugsrcdir and sources: | ||
| 976 | sourcefile = d.expand("${WORKDIR}/debugsources.list") | ||
| 977 | bb.utils.remove(sourcefile) | ||
| 978 | |||
| 979 | # filenames are null-separated - this is an artefact of the previous use | ||
| 980 | # of rpm's debugedit, which was writing them out that way, and the code elsewhere | ||
| 981 | # is still assuming that. | ||
| 982 | debuglistoutput = '\0'.join(sources) + '\0' | ||
| 983 | with open(sourcefile, 'a') as sf: | ||
| 984 | sf.write(debuglistoutput) | ||
| 985 | |||
| 986 | dvar = d.getVar('PKGD') | ||
| 987 | strip = d.getVar("STRIP") | ||
| 988 | objcopy = d.getVar("OBJCOPY") | ||
| 989 | workdir = d.getVar("WORKDIR") | ||
| 990 | sdir = d.getVar("S") | ||
| 991 | cflags = d.expand("${CFLAGS}") | ||
| 992 | |||
| 993 | prefixmap = {} | ||
| 994 | for flag in cflags.split(): | ||
| 995 | if not flag.startswith("-ffile-prefix-map"): | ||
| 996 | continue | ||
| 997 | if "recipe-sysroot" in flag: | ||
| 998 | continue | ||
| 999 | flag = flag.split("=") | ||
| 1000 | prefixmap[flag[1]] = flag[2] | ||
| 1001 | |||
| 1002 | nosuchdir = [] | ||
| 1003 | basepath = dvar | ||
| 1004 | for p in debugsrcdir.split("/"): | ||
| 1005 | basepath = basepath + "/" + p | ||
| 1006 | if not cpath.exists(basepath): | ||
| 1007 | nosuchdir.append(basepath) | ||
| 1008 | bb.utils.mkdirhier(basepath) | ||
| 1009 | cpath.updatecache(basepath) | ||
| 1010 | |||
| 1011 | for pmap in prefixmap: | ||
| 1012 | # Ignore files from the recipe sysroots (target and native) | ||
| 1013 | cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile | ||
| 1014 | # We need to ignore files that are not actually ours | ||
| 1015 | # we do this by only paying attention to items from this package | ||
| 1016 | cmd += "fgrep -zw '%s' | " % prefixmap[pmap] | ||
| 1017 | # Remove prefix in the source paths | ||
| 1018 | cmd += "sed 's#%s/##g' | " % (prefixmap[pmap]) | ||
| 1019 | cmd += "(cd '%s' ; cpio -pd0mlLu --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap]) | ||
| 1020 | |||
| 1021 | try: | ||
| 1022 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 1023 | except subprocess.CalledProcessError: | ||
| 1024 | # Can "fail" if internal headers/transient sources are attempted | ||
| 1025 | pass | ||
| 1026 | # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced. | ||
| 1027 | # Work around this by manually finding and copying any symbolic links that made it through. | ||
| 1028 | cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \ | ||
| 1029 | (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap]) | ||
| 1030 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 1031 | |||
| 1032 | # debugsources.list may be polluted from the host if we used externalsrc, | ||
| 1033 | # cpio uses copy-pass and may have just created a directory structure | ||
| 1034 | # matching the one from the host, if thats the case move those files to | ||
| 1035 | # debugsrcdir to avoid host contamination. | ||
| 1036 | # Empty dir structure will be deleted in the next step. | ||
| 1037 | |||
| 1038 | # Same check as above for externalsrc | ||
| 1039 | if workdir not in sdir: | ||
| 1040 | if os.path.exists(dvar + debugsrcdir + sdir): | ||
| 1041 | cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir) | ||
| 1042 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 1043 | |||
| 1044 | # The copy by cpio may have resulted in some empty directories! Remove these | ||
| 1045 | cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir) | ||
| 1046 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 1047 | |||
| 1048 | # Also remove debugsrcdir if its empty | ||
| 1049 | for p in nosuchdir[::-1]: | ||
| 1050 | if os.path.exists(p) and not os.listdir(p): | ||
| 1051 | os.rmdir(p) | ||
| 1052 | |||
| 1053 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
| 1054 | def save_debugsources_info(debugsrcdir, sources_raw, d): | ||
| 1055 | import json | ||
| 1056 | import bb.compress.zstd | ||
| 1057 | if debugsrcdir and sources_raw: | ||
| 1058 | debugsources_file = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd") | ||
| 1059 | debugsources_dir = os.path.dirname(debugsources_file) | ||
| 1060 | if not os.path.isdir(debugsources_dir): | ||
| 1061 | bb.utils.mkdirhier(debugsources_dir) | ||
| 1062 | bb.utils.remove(debugsources_file) | ||
| 1063 | |||
| 1064 | workdir = d.getVar("WORKDIR") | ||
| 1065 | pn = d.getVar('PN') | ||
| 1066 | |||
| 1067 | # Kernel sources are in a different directory and are special case | ||
| 1068 | # we format the sources as expected by spdx by replacing /usr/src/kernel/ | ||
| 1069 | # into BP/ | ||
| 1070 | kernel_src = d.getVar('KERNEL_SRC_PATH') | ||
| 1071 | bp = d.getVar('BP') | ||
| 1072 | sources_dict = {} | ||
| 1073 | for file, src_files in sources_raw: | ||
| 1074 | file_clean = file.replace(f"{workdir}/package/","") | ||
| 1075 | sources_clean = [ | ||
| 1076 | src.replace(f"{debugsrcdir}/{pn}/", "") | ||
| 1077 | if not kernel_src else src.replace(f"{kernel_src}/", f"{bp}/") | ||
| 1078 | for src in src_files | ||
| 1079 | if not any(keyword in src for keyword in ("<internal>", "<built-in>")) and not src.endswith("/") | ||
| 1080 | ] | ||
| 1081 | sources_dict[file_clean] = sorted(sources_clean) | ||
| 1082 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
| 1083 | with bb.compress.zstd.open(debugsources_file, "wt", encoding="utf-8", num_threads=num_threads) as f: | ||
| 1084 | json.dump(sources_dict, f, sort_keys=True) | ||
| 1085 | |||
| 1086 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
| 1087 | def read_debugsources_info(d): | ||
| 1088 | import json | ||
| 1089 | import bb.compress.zstd | ||
| 1090 | try: | ||
| 1091 | fn = d.expand("${PKGDESTWORK}/debugsources/${PN}-debugsources.json.zstd") | ||
| 1092 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
| 1093 | with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f: | ||
| 1094 | return json.load(f) | ||
| 1095 | except FileNotFoundError: | ||
| 1096 | bb.debug(1, f"File not found: {fn}") | ||
| 1097 | return None | ||
| 1098 | |||
| 1099 | def process_split_and_strip_files(d): | ||
| 1100 | cpath = oe.cachedpath.CachedPath() | ||
| 1101 | |||
| 1102 | dvar = d.getVar('PKGD') | ||
| 1103 | pn = d.getVar('PN') | ||
| 1104 | hostos = d.getVar('HOST_OS') | ||
| 1105 | |||
| 1106 | oldcwd = os.getcwd() | ||
| 1107 | os.chdir(dvar) | ||
| 1108 | |||
| 1109 | dv = package_debug_vars(d) | ||
| 1110 | |||
| 1111 | # | ||
| 1112 | # First lets figure out all of the files we may have to process ... do this only once! | ||
| 1113 | # | ||
| 1114 | elffiles = {} | ||
| 1115 | symlinks = {} | ||
| 1116 | staticlibs = [] | ||
| 1117 | inodes = {} | ||
| 1118 | libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) | ||
| 1119 | baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) | ||
| 1120 | skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split() | ||
| 1121 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ | ||
| 1122 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 1123 | checkelf = {} | ||
| 1124 | checkelflinks = {} | ||
| 1125 | checkstatic = {} | ||
| 1126 | for root, dirs, files in cpath.walk(dvar): | ||
| 1127 | for f in files: | ||
| 1128 | file = os.path.join(root, f) | ||
| 1129 | |||
| 1130 | # Skip debug files | ||
| 1131 | if dv["append"] and file.endswith(dv["append"]): | ||
| 1132 | continue | ||
| 1133 | if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]): | ||
| 1134 | continue | ||
| 1135 | |||
| 1136 | if file in skipfiles: | ||
| 1137 | continue | ||
| 1138 | |||
| 1139 | try: | ||
| 1140 | ltarget = cpath.realpath(file, dvar, False) | ||
| 1141 | s = cpath.lstat(ltarget) | ||
| 1142 | except OSError as e: | ||
| 1143 | (err, strerror) = e.args | ||
| 1144 | if err != errno.ENOENT: | ||
| 1145 | raise | ||
| 1146 | # Skip broken symlinks | ||
| 1147 | continue | ||
| 1148 | if not s: | ||
| 1149 | continue | ||
| 1150 | |||
| 1151 | if oe.package.is_static_lib(file): | ||
| 1152 | # Use a reference of device ID and inode number to identify files | ||
| 1153 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
| 1154 | checkstatic[file] = (file, file_reference) | ||
| 1155 | continue | ||
| 1156 | |||
| 1157 | # Check its an executable | ||
| 1158 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ | ||
| 1159 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ | ||
| 1160 | or ((file.startswith(libdir) or file.startswith(baselibdir)) \ | ||
| 1161 | and (".so" in f or ".node" in f)) \ | ||
| 1162 | or (f.startswith('vmlinux') or ".ko" in f): | ||
| 1163 | |||
| 1164 | if cpath.islink(file): | ||
| 1165 | checkelflinks[file] = ltarget | ||
| 1166 | continue | ||
| 1167 | # Use a reference of device ID and inode number to identify files | ||
| 1168 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
| 1169 | checkelf[file] = (file, file_reference) | ||
| 1170 | |||
| 1171 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d) | ||
| 1172 | results_map = {} | ||
| 1173 | for (ltarget, elf_file) in results: | ||
| 1174 | results_map[ltarget] = elf_file | ||
| 1175 | for file in checkelflinks: | ||
| 1176 | ltarget = checkelflinks[file] | ||
| 1177 | # If it's a symlink, and points to an ELF file, we capture the readlink target | ||
| 1178 | if results_map[ltarget]: | ||
| 1179 | target = os.readlink(file) | ||
| 1180 | #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget])) | ||
| 1181 | symlinks[file] = target | ||
| 1182 | |||
| 1183 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d) | ||
| 1184 | |||
| 1185 | # Sort results by file path. This ensures that the files are always | ||
| 1186 | # processed in the same order, which is important to make sure builds | ||
| 1187 | # are reproducible when dealing with hardlinks | ||
| 1188 | results.sort(key=lambda x: x[0]) | ||
| 1189 | |||
| 1190 | for (file, elf_file) in results: | ||
| 1191 | # It's a file (or hardlink), not a link | ||
| 1192 | # ...but is it ELF, and is it already stripped? | ||
| 1193 | if elf_file & 1: | ||
| 1194 | if elf_file & 2: | ||
| 1195 | if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 1196 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | ||
| 1197 | else: | ||
| 1198 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | ||
| 1199 | oe.qa.handle_error("already-stripped", msg, d) | ||
| 1200 | continue | ||
| 1201 | |||
| 1202 | # At this point we have an unstripped elf file. We need to: | ||
| 1203 | # a) Make sure any file we strip is not hardlinked to anything else outside this tree | ||
| 1204 | # b) Only strip any hardlinked file once (no races) | ||
| 1205 | # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks | ||
| 1206 | |||
| 1207 | # Use a reference of device ID and inode number to identify files | ||
| 1208 | file_reference = checkelf[file][1] | ||
| 1209 | if file_reference in inodes: | ||
| 1210 | os.unlink(file) | ||
| 1211 | os.link(inodes[file_reference][0], file) | ||
| 1212 | inodes[file_reference].append(file) | ||
| 1213 | else: | ||
| 1214 | inodes[file_reference] = [file] | ||
| 1215 | # break hardlink | ||
| 1216 | bb.utils.break_hardlinks(file) | ||
| 1217 | elffiles[file] = elf_file | ||
| 1218 | # Modified the file so clear the cache | ||
| 1219 | cpath.updatecache(file) | ||
| 1220 | |||
| 1221 | # Do the same hardlink processing as above, but for static libraries | ||
| 1222 | results = list(checkstatic.keys()) | ||
| 1223 | |||
| 1224 | # As above, sort the results. | ||
| 1225 | results.sort(key=lambda x: x[0]) | ||
| 1226 | |||
| 1227 | for file in results: | ||
| 1228 | # Use a reference of device ID and inode number to identify files | ||
| 1229 | file_reference = checkstatic[file][1] | ||
| 1230 | if file_reference in inodes: | ||
| 1231 | os.unlink(file) | ||
| 1232 | os.link(inodes[file_reference][0], file) | ||
| 1233 | inodes[file_reference].append(file) | ||
| 1234 | else: | ||
| 1235 | inodes[file_reference] = [file] | ||
| 1236 | # break hardlink | ||
| 1237 | bb.utils.break_hardlinks(file) | ||
| 1238 | staticlibs.append(file) | ||
| 1239 | # Modified the file so clear the cache | ||
| 1240 | cpath.updatecache(file) | ||
| 1241 | |||
| 1242 | def strip_pkgd_prefix(f): | ||
| 1243 | nonlocal dvar | ||
| 1244 | |||
| 1245 | if f.startswith(dvar): | ||
| 1246 | return f[len(dvar):] | ||
| 1247 | |||
| 1248 | return f | ||
| 1249 | |||
| 1250 | # | ||
| 1251 | # First lets process debug splitting | ||
| 1252 | # | ||
| 1253 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 1254 | results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d)) | ||
| 1255 | |||
| 1256 | if dv["srcdir"] and not hostos.startswith("mingw"): | ||
| 1257 | if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 1258 | results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d)) | ||
| 1259 | else: | ||
| 1260 | for file in staticlibs: | ||
| 1261 | results.append( (file,source_info(file, d)) ) | ||
| 1262 | |||
| 1263 | d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) | ||
| 1264 | |||
| 1265 | sources = set() | ||
| 1266 | for r in results: | ||
| 1267 | sources.update(r[1]) | ||
| 1268 | |||
| 1269 | # Hardlink our debug symbols to the other hardlink copies | ||
| 1270 | for ref in inodes: | ||
| 1271 | if len(inodes[ref]) == 1: | ||
| 1272 | continue | ||
| 1273 | |||
| 1274 | target = inodes[ref][0][len(dvar):] | ||
| 1275 | for file in inodes[ref][1:]: | ||
| 1276 | src = file[len(dvar):] | ||
| 1277 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 1278 | fpath = dvar + dest | ||
| 1279 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 1280 | if os.access(ftarget, os.R_OK): | ||
| 1281 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 1282 | # Only one hardlink of separated debug info file in each directory | ||
| 1283 | if not os.access(fpath, os.R_OK): | ||
| 1284 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
| 1285 | os.link(ftarget, fpath) | ||
| 1286 | elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 1287 | deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"] | ||
| 1288 | fpath = dvar + deststatic | ||
| 1289 | ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"] | ||
| 1290 | if os.access(ftarget, os.R_OK): | ||
| 1291 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 1292 | # Only one hardlink of separated debug info file in each directory | ||
| 1293 | if not os.access(fpath, os.R_OK): | ||
| 1294 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
| 1295 | os.link(ftarget, fpath) | ||
| 1296 | else: | ||
| 1297 | bb.note("Unable to find inode link target %s" % (target)) | ||
| 1298 | |||
| 1299 | # Create symlinks for all cases we were able to split symbols | ||
| 1300 | for file in symlinks: | ||
| 1301 | src = file[len(dvar):] | ||
| 1302 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 1303 | fpath = dvar + dest | ||
| 1304 | # Skip it if the target doesn't exist | ||
| 1305 | try: | ||
| 1306 | s = os.stat(fpath) | ||
| 1307 | except OSError as e: | ||
| 1308 | (err, strerror) = e.args | ||
| 1309 | if err != errno.ENOENT: | ||
| 1310 | raise | ||
| 1311 | continue | ||
| 1312 | |||
| 1313 | ltarget = symlinks[file] | ||
| 1314 | lpath = os.path.dirname(ltarget) | ||
| 1315 | lbase = os.path.basename(ltarget) | ||
| 1316 | ftarget = "" | ||
| 1317 | if lpath and lpath != ".": | ||
| 1318 | ftarget += lpath + dv["dir"] + "/" | ||
| 1319 | ftarget += lbase + dv["append"] | ||
| 1320 | if lpath.startswith(".."): | ||
| 1321 | ftarget = os.path.join("..", ftarget) | ||
| 1322 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 1323 | #bb.note("Symlink %s -> %s" % (fpath, ftarget)) | ||
| 1324 | os.symlink(ftarget, fpath) | ||
| 1325 | |||
| 1326 | # Process the dv["srcdir"] if requested... | ||
| 1327 | # This copies and places the referenced sources for later debugging... | ||
| 1328 | copydebugsources(dv["srcdir"], sources, d) | ||
| 1329 | |||
| 1330 | # Save source info to be accessible to other tasks | ||
| 1331 | save_debugsources_info(dv["srcdir"], results, d) | ||
| 1332 | # | ||
| 1333 | # End of debug splitting | ||
| 1334 | # | ||
| 1335 | |||
| 1336 | # | ||
| 1337 | # Now lets go back over things and strip them | ||
| 1338 | # | ||
| 1339 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): | ||
| 1340 | strip = d.getVar("STRIP") | ||
| 1341 | sfiles = [] | ||
| 1342 | for file in elffiles: | ||
| 1343 | elf_file = int(elffiles[file]) | ||
| 1344 | #bb.note("Strip %s" % file) | ||
| 1345 | sfiles.append((file, elf_file, strip)) | ||
| 1346 | if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 1347 | for f in staticlibs: | ||
| 1348 | sfiles.append((f, 16, strip)) | ||
| 1349 | |||
| 1350 | oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d) | ||
| 1351 | |||
| 1352 | # Build "minidebuginfo" and reinject it back into the stripped binaries | ||
| 1353 | if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d): | ||
| 1354 | oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d, | ||
| 1355 | extraargs=(dvar, dv, d)) | ||
| 1356 | |||
| 1357 | # | ||
| 1358 | # End of strip | ||
| 1359 | # | ||
| 1360 | os.chdir(oldcwd) | ||
| 1361 | |||
| 1362 | |||
| 1363 | def populate_packages(d): | ||
| 1364 | cpath = oe.cachedpath.CachedPath() | ||
| 1365 | |||
| 1366 | workdir = d.getVar('WORKDIR') | ||
| 1367 | outdir = d.getVar('DEPLOY_DIR') | ||
| 1368 | dvar = d.getVar('PKGD') | ||
| 1369 | packages = d.getVar('PACKAGES').split() | ||
| 1370 | pn = d.getVar('PN') | ||
| 1371 | |||
| 1372 | bb.utils.mkdirhier(outdir) | ||
| 1373 | os.chdir(dvar) | ||
| 1374 | |||
| 1375 | autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) | ||
| 1376 | |||
| 1377 | split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg') | ||
| 1378 | |||
| 1379 | # If debug-with-srcpkg mode is enabled then add the source package if it | ||
| 1380 | # doesn't exist and add the source file contents to the source package. | ||
| 1381 | if split_source_package: | ||
| 1382 | src_package_name = ('%s-src' % d.getVar('PN')) | ||
| 1383 | if not src_package_name in packages: | ||
| 1384 | packages.append(src_package_name) | ||
| 1385 | d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') | ||
| 1386 | |||
| 1387 | # Sanity check PACKAGES for duplicates | ||
| 1388 | # Sanity should be moved to sanity.bbclass once we have the infrastructure | ||
| 1389 | package_dict = {} | ||
| 1390 | |||
| 1391 | for i, pkg in enumerate(packages): | ||
| 1392 | if pkg in package_dict: | ||
| 1393 | msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg | ||
| 1394 | oe.qa.handle_error("packages-list", msg, d) | ||
| 1395 | # Ensure the source package gets the chance to pick up the source files | ||
| 1396 | # before the debug package by ordering it first in PACKAGES. Whether it | ||
| 1397 | # actually picks up any source files is controlled by | ||
| 1398 | # PACKAGE_DEBUG_SPLIT_STYLE. | ||
| 1399 | elif pkg.endswith("-src"): | ||
| 1400 | package_dict[pkg] = (10, i) | ||
| 1401 | elif autodebug and pkg.endswith("-dbg"): | ||
| 1402 | package_dict[pkg] = (30, i) | ||
| 1403 | else: | ||
| 1404 | package_dict[pkg] = (50, i) | ||
| 1405 | packages = sorted(package_dict.keys(), key=package_dict.get) | ||
| 1406 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 1407 | pkgdest = d.getVar('PKGDEST') | ||
| 1408 | |||
| 1409 | seen = [] | ||
| 1410 | |||
| 1411 | # os.mkdir masks the permissions with umask so we have to unset it first | ||
| 1412 | oldumask = os.umask(0) | ||
| 1413 | |||
| 1414 | debug = [] | ||
| 1415 | for root, dirs, files in cpath.walk(dvar): | ||
| 1416 | dir = root[len(dvar):] | ||
| 1417 | if not dir: | ||
| 1418 | dir = os.sep | ||
| 1419 | for f in (files + dirs): | ||
| 1420 | path = "." + os.path.join(dir, f) | ||
| 1421 | if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"): | ||
| 1422 | debug.append(path) | ||
| 1423 | |||
| 1424 | for pkg in packages: | ||
| 1425 | root = os.path.join(pkgdest, pkg) | ||
| 1426 | bb.utils.mkdirhier(root) | ||
| 1427 | |||
| 1428 | filesvar = d.getVar('FILES:%s' % pkg) or "" | ||
| 1429 | if "//" in filesvar: | ||
| 1430 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | ||
| 1431 | oe.qa.handle_error("files-invalid", msg, d) | ||
| 1432 | filesvar.replace("//", "/") | ||
| 1433 | |||
| 1434 | origfiles = filesvar.split() | ||
| 1435 | files, symlink_paths = oe.package.files_from_filevars(origfiles) | ||
| 1436 | |||
| 1437 | if autodebug and pkg.endswith("-dbg"): | ||
| 1438 | files.extend(debug) | ||
| 1439 | |||
| 1440 | for file in files: | ||
| 1441 | if (not cpath.islink(file)) and (not cpath.exists(file)): | ||
| 1442 | continue | ||
| 1443 | if file in seen: | ||
| 1444 | continue | ||
| 1445 | seen.append(file) | ||
| 1446 | |||
| 1447 | def mkdir(src, dest, p): | ||
| 1448 | src = os.path.join(src, p) | ||
| 1449 | dest = os.path.join(dest, p) | ||
| 1450 | fstat = cpath.stat(src) | ||
| 1451 | os.mkdir(dest) | ||
| 1452 | os.chmod(dest, fstat.st_mode) | ||
| 1453 | os.chown(dest, fstat.st_uid, fstat.st_gid) | ||
| 1454 | if p not in seen: | ||
| 1455 | seen.append(p) | ||
| 1456 | cpath.updatecache(dest) | ||
| 1457 | |||
| 1458 | def mkdir_recurse(src, dest, paths): | ||
| 1459 | if cpath.exists(dest + '/' + paths): | ||
| 1460 | return | ||
| 1461 | while paths.startswith("./"): | ||
| 1462 | paths = paths[2:] | ||
| 1463 | p = "." | ||
| 1464 | for c in paths.split("/"): | ||
| 1465 | p = os.path.join(p, c) | ||
| 1466 | if not cpath.exists(os.path.join(dest, p)): | ||
| 1467 | mkdir(src, dest, p) | ||
| 1468 | |||
| 1469 | if cpath.isdir(file) and not cpath.islink(file): | ||
| 1470 | mkdir_recurse(dvar, root, file) | ||
| 1471 | continue | ||
| 1472 | |||
| 1473 | mkdir_recurse(dvar, root, os.path.dirname(file)) | ||
| 1474 | fpath = os.path.join(root,file) | ||
| 1475 | if not cpath.islink(file): | ||
| 1476 | os.link(file, fpath) | ||
| 1477 | continue | ||
| 1478 | ret = bb.utils.copyfile(file, fpath) | ||
| 1479 | if ret is False or ret == 0: | ||
| 1480 | bb.fatal("File population failed") | ||
| 1481 | |||
| 1482 | # Check if symlink paths exist | ||
| 1483 | for file in symlink_paths: | ||
| 1484 | if not os.path.exists(os.path.join(root,file)): | ||
| 1485 | bb.fatal("File '%s' cannot be packaged into '%s' because its " | ||
| 1486 | "parent directory structure does not exist. One of " | ||
| 1487 | "its parent directories is a symlink whose target " | ||
| 1488 | "directory is not included in the package." % | ||
| 1489 | (file, pkg)) | ||
| 1490 | |||
| 1491 | os.umask(oldumask) | ||
| 1492 | os.chdir(workdir) | ||
| 1493 | |||
| 1494 | # Handle excluding packages with incompatible licenses | ||
| 1495 | package_list = [] | ||
| 1496 | skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages) | ||
| 1497 | for pkg in packages: | ||
| 1498 | if pkg in skipped_pkgs: | ||
| 1499 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg]) | ||
| 1500 | oe.qa.handle_error("incompatible-license", msg, d) | ||
| 1501 | else: | ||
| 1502 | package_list.append(pkg) | ||
| 1503 | d.setVar('PACKAGES', ' '.join(package_list)) | ||
| 1504 | |||
| 1505 | unshipped = [] | ||
| 1506 | for root, dirs, files in cpath.walk(dvar): | ||
| 1507 | dir = root[len(dvar):] | ||
| 1508 | if not dir: | ||
| 1509 | dir = os.sep | ||
| 1510 | for f in (files + dirs): | ||
| 1511 | path = os.path.join(dir, f) | ||
| 1512 | if ('.' + path) not in seen: | ||
| 1513 | unshipped.append(path) | ||
| 1514 | |||
| 1515 | if unshipped != []: | ||
| 1516 | msg = pn + ": Files/directories were installed but not shipped in any package:" | ||
| 1517 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 1518 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) | ||
| 1519 | else: | ||
| 1520 | for f in unshipped: | ||
| 1521 | msg = msg + "\n " + f | ||
| 1522 | msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" | ||
| 1523 | msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) | ||
| 1524 | oe.qa.handle_error("installed-vs-shipped", msg, d) | ||
| 1525 | |||
| 1526 | def process_fixsymlinks(pkgfiles, d): | ||
| 1527 | cpath = oe.cachedpath.CachedPath() | ||
| 1528 | pkgdest = d.getVar('PKGDEST') | ||
| 1529 | packages = d.getVar("PACKAGES", False).split() | ||
| 1530 | |||
| 1531 | dangling_links = {} | ||
| 1532 | pkg_files = {} | ||
| 1533 | for pkg in packages: | ||
| 1534 | dangling_links[pkg] = [] | ||
| 1535 | pkg_files[pkg] = [] | ||
| 1536 | inst_root = os.path.join(pkgdest, pkg) | ||
| 1537 | for path in pkgfiles[pkg]: | ||
| 1538 | rpath = path[len(inst_root):] | ||
| 1539 | pkg_files[pkg].append(rpath) | ||
| 1540 | rtarget = cpath.realpath(path, inst_root, True, assume_dir = True) | ||
| 1541 | if not cpath.lexists(rtarget): | ||
| 1542 | dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):])) | ||
| 1543 | |||
| 1544 | newrdepends = {} | ||
| 1545 | for pkg in dangling_links: | ||
| 1546 | for l in dangling_links[pkg]: | ||
| 1547 | found = False | ||
| 1548 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | ||
| 1549 | for p in packages: | ||
| 1550 | if l in pkg_files[p]: | ||
| 1551 | found = True | ||
| 1552 | bb.debug(1, "target found in %s" % p) | ||
| 1553 | if p == pkg: | ||
| 1554 | break | ||
| 1555 | if pkg not in newrdepends: | ||
| 1556 | newrdepends[pkg] = [] | ||
| 1557 | newrdepends[pkg].append(p) | ||
| 1558 | break | ||
| 1559 | if found == False: | ||
| 1560 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | ||
| 1561 | |||
| 1562 | for pkg in newrdepends: | ||
| 1563 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") | ||
| 1564 | for p in newrdepends[pkg]: | ||
| 1565 | if p not in rdepends: | ||
| 1566 | rdepends[p] = [] | ||
| 1567 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | ||
| 1568 | |||
| 1569 | def process_filedeps(pkgfiles, d): | ||
| 1570 | """ | ||
| 1571 | Collect perfile run-time dependency metadata | ||
| 1572 | Output: | ||
| 1573 | FILERPROVIDESFLIST:pkg - list of all files w/ deps | ||
| 1574 | FILERPROVIDES:filepath:pkg - per file dep | ||
| 1575 | |||
| 1576 | FILERDEPENDSFLIST:pkg - list of all files w/ deps | ||
| 1577 | FILERDEPENDS:filepath:pkg - per file dep | ||
| 1578 | """ | ||
| 1579 | if d.getVar('SKIP_FILEDEPS') == '1': | ||
| 1580 | return | ||
| 1581 | |||
| 1582 | pkgdest = d.getVar('PKGDEST') | ||
| 1583 | packages = d.getVar('PACKAGES') | ||
| 1584 | rpmdeps = d.getVar('RPMDEPS') | ||
| 1585 | |||
| 1586 | def chunks(files, n): | ||
| 1587 | return [files[i:i+n] for i in range(0, len(files), n)] | ||
| 1588 | |||
| 1589 | pkglist = [] | ||
| 1590 | for pkg in packages.split(): | ||
| 1591 | if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': | ||
| 1592 | continue | ||
| 1593 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): | ||
| 1594 | continue | ||
| 1595 | for files in chunks(pkgfiles[pkg], 100): | ||
| 1596 | pkglist.append((pkg, files, rpmdeps, pkgdest)) | ||
| 1597 | |||
| 1598 | processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d) | ||
| 1599 | |||
| 1600 | provides_files = {} | ||
| 1601 | requires_files = {} | ||
| 1602 | |||
| 1603 | for result in processed: | ||
| 1604 | (pkg, provides, requires) = result | ||
| 1605 | |||
| 1606 | if pkg not in provides_files: | ||
| 1607 | provides_files[pkg] = [] | ||
| 1608 | if pkg not in requires_files: | ||
| 1609 | requires_files[pkg] = [] | ||
| 1610 | |||
| 1611 | for file in sorted(provides): | ||
| 1612 | provides_files[pkg].append(file) | ||
| 1613 | key = "FILERPROVIDES:" + file + ":" + pkg | ||
| 1614 | d.appendVar(key, " " + " ".join(provides[file])) | ||
| 1615 | |||
| 1616 | for file in sorted(requires): | ||
| 1617 | requires_files[pkg].append(file) | ||
| 1618 | key = "FILERDEPENDS:" + file + ":" + pkg | ||
| 1619 | d.appendVar(key, " " + " ".join(requires[file])) | ||
| 1620 | |||
| 1621 | for pkg in requires_files: | ||
| 1622 | d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) | ||
| 1623 | for pkg in provides_files: | ||
| 1624 | d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) | ||
| 1625 | |||
| 1626 | def process_shlibs(pkgfiles, d): | ||
| 1627 | cpath = oe.cachedpath.CachedPath() | ||
| 1628 | |||
| 1629 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False) | ||
| 1630 | if exclude_shlibs: | ||
| 1631 | bb.note("not generating shlibs") | ||
| 1632 | return | ||
| 1633 | |||
| 1634 | lib_re = re.compile(r"^.*\.so") | ||
| 1635 | libdir_re = re.compile(r".*/%s$" % d.getVar('baselib')) | ||
| 1636 | |||
| 1637 | packages = d.getVar('PACKAGES') | ||
| 1638 | |||
| 1639 | shlib_pkgs = [] | ||
| 1640 | exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS") | ||
| 1641 | if exclusion_list: | ||
| 1642 | for pkg in packages.split(): | ||
| 1643 | if pkg not in exclusion_list.split(): | ||
| 1644 | shlib_pkgs.append(pkg) | ||
| 1645 | else: | ||
| 1646 | bb.note("not generating shlibs for %s" % pkg) | ||
| 1647 | else: | ||
| 1648 | shlib_pkgs = packages.split() | ||
| 1649 | |||
| 1650 | hostos = d.getVar('HOST_OS') | ||
| 1651 | |||
| 1652 | workdir = d.getVar('WORKDIR') | ||
| 1653 | |||
| 1654 | ver = d.getVar('PKGV') | ||
| 1655 | if not ver: | ||
| 1656 | msg = "PKGV not defined" | ||
| 1657 | oe.qa.handle_error("pkgv-undefined", msg, d) | ||
| 1658 | return | ||
| 1659 | |||
| 1660 | pkgdest = d.getVar('PKGDEST') | ||
| 1661 | |||
| 1662 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 1663 | |||
| 1664 | def linux_so(file, pkg, pkgver, d): | ||
| 1665 | needs_ldconfig = False | ||
| 1666 | needed = set() | ||
| 1667 | sonames = set() | ||
| 1668 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1669 | cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null" | ||
| 1670 | fd = os.popen(cmd) | ||
| 1671 | lines = fd.readlines() | ||
| 1672 | fd.close() | ||
| 1673 | rpath = tuple() | ||
| 1674 | for l in lines: | ||
| 1675 | m = re.match(r"\s+RPATH\s+([^\s]*)", l) | ||
| 1676 | if m: | ||
| 1677 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") | ||
| 1678 | rpath = tuple(map(os.path.normpath, rpaths)) | ||
| 1679 | for l in lines: | ||
| 1680 | m = re.match(r"\s+NEEDED\s+([^\s]*)", l) | ||
| 1681 | if m: | ||
| 1682 | dep = m.group(1) | ||
| 1683 | if dep not in needed: | ||
| 1684 | needed.add((dep, file, rpath)) | ||
| 1685 | m = re.match(r"\s+SONAME\s+([^\s]*)", l) | ||
| 1686 | if m: | ||
| 1687 | this_soname = m.group(1) | ||
| 1688 | prov = (this_soname, ldir, pkgver) | ||
| 1689 | if not prov in sonames: | ||
| 1690 | # if library is private (only used by package) then do not build shlib for it | ||
| 1691 | if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: | ||
| 1692 | sonames.add(prov) | ||
| 1693 | if libdir_re.match(os.path.dirname(file)): | ||
| 1694 | needs_ldconfig = True | ||
| 1695 | return (needs_ldconfig, needed, sonames) | ||
| 1696 | |||
| 1697 | def darwin_so(file, needed, sonames, pkgver): | ||
| 1698 | if not os.path.exists(file): | ||
| 1699 | return | ||
| 1700 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1701 | |||
| 1702 | def get_combinations(base): | ||
| 1703 | # | ||
| 1704 | # Given a base library name, find all combinations of this split by "." and "-" | ||
| 1705 | # | ||
| 1706 | combos = [] | ||
| 1707 | options = base.split(".") | ||
| 1708 | for i in range(1, len(options) + 1): | ||
| 1709 | combos.append(".".join(options[0:i])) | ||
| 1710 | options = base.split("-") | ||
| 1711 | for i in range(1, len(options) + 1): | ||
| 1712 | combos.append("-".join(options[0:i])) | ||
| 1713 | return combos | ||
| 1714 | |||
| 1715 | if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'): | ||
| 1716 | # Drop suffix | ||
| 1717 | name = os.path.basename(file).rsplit(".",1)[0] | ||
| 1718 | # Find all combinations | ||
| 1719 | combos = get_combinations(name) | ||
| 1720 | for combo in combos: | ||
| 1721 | if not combo in sonames: | ||
| 1722 | prov = (combo, ldir, pkgver) | ||
| 1723 | sonames.add(prov) | ||
| 1724 | if file.endswith('.dylib') or file.endswith('.so'): | ||
| 1725 | rpath = [] | ||
| 1726 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) | ||
| 1727 | out, err = p.communicate() | ||
| 1728 | # If returned successfully, process stdout for results | ||
| 1729 | if p.returncode == 0: | ||
| 1730 | for l in out.split("\n"): | ||
| 1731 | l = l.strip() | ||
| 1732 | if l.startswith('path '): | ||
| 1733 | rpath.append(l.split()[1]) | ||
| 1734 | |||
| 1735 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) | ||
| 1736 | out, err = p.communicate() | ||
| 1737 | # If returned successfully, process stdout for results | ||
| 1738 | if p.returncode == 0: | ||
| 1739 | for l in out.split("\n"): | ||
| 1740 | l = l.strip() | ||
| 1741 | if not l or l.endswith(":"): | ||
| 1742 | continue | ||
| 1743 | if "is not an object file" in l: | ||
| 1744 | continue | ||
| 1745 | name = os.path.basename(l.split()[0]).rsplit(".", 1)[0] | ||
| 1746 | if name and name not in needed[pkg]: | ||
| 1747 | needed[pkg].add((name, file, tuple())) | ||
| 1748 | |||
| 1749 | def mingw_dll(file, needed, sonames, pkgver): | ||
| 1750 | if not os.path.exists(file): | ||
| 1751 | return | ||
| 1752 | |||
| 1753 | if file.endswith(".dll"): | ||
| 1754 | # assume all dlls are shared objects provided by the package | ||
| 1755 | sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) | ||
| 1756 | |||
| 1757 | if (file.endswith(".dll") or file.endswith(".exe")): | ||
| 1758 | # use objdump to search for "DLL Name: .*\.dll" | ||
| 1759 | p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1760 | out, err = p.communicate() | ||
| 1761 | # process the output, grabbing all .dll names | ||
| 1762 | if p.returncode == 0: | ||
| 1763 | for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): | ||
| 1764 | dllname = m.group(1) | ||
| 1765 | if dllname: | ||
| 1766 | needed[pkg].add((dllname, file, tuple())) | ||
| 1767 | |||
| 1768 | needed = {} | ||
| 1769 | |||
| 1770 | shlib_provider = oe.package.read_shlib_providers(d) | ||
| 1771 | |||
| 1772 | for pkg in shlib_pkgs: | ||
| 1773 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 1774 | private_libs = private_libs.split() | ||
| 1775 | needs_ldconfig = False | ||
| 1776 | bb.debug(2, "calculating shlib provides for %s" % pkg) | ||
| 1777 | |||
| 1778 | pkgver = d.getVar('PKGV:' + pkg) | ||
| 1779 | if not pkgver: | ||
| 1780 | pkgver = d.getVar('PV_' + pkg) | ||
| 1781 | if not pkgver: | ||
| 1782 | pkgver = ver | ||
| 1783 | |||
| 1784 | needed[pkg] = set() | ||
| 1785 | sonames = set() | ||
| 1786 | linuxlist = [] | ||
| 1787 | for file in pkgfiles[pkg]: | ||
| 1788 | soname = None | ||
| 1789 | if cpath.islink(file): | ||
| 1790 | continue | ||
| 1791 | if hostos.startswith("darwin"): | ||
| 1792 | darwin_so(file, needed, sonames, pkgver) | ||
| 1793 | elif hostos.startswith("mingw"): | ||
| 1794 | mingw_dll(file, needed, sonames, pkgver) | ||
| 1795 | elif os.access(file, os.X_OK) or lib_re.match(file): | ||
| 1796 | linuxlist.append(file) | ||
| 1797 | |||
| 1798 | if linuxlist: | ||
| 1799 | results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d)) | ||
| 1800 | for r in results: | ||
| 1801 | ldconfig = r[0] | ||
| 1802 | needed[pkg] |= r[1] | ||
| 1803 | sonames |= r[2] | ||
| 1804 | needs_ldconfig = needs_ldconfig or ldconfig | ||
| 1805 | |||
| 1806 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | ||
| 1807 | if len(sonames): | ||
| 1808 | with open(shlibs_file, 'w') as fd: | ||
| 1809 | for s in sorted(sonames): | ||
| 1810 | if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]: | ||
| 1811 | (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]] | ||
| 1812 | if old_pkg != pkg: | ||
| 1813 | bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver)) | ||
| 1814 | bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0])) | ||
| 1815 | fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n') | ||
| 1816 | if s[0] not in shlib_provider: | ||
| 1817 | shlib_provider[s[0]] = {} | ||
| 1818 | shlib_provider[s[0]][s[1]] = (pkg, pkgver) | ||
| 1819 | if needs_ldconfig: | ||
| 1820 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | ||
| 1821 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
| 1822 | if not postinst: | ||
| 1823 | postinst = '#!/bin/sh\n' | ||
| 1824 | postinst += d.getVar('ldconfig_postinst_fragment') | ||
| 1825 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
| 1826 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) | ||
| 1827 | |||
| 1828 | assumed_libs = d.getVar('ASSUME_SHLIBS') | ||
| 1829 | if assumed_libs: | ||
| 1830 | libdir = d.getVar("libdir") | ||
| 1831 | for e in assumed_libs.split(): | ||
| 1832 | l, dep_pkg = e.split(":") | ||
| 1833 | lib_ver = None | ||
| 1834 | dep_pkg = dep_pkg.rsplit("_", 1) | ||
| 1835 | if len(dep_pkg) == 2: | ||
| 1836 | lib_ver = dep_pkg[1] | ||
| 1837 | dep_pkg = dep_pkg[0] | ||
| 1838 | if l not in shlib_provider: | ||
| 1839 | shlib_provider[l] = {} | ||
| 1840 | shlib_provider[l][libdir] = (dep_pkg, lib_ver) | ||
| 1841 | |||
| 1842 | libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] | ||
| 1843 | |||
| 1844 | for pkg in shlib_pkgs: | ||
| 1845 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | ||
| 1846 | |||
| 1847 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 1848 | private_libs = private_libs.split() | ||
| 1849 | |||
| 1850 | deps = list() | ||
| 1851 | for n in needed[pkg]: | ||
| 1852 | # if n is in private libraries, don't try to search provider for it | ||
| 1853 | # this could cause problem in case some abc.bb provides private | ||
| 1854 | # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 | ||
| 1855 | # but skipping it is still better alternative than providing own | ||
| 1856 | # version and then adding runtime dependency for the same system library | ||
| 1857 | if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: | ||
| 1858 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) | ||
| 1859 | continue | ||
| 1860 | if n[0] in shlib_provider.keys(): | ||
| 1861 | shlib_provider_map = shlib_provider[n[0]] | ||
| 1862 | matches = set() | ||
| 1863 | for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): | ||
| 1864 | if p in shlib_provider_map: | ||
| 1865 | matches.add(p) | ||
| 1866 | if len(matches) > 1: | ||
| 1867 | matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) | ||
| 1868 | bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) | ||
| 1869 | elif len(matches) == 1: | ||
| 1870 | (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] | ||
| 1871 | |||
| 1872 | bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) | ||
| 1873 | |||
| 1874 | if dep_pkg == pkg: | ||
| 1875 | continue | ||
| 1876 | |||
| 1877 | if ver_needed: | ||
| 1878 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | ||
| 1879 | else: | ||
| 1880 | dep = dep_pkg | ||
| 1881 | if not dep in deps: | ||
| 1882 | deps.append(dep) | ||
| 1883 | continue | ||
| 1884 | bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1])) | ||
| 1885 | |||
| 1886 | deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") | ||
| 1887 | if os.path.exists(deps_file): | ||
| 1888 | os.remove(deps_file) | ||
| 1889 | if deps: | ||
| 1890 | with open(deps_file, 'w') as fd: | ||
| 1891 | for dep in sorted(deps): | ||
| 1892 | fd.write(dep + '\n') | ||
| 1893 | |||
| 1894 | def process_pkgconfig(pkgfiles, d): | ||
| 1895 | packages = d.getVar('PACKAGES') | ||
| 1896 | workdir = d.getVar('WORKDIR') | ||
| 1897 | pkgdest = d.getVar('PKGDEST') | ||
| 1898 | |||
| 1899 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() | ||
| 1900 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 1901 | |||
| 1902 | pc_re = re.compile(r'(.*)\.pc$') | ||
| 1903 | var_re = re.compile(r'(.*)=(.*)') | ||
| 1904 | field_re = re.compile(r'(.*): (.*)') | ||
| 1905 | |||
| 1906 | pkgconfig_provided = {} | ||
| 1907 | pkgconfig_needed = {} | ||
| 1908 | for pkg in packages.split(): | ||
| 1909 | pkgconfig_provided[pkg] = [] | ||
| 1910 | pkgconfig_needed[pkg] = [] | ||
| 1911 | for file in sorted(pkgfiles[pkg]): | ||
| 1912 | m = pc_re.match(file) | ||
| 1913 | if m: | ||
| 1914 | pd = bb.data.init() | ||
| 1915 | name = m.group(1) | ||
| 1916 | pkgconfig_provided[pkg].append(os.path.basename(name)) | ||
| 1917 | if not os.access(file, os.R_OK): | ||
| 1918 | continue | ||
| 1919 | with open(file, 'r') as f: | ||
| 1920 | lines = f.readlines() | ||
| 1921 | for l in lines: | ||
| 1922 | m = field_re.match(l) | ||
| 1923 | if m: | ||
| 1924 | hdr = m.group(1) | ||
| 1925 | exp = pd.expand(m.group(2)) | ||
| 1926 | if hdr == 'Requires' or hdr == 'Requires.private': | ||
| 1927 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | ||
| 1928 | continue | ||
| 1929 | m = var_re.match(l) | ||
| 1930 | if m: | ||
| 1931 | name = m.group(1) | ||
| 1932 | val = m.group(2) | ||
| 1933 | pd.setVar(name, pd.expand(val)) | ||
| 1934 | |||
| 1935 | for pkg in packages.split(): | ||
| 1936 | pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") | ||
| 1937 | if pkgconfig_provided[pkg] != []: | ||
| 1938 | with open(pkgs_file, 'w') as f: | ||
| 1939 | for p in sorted(pkgconfig_provided[pkg]): | ||
| 1940 | f.write('%s\n' % p) | ||
| 1941 | |||
| 1942 | # Go from least to most specific since the last one found wins | ||
| 1943 | for dir in reversed(shlibs_dirs): | ||
| 1944 | if not os.path.exists(dir): | ||
| 1945 | continue | ||
| 1946 | for file in sorted(os.listdir(dir)): | ||
| 1947 | m = re.match(r'^(.*)\.pclist$', file) | ||
| 1948 | if m: | ||
| 1949 | pkg = m.group(1) | ||
| 1950 | with open(os.path.join(dir, file)) as fd: | ||
| 1951 | lines = fd.readlines() | ||
| 1952 | pkgconfig_provided[pkg] = [] | ||
| 1953 | for l in lines: | ||
| 1954 | pkgconfig_provided[pkg].append(l.rstrip()) | ||
| 1955 | |||
| 1956 | for pkg in packages.split(): | ||
| 1957 | deps = [] | ||
| 1958 | for n in pkgconfig_needed[pkg]: | ||
| 1959 | found = False | ||
| 1960 | for k in pkgconfig_provided.keys(): | ||
| 1961 | if n in pkgconfig_provided[k]: | ||
| 1962 | if k != pkg and not (k in deps): | ||
| 1963 | deps.append(k) | ||
| 1964 | found = True | ||
| 1965 | if found == False: | ||
| 1966 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | ||
| 1967 | deps_file = os.path.join(pkgdest, pkg + ".pcdeps") | ||
| 1968 | if len(deps): | ||
| 1969 | with open(deps_file, 'w') as fd: | ||
| 1970 | for dep in deps: | ||
| 1971 | fd.write(dep + '\n') | ||
| 1972 | |||
| 1973 | def read_libdep_files(d): | ||
| 1974 | pkglibdeps = {} | ||
| 1975 | packages = d.getVar('PACKAGES').split() | ||
| 1976 | for pkg in packages: | ||
| 1977 | pkglibdeps[pkg] = {} | ||
| 1978 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | ||
| 1979 | depsfile = d.expand("${PKGDEST}/" + pkg + extension) | ||
| 1980 | if os.access(depsfile, os.R_OK): | ||
| 1981 | with open(depsfile) as fd: | ||
| 1982 | lines = fd.readlines() | ||
| 1983 | for l in lines: | ||
| 1984 | l.rstrip() | ||
| 1985 | deps = bb.utils.explode_dep_versions2(l) | ||
| 1986 | for dep in deps: | ||
| 1987 | if not dep in pkglibdeps[pkg]: | ||
| 1988 | pkglibdeps[pkg][dep] = deps[dep] | ||
| 1989 | return pkglibdeps | ||
| 1990 | |||
| 1991 | def process_depchains(pkgfiles, d): | ||
| 1992 | """ | ||
| 1993 | For a given set of prefix and postfix modifiers, make those packages | ||
| 1994 | RRECOMMENDS on the corresponding packages for its RDEPENDS. | ||
| 1995 | |||
| 1996 | Example: If package A depends upon package B, and A's .bb emits an | ||
| 1997 | A-dev package, this would make A-dev Recommends: B-dev. | ||
| 1998 | |||
| 1999 | If only one of a given suffix is specified, it will take the RRECOMMENDS | ||
| 2000 | based on the RDEPENDS of *all* other packages. If more than one of a given | ||
| 2001 | suffix is specified, its will only use the RDEPENDS of the single parent | ||
| 2002 | package. | ||
| 2003 | """ | ||
| 2004 | |||
| 2005 | packages = d.getVar('PACKAGES') | ||
| 2006 | postfixes = (d.getVar('DEPCHAIN_POST') or '').split() | ||
| 2007 | prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() | ||
| 2008 | |||
| 2009 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | ||
| 2010 | |||
| 2011 | #bb.note('depends for %s is %s' % (base, depends)) | ||
| 2012 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 2013 | |||
| 2014 | for depend in sorted(depends): | ||
| 2015 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | ||
| 2016 | #bb.note("Skipping %s" % depend) | ||
| 2017 | continue | ||
| 2018 | if depend.endswith('-dev'): | ||
| 2019 | depend = depend[:-4] | ||
| 2020 | if depend.endswith('-dbg'): | ||
| 2021 | depend = depend[:-4] | ||
| 2022 | pkgname = getname(depend, suffix) | ||
| 2023 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 2024 | if pkgname not in rreclist and pkgname != pkg: | ||
| 2025 | rreclist[pkgname] = [] | ||
| 2026 | |||
| 2027 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 2028 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 2029 | |||
| 2030 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | ||
| 2031 | |||
| 2032 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | ||
| 2033 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 2034 | |||
| 2035 | for depend in sorted(rdepends): | ||
| 2036 | if depend.find('virtual-locale-') != -1: | ||
| 2037 | #bb.note("Skipping %s" % depend) | ||
| 2038 | continue | ||
| 2039 | if depend.endswith('-dev'): | ||
| 2040 | depend = depend[:-4] | ||
| 2041 | if depend.endswith('-dbg'): | ||
| 2042 | depend = depend[:-4] | ||
| 2043 | pkgname = getname(depend, suffix) | ||
| 2044 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 2045 | if pkgname not in rreclist and pkgname != pkg: | ||
| 2046 | rreclist[pkgname] = [] | ||
| 2047 | |||
| 2048 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 2049 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 2050 | |||
| 2051 | def add_dep(list, dep): | ||
| 2052 | if dep not in list: | ||
| 2053 | list.append(dep) | ||
| 2054 | |||
| 2055 | depends = [] | ||
| 2056 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): | ||
| 2057 | add_dep(depends, dep) | ||
| 2058 | |||
| 2059 | rdepends = [] | ||
| 2060 | for pkg in packages.split(): | ||
| 2061 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): | ||
| 2062 | add_dep(rdepends, dep) | ||
| 2063 | |||
| 2064 | #bb.note('rdepends is %s' % rdepends) | ||
| 2065 | |||
| 2066 | def post_getname(name, suffix): | ||
| 2067 | return '%s%s' % (name, suffix) | ||
| 2068 | def pre_getname(name, suffix): | ||
| 2069 | return '%s%s' % (suffix, name) | ||
| 2070 | |||
| 2071 | pkgs = {} | ||
| 2072 | for pkg in packages.split(): | ||
| 2073 | for postfix in postfixes: | ||
| 2074 | if pkg.endswith(postfix): | ||
| 2075 | if not postfix in pkgs: | ||
| 2076 | pkgs[postfix] = {} | ||
| 2077 | pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) | ||
| 2078 | |||
| 2079 | for prefix in prefixes: | ||
| 2080 | if pkg.startswith(prefix): | ||
| 2081 | if not prefix in pkgs: | ||
| 2082 | pkgs[prefix] = {} | ||
| 2083 | pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) | ||
| 2084 | |||
| 2085 | if "-dbg" in pkgs: | ||
| 2086 | pkglibdeps = read_libdep_files(d) | ||
| 2087 | pkglibdeplist = [] | ||
| 2088 | for pkg in pkglibdeps: | ||
| 2089 | for k in pkglibdeps[pkg]: | ||
| 2090 | add_dep(pkglibdeplist, k) | ||
| 2091 | dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) | ||
| 2092 | |||
| 2093 | for suffix in pkgs: | ||
| 2094 | for pkg in pkgs[suffix]: | ||
| 2095 | if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): | ||
| 2096 | continue | ||
| 2097 | (base, func) = pkgs[suffix][pkg] | ||
| 2098 | if suffix == "-dev": | ||
| 2099 | pkg_adddeprrecs(pkg, base, suffix, func, depends, d) | ||
| 2100 | elif suffix == "-dbg": | ||
| 2101 | if not dbgdefaultdeps: | ||
| 2102 | pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d) | ||
| 2103 | continue | ||
| 2104 | if len(pkgs[suffix]) == 1: | ||
| 2105 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | ||
| 2106 | else: | ||
| 2107 | rdeps = [] | ||
| 2108 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): | ||
| 2109 | add_dep(rdeps, dep) | ||
| 2110 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | ||
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py deleted file mode 100644 index 88bc5ab195..0000000000 --- a/meta/lib/oe/package_manager/__init__.py +++ /dev/null | |||
| @@ -1,582 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from abc import ABCMeta, abstractmethod | ||
| 8 | import os | ||
| 9 | import glob | ||
| 10 | import subprocess | ||
| 11 | import shutil | ||
| 12 | import re | ||
| 13 | import collections | ||
| 14 | import bb | ||
| 15 | import tempfile | ||
| 16 | import oe.utils | ||
| 17 | import oe.path | ||
| 18 | import string | ||
| 19 | from oe.gpg_sign import get_signer | ||
| 20 | import oe.packagedata | ||
| 21 | import hashlib | ||
| 22 | import fnmatch | ||
| 23 | |||
| 24 | # this can be used by all PM backends to create the index files in parallel | ||
| 25 | def create_index(arg): | ||
| 26 | index_cmd = arg | ||
| 27 | |||
| 28 | bb.note("Executing '%s' ..." % index_cmd) | ||
| 29 | result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") | ||
| 30 | if result: | ||
| 31 | bb.note(result) | ||
| 32 | |||
| 33 | def opkg_query(cmd_output): | ||
| 34 | """ | ||
| 35 | This method parse the output from the package managerand return | ||
| 36 | a dictionary with the information of the packages. This is used | ||
| 37 | when the packages are in deb or ipk format. | ||
| 38 | """ | ||
| 39 | verregex = re.compile(r' \([=<>]* [^ )]*\)') | ||
| 40 | output = dict() | ||
| 41 | pkg = "" | ||
| 42 | arch = "" | ||
| 43 | ver = "" | ||
| 44 | filename = "" | ||
| 45 | dep = [] | ||
| 46 | prov = [] | ||
| 47 | pkgarch = "" | ||
| 48 | for line in cmd_output.splitlines()+['']: | ||
| 49 | line = line.rstrip() | ||
| 50 | if ':' in line: | ||
| 51 | if line.startswith("Package: "): | ||
| 52 | pkg = line.split(": ")[1] | ||
| 53 | elif line.startswith("Architecture: "): | ||
| 54 | arch = line.split(": ")[1] | ||
| 55 | elif line.startswith("Version: "): | ||
| 56 | ver = line.split(": ")[1] | ||
| 57 | elif line.startswith("File: ") or line.startswith("Filename:"): | ||
| 58 | filename = line.split(": ")[1] | ||
| 59 | if "/" in filename: | ||
| 60 | filename = os.path.basename(filename) | ||
| 61 | elif line.startswith("Depends: "): | ||
| 62 | depends = verregex.sub('', line.split(": ")[1]) | ||
| 63 | for depend in depends.split(", "): | ||
| 64 | dep.append(depend) | ||
| 65 | elif line.startswith("Recommends: "): | ||
| 66 | recommends = verregex.sub('', line.split(": ")[1]) | ||
| 67 | for recommend in recommends.split(", "): | ||
| 68 | dep.append("%s [REC]" % recommend) | ||
| 69 | elif line.startswith("PackageArch: "): | ||
| 70 | pkgarch = line.split(": ")[1] | ||
| 71 | elif line.startswith("Provides: "): | ||
| 72 | provides = verregex.sub('', line.split(": ")[1]) | ||
| 73 | for provide in provides.split(", "): | ||
| 74 | prov.append(provide) | ||
| 75 | |||
| 76 | # When there is a blank line save the package information | ||
| 77 | elif not line: | ||
| 78 | # IPK doesn't include the filename | ||
| 79 | if not filename: | ||
| 80 | filename = "%s_%s_%s.ipk" % (pkg, ver, arch) | ||
| 81 | if pkg: | ||
| 82 | output[pkg] = {"arch":arch, "ver":ver, | ||
| 83 | "filename":filename, "deps": dep, "pkgarch":pkgarch, "provs": prov} | ||
| 84 | pkg = "" | ||
| 85 | arch = "" | ||
| 86 | ver = "" | ||
| 87 | filename = "" | ||
| 88 | dep = [] | ||
| 89 | prov = [] | ||
| 90 | pkgarch = "" | ||
| 91 | |||
| 92 | return output | ||
| 93 | |||
| 94 | def failed_postinsts_abort(pkgs, log_path): | ||
| 95 | bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot, | ||
| 96 | then please place them into pkg_postinst_ontarget:${PN} (). | ||
| 97 | Deferring to first boot via 'exit 1' is no longer supported. | ||
| 98 | Details of the failure are in %s.""" %(pkgs, log_path)) | ||
| 99 | |||
| 100 | def generate_locale_archive(d, rootfs, target_arch, localedir): | ||
| 101 | # Pretty sure we don't need this for locale archive generation but | ||
| 102 | # keeping it to be safe... | ||
| 103 | locale_arch_options = { \ | ||
| 104 | "arc": ["--uint32-align=4", "--little-endian"], | ||
| 105 | "arceb": ["--uint32-align=4", "--big-endian"], | ||
| 106 | "arm": ["--uint32-align=4", "--little-endian"], | ||
| 107 | "armeb": ["--uint32-align=4", "--big-endian"], | ||
| 108 | "aarch64": ["--uint32-align=4", "--little-endian"], | ||
| 109 | "aarch64_be": ["--uint32-align=4", "--big-endian"], | ||
| 110 | "sh4": ["--uint32-align=4", "--big-endian"], | ||
| 111 | "powerpc": ["--uint32-align=4", "--big-endian"], | ||
| 112 | "powerpc64": ["--uint32-align=4", "--big-endian"], | ||
| 113 | "powerpc64le": ["--uint32-align=4", "--little-endian"], | ||
| 114 | "mips": ["--uint32-align=4", "--big-endian"], | ||
| 115 | "mipsisa32r6": ["--uint32-align=4", "--big-endian"], | ||
| 116 | "mips64": ["--uint32-align=4", "--big-endian"], | ||
| 117 | "mipsisa64r6": ["--uint32-align=4", "--big-endian"], | ||
| 118 | "mipsel": ["--uint32-align=4", "--little-endian"], | ||
| 119 | "mipsisa32r6el": ["--uint32-align=4", "--little-endian"], | ||
| 120 | "mips64el": ["--uint32-align=4", "--little-endian"], | ||
| 121 | "mipsisa64r6el": ["--uint32-align=4", "--little-endian"], | ||
| 122 | "riscv64": ["--uint32-align=4", "--little-endian"], | ||
| 123 | "riscv32": ["--uint32-align=4", "--little-endian"], | ||
| 124 | "i586": ["--uint32-align=4", "--little-endian"], | ||
| 125 | "i686": ["--uint32-align=4", "--little-endian"], | ||
| 126 | "x86_64": ["--uint32-align=4", "--little-endian"], | ||
| 127 | "loongarch64": ["--uint32-align=4", "--little-endian"] | ||
| 128 | } | ||
| 129 | if target_arch in locale_arch_options: | ||
| 130 | arch_options = locale_arch_options[target_arch] | ||
| 131 | else: | ||
| 132 | bb.error("locale_arch_options not found for target_arch=" + target_arch) | ||
| 133 | bb.fatal("unknown arch:" + target_arch + " for locale_arch_options") | ||
| 134 | |||
| 135 | # Need to set this so cross-localedef knows where the archive is | ||
| 136 | env = dict(os.environ) | ||
| 137 | env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive") | ||
| 138 | |||
| 139 | for name in sorted(os.listdir(localedir)): | ||
| 140 | path = os.path.join(localedir, name) | ||
| 141 | if os.path.isdir(path): | ||
| 142 | cmd = ["cross-localedef", "--verbose"] | ||
| 143 | cmd += arch_options | ||
| 144 | cmd += ["--add-to-archive", path] | ||
| 145 | subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT) | ||
| 146 | |||
| 147 | class Indexer(object, metaclass=ABCMeta): | ||
| 148 | def __init__(self, d, deploy_dir): | ||
| 149 | self.d = d | ||
| 150 | self.deploy_dir = deploy_dir | ||
| 151 | |||
| 152 | @abstractmethod | ||
| 153 | def write_index(self): | ||
| 154 | pass | ||
| 155 | |||
| 156 | class PkgsList(object, metaclass=ABCMeta): | ||
| 157 | def __init__(self, d, rootfs_dir): | ||
| 158 | self.d = d | ||
| 159 | self.rootfs_dir = rootfs_dir | ||
| 160 | |||
| 161 | @abstractmethod | ||
| 162 | def list_pkgs(self): | ||
| 163 | pass | ||
| 164 | |||
| 165 | class PackageManager(object, metaclass=ABCMeta): | ||
| 166 | """ | ||
| 167 | This is an abstract class. Do not instantiate this directly. | ||
| 168 | """ | ||
| 169 | |||
| 170 | def __init__(self, d, target_rootfs): | ||
| 171 | self.d = d | ||
| 172 | self.target_rootfs = target_rootfs | ||
| 173 | self.deploy_dir = None | ||
| 174 | self.deploy_lock = None | ||
| 175 | self._initialize_intercepts() | ||
| 176 | |||
| 177 | def _initialize_intercepts(self): | ||
| 178 | bb.note("Initializing intercept dir for %s" % self.target_rootfs) | ||
| 179 | # As there might be more than one instance of PackageManager operating at the same time | ||
| 180 | # we need to isolate the intercept_scripts directories from each other, | ||
| 181 | # hence the ugly hash digest in dir name. | ||
| 182 | self.intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts-%s" % | ||
| 183 | (hashlib.sha256(self.target_rootfs.encode()).hexdigest())) | ||
| 184 | |||
| 185 | postinst_intercepts = (self.d.getVar("POSTINST_INTERCEPTS") or "").split() | ||
| 186 | if not postinst_intercepts: | ||
| 187 | postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_PATH") | ||
| 188 | if not postinst_intercepts_path: | ||
| 189 | postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_DIR") or self.d.expand("${COREBASE}/scripts/postinst-intercepts") | ||
| 190 | postinst_intercepts = oe.path.which_wild('*', postinst_intercepts_path) | ||
| 191 | |||
| 192 | bb.debug(1, 'Collected intercepts:\n%s' % ''.join(' %s\n' % i for i in postinst_intercepts)) | ||
| 193 | bb.utils.remove(self.intercepts_dir, True) | ||
| 194 | bb.utils.mkdirhier(self.intercepts_dir) | ||
| 195 | for intercept in postinst_intercepts: | ||
| 196 | shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) | ||
| 197 | |||
| 198 | @abstractmethod | ||
| 199 | def _handle_intercept_failure(self, failed_script): | ||
| 200 | pass | ||
| 201 | |||
| 202 | def _postpone_to_first_boot(self, postinst_intercept_hook): | ||
| 203 | with open(postinst_intercept_hook) as intercept: | ||
| 204 | registered_pkgs = None | ||
| 205 | for line in intercept.read().split("\n"): | ||
| 206 | m = re.match(r"^##PKGS:(.*)", line) | ||
| 207 | if m is not None: | ||
| 208 | registered_pkgs = m.group(1).strip() | ||
| 209 | break | ||
| 210 | |||
| 211 | if registered_pkgs is not None: | ||
| 212 | bb.note("If an image is being built, the postinstalls for the following packages " | ||
| 213 | "will be postponed for first boot: %s" % | ||
| 214 | registered_pkgs) | ||
| 215 | |||
| 216 | # call the backend dependent handler | ||
| 217 | self._handle_intercept_failure(registered_pkgs) | ||
| 218 | |||
| 219 | |||
| 220 | def run_intercepts(self, populate_sdk=None): | ||
| 221 | intercepts_dir = self.intercepts_dir | ||
| 222 | |||
| 223 | bb.note("Running intercept scripts:") | ||
| 224 | os.environ['D'] = self.target_rootfs | ||
| 225 | os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE') | ||
| 226 | for script in os.listdir(intercepts_dir): | ||
| 227 | script_full = os.path.join(intercepts_dir, script) | ||
| 228 | |||
| 229 | if script == "postinst_intercept" or not os.access(script_full, os.X_OK): | ||
| 230 | continue | ||
| 231 | |||
| 232 | # we do not want to run any multilib variant of this | ||
| 233 | if script.startswith("delay_to_first_boot"): | ||
| 234 | self._postpone_to_first_boot(script_full) | ||
| 235 | continue | ||
| 236 | |||
| 237 | if populate_sdk == 'host' and self.d.getVar('SDK_OS') == 'mingw32': | ||
| 238 | bb.note("The postinstall intercept hook '%s' could not be executed due to missing wine support, details in %s/log.do_%s" | ||
| 239 | % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK'))) | ||
| 240 | continue | ||
| 241 | |||
| 242 | bb.note("> Executing %s intercept ..." % script) | ||
| 243 | |||
| 244 | try: | ||
| 245 | output = subprocess.check_output(script_full, stderr=subprocess.STDOUT) | ||
| 246 | if output: bb.note(output.decode("utf-8")) | ||
| 247 | except subprocess.CalledProcessError as e: | ||
| 248 | bb.note("Exit code %d. Output:\n%s" % (e.returncode, e.output.decode("utf-8"))) | ||
| 249 | if populate_sdk == 'host': | ||
| 250 | bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK'))) | ||
| 251 | elif populate_sdk == 'target': | ||
| 252 | if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"): | ||
| 253 | bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s" | ||
| 254 | % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK'))) | ||
| 255 | else: | ||
| 256 | bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK'))) | ||
| 257 | else: | ||
| 258 | if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"): | ||
| 259 | bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s" | ||
| 260 | % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK'))) | ||
| 261 | self._postpone_to_first_boot(script_full) | ||
| 262 | else: | ||
| 263 | bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK'))) | ||
| 264 | |||
| 265 | @abstractmethod | ||
| 266 | def update(self): | ||
| 267 | """ | ||
| 268 | Update the package manager package database. | ||
| 269 | """ | ||
| 270 | pass | ||
| 271 | |||
| 272 | @abstractmethod | ||
| 273 | def install(self, pkgs, attempt_only=False, hard_depends_only=False): | ||
| 274 | """ | ||
| 275 | Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is | ||
| 276 | True, installation failures are ignored. | ||
| 277 | """ | ||
| 278 | pass | ||
| 279 | |||
| 280 | @abstractmethod | ||
| 281 | def remove(self, pkgs, with_dependencies=True): | ||
| 282 | """ | ||
| 283 | Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies' | ||
| 284 | is False, then any dependencies are left in place. | ||
| 285 | """ | ||
| 286 | pass | ||
| 287 | |||
| 288 | @abstractmethod | ||
| 289 | def write_index(self): | ||
| 290 | """ | ||
| 291 | This function creates the index files | ||
| 292 | """ | ||
| 293 | pass | ||
| 294 | |||
| 295 | @abstractmethod | ||
| 296 | def remove_packaging_data(self): | ||
| 297 | pass | ||
| 298 | |||
| 299 | @abstractmethod | ||
| 300 | def list_installed(self): | ||
| 301 | pass | ||
| 302 | |||
| 303 | @abstractmethod | ||
| 304 | def extract(self, pkg): | ||
| 305 | """ | ||
| 306 | Returns the path to a tmpdir where resides the contents of a package. | ||
| 307 | Deleting the tmpdir is responsability of the caller. | ||
| 308 | """ | ||
| 309 | pass | ||
| 310 | |||
| 311 | @abstractmethod | ||
| 312 | def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): | ||
| 313 | """ | ||
| 314 | Add remote package feeds into repository manager configuration. The parameters | ||
| 315 | for the feeds are set by feed_uris, feed_base_paths and feed_archs. | ||
| 316 | See http://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PACKAGE_FEED_URIS | ||
| 317 | for their description. | ||
| 318 | """ | ||
| 319 | pass | ||
| 320 | |||
| 321 | def install_glob(self, globs, sdk=False): | ||
| 322 | """ | ||
| 323 | Install all packages that match a glob. | ||
| 324 | """ | ||
| 325 | # TODO don't have sdk here but have a property on the superclass | ||
| 326 | # (and respect in install_complementary) | ||
| 327 | if sdk: | ||
| 328 | pkgdatadir = self.d.getVar("PKGDATA_DIR_SDK") | ||
| 329 | else: | ||
| 330 | pkgdatadir = self.d.getVar("PKGDATA_DIR") | ||
| 331 | |||
| 332 | try: | ||
| 333 | bb.note("Installing globbed packages...") | ||
| 334 | cmd = ["oe-pkgdata-util", "-p", pkgdatadir, "list-pkgs", globs] | ||
| 335 | bb.note('Running %s' % cmd) | ||
| 336 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 337 | stdout, stderr = proc.communicate() | ||
| 338 | if stderr: bb.note(stderr.decode("utf-8")) | ||
| 339 | pkgs = stdout.decode("utf-8") | ||
| 340 | self.install(pkgs.split(), attempt_only=True) | ||
| 341 | except subprocess.CalledProcessError as e: | ||
| 342 | # Return code 1 means no packages matched | ||
| 343 | if e.returncode != 1: | ||
| 344 | bb.fatal("Could not compute globbed packages list. Command " | ||
| 345 | "'%s' returned %d:\n%s" % | ||
| 346 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
| 347 | |||
| 348 | def install_complementary(self, globs=None): | ||
| 349 | """ | ||
| 350 | Install complementary packages based upon the list of currently installed | ||
| 351 | packages e.g. locales, *-dev, *-dbg, etc. Note: every backend needs to | ||
| 352 | call this function explicitly after the normal package installation. | ||
| 353 | """ | ||
| 354 | if globs is None: | ||
| 355 | globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') | ||
| 356 | split_linguas = set() | ||
| 357 | |||
| 358 | for translation in self.d.getVar('IMAGE_LINGUAS').split(): | ||
| 359 | split_linguas.add(translation) | ||
| 360 | split_linguas.add(translation.split('-')[0]) | ||
| 361 | |||
| 362 | split_linguas = sorted(split_linguas) | ||
| 363 | |||
| 364 | for lang in split_linguas: | ||
| 365 | globs += " *-locale-%s" % lang | ||
| 366 | for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split(): | ||
| 367 | globs += (" " + complementary_linguas) % lang | ||
| 368 | |||
| 369 | if globs: | ||
| 370 | # we need to write the list of installed packages to a file because the | ||
| 371 | # oe-pkgdata-util reads it from a file | ||
| 372 | with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: | ||
| 373 | pkgs = self.list_installed() | ||
| 374 | |||
| 375 | provided_pkgs = set() | ||
| 376 | for pkg in pkgs.values(): | ||
| 377 | provided_pkgs |= set(pkg.get('provs', [])) | ||
| 378 | |||
| 379 | output = oe.utils.format_pkg_list(pkgs, "arch") | ||
| 380 | installed_pkgs.write(output) | ||
| 381 | installed_pkgs.flush() | ||
| 382 | |||
| 383 | cmd = ["oe-pkgdata-util", | ||
| 384 | "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, | ||
| 385 | globs] | ||
| 386 | exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') | ||
| 387 | if exclude: | ||
| 388 | cmd.extend(['--exclude=' + '|'.join(exclude.split())]) | ||
| 389 | try: | ||
| 390 | bb.note('Running %s' % cmd) | ||
| 391 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 392 | stdout, stderr = proc.communicate() | ||
| 393 | if stderr: bb.note(stderr.decode("utf-8")) | ||
| 394 | complementary_pkgs = stdout.decode("utf-8") | ||
| 395 | complementary_pkgs = set(complementary_pkgs.split()) | ||
| 396 | skip_pkgs = sorted(complementary_pkgs & provided_pkgs) | ||
| 397 | install_pkgs = sorted(complementary_pkgs - provided_pkgs) | ||
| 398 | bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( | ||
| 399 | ' '.join(install_pkgs), | ||
| 400 | ' '.join(skip_pkgs))) | ||
| 401 | self.install(install_pkgs, hard_depends_only=True) | ||
| 402 | except subprocess.CalledProcessError as e: | ||
| 403 | bb.fatal("Could not compute complementary packages list. Command " | ||
| 404 | "'%s' returned %d:\n%s" % | ||
| 405 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
| 406 | |||
| 407 | if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': | ||
| 408 | target_arch = self.d.getVar('TARGET_ARCH') | ||
| 409 | localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale") | ||
| 410 | if os.path.exists(localedir) and os.listdir(localedir): | ||
| 411 | generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir) | ||
| 412 | # And now delete the binary locales | ||
| 413 | self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False) | ||
| 414 | |||
| 415 | def deploy_dir_lock(self): | ||
| 416 | if self.deploy_dir is None: | ||
| 417 | raise RuntimeError("deploy_dir is not set!") | ||
| 418 | |||
| 419 | lock_file_name = os.path.join(self.deploy_dir, "deploy.lock") | ||
| 420 | |||
| 421 | self.deploy_lock = bb.utils.lockfile(lock_file_name) | ||
| 422 | |||
| 423 | def deploy_dir_unlock(self): | ||
| 424 | if self.deploy_lock is None: | ||
| 425 | return | ||
| 426 | |||
| 427 | bb.utils.unlockfile(self.deploy_lock) | ||
| 428 | |||
| 429 | self.deploy_lock = None | ||
| 430 | |||
| 431 | def construct_uris(self, uris, base_paths): | ||
| 432 | """ | ||
| 433 | Construct URIs based on the following pattern: uri/base_path where 'uri' | ||
| 434 | and 'base_path' correspond to each element of the corresponding array | ||
| 435 | argument leading to len(uris) x len(base_paths) elements on the returned | ||
| 436 | array | ||
| 437 | """ | ||
| 438 | def _append(arr1, arr2, sep='/'): | ||
| 439 | res = [] | ||
| 440 | narr1 = [a.rstrip(sep) for a in arr1] | ||
| 441 | narr2 = [a.rstrip(sep).lstrip(sep) for a in arr2] | ||
| 442 | for a1 in narr1: | ||
| 443 | if arr2: | ||
| 444 | for a2 in narr2: | ||
| 445 | res.append("%s%s%s" % (a1, sep, a2)) | ||
| 446 | else: | ||
| 447 | res.append(a1) | ||
| 448 | return res | ||
| 449 | return _append(uris, base_paths) | ||
| 450 | |||
| 451 | def get_missing_pkg_reason(self, pkg): | ||
| 452 | """ | ||
| 453 | Return a string describing the possible reason of a missing package. | ||
| 454 | """ | ||
| 455 | reason = "" | ||
| 456 | if not oe.packagedata.packaged(pkg, self.d): | ||
| 457 | if oe.packagedata.has_pkgdata(pkg, self.d): | ||
| 458 | packaged_pkgs = [] | ||
| 459 | recipe_data = oe.packagedata.read_pkgdata(pkg, self.d) | ||
| 460 | for subpkg in recipe_data.get("PACKAGES", "").split(): | ||
| 461 | if oe.packagedata.packaged(subpkg, self.d): | ||
| 462 | packaged_pkgs.append(subpkg) | ||
| 463 | reason = "%s is a recipe. Its generated packages are: %s\n" % (pkg, packaged_pkgs) | ||
| 464 | reason += "Either specify a generated package or set ALLOW_EMPTY:${PN} = \"1\" in %s recipe\n" % pkg | ||
| 465 | else: | ||
| 466 | reason = "%s is neither a recipe nor a generated package.\n" % pkg | ||
| 467 | else: | ||
| 468 | reason = "%s is a generated package.\n" % pkg | ||
| 469 | reason += "The reason it's not found might be that it's not in %s\n" % oe.path.join(self.d.getVar('WORKDIR'), "oe-rootfs-repo") | ||
| 470 | return reason | ||
| 471 | |||
| 472 | def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies, include_self=False): | ||
| 473 | """ | ||
| 474 | Go through our do_package_write_X dependencies and hardlink the packages we depend | ||
| 475 | upon into the repo directory. This prevents us seeing other packages that may | ||
| 476 | have been built that we don't depend upon and also packages for architectures we don't | ||
| 477 | support. | ||
| 478 | """ | ||
| 479 | import errno | ||
| 480 | |||
| 481 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
| 482 | mytaskname = d.getVar("BB_RUNTASK") | ||
| 483 | pn = d.getVar("PN") | ||
| 484 | seendirs = set() | ||
| 485 | multilibs = {} | ||
| 486 | |||
| 487 | bb.utils.remove(subrepo_dir, recurse=True) | ||
| 488 | bb.utils.mkdirhier(subrepo_dir) | ||
| 489 | |||
| 490 | # Detect bitbake -b usage | ||
| 491 | nodeps = d.getVar("BB_LIMITEDDEPS") or False | ||
| 492 | if nodeps or not filterbydependencies: | ||
| 493 | for arch in d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").split() + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").replace("-", "_").split(): | ||
| 494 | target = os.path.join(deploydir + "/" + arch) | ||
| 495 | if os.path.exists(target): | ||
| 496 | oe.path.symlink(target, subrepo_dir + "/" + arch, True) | ||
| 497 | return | ||
| 498 | |||
| 499 | start = None | ||
| 500 | for dep in taskdepdata: | ||
| 501 | data = taskdepdata[dep] | ||
| 502 | if data[1] == mytaskname and data[0] == pn: | ||
| 503 | start = dep | ||
| 504 | break | ||
| 505 | if start is None: | ||
| 506 | bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") | ||
| 507 | pkgdeps = set() | ||
| 508 | start = [start] | ||
| 509 | if include_self: | ||
| 510 | seen = set() | ||
| 511 | else: | ||
| 512 | seen = set(start) | ||
| 513 | # Support direct dependencies (do_rootfs -> do_package_write_X) | ||
| 514 | # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) | ||
| 515 | while start: | ||
| 516 | next = [] | ||
| 517 | for dep2 in start: | ||
| 518 | for dep in taskdepdata[dep2][3]: | ||
| 519 | if include_self or taskdepdata[dep][0] != pn: | ||
| 520 | if "do_" + taskname in dep: | ||
| 521 | pkgdeps.add(dep) | ||
| 522 | elif dep not in seen: | ||
| 523 | next.append(dep) | ||
| 524 | seen.add(dep) | ||
| 525 | start = next | ||
| 526 | |||
| 527 | for dep in pkgdeps: | ||
| 528 | c = taskdepdata[dep][0] | ||
| 529 | manifest, d2 = oe.sstatesig.find_sstate_manifest(c, taskdepdata[dep][2], taskname, d, multilibs) | ||
| 530 | if not manifest: | ||
| 531 | bb.fatal("No manifest generated from: %s in %s" % (c, taskdepdata[dep][2])) | ||
| 532 | if not os.path.exists(manifest): | ||
| 533 | continue | ||
| 534 | with open(manifest, "r") as f: | ||
| 535 | for l in f: | ||
| 536 | l = l.strip() | ||
| 537 | deploydir = os.path.normpath(deploydir) | ||
| 538 | dest = l.replace(deploydir, "") | ||
| 539 | dest = subrepo_dir + dest | ||
| 540 | if l.endswith("/"): | ||
| 541 | if dest not in seendirs: | ||
| 542 | bb.utils.mkdirhier(dest) | ||
| 543 | seendirs.add(dest) | ||
| 544 | continue | ||
| 545 | # Try to hardlink the file, copy if that fails | ||
| 546 | destdir = os.path.dirname(dest) | ||
| 547 | if destdir not in seendirs: | ||
| 548 | bb.utils.mkdirhier(destdir) | ||
| 549 | seendirs.add(destdir) | ||
| 550 | try: | ||
| 551 | os.link(l, dest) | ||
| 552 | except OSError as err: | ||
| 553 | if err.errno == errno.EXDEV: | ||
| 554 | bb.utils.copyfile(l, dest) | ||
| 555 | else: | ||
| 556 | raise | ||
| 557 | |||
| 558 | |||
| 559 | def generate_index_files(d): | ||
| 560 | from oe.package_manager.rpm import RpmSubdirIndexer | ||
| 561 | from oe.package_manager.ipk import OpkgIndexer | ||
| 562 | from oe.package_manager.deb import DpkgIndexer | ||
| 563 | |||
| 564 | classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split() | ||
| 565 | |||
| 566 | indexer_map = { | ||
| 567 | "rpm": (RpmSubdirIndexer, d.getVar('DEPLOY_DIR_RPM')), | ||
| 568 | "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')), | ||
| 569 | "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB')) | ||
| 570 | } | ||
| 571 | |||
| 572 | result = None | ||
| 573 | |||
| 574 | for pkg_class in classes: | ||
| 575 | if not pkg_class in indexer_map: | ||
| 576 | continue | ||
| 577 | |||
| 578 | if os.path.exists(indexer_map[pkg_class][1]): | ||
| 579 | result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index() | ||
| 580 | |||
| 581 | if result is not None: | ||
| 582 | bb.fatal(result) | ||
diff --git a/meta/lib/oe/package_manager/common_deb_ipk.py b/meta/lib/oe/package_manager/common_deb_ipk.py deleted file mode 100644 index 6a1e28ee6f..0000000000 --- a/meta/lib/oe/package_manager/common_deb_ipk.py +++ /dev/null | |||
| @@ -1,97 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import glob | ||
| 8 | import os | ||
| 9 | import subprocess | ||
| 10 | import tempfile | ||
| 11 | |||
| 12 | import bb | ||
| 13 | |||
| 14 | from oe.package_manager import opkg_query, PackageManager | ||
| 15 | |||
| 16 | class OpkgDpkgPM(PackageManager): | ||
| 17 | def __init__(self, d, target_rootfs): | ||
| 18 | """ | ||
| 19 | This is an abstract class. Do not instantiate this directly. | ||
| 20 | """ | ||
| 21 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
| 22 | |||
| 23 | def package_info(self, pkg): | ||
| 24 | """ | ||
| 25 | Returns a dictionary with the package info. | ||
| 26 | """ | ||
| 27 | raise NotImplementedError | ||
| 28 | |||
| 29 | def _common_package_info(self, cmd): | ||
| 30 | """ | ||
| 31 | "Returns a dictionary with the package info. | ||
| 32 | |||
| 33 | This method extracts the common parts for Opkg and Dpkg | ||
| 34 | """ | ||
| 35 | |||
| 36 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
| 37 | if proc.returncode: | ||
| 38 | bb.fatal("Unable to list available packages. Command '%s' " | ||
| 39 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
| 40 | elif proc.stderr: | ||
| 41 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
| 42 | |||
| 43 | return opkg_query(proc.stdout) | ||
| 44 | |||
| 45 | def extract(self, pkg): | ||
| 46 | """ | ||
| 47 | Returns the path to a tmpdir where resides the contents of a package. | ||
| 48 | |||
| 49 | Deleting the tmpdir is responsability of the caller. | ||
| 50 | """ | ||
| 51 | pkg_info = self.package_info(pkg) | ||
| 52 | if not pkg_info: | ||
| 53 | bb.fatal("Unable to get information for package '%s' while " | ||
| 54 | "trying to extract the package." % pkg) | ||
| 55 | |||
| 56 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
| 57 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
| 58 | pkg_path = pkg_info[pkg]["filepath"] | ||
| 59 | |||
| 60 | if not os.path.isfile(pkg_path): | ||
| 61 | bb.fatal("Unable to extract package for '%s'." | ||
| 62 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
| 63 | |||
| 64 | tmp_dir = tempfile.mkdtemp() | ||
| 65 | current_dir = os.getcwd() | ||
| 66 | os.chdir(tmp_dir) | ||
| 67 | |||
| 68 | try: | ||
| 69 | cmd = [ar_cmd, 'x', pkg_path] | ||
| 70 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
| 71 | data_tar = glob.glob("data.tar.*") | ||
| 72 | if len(data_tar) != 1: | ||
| 73 | bb.fatal("Unable to extract %s package. Failed to identify " | ||
| 74 | "data tarball (found tarballs '%s').", | ||
| 75 | pkg_path, data_tar) | ||
| 76 | data_tar = data_tar[0] | ||
| 77 | cmd = [tar_cmd, 'xf', data_tar] | ||
| 78 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
| 79 | except subprocess.CalledProcessError as e: | ||
| 80 | bb.utils.remove(tmp_dir, recurse=True) | ||
| 81 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
| 82 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
| 83 | except OSError as e: | ||
| 84 | bb.utils.remove(tmp_dir, recurse=True) | ||
| 85 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
| 86 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
| 87 | |||
| 88 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
| 89 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
| 90 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
| 91 | bb.utils.remove(os.path.join(tmp_dir, data_tar)) | ||
| 92 | os.chdir(current_dir) | ||
| 93 | |||
| 94 | return tmp_dir | ||
| 95 | |||
| 96 | def _handle_intercept_failure(self, registered_pkgs): | ||
| 97 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py deleted file mode 100644 index eb48f3f982..0000000000 --- a/meta/lib/oe/package_manager/deb/__init__.py +++ /dev/null | |||
| @@ -1,451 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import re | ||
| 8 | import subprocess | ||
| 9 | from oe.package_manager import * | ||
| 10 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
| 11 | |||
| 12 | class DpkgIndexer(Indexer): | ||
| 13 | def _create_configs(self): | ||
| 14 | bb.utils.mkdirhier(self.apt_conf_dir) | ||
| 15 | bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "lists", "partial")) | ||
| 16 | bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "apt.conf.d")) | ||
| 17 | bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "preferences.d")) | ||
| 18 | |||
| 19 | with open(os.path.join(self.apt_conf_dir, "preferences"), | ||
| 20 | "w") as prefs_file: | ||
| 21 | pass | ||
| 22 | with open(os.path.join(self.apt_conf_dir, "sources.list"), | ||
| 23 | "w+") as sources_file: | ||
| 24 | pass | ||
| 25 | |||
| 26 | with open(self.apt_conf_file, "w") as apt_conf: | ||
| 27 | with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"), | ||
| 28 | "apt", "apt.conf.sample")) as apt_conf_sample: | ||
| 29 | for line in apt_conf_sample.read().split("\n"): | ||
| 30 | line = re.sub(r"#ROOTFS#", "/dev/null", line) | ||
| 31 | line = re.sub(r"#APTCONF#", self.apt_conf_dir, line) | ||
| 32 | apt_conf.write(line + "\n") | ||
| 33 | |||
| 34 | def write_index(self): | ||
| 35 | self.apt_conf_dir = os.path.join(self.d.expand("${APTCONF_TARGET}"), | ||
| 36 | "apt-ftparchive") | ||
| 37 | self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf") | ||
| 38 | self._create_configs() | ||
| 39 | |||
| 40 | os.environ['APT_CONFIG'] = self.apt_conf_file | ||
| 41 | |||
| 42 | pkg_archs = self.d.getVar('PACKAGE_ARCHS') | ||
| 43 | if pkg_archs is not None: | ||
| 44 | arch_list = pkg_archs.split() | ||
| 45 | sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS') | ||
| 46 | if sdk_pkg_archs is not None: | ||
| 47 | for a in sdk_pkg_archs.split(): | ||
| 48 | if a not in pkg_archs: | ||
| 49 | arch_list.append(a) | ||
| 50 | |||
| 51 | all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() | ||
| 52 | arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) | ||
| 53 | |||
| 54 | apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") | ||
| 55 | gzip = bb.utils.which(os.getenv('PATH'), "gzip") | ||
| 56 | |||
| 57 | index_cmds = [] | ||
| 58 | deb_dirs_found = False | ||
| 59 | index_sign_files = set() | ||
| 60 | for arch in arch_list: | ||
| 61 | arch_dir = os.path.join(self.deploy_dir, arch) | ||
| 62 | if not os.path.isdir(arch_dir): | ||
| 63 | continue | ||
| 64 | |||
| 65 | cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive) | ||
| 66 | |||
| 67 | cmd += "%s -fcn Packages > Packages.gz;" % gzip | ||
| 68 | |||
| 69 | release_file = os.path.join(arch_dir, "Release") | ||
| 70 | index_sign_files.add(release_file) | ||
| 71 | |||
| 72 | with open(release_file, "w+") as release: | ||
| 73 | release.write("Label: %s\n" % arch) | ||
| 74 | |||
| 75 | cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive | ||
| 76 | |||
| 77 | index_cmds.append(cmd) | ||
| 78 | |||
| 79 | deb_dirs_found = True | ||
| 80 | |||
| 81 | if not deb_dirs_found: | ||
| 82 | bb.note("There are no packages in %s" % self.deploy_dir) | ||
| 83 | return | ||
| 84 | |||
| 85 | oe.utils.multiprocess_launch(create_index, index_cmds, self.d) | ||
| 86 | if self.d.getVar('PACKAGE_FEED_SIGN') == '1': | ||
| 87 | signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) | ||
| 88 | else: | ||
| 89 | signer = None | ||
| 90 | if signer: | ||
| 91 | for f in index_sign_files: | ||
| 92 | signer.detach_sign(f, | ||
| 93 | self.d.getVar('PACKAGE_FEED_GPG_NAME'), | ||
| 94 | self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), | ||
| 95 | output_suffix="gpg", | ||
| 96 | use_sha256=True) | ||
| 97 | |||
| 98 | class PMPkgsList(PkgsList): | ||
| 99 | |||
| 100 | def list_pkgs(self): | ||
| 101 | cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"), | ||
| 102 | "--admindir=%s/var/lib/dpkg" % self.rootfs_dir, | ||
| 103 | "-W"] | ||
| 104 | |||
| 105 | cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\nProvides: ${Provides}\n\n") | ||
| 106 | |||
| 107 | try: | ||
| 108 | cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8") | ||
| 109 | except subprocess.CalledProcessError as e: | ||
| 110 | bb.fatal("Cannot get the installed packages list. Command '%s' " | ||
| 111 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
| 112 | |||
| 113 | return opkg_query(cmd_output) | ||
| 114 | |||
| 115 | |||
| 116 | class DpkgPM(OpkgDpkgPM): | ||
| 117 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): | ||
| 118 | super(DpkgPM, self).__init__(d, target_rootfs) | ||
| 119 | self.deploy_dir = oe.path.join(self.d.getVar('WORKDIR'), deb_repo_workdir) | ||
| 120 | |||
| 121 | create_packages_dir(self.d, self.deploy_dir, d.getVar("DEPLOY_DIR_DEB"), "package_write_deb", filterbydependencies) | ||
| 122 | |||
| 123 | if apt_conf_dir is None: | ||
| 124 | self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") | ||
| 125 | else: | ||
| 126 | self.apt_conf_dir = apt_conf_dir | ||
| 127 | self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf") | ||
| 128 | self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") | ||
| 129 | self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") | ||
| 130 | |||
| 131 | self.apt_args = d.getVar("APT_ARGS") | ||
| 132 | |||
| 133 | self.all_arch_list = archs.split() | ||
| 134 | all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() | ||
| 135 | self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) | ||
| 136 | |||
| 137 | self._create_configs(archs, base_archs) | ||
| 138 | |||
| 139 | self.indexer = DpkgIndexer(self.d, self.deploy_dir) | ||
| 140 | |||
| 141 | def mark_packages(self, status_tag, packages=None): | ||
| 142 | """ | ||
| 143 | This function will change a package's status in /var/lib/dpkg/status file. | ||
| 144 | If 'packages' is None then the new_status will be applied to all | ||
| 145 | packages | ||
| 146 | """ | ||
| 147 | status_file = self.target_rootfs + "/var/lib/dpkg/status" | ||
| 148 | |||
| 149 | with open(status_file, "r") as sf: | ||
| 150 | with open(status_file + ".tmp", "w+") as tmp_sf: | ||
| 151 | if packages is None: | ||
| 152 | tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)", | ||
| 153 | r"Package: \1\n\2Status: \3%s" % status_tag, | ||
| 154 | sf.read())) | ||
| 155 | else: | ||
| 156 | if type(packages).__name__ != "list": | ||
| 157 | raise TypeError("'packages' should be a list object") | ||
| 158 | |||
| 159 | status = sf.read() | ||
| 160 | for pkg in packages: | ||
| 161 | status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg, | ||
| 162 | r"Package: %s\n\1Status: \2%s" % (pkg, status_tag), | ||
| 163 | status) | ||
| 164 | |||
| 165 | tmp_sf.write(status) | ||
| 166 | |||
| 167 | bb.utils.rename(status_file + ".tmp", status_file) | ||
| 168 | |||
| 169 | def run_pre_post_installs(self, package_name=None): | ||
| 170 | """ | ||
| 171 | Run the pre/post installs for package "package_name". If package_name is | ||
| 172 | None, then run all pre/post install scriptlets. | ||
| 173 | """ | ||
| 174 | info_dir = self.target_rootfs + "/var/lib/dpkg/info" | ||
| 175 | ControlScript = collections.namedtuple("ControlScript", ["suffix", "name", "argument"]) | ||
| 176 | control_scripts = [ | ||
| 177 | ControlScript(".preinst", "Preinstall", "install"), | ||
| 178 | ControlScript(".postinst", "Postinstall", "configure")] | ||
| 179 | status_file = self.target_rootfs + "/var/lib/dpkg/status" | ||
| 180 | installed_pkgs = [] | ||
| 181 | |||
| 182 | with open(status_file, "r") as status: | ||
| 183 | for line in status.read().split('\n'): | ||
| 184 | m = re.match(r"^Package: (.*)", line) | ||
| 185 | if m is not None: | ||
| 186 | installed_pkgs.append(m.group(1)) | ||
| 187 | |||
| 188 | if package_name is not None and not package_name in installed_pkgs: | ||
| 189 | return | ||
| 190 | |||
| 191 | os.environ['D'] = self.target_rootfs | ||
| 192 | os.environ['OFFLINE_ROOT'] = self.target_rootfs | ||
| 193 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 194 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 195 | os.environ['INTERCEPT_DIR'] = self.intercepts_dir | ||
| 196 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') | ||
| 197 | |||
| 198 | for pkg_name in installed_pkgs: | ||
| 199 | for control_script in control_scripts: | ||
| 200 | p_full = os.path.join(info_dir, pkg_name + control_script.suffix) | ||
| 201 | if os.path.exists(p_full): | ||
| 202 | try: | ||
| 203 | bb.note("Executing %s for package: %s ..." % | ||
| 204 | (control_script.name.lower(), pkg_name)) | ||
| 205 | output = subprocess.check_output([p_full, control_script.argument], | ||
| 206 | stderr=subprocess.STDOUT).decode("utf-8") | ||
| 207 | bb.note(output) | ||
| 208 | except subprocess.CalledProcessError as e: | ||
| 209 | bb.warn("%s for package %s failed with %d:\n%s" % | ||
| 210 | (control_script.name, pkg_name, e.returncode, | ||
| 211 | e.output.decode("utf-8"))) | ||
| 212 | failed_postinsts_abort([pkg_name], self.d.expand("${T}/log.do_${BB_CURRENTTASK}")) | ||
| 213 | |||
| 214 | def update(self): | ||
| 215 | os.environ['APT_CONFIG'] = self.apt_conf_file | ||
| 216 | |||
| 217 | self.deploy_dir_lock() | ||
| 218 | |||
| 219 | cmd = "%s update" % self.apt_get_cmd | ||
| 220 | |||
| 221 | try: | ||
| 222 | subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | ||
| 223 | except subprocess.CalledProcessError as e: | ||
| 224 | bb.fatal("Unable to update the package index files. Command '%s' " | ||
| 225 | "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8"))) | ||
| 226 | |||
| 227 | self.deploy_dir_unlock() | ||
| 228 | |||
| 229 | def install(self, pkgs, attempt_only=False, hard_depends_only=False): | ||
| 230 | if attempt_only and len(pkgs) == 0: | ||
| 231 | return | ||
| 232 | |||
| 233 | os.environ['APT_CONFIG'] = self.apt_conf_file | ||
| 234 | |||
| 235 | extra_args = "" | ||
| 236 | if hard_depends_only: | ||
| 237 | extra_args = "--no-install-recommends" | ||
| 238 | |||
| 239 | cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s %s" % \ | ||
| 240 | (self.apt_get_cmd, self.apt_args, extra_args, ' '.join(pkgs)) | ||
| 241 | |||
| 242 | try: | ||
| 243 | bb.note("Installing the following packages: %s" % ' '.join(pkgs)) | ||
| 244 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | ||
| 245 | bb.note(output.decode("utf-8")) | ||
| 246 | except subprocess.CalledProcessError as e: | ||
| 247 | e_output = e.output.decode("utf-8") | ||
| 248 | extra_info = "" | ||
| 249 | for e_line in e_output.split('\n'): | ||
| 250 | if 'has no installation candidate' in e_line or 'Unable to locate package' in e_line: | ||
| 251 | match = re.search(r"E: Package '([a-z0-9+\-\._]+)' has no installation candidate", e_line) | ||
| 252 | if match: | ||
| 253 | pkg = match.group(1) | ||
| 254 | else: | ||
| 255 | pkg = re.search(r"E: Unable to locate package ([a-z0-9+\-\._]+)", e_line).group(1) | ||
| 256 | extra_info += self.get_missing_pkg_reason(pkg) | ||
| 257 | (bb.fatal, bb.warn)[attempt_only]("Unable to install packages. " | ||
| 258 | "Command '%s' returned %d:\n%s%s" % | ||
| 259 | (cmd, e.returncode, e_output, extra_info)) | ||
| 260 | |||
| 261 | # rename *.dpkg-new files/dirs | ||
| 262 | for root, dirs, files in os.walk(self.target_rootfs): | ||
| 263 | for dir in dirs: | ||
| 264 | new_dir = re.sub(r"\.dpkg-new", "", dir) | ||
| 265 | if dir != new_dir: | ||
| 266 | bb.utils.rename(os.path.join(root, dir), | ||
| 267 | os.path.join(root, new_dir)) | ||
| 268 | |||
| 269 | for file in files: | ||
| 270 | new_file = re.sub(r"\.dpkg-new", "", file) | ||
| 271 | if file != new_file: | ||
| 272 | bb.utils.rename(os.path.join(root, file), | ||
| 273 | os.path.join(root, new_file)) | ||
| 274 | |||
| 275 | |||
| 276 | def remove(self, pkgs, with_dependencies=True): | ||
| 277 | if not pkgs: | ||
| 278 | return | ||
| 279 | |||
| 280 | os.environ['D'] = self.target_rootfs | ||
| 281 | os.environ['OFFLINE_ROOT'] = self.target_rootfs | ||
| 282 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 283 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 284 | os.environ['INTERCEPT_DIR'] = self.intercepts_dir | ||
| 285 | |||
| 286 | if with_dependencies: | ||
| 287 | os.environ['APT_CONFIG'] = self.apt_conf_file | ||
| 288 | cmd = "%s purge %s" % (self.apt_get_cmd, ' '.join(pkgs)) | ||
| 289 | else: | ||
| 290 | cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \ | ||
| 291 | " -P --force-depends %s" % \ | ||
| 292 | (bb.utils.which(os.getenv('PATH'), "dpkg"), | ||
| 293 | self.target_rootfs, self.target_rootfs, ' '.join(pkgs)) | ||
| 294 | |||
| 295 | try: | ||
| 296 | subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | ||
| 297 | except subprocess.CalledProcessError as e: | ||
| 298 | bb.fatal("Unable to remove packages. Command '%s' " | ||
| 299 | "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8"))) | ||
| 300 | |||
| 301 | def write_index(self): | ||
| 302 | self.deploy_dir_lock() | ||
| 303 | |||
| 304 | result = self.indexer.write_index() | ||
| 305 | |||
| 306 | self.deploy_dir_unlock() | ||
| 307 | |||
| 308 | if result is not None: | ||
| 309 | bb.fatal(result) | ||
| 310 | |||
| 311 | def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): | ||
| 312 | if feed_uris == "": | ||
| 313 | return | ||
| 314 | |||
| 315 | |||
| 316 | sources_conf = os.path.join("%s/etc/apt/sources.list" | ||
| 317 | % self.target_rootfs) | ||
| 318 | if not os.path.exists(os.path.dirname(sources_conf)): | ||
| 319 | return | ||
| 320 | |||
| 321 | arch_list = [] | ||
| 322 | |||
| 323 | if feed_archs is None: | ||
| 324 | for arch in self.all_arch_list: | ||
| 325 | if not os.path.exists(os.path.join(self.deploy_dir, arch)): | ||
| 326 | continue | ||
| 327 | arch_list.append(arch) | ||
| 328 | else: | ||
| 329 | arch_list = feed_archs.split() | ||
| 330 | |||
| 331 | feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split()) | ||
| 332 | |||
| 333 | with open(sources_conf, "w+") as sources_file: | ||
| 334 | for uri in feed_uris: | ||
| 335 | if arch_list: | ||
| 336 | for arch in arch_list: | ||
| 337 | bb.note('Adding dpkg channel at (%s)' % uri) | ||
| 338 | sources_file.write("deb [trusted=yes] %s/%s ./\n" % | ||
| 339 | (uri, arch)) | ||
| 340 | else: | ||
| 341 | bb.note('Adding dpkg channel at (%s)' % uri) | ||
| 342 | sources_file.write("deb [trusted=yes] %s ./\n" % uri) | ||
| 343 | |||
| 344 | def _create_configs(self, archs, base_archs): | ||
| 345 | base_archs = re.sub(r"_", r"-", base_archs) | ||
| 346 | |||
| 347 | if os.path.exists(self.apt_conf_dir): | ||
| 348 | bb.utils.remove(self.apt_conf_dir, True) | ||
| 349 | |||
| 350 | bb.utils.mkdirhier(self.apt_conf_dir) | ||
| 351 | bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/") | ||
| 352 | bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/") | ||
| 353 | bb.utils.mkdirhier(self.apt_conf_dir + "/preferences.d/") | ||
| 354 | |||
| 355 | arch_list = [] | ||
| 356 | for arch in self.all_arch_list: | ||
| 357 | if not os.path.exists(os.path.join(self.deploy_dir, arch)): | ||
| 358 | continue | ||
| 359 | arch_list.append(arch) | ||
| 360 | |||
| 361 | with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file: | ||
| 362 | priority = 801 | ||
| 363 | for arch in arch_list: | ||
| 364 | prefs_file.write( | ||
| 365 | "Package: *\n" | ||
| 366 | "Pin: release l=%s\n" | ||
| 367 | "Pin-Priority: %d\n\n" % (arch, priority)) | ||
| 368 | |||
| 369 | priority += 5 | ||
| 370 | |||
| 371 | pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" | ||
| 372 | for pkg in pkg_exclude.split(): | ||
| 373 | prefs_file.write( | ||
| 374 | "Package: %s\n" | ||
| 375 | "Pin: release *\n" | ||
| 376 | "Pin-Priority: -1\n\n" % pkg) | ||
| 377 | |||
| 378 | arch_list.reverse() | ||
| 379 | |||
| 380 | with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file: | ||
| 381 | for arch in arch_list: | ||
| 382 | sources_file.write("deb [trusted=yes] file:%s/ ./\n" % | ||
| 383 | os.path.join(self.deploy_dir, arch)) | ||
| 384 | |||
| 385 | base_arch_list = base_archs.split() | ||
| 386 | multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); | ||
| 387 | for variant in multilib_variants.split(): | ||
| 388 | localdata = bb.data.createCopy(self.d) | ||
| 389 | variant_tune = localdata.getVar("DEFAULTTUNE:virtclass-multilib-" + variant, False) | ||
| 390 | orig_arch = localdata.getVar("DPKG_ARCH") | ||
| 391 | localdata.setVar("DEFAULTTUNE", variant_tune) | ||
| 392 | variant_arch = localdata.getVar("DPKG_ARCH") | ||
| 393 | if variant_arch not in base_arch_list: | ||
| 394 | base_arch_list.append(variant_arch) | ||
| 395 | |||
| 396 | with open(self.apt_conf_file, "w+") as apt_conf: | ||
| 397 | with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample: | ||
| 398 | for line in apt_conf_sample.read().split("\n"): | ||
| 399 | match_arch = re.match(r" Architecture \".*\";$", line) | ||
| 400 | architectures = "" | ||
| 401 | if match_arch: | ||
| 402 | for base_arch in base_arch_list: | ||
| 403 | architectures += "\"%s\";" % base_arch | ||
| 404 | apt_conf.write(" Architectures {%s};\n" % architectures); | ||
| 405 | apt_conf.write(" Architecture \"%s\";\n" % base_archs) | ||
| 406 | else: | ||
| 407 | line = re.sub(r"#ROOTFS#", self.target_rootfs, line) | ||
| 408 | line = re.sub(r"#APTCONF#", self.apt_conf_dir, line) | ||
| 409 | apt_conf.write(line + "\n") | ||
| 410 | |||
| 411 | target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs | ||
| 412 | bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info")) | ||
| 413 | |||
| 414 | bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates")) | ||
| 415 | |||
| 416 | if not os.path.exists(os.path.join(target_dpkg_dir, "status")): | ||
| 417 | open(os.path.join(target_dpkg_dir, "status"), "w+").close() | ||
| 418 | if not os.path.exists(os.path.join(target_dpkg_dir, "available")): | ||
| 419 | open(os.path.join(target_dpkg_dir, "available"), "w+").close() | ||
| 420 | |||
| 421 | def remove_packaging_data(self): | ||
| 422 | bb.utils.remove(self.target_rootfs + self.d.getVar('opkglibdir'), True) | ||
| 423 | bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) | ||
| 424 | |||
| 425 | def fix_broken_dependencies(self): | ||
| 426 | os.environ['APT_CONFIG'] = self.apt_conf_file | ||
| 427 | |||
| 428 | cmd = "%s %s --allow-unauthenticated -f install" % (self.apt_get_cmd, self.apt_args) | ||
| 429 | |||
| 430 | try: | ||
| 431 | subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | ||
| 432 | except subprocess.CalledProcessError as e: | ||
| 433 | bb.fatal("Cannot fix broken dependencies. Command '%s' " | ||
| 434 | "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) | ||
| 435 | |||
| 436 | def list_installed(self): | ||
| 437 | return PMPkgsList(self.d, self.target_rootfs).list_pkgs() | ||
| 438 | |||
| 439 | def package_info(self, pkg): | ||
| 440 | """ | ||
| 441 | Returns a dictionary with the package info. | ||
| 442 | """ | ||
| 443 | cmd = "%s show %s" % (self.apt_cache_cmd, pkg) | ||
| 444 | pkg_info = self._common_package_info(cmd) | ||
| 445 | |||
| 446 | pkg_arch = pkg_info[pkg]["pkgarch"] | ||
| 447 | pkg_filename = pkg_info[pkg]["filename"] | ||
| 448 | pkg_info[pkg]["filepath"] = \ | ||
| 449 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | ||
| 450 | |||
| 451 | return pkg_info | ||
diff --git a/meta/lib/oe/package_manager/deb/manifest.py b/meta/lib/oe/package_manager/deb/manifest.py deleted file mode 100644 index 72983bae98..0000000000 --- a/meta/lib/oe/package_manager/deb/manifest.py +++ /dev/null | |||
| @@ -1,28 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from oe.manifest import Manifest | ||
| 8 | |||
| 9 | class PkgManifest(Manifest): | ||
| 10 | def create_initial(self): | ||
| 11 | with open(self.initial_manifest, "w+") as manifest: | ||
| 12 | manifest.write(self.initial_manifest_file_header) | ||
| 13 | |||
| 14 | for var in self.var_maps[self.manifest_type]: | ||
| 15 | pkg_list = self.d.getVar(var) | ||
| 16 | |||
| 17 | if pkg_list is None: | ||
| 18 | continue | ||
| 19 | |||
| 20 | for pkg in pkg_list.split(): | ||
| 21 | manifest.write("%s,%s\n" % | ||
| 22 | (self.var_maps[self.manifest_type][var], pkg)) | ||
| 23 | |||
| 24 | def create_final(self): | ||
| 25 | pass | ||
| 26 | |||
| 27 | def create_full(self, pm): | ||
| 28 | pass | ||
diff --git a/meta/lib/oe/package_manager/deb/rootfs.py b/meta/lib/oe/package_manager/deb/rootfs.py deleted file mode 100644 index 1e25b64ed9..0000000000 --- a/meta/lib/oe/package_manager/deb/rootfs.py +++ /dev/null | |||
| @@ -1,212 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import re | ||
| 8 | import shutil | ||
| 9 | from oe.rootfs import Rootfs | ||
| 10 | from oe.manifest import Manifest | ||
| 11 | from oe.utils import execute_pre_post_process | ||
| 12 | from oe.package_manager.deb.manifest import PkgManifest | ||
| 13 | from oe.package_manager.deb import DpkgPM | ||
| 14 | |||
| 15 | class DpkgOpkgRootfs(Rootfs): | ||
| 16 | def __init__(self, d, progress_reporter=None, logcatcher=None): | ||
| 17 | super(DpkgOpkgRootfs, self).__init__(d, progress_reporter, logcatcher) | ||
| 18 | |||
| 19 | def _get_pkgs_postinsts(self, status_file): | ||
| 20 | def _get_pkg_depends_list(pkg_depends): | ||
| 21 | pkg_depends_list = [] | ||
| 22 | # filter version requirements like libc (>= 1.1) | ||
| 23 | for dep in pkg_depends.split(', '): | ||
| 24 | m_dep = re.match(r"^(.*) \(.*\)$", dep) | ||
| 25 | if m_dep: | ||
| 26 | dep = m_dep.group(1) | ||
| 27 | pkg_depends_list.append(dep) | ||
| 28 | |||
| 29 | return pkg_depends_list | ||
| 30 | |||
| 31 | pkgs = {} | ||
| 32 | pkg_name = "" | ||
| 33 | pkg_status_match = False | ||
| 34 | pkg_depends = "" | ||
| 35 | |||
| 36 | with open(status_file) as status: | ||
| 37 | data = status.read() | ||
| 38 | status.close() | ||
| 39 | for line in data.split('\n'): | ||
| 40 | m_pkg = re.match(r"^Package: (.*)", line) | ||
| 41 | m_status = re.match(r"^Status:.*unpacked", line) | ||
| 42 | m_depends = re.match(r"^Depends: (.*)", line) | ||
| 43 | |||
| 44 | #Only one of m_pkg, m_status or m_depends is not None at time | ||
| 45 | #If m_pkg is not None, we started a new package | ||
| 46 | if m_pkg is not None: | ||
| 47 | #Get Package name | ||
| 48 | pkg_name = m_pkg.group(1) | ||
| 49 | #Make sure we reset other variables | ||
| 50 | pkg_status_match = False | ||
| 51 | pkg_depends = "" | ||
| 52 | elif m_status is not None: | ||
| 53 | #New status matched | ||
| 54 | pkg_status_match = True | ||
| 55 | elif m_depends is not None: | ||
| 56 | #New depends macthed | ||
| 57 | pkg_depends = m_depends.group(1) | ||
| 58 | else: | ||
| 59 | pass | ||
| 60 | |||
| 61 | #Now check if we can process package depends and postinst | ||
| 62 | if "" != pkg_name and pkg_status_match: | ||
| 63 | pkgs[pkg_name] = _get_pkg_depends_list(pkg_depends) | ||
| 64 | else: | ||
| 65 | #Not enough information | ||
| 66 | pass | ||
| 67 | |||
| 68 | # remove package dependencies not in postinsts | ||
| 69 | pkg_names = list(pkgs.keys()) | ||
| 70 | for pkg_name in pkg_names: | ||
| 71 | deps = pkgs[pkg_name][:] | ||
| 72 | |||
| 73 | for d in deps: | ||
| 74 | if d not in pkg_names: | ||
| 75 | pkgs[pkg_name].remove(d) | ||
| 76 | |||
| 77 | return pkgs | ||
| 78 | |||
| 79 | def _get_delayed_postinsts_common(self, status_file): | ||
| 80 | def _dep_resolve(graph, node, resolved, seen): | ||
| 81 | seen.append(node) | ||
| 82 | |||
| 83 | for edge in graph[node]: | ||
| 84 | if edge not in resolved: | ||
| 85 | if edge in seen: | ||
| 86 | raise RuntimeError("Packages %s and %s have " \ | ||
| 87 | "a circular dependency in postinsts scripts." \ | ||
| 88 | % (node, edge)) | ||
| 89 | _dep_resolve(graph, edge, resolved, seen) | ||
| 90 | |||
| 91 | resolved.append(node) | ||
| 92 | |||
| 93 | pkg_list = [] | ||
| 94 | |||
| 95 | pkgs = None | ||
| 96 | if not self.d.getVar('PACKAGE_INSTALL').strip(): | ||
| 97 | bb.note("Building empty image") | ||
| 98 | else: | ||
| 99 | pkgs = self._get_pkgs_postinsts(status_file) | ||
| 100 | if pkgs: | ||
| 101 | root = "__packagegroup_postinst__" | ||
| 102 | pkgs[root] = list(pkgs.keys()) | ||
| 103 | _dep_resolve(pkgs, root, pkg_list, []) | ||
| 104 | pkg_list.remove(root) | ||
| 105 | |||
| 106 | if len(pkg_list) == 0: | ||
| 107 | return None | ||
| 108 | |||
| 109 | return pkg_list | ||
| 110 | |||
| 111 | def _save_postinsts_common(self, dst_postinst_dir, src_postinst_dir): | ||
| 112 | if bb.utils.contains("IMAGE_FEATURES", "package-management", | ||
| 113 | True, False, self.d): | ||
| 114 | return | ||
| 115 | num = 0 | ||
| 116 | for p in self._get_delayed_postinsts(): | ||
| 117 | bb.utils.mkdirhier(dst_postinst_dir) | ||
| 118 | |||
| 119 | if os.path.exists(os.path.join(src_postinst_dir, p + ".postinst")): | ||
| 120 | shutil.copy(os.path.join(src_postinst_dir, p + ".postinst"), | ||
| 121 | os.path.join(dst_postinst_dir, "%03d-%s" % (num, p))) | ||
| 122 | |||
| 123 | num += 1 | ||
| 124 | |||
| 125 | class PkgRootfs(DpkgOpkgRootfs): | ||
| 126 | def __init__(self, d, manifest_dir, progress_reporter=None, logcatcher=None): | ||
| 127 | super(PkgRootfs, self).__init__(d, progress_reporter, logcatcher) | ||
| 128 | self.log_check_regex = '^E:' | ||
| 129 | self.log_check_expected_regexes = \ | ||
| 130 | [ | ||
| 131 | "^E: Unmet dependencies." | ||
| 132 | ] | ||
| 133 | |||
| 134 | bb.utils.remove(self.image_rootfs, True) | ||
| 135 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) | ||
| 136 | self.manifest = PkgManifest(d, manifest_dir) | ||
| 137 | self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS'), | ||
| 138 | d.getVar('PACKAGE_ARCHS'), | ||
| 139 | d.getVar('DPKG_ARCH')) | ||
| 140 | |||
| 141 | |||
| 142 | def _create(self): | ||
| 143 | pkgs_to_install = self.manifest.parse_initial_manifest() | ||
| 144 | deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS') | ||
| 145 | deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS') | ||
| 146 | |||
| 147 | alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives") | ||
| 148 | bb.utils.mkdirhier(alt_dir) | ||
| 149 | |||
| 150 | # update PM index files | ||
| 151 | self.pm.write_index() | ||
| 152 | |||
| 153 | execute_pre_post_process(self.d, deb_pre_process_cmds) | ||
| 154 | |||
| 155 | if self.progress_reporter: | ||
| 156 | self.progress_reporter.next_stage() | ||
| 157 | # Don't support incremental, so skip that | ||
| 158 | self.progress_reporter.next_stage() | ||
| 159 | |||
| 160 | self.pm.update() | ||
| 161 | |||
| 162 | if self.progress_reporter: | ||
| 163 | self.progress_reporter.next_stage() | ||
| 164 | |||
| 165 | for pkg_type in self.install_order: | ||
| 166 | if pkg_type in pkgs_to_install: | ||
| 167 | self.pm.install(pkgs_to_install[pkg_type], | ||
| 168 | [False, True][pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY]) | ||
| 169 | self.pm.fix_broken_dependencies() | ||
| 170 | |||
| 171 | if self.progress_reporter: | ||
| 172 | # Don't support attemptonly, so skip that | ||
| 173 | self.progress_reporter.next_stage() | ||
| 174 | self.progress_reporter.next_stage() | ||
| 175 | |||
| 176 | self.pm.install_complementary() | ||
| 177 | |||
| 178 | if self.progress_reporter: | ||
| 179 | self.progress_reporter.next_stage() | ||
| 180 | |||
| 181 | self._setup_dbg_rootfs(['/var/lib/dpkg']) | ||
| 182 | |||
| 183 | self.pm.fix_broken_dependencies() | ||
| 184 | |||
| 185 | self.pm.mark_packages("installed") | ||
| 186 | |||
| 187 | self.pm.run_pre_post_installs() | ||
| 188 | |||
| 189 | execute_pre_post_process(self.d, deb_post_process_cmds) | ||
| 190 | |||
| 191 | if self.progress_reporter: | ||
| 192 | self.progress_reporter.next_stage() | ||
| 193 | |||
| 194 | @staticmethod | ||
| 195 | def _depends_list(): | ||
| 196 | return ['DEPLOY_DIR_DEB', 'DEB_SDK_ARCH', 'APTCONF_TARGET', 'APT_ARGS', 'DPKG_ARCH', 'DEB_PREPROCESS_COMMANDS', 'DEB_POSTPROCESS_COMMANDS'] | ||
| 197 | |||
| 198 | def _get_delayed_postinsts(self): | ||
| 199 | status_file = self.image_rootfs + "/var/lib/dpkg/status" | ||
| 200 | return self._get_delayed_postinsts_common(status_file) | ||
| 201 | |||
| 202 | def _save_postinsts(self): | ||
| 203 | dst_postinst_dir = self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/deb-postinsts") | ||
| 204 | src_postinst_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/info") | ||
| 205 | return self._save_postinsts_common(dst_postinst_dir, src_postinst_dir) | ||
| 206 | |||
| 207 | def _log_check(self): | ||
| 208 | self._log_check_warn() | ||
| 209 | self._log_check_error() | ||
| 210 | |||
| 211 | def _cleanup(self): | ||
| 212 | pass | ||
diff --git a/meta/lib/oe/package_manager/deb/sdk.py b/meta/lib/oe/package_manager/deb/sdk.py deleted file mode 100644 index 6f3005053e..0000000000 --- a/meta/lib/oe/package_manager/deb/sdk.py +++ /dev/null | |||
| @@ -1,107 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import glob | ||
| 8 | import shutil | ||
| 9 | from oe.utils import execute_pre_post_process | ||
| 10 | from oe.sdk import Sdk | ||
| 11 | from oe.manifest import Manifest | ||
| 12 | from oe.package_manager.deb import DpkgPM | ||
| 13 | from oe.package_manager.deb.manifest import PkgManifest | ||
| 14 | |||
| 15 | class PkgSdk(Sdk): | ||
| 16 | def __init__(self, d, manifest_dir=None): | ||
| 17 | super(PkgSdk, self).__init__(d, manifest_dir) | ||
| 18 | |||
| 19 | self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt") | ||
| 20 | self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt-sdk") | ||
| 21 | |||
| 22 | |||
| 23 | self.target_manifest = PkgManifest(d, self.manifest_dir, | ||
| 24 | Manifest.MANIFEST_TYPE_SDK_TARGET) | ||
| 25 | self.host_manifest = PkgManifest(d, self.manifest_dir, | ||
| 26 | Manifest.MANIFEST_TYPE_SDK_HOST) | ||
| 27 | |||
| 28 | deb_repo_workdir = "oe-sdk-repo" | ||
| 29 | if "sdk_ext" in d.getVar("BB_RUNTASK"): | ||
| 30 | deb_repo_workdir = "oe-sdk-ext-repo" | ||
| 31 | |||
| 32 | self.target_pm = DpkgPM(d, self.sdk_target_sysroot, | ||
| 33 | self.d.getVar("PACKAGE_ARCHS"), | ||
| 34 | self.d.getVar("DPKG_ARCH"), | ||
| 35 | self.target_conf_dir, | ||
| 36 | deb_repo_workdir=deb_repo_workdir) | ||
| 37 | |||
| 38 | self.host_pm = DpkgPM(d, self.sdk_host_sysroot, | ||
| 39 | self.d.getVar("SDK_PACKAGE_ARCHS"), | ||
| 40 | self.d.getVar("DEB_SDK_ARCH"), | ||
| 41 | self.host_conf_dir, | ||
| 42 | deb_repo_workdir=deb_repo_workdir) | ||
| 43 | |||
| 44 | def _copy_apt_dir_to(self, dst_dir): | ||
| 45 | staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE") | ||
| 46 | |||
| 47 | self.remove(dst_dir, True) | ||
| 48 | |||
| 49 | shutil.copytree(os.path.join(staging_etcdir_native, "apt"), dst_dir) | ||
| 50 | |||
| 51 | def _populate_sysroot(self, pm, manifest): | ||
| 52 | pkgs_to_install = manifest.parse_initial_manifest() | ||
| 53 | |||
| 54 | pm.write_index() | ||
| 55 | pm.update() | ||
| 56 | |||
| 57 | for pkg_type in self.install_order: | ||
| 58 | if pkg_type in pkgs_to_install: | ||
| 59 | pm.install(pkgs_to_install[pkg_type], | ||
| 60 | [False, True][pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY]) | ||
| 61 | |||
| 62 | def _populate(self): | ||
| 63 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_PRE_TARGET_COMMAND")) | ||
| 64 | |||
| 65 | bb.note("Installing TARGET packages") | ||
| 66 | self._populate_sysroot(self.target_pm, self.target_manifest) | ||
| 67 | |||
| 68 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) | ||
| 69 | |||
| 70 | self.target_pm.run_pre_post_installs() | ||
| 71 | |||
| 72 | env_bkp = os.environ.copy() | ||
| 73 | os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \ | ||
| 74 | os.pathsep + os.environ["PATH"] | ||
| 75 | |||
| 76 | self.target_pm.run_intercepts(populate_sdk='target') | ||
| 77 | os.environ.update(env_bkp) | ||
| 78 | |||
| 79 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) | ||
| 80 | |||
| 81 | self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt")) | ||
| 82 | |||
| 83 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | ||
| 84 | self.target_pm.remove_packaging_data() | ||
| 85 | |||
| 86 | bb.note("Installing NATIVESDK packages") | ||
| 87 | self._populate_sysroot(self.host_pm, self.host_manifest) | ||
| 88 | self.install_locales(self.host_pm) | ||
| 89 | |||
| 90 | self.host_pm.run_pre_post_installs() | ||
| 91 | |||
| 92 | self.host_pm.run_intercepts(populate_sdk='host') | ||
| 93 | |||
| 94 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) | ||
| 95 | |||
| 96 | self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path, | ||
| 97 | "etc", "apt")) | ||
| 98 | |||
| 99 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | ||
| 100 | self.host_pm.remove_packaging_data() | ||
| 101 | |||
| 102 | native_dpkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, | ||
| 103 | "var", "lib", "dpkg") | ||
| 104 | self.mkdirhier(native_dpkg_state_dir) | ||
| 105 | for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "dpkg", "*")): | ||
| 106 | self.movefile(f, native_dpkg_state_dir) | ||
| 107 | self.remove(os.path.join(self.sdk_output, "var"), True) | ||
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py deleted file mode 100644 index 4794f31f88..0000000000 --- a/meta/lib/oe/package_manager/ipk/__init__.py +++ /dev/null | |||
| @@ -1,438 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import re | ||
| 8 | import shutil | ||
| 9 | import subprocess | ||
| 10 | from oe.package_manager import * | ||
| 11 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
| 12 | |||
| 13 | class OpkgIndexer(Indexer): | ||
| 14 | def write_index(self): | ||
| 15 | arch_vars = ["ALL_MULTILIB_PACKAGE_ARCHS", | ||
| 16 | "SDK_PACKAGE_ARCHS", | ||
| 17 | ] | ||
| 18 | |||
| 19 | opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") | ||
| 20 | opkg_index_cmd_extra_params = self.d.getVar('OPKG_MAKE_INDEX_EXTRA_PARAMS') or "" | ||
| 21 | if self.d.getVar('PACKAGE_FEED_SIGN') == '1': | ||
| 22 | signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) | ||
| 23 | else: | ||
| 24 | signer = None | ||
| 25 | |||
| 26 | if not os.path.exists(os.path.join(self.deploy_dir, "Packages")): | ||
| 27 | open(os.path.join(self.deploy_dir, "Packages"), "w").close() | ||
| 28 | |||
| 29 | index_cmds = set() | ||
| 30 | index_sign_files = set() | ||
| 31 | for arch_var in arch_vars: | ||
| 32 | archs = self.d.getVar(arch_var) | ||
| 33 | if archs is None: | ||
| 34 | continue | ||
| 35 | |||
| 36 | for arch in archs.split(): | ||
| 37 | pkgs_dir = os.path.join(self.deploy_dir, arch) | ||
| 38 | pkgs_file = os.path.join(pkgs_dir, "Packages") | ||
| 39 | |||
| 40 | if not os.path.isdir(pkgs_dir): | ||
| 41 | continue | ||
| 42 | |||
| 43 | if not os.path.exists(pkgs_file): | ||
| 44 | open(pkgs_file, "w").close() | ||
| 45 | |||
| 46 | index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s %s' % | ||
| 47 | (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir, opkg_index_cmd_extra_params)) | ||
| 48 | |||
| 49 | index_sign_files.add(pkgs_file) | ||
| 50 | |||
| 51 | if len(index_cmds) == 0: | ||
| 52 | bb.note("There are no packages in %s!" % self.deploy_dir) | ||
| 53 | return | ||
| 54 | |||
| 55 | oe.utils.multiprocess_launch(create_index, index_cmds, self.d) | ||
| 56 | |||
| 57 | if signer: | ||
| 58 | feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') | ||
| 59 | is_ascii_sig = (feed_sig_type.upper() != "BIN") | ||
| 60 | for f in index_sign_files: | ||
| 61 | signer.detach_sign(f, | ||
| 62 | self.d.getVar('PACKAGE_FEED_GPG_NAME'), | ||
| 63 | self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), | ||
| 64 | armor=is_ascii_sig) | ||
| 65 | |||
| 66 | class PMPkgsList(PkgsList): | ||
| 67 | def __init__(self, d, rootfs_dir): | ||
| 68 | super(PMPkgsList, self).__init__(d, rootfs_dir) | ||
| 69 | config_file = d.getVar("IPKGCONF_TARGET") | ||
| 70 | |||
| 71 | self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") | ||
| 72 | self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir) | ||
| 73 | self.opkg_args += self.d.getVar("OPKG_ARGS") | ||
| 74 | |||
| 75 | def list_pkgs(self, format=None): | ||
| 76 | cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args) | ||
| 77 | |||
| 78 | # opkg returns success even when it printed some | ||
| 79 | # "Collected errors:" report to stderr. Mixing stderr into | ||
| 80 | # stdout then leads to random failures later on when | ||
| 81 | # parsing the output. To avoid this we need to collect both | ||
| 82 | # output streams separately and check for empty stderr. | ||
| 83 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) | ||
| 84 | cmd_output, cmd_stderr = p.communicate() | ||
| 85 | cmd_output = cmd_output.decode("utf-8") | ||
| 86 | cmd_stderr = cmd_stderr.decode("utf-8") | ||
| 87 | if p.returncode or cmd_stderr: | ||
| 88 | bb.fatal("Cannot get the installed packages list. Command '%s' " | ||
| 89 | "returned %d and stderr:\n%s" % (cmd, p.returncode, cmd_stderr)) | ||
| 90 | |||
| 91 | return opkg_query(cmd_output) | ||
| 92 | |||
| 93 | |||
| 94 | class OpkgPM(OpkgDpkgPM): | ||
| 95 | def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): | ||
| 96 | super(OpkgPM, self).__init__(d, target_rootfs) | ||
| 97 | |||
| 98 | self.config_file = config_file | ||
| 99 | self.pkg_archs = archs | ||
| 100 | self.task_name = task_name | ||
| 101 | |||
| 102 | self.deploy_dir = oe.path.join(self.d.getVar('WORKDIR'), ipk_repo_workdir) | ||
| 103 | self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock") | ||
| 104 | self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") | ||
| 105 | self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs) | ||
| 106 | self.opkg_args += self.d.getVar("OPKG_ARGS") | ||
| 107 | |||
| 108 | if prepare_index: | ||
| 109 | create_packages_dir(self.d, self.deploy_dir, d.getVar("DEPLOY_DIR_IPK"), "package_write_ipk", filterbydependencies) | ||
| 110 | |||
| 111 | self.opkg_dir = oe.path.join(target_rootfs, self.d.getVar('OPKGLIBDIR'), "opkg") | ||
| 112 | bb.utils.mkdirhier(self.opkg_dir) | ||
| 113 | |||
| 114 | self.saved_opkg_dir = self.d.expand('${T}/saved/%s' % self.task_name) | ||
| 115 | if not os.path.exists(self.d.expand('${T}/saved')): | ||
| 116 | bb.utils.mkdirhier(self.d.expand('${T}/saved')) | ||
| 117 | |||
| 118 | self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") == "1" | ||
| 119 | if self.from_feeds: | ||
| 120 | self._create_custom_config() | ||
| 121 | else: | ||
| 122 | self._create_config() | ||
| 123 | |||
| 124 | self.indexer = OpkgIndexer(self.d, self.deploy_dir) | ||
| 125 | |||
| 126 | def mark_packages(self, status_tag, packages=None): | ||
| 127 | """ | ||
| 128 | This function will change a package's status in /var/lib/opkg/status file. | ||
| 129 | If 'packages' is None then the new_status will be applied to all | ||
| 130 | packages | ||
| 131 | """ | ||
| 132 | status_file = os.path.join(self.opkg_dir, "status") | ||
| 133 | |||
| 134 | with open(status_file, "r") as sf: | ||
| 135 | with open(status_file + ".tmp", "w+") as tmp_sf: | ||
| 136 | if packages is None: | ||
| 137 | tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)", | ||
| 138 | r"Package: \1\n\2Status: \3%s" % status_tag, | ||
| 139 | sf.read())) | ||
| 140 | else: | ||
| 141 | if type(packages).__name__ != "list": | ||
| 142 | raise TypeError("'packages' should be a list object") | ||
| 143 | |||
| 144 | status = sf.read() | ||
| 145 | for pkg in packages: | ||
| 146 | status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg, | ||
| 147 | r"Package: %s\n\1Status: \2%s" % (pkg, status_tag), | ||
| 148 | status) | ||
| 149 | |||
| 150 | tmp_sf.write(status) | ||
| 151 | |||
| 152 | bb.utils.rename(status_file + ".tmp", status_file) | ||
| 153 | |||
| 154 | def _create_custom_config(self): | ||
| 155 | bb.note("Building from feeds activated!") | ||
| 156 | |||
| 157 | with open(self.config_file, "w+") as config_file: | ||
| 158 | priority = 1 | ||
| 159 | for arch in self.pkg_archs.split(): | ||
| 160 | config_file.write("arch %s %d\n" % (arch, priority)) | ||
| 161 | priority += 5 | ||
| 162 | |||
| 163 | for line in (self.d.getVar('IPK_FEED_URIS') or "").split(): | ||
| 164 | feed_match = re.match(r"^[ \t]*(.*)##([^ \t]*)[ \t]*$", line) | ||
| 165 | |||
| 166 | if feed_match is not None: | ||
| 167 | feed_name = feed_match.group(1) | ||
| 168 | feed_uri = feed_match.group(2) | ||
| 169 | |||
| 170 | bb.note("Add %s feed with URL %s" % (feed_name, feed_uri)) | ||
| 171 | |||
| 172 | config_file.write("src/gz %s %s\n" % (feed_name, feed_uri)) | ||
| 173 | |||
| 174 | """ | ||
| 175 | Allow to use package deploy directory contents as quick devel-testing | ||
| 176 | feed. This creates individual feed configs for each arch subdir of those | ||
| 177 | specified as compatible for the current machine. | ||
| 178 | NOTE: Development-helper feature, NOT a full-fledged feed. | ||
| 179 | """ | ||
| 180 | if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": | ||
| 181 | for arch in self.pkg_archs.split(): | ||
| 182 | cfg_file_name = oe.path.join(self.target_rootfs, | ||
| 183 | self.d.getVar("sysconfdir"), | ||
| 184 | "opkg", | ||
| 185 | "local-%s-feed.conf" % arch) | ||
| 186 | |||
| 187 | with open(cfg_file_name, "w+") as cfg_file: | ||
| 188 | cfg_file.write("src/gz local-%s %s/%s" % | ||
| 189 | (arch, | ||
| 190 | self.d.getVar('FEED_DEPLOYDIR_BASE_URI'), | ||
| 191 | arch)) | ||
| 192 | |||
| 193 | if self.d.getVar('OPKGLIBDIR') != '/var/lib': | ||
| 194 | # There is no command line option for this anymore, we need to add | ||
| 195 | # info_dir and status_file to config file, if OPKGLIBDIR doesn't have | ||
| 196 | # the default value of "/var/lib" as defined in opkg: | ||
| 197 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_LISTS_DIR VARDIR "/lib/opkg/lists" | ||
| 198 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR VARDIR "/lib/opkg/info" | ||
| 199 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE VARDIR "/lib/opkg/status" | ||
| 200 | cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) | ||
| 201 | cfg_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'lists')) | ||
| 202 | cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) | ||
| 203 | |||
| 204 | |||
| 205 | def _create_config(self): | ||
| 206 | with open(self.config_file, "w+") as config_file: | ||
| 207 | priority = 1 | ||
| 208 | for arch in self.pkg_archs.split(): | ||
| 209 | config_file.write("arch %s %d\n" % (arch, priority)) | ||
| 210 | priority += 5 | ||
| 211 | |||
| 212 | config_file.write("src oe file:%s\n" % self.deploy_dir) | ||
| 213 | |||
| 214 | for arch in self.pkg_archs.split(): | ||
| 215 | pkgs_dir = os.path.join(self.deploy_dir, arch) | ||
| 216 | if os.path.isdir(pkgs_dir): | ||
| 217 | config_file.write("src oe-%s file:%s\n" % | ||
| 218 | (arch, pkgs_dir)) | ||
| 219 | |||
| 220 | if self.d.getVar('OPKGLIBDIR') != '/var/lib': | ||
| 221 | # There is no command line option for this anymore, we need to add | ||
| 222 | # info_dir and status_file to config file, if OPKGLIBDIR doesn't have | ||
| 223 | # the default value of "/var/lib" as defined in opkg: | ||
| 224 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_LISTS_DIR VARDIR "/lib/opkg/lists" | ||
| 225 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR VARDIR "/lib/opkg/info" | ||
| 226 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE VARDIR "/lib/opkg/status" | ||
| 227 | config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) | ||
| 228 | config_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'lists')) | ||
| 229 | config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) | ||
| 230 | |||
| 231 | def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): | ||
| 232 | if feed_uris == "": | ||
| 233 | return | ||
| 234 | |||
| 235 | rootfs_config = os.path.join('%s/etc/opkg/base-feeds.conf' | ||
| 236 | % self.target_rootfs) | ||
| 237 | |||
| 238 | os.makedirs('%s/etc/opkg' % self.target_rootfs, exist_ok=True) | ||
| 239 | |||
| 240 | feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split()) | ||
| 241 | archs = self.pkg_archs.split() if feed_archs is None else feed_archs.split() | ||
| 242 | |||
| 243 | with open(rootfs_config, "w+") as config_file: | ||
| 244 | uri_iterator = 0 | ||
| 245 | for uri in feed_uris: | ||
| 246 | if archs: | ||
| 247 | for arch in archs: | ||
| 248 | if (feed_archs is None) and (not os.path.exists(oe.path.join(self.deploy_dir, arch))): | ||
| 249 | continue | ||
| 250 | bb.note('Adding opkg feed url-%s-%d (%s)' % | ||
| 251 | (arch, uri_iterator, uri)) | ||
| 252 | config_file.write("src/gz uri-%s-%d %s/%s\n" % | ||
| 253 | (arch, uri_iterator, uri, arch)) | ||
| 254 | else: | ||
| 255 | bb.note('Adding opkg feed url-%d (%s)' % | ||
| 256 | (uri_iterator, uri)) | ||
| 257 | config_file.write("src/gz uri-%d %s\n" % | ||
| 258 | (uri_iterator, uri)) | ||
| 259 | |||
| 260 | uri_iterator += 1 | ||
| 261 | |||
| 262 | def update(self): | ||
| 263 | self.deploy_dir_lock() | ||
| 264 | |||
| 265 | cmd = "%s %s update" % (self.opkg_cmd, self.opkg_args) | ||
| 266 | |||
| 267 | try: | ||
| 268 | subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) | ||
| 269 | except subprocess.CalledProcessError as e: | ||
| 270 | self.deploy_dir_unlock() | ||
| 271 | bb.fatal("Unable to update the package index files. Command '%s' " | ||
| 272 | "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) | ||
| 273 | |||
| 274 | self.deploy_dir_unlock() | ||
| 275 | |||
| 276 | def install(self, pkgs, attempt_only=False, hard_depends_only=False): | ||
| 277 | if not pkgs: | ||
| 278 | return | ||
| 279 | |||
| 280 | cmd = "%s %s" % (self.opkg_cmd, self.opkg_args) | ||
| 281 | for exclude in (self.d.getVar("PACKAGE_EXCLUDE") or "").split(): | ||
| 282 | cmd += " --add-exclude %s" % exclude | ||
| 283 | for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split(): | ||
| 284 | cmd += " --add-ignore-recommends %s" % bad_recommendation | ||
| 285 | if hard_depends_only: | ||
| 286 | cmd += " --no-install-recommends" | ||
| 287 | cmd += " install " | ||
| 288 | cmd += " ".join(pkgs) | ||
| 289 | |||
| 290 | os.environ['D'] = self.target_rootfs | ||
| 291 | os.environ['OFFLINE_ROOT'] = self.target_rootfs | ||
| 292 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 293 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 294 | os.environ['INTERCEPT_DIR'] = self.intercepts_dir | ||
| 295 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') | ||
| 296 | |||
| 297 | try: | ||
| 298 | bb.note("Installing the following packages: %s" % ' '.join(pkgs)) | ||
| 299 | bb.note(cmd) | ||
| 300 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8") | ||
| 301 | bb.note(output) | ||
| 302 | failed_pkgs = [] | ||
| 303 | for line in output.split('\n'): | ||
| 304 | if line.endswith("configuration required on target."): | ||
| 305 | bb.warn(line) | ||
| 306 | failed_pkgs.append(line.split(".")[0]) | ||
| 307 | if failed_pkgs: | ||
| 308 | failed_postinsts_abort(failed_pkgs, self.d.expand("${T}/log.do_${BB_CURRENTTASK}")) | ||
| 309 | except subprocess.CalledProcessError as e: | ||
| 310 | e_output = e.output.decode("utf-8") | ||
| 311 | extra_info = "" | ||
| 312 | unmatched_pkgs = [] | ||
| 313 | for e_line in e_output.split('\n'): | ||
| 314 | if "error: opkg_solver_install: No candidates to install" in e_line: | ||
| 315 | unmatched_pkg = re.search(r"error: opkg_solver_install: No candidates to install ([a-z0-9+\-\._]+)", e_line).group(1) | ||
| 316 | unmatched_pkgs.append(unmatched_pkg) | ||
| 317 | elif "error: opkg_prepare_url_for_install: Couldn't find anything to satisfy" in e_line: | ||
| 318 | unmatched_pkg = re.search(r"error: opkg_prepare_url_for_install: Couldn't find anything to satisfy '([a-z0-9+\-\._]+)'", e_line).group(1) | ||
| 319 | unmatched_pkgs.append(unmatched_pkg) | ||
| 320 | for pkg in unmatched_pkgs: | ||
| 321 | extra_info += self.get_missing_pkg_reason(pkg) | ||
| 322 | (bb.fatal, bb.warn)[attempt_only]("Unable to install packages. " | ||
| 323 | "Command '%s' returned %d:\n%s%s" % | ||
| 324 | (cmd, e.returncode, e_output, extra_info)) | ||
| 325 | |||
| 326 | def remove(self, pkgs, with_dependencies=True): | ||
| 327 | if not pkgs: | ||
| 328 | return | ||
| 329 | |||
| 330 | if with_dependencies: | ||
| 331 | cmd = "%s %s --force-remove --force-removal-of-dependent-packages remove %s" % \ | ||
| 332 | (self.opkg_cmd, self.opkg_args, ' '.join(pkgs)) | ||
| 333 | else: | ||
| 334 | cmd = "%s %s --force-depends remove %s" % \ | ||
| 335 | (self.opkg_cmd, self.opkg_args, ' '.join(pkgs)) | ||
| 336 | |||
| 337 | try: | ||
| 338 | bb.note(cmd) | ||
| 339 | output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8") | ||
| 340 | bb.note(output) | ||
| 341 | except subprocess.CalledProcessError as e: | ||
| 342 | bb.fatal("Unable to remove packages. Command '%s' " | ||
| 343 | "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8"))) | ||
| 344 | |||
| 345 | def write_index(self): | ||
| 346 | self.deploy_dir_lock() | ||
| 347 | |||
| 348 | result = self.indexer.write_index() | ||
| 349 | |||
| 350 | self.deploy_dir_unlock() | ||
| 351 | |||
| 352 | if result is not None: | ||
| 353 | bb.fatal(result) | ||
| 354 | |||
| 355 | def remove_packaging_data(self): | ||
| 356 | cachedir = oe.path.join(self.target_rootfs, self.d.getVar("localstatedir"), "cache", "opkg") | ||
| 357 | bb.utils.remove(self.opkg_dir, True) | ||
| 358 | bb.utils.remove(cachedir, True) | ||
| 359 | |||
| 360 | def remove_lists(self): | ||
| 361 | if not self.from_feeds: | ||
| 362 | bb.utils.remove(os.path.join(self.opkg_dir, "lists"), True) | ||
| 363 | |||
| 364 | def list_installed(self): | ||
| 365 | return PMPkgsList(self.d, self.target_rootfs).list_pkgs() | ||
| 366 | |||
| 367 | def dummy_install(self, pkgs): | ||
| 368 | """ | ||
| 369 | The following function dummy installs pkgs and returns the log of output. | ||
| 370 | """ | ||
| 371 | if len(pkgs) == 0: | ||
| 372 | return | ||
| 373 | |||
| 374 | # Create an temp dir as opkg root for dummy installation | ||
| 375 | temp_rootfs = self.d.expand('${T}/opkg') | ||
| 376 | opkg_lib_dir = self.d.getVar('OPKGLIBDIR') | ||
| 377 | if opkg_lib_dir[0] == "/": | ||
| 378 | opkg_lib_dir = opkg_lib_dir[1:] | ||
| 379 | temp_opkg_dir = os.path.join(temp_rootfs, opkg_lib_dir, 'opkg') | ||
| 380 | bb.utils.mkdirhier(temp_opkg_dir) | ||
| 381 | |||
| 382 | opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs) | ||
| 383 | opkg_args += self.d.getVar("OPKG_ARGS") | ||
| 384 | |||
| 385 | cmd = "%s %s update" % (self.opkg_cmd, opkg_args) | ||
| 386 | try: | ||
| 387 | subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) | ||
| 388 | except subprocess.CalledProcessError as e: | ||
| 389 | bb.fatal("Unable to update. Command '%s' " | ||
| 390 | "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) | ||
| 391 | |||
| 392 | # Dummy installation | ||
| 393 | cmd = "%s %s --noaction install %s " % (self.opkg_cmd, | ||
| 394 | opkg_args, | ||
| 395 | ' '.join(pkgs)) | ||
| 396 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
| 397 | if proc.returncode: | ||
| 398 | bb.fatal("Unable to dummy install packages. Command '%s' " | ||
| 399 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
| 400 | elif proc.stderr: | ||
| 401 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
| 402 | |||
| 403 | bb.utils.remove(temp_rootfs, True) | ||
| 404 | |||
| 405 | return proc.stdout | ||
| 406 | |||
| 407 | def backup_packaging_data(self): | ||
| 408 | # Save the opkglib for increment ipk image generation | ||
| 409 | if os.path.exists(self.saved_opkg_dir): | ||
| 410 | bb.utils.remove(self.saved_opkg_dir, True) | ||
| 411 | shutil.copytree(self.opkg_dir, | ||
| 412 | self.saved_opkg_dir, | ||
| 413 | symlinks=True) | ||
| 414 | |||
| 415 | def recover_packaging_data(self): | ||
| 416 | # Move the opkglib back | ||
| 417 | if os.path.exists(self.saved_opkg_dir): | ||
| 418 | if os.path.exists(self.opkg_dir): | ||
| 419 | bb.utils.remove(self.opkg_dir, True) | ||
| 420 | |||
| 421 | bb.note('Recover packaging data') | ||
| 422 | shutil.copytree(self.saved_opkg_dir, | ||
| 423 | self.opkg_dir, | ||
| 424 | symlinks=True) | ||
| 425 | |||
| 426 | def package_info(self, pkg): | ||
| 427 | """ | ||
| 428 | Returns a dictionary with the package info. | ||
| 429 | """ | ||
| 430 | cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) | ||
| 431 | pkg_info = self._common_package_info(cmd) | ||
| 432 | |||
| 433 | pkg_arch = pkg_info[pkg]["arch"] | ||
| 434 | pkg_filename = pkg_info[pkg]["filename"] | ||
| 435 | pkg_info[pkg]["filepath"] = \ | ||
| 436 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | ||
| 437 | |||
| 438 | return pkg_info | ||
diff --git a/meta/lib/oe/package_manager/ipk/manifest.py b/meta/lib/oe/package_manager/ipk/manifest.py deleted file mode 100644 index 3549d7428d..0000000000 --- a/meta/lib/oe/package_manager/ipk/manifest.py +++ /dev/null | |||
| @@ -1,76 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from oe.manifest import Manifest | ||
| 8 | import re | ||
| 9 | |||
| 10 | class PkgManifest(Manifest): | ||
| 11 | """ | ||
| 12 | Returns a dictionary object with mip and mlp packages. | ||
| 13 | """ | ||
| 14 | def _split_multilib(self, pkg_list): | ||
| 15 | pkgs = dict() | ||
| 16 | |||
| 17 | for pkg in pkg_list.split(): | ||
| 18 | pkg_type = self.PKG_TYPE_MUST_INSTALL | ||
| 19 | |||
| 20 | ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() | ||
| 21 | |||
| 22 | for ml_variant in ml_variants: | ||
| 23 | if pkg.startswith(ml_variant + '-'): | ||
| 24 | pkg_type = self.PKG_TYPE_MULTILIB | ||
| 25 | |||
| 26 | if not pkg_type in pkgs: | ||
| 27 | pkgs[pkg_type] = pkg | ||
| 28 | else: | ||
| 29 | pkgs[pkg_type] += " " + pkg | ||
| 30 | |||
| 31 | return pkgs | ||
| 32 | |||
| 33 | def create_initial(self): | ||
| 34 | pkgs = dict() | ||
| 35 | |||
| 36 | with open(self.initial_manifest, "w+") as manifest: | ||
| 37 | manifest.write(self.initial_manifest_file_header) | ||
| 38 | |||
| 39 | for var in self.var_maps[self.manifest_type]: | ||
| 40 | if var in self.vars_to_split: | ||
| 41 | split_pkgs = self._split_multilib(self.d.getVar(var)) | ||
| 42 | if split_pkgs is not None: | ||
| 43 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) | ||
| 44 | else: | ||
| 45 | pkg_list = self.d.getVar(var) | ||
| 46 | if pkg_list is not None: | ||
| 47 | pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) | ||
| 48 | |||
| 49 | for pkg_type in sorted(pkgs): | ||
| 50 | for pkg in sorted(pkgs[pkg_type].split()): | ||
| 51 | manifest.write("%s,%s\n" % (pkg_type, pkg)) | ||
| 52 | |||
| 53 | def create_final(self): | ||
| 54 | pass | ||
| 55 | |||
| 56 | def create_full(self, pm): | ||
| 57 | if not os.path.exists(self.initial_manifest): | ||
| 58 | self.create_initial() | ||
| 59 | |||
| 60 | initial_manifest = self.parse_initial_manifest() | ||
| 61 | pkgs_to_install = list() | ||
| 62 | for pkg_type in initial_manifest: | ||
| 63 | pkgs_to_install += initial_manifest[pkg_type] | ||
| 64 | if len(pkgs_to_install) == 0: | ||
| 65 | return | ||
| 66 | |||
| 67 | output = pm.dummy_install(pkgs_to_install) | ||
| 68 | |||
| 69 | with open(self.full_manifest, 'w+') as manifest: | ||
| 70 | pkg_re = re.compile('^Installing ([^ ]+) [^ ].*') | ||
| 71 | for line in set(output.split('\n')): | ||
| 72 | m = pkg_re.match(line) | ||
| 73 | if m: | ||
| 74 | manifest.write(m.group(1) + '\n') | ||
| 75 | |||
| 76 | return | ||
diff --git a/meta/lib/oe/package_manager/ipk/rootfs.py b/meta/lib/oe/package_manager/ipk/rootfs.py deleted file mode 100644 index ba93eb62ea..0000000000 --- a/meta/lib/oe/package_manager/ipk/rootfs.py +++ /dev/null | |||
| @@ -1,352 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import re | ||
| 8 | import filecmp | ||
| 9 | import shutil | ||
| 10 | from oe.rootfs import Rootfs | ||
| 11 | from oe.manifest import Manifest | ||
| 12 | from oe.utils import execute_pre_post_process | ||
| 13 | from oe.package_manager.ipk.manifest import PkgManifest | ||
| 14 | from oe.package_manager.ipk import OpkgPM | ||
| 15 | |||
| 16 | class DpkgOpkgRootfs(Rootfs): | ||
| 17 | def __init__(self, d, progress_reporter=None, logcatcher=None): | ||
| 18 | super(DpkgOpkgRootfs, self).__init__(d, progress_reporter, logcatcher) | ||
| 19 | |||
| 20 | def _get_pkgs_postinsts(self, status_file): | ||
| 21 | def _get_pkg_depends_list(pkg_depends): | ||
| 22 | pkg_depends_list = [] | ||
| 23 | # filter version requirements like libc (>= 1.1) | ||
| 24 | for dep in pkg_depends.split(', '): | ||
| 25 | m_dep = re.match(r"^(.*) \(.*\)$", dep) | ||
| 26 | if m_dep: | ||
| 27 | dep = m_dep.group(1) | ||
| 28 | pkg_depends_list.append(dep) | ||
| 29 | |||
| 30 | return pkg_depends_list | ||
| 31 | |||
| 32 | pkgs = {} | ||
| 33 | pkg_name = "" | ||
| 34 | pkg_status_match = False | ||
| 35 | pkg_depends = "" | ||
| 36 | |||
| 37 | with open(status_file) as status: | ||
| 38 | data = status.read() | ||
| 39 | status.close() | ||
| 40 | for line in data.split('\n'): | ||
| 41 | m_pkg = re.match(r"^Package: (.*)", line) | ||
| 42 | m_status = re.match(r"^Status:.*unpacked", line) | ||
| 43 | m_depends = re.match(r"^Depends: (.*)", line) | ||
| 44 | |||
| 45 | #Only one of m_pkg, m_status or m_depends is not None at time | ||
| 46 | #If m_pkg is not None, we started a new package | ||
| 47 | if m_pkg is not None: | ||
| 48 | #Get Package name | ||
| 49 | pkg_name = m_pkg.group(1) | ||
| 50 | #Make sure we reset other variables | ||
| 51 | pkg_status_match = False | ||
| 52 | pkg_depends = "" | ||
| 53 | elif m_status is not None: | ||
| 54 | #New status matched | ||
| 55 | pkg_status_match = True | ||
| 56 | elif m_depends is not None: | ||
| 57 | #New depends macthed | ||
| 58 | pkg_depends = m_depends.group(1) | ||
| 59 | else: | ||
| 60 | pass | ||
| 61 | |||
| 62 | #Now check if we can process package depends and postinst | ||
| 63 | if "" != pkg_name and pkg_status_match: | ||
| 64 | pkgs[pkg_name] = _get_pkg_depends_list(pkg_depends) | ||
| 65 | else: | ||
| 66 | #Not enough information | ||
| 67 | pass | ||
| 68 | |||
| 69 | # remove package dependencies not in postinsts | ||
| 70 | pkg_names = list(pkgs.keys()) | ||
| 71 | for pkg_name in pkg_names: | ||
| 72 | deps = pkgs[pkg_name][:] | ||
| 73 | |||
| 74 | for d in deps: | ||
| 75 | if d not in pkg_names: | ||
| 76 | pkgs[pkg_name].remove(d) | ||
| 77 | |||
| 78 | return pkgs | ||
| 79 | |||
| 80 | def _get_delayed_postinsts_common(self, status_file): | ||
| 81 | def _dep_resolve(graph, node, resolved, seen): | ||
| 82 | seen.append(node) | ||
| 83 | |||
| 84 | for edge in graph[node]: | ||
| 85 | if edge not in resolved: | ||
| 86 | if edge in seen: | ||
| 87 | raise RuntimeError("Packages %s and %s have " \ | ||
| 88 | "a circular dependency in postinsts scripts." \ | ||
| 89 | % (node, edge)) | ||
| 90 | _dep_resolve(graph, edge, resolved, seen) | ||
| 91 | |||
| 92 | resolved.append(node) | ||
| 93 | |||
| 94 | pkg_list = [] | ||
| 95 | |||
| 96 | pkgs = None | ||
| 97 | if not self.d.getVar('PACKAGE_INSTALL').strip(): | ||
| 98 | bb.note("Building empty image") | ||
| 99 | else: | ||
| 100 | pkgs = self._get_pkgs_postinsts(status_file) | ||
| 101 | if pkgs: | ||
| 102 | root = "__packagegroup_postinst__" | ||
| 103 | pkgs[root] = list(pkgs.keys()) | ||
| 104 | _dep_resolve(pkgs, root, pkg_list, []) | ||
| 105 | pkg_list.remove(root) | ||
| 106 | |||
| 107 | if len(pkg_list) == 0: | ||
| 108 | return None | ||
| 109 | |||
| 110 | return pkg_list | ||
| 111 | |||
| 112 | def _save_postinsts_common(self, dst_postinst_dir, src_postinst_dir): | ||
| 113 | if bb.utils.contains("IMAGE_FEATURES", "package-management", | ||
| 114 | True, False, self.d): | ||
| 115 | return | ||
| 116 | num = 0 | ||
| 117 | for p in self._get_delayed_postinsts(): | ||
| 118 | bb.utils.mkdirhier(dst_postinst_dir) | ||
| 119 | |||
| 120 | if os.path.exists(os.path.join(src_postinst_dir, p + ".postinst")): | ||
| 121 | shutil.copy(os.path.join(src_postinst_dir, p + ".postinst"), | ||
| 122 | os.path.join(dst_postinst_dir, "%03d-%s" % (num, p))) | ||
| 123 | |||
| 124 | num += 1 | ||
| 125 | |||
| 126 | class PkgRootfs(DpkgOpkgRootfs): | ||
| 127 | def __init__(self, d, manifest_dir, progress_reporter=None, logcatcher=None): | ||
| 128 | super(PkgRootfs, self).__init__(d, progress_reporter, logcatcher) | ||
| 129 | self.log_check_regex = '(exit 1|Collected errors)' | ||
| 130 | |||
| 131 | self.manifest = PkgManifest(d, manifest_dir) | ||
| 132 | self.opkg_conf = self.d.getVar("IPKGCONF_TARGET") | ||
| 133 | self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS") | ||
| 134 | |||
| 135 | self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN') or "" | ||
| 136 | if self._remove_old_rootfs(): | ||
| 137 | bb.utils.remove(self.image_rootfs, True) | ||
| 138 | self.pm = OpkgPM(d, | ||
| 139 | self.image_rootfs, | ||
| 140 | self.opkg_conf, | ||
| 141 | self.pkg_archs) | ||
| 142 | else: | ||
| 143 | self.pm = OpkgPM(d, | ||
| 144 | self.image_rootfs, | ||
| 145 | self.opkg_conf, | ||
| 146 | self.pkg_archs) | ||
| 147 | self.pm.recover_packaging_data() | ||
| 148 | |||
| 149 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) | ||
| 150 | ''' | ||
| 151 | Compare two files with the same key twice to see if they are equal. | ||
| 152 | If they are not equal, it means they are duplicated and come from | ||
| 153 | different packages. | ||
| 154 | ''' | ||
| 155 | def _file_equal(self, key, f1, f2): | ||
| 156 | if filecmp.cmp(f1, f2): | ||
| 157 | return True | ||
| 158 | # Not equal | ||
| 159 | return False | ||
| 160 | |||
| 161 | """ | ||
| 162 | This function was reused from the old implementation. | ||
| 163 | See commit: "image.bbclass: Added variables for multilib support." by | ||
| 164 | Lianhao Lu. | ||
| 165 | """ | ||
| 166 | def _multilib_sanity_test(self, dirs): | ||
| 167 | |||
| 168 | allow_replace = "|".join((self.d.getVar("MULTILIBRE_ALLOW_REP") or "").split()) | ||
| 169 | if allow_replace is None: | ||
| 170 | allow_replace = "" | ||
| 171 | |||
| 172 | allow_rep = re.compile(re.sub(r"\|$", r"", allow_replace)) | ||
| 173 | error_prompt = "Multilib check error:" | ||
| 174 | |||
| 175 | files = {} | ||
| 176 | for dir in dirs: | ||
| 177 | for root, subfolders, subfiles in os.walk(dir): | ||
| 178 | for file in subfiles: | ||
| 179 | item = os.path.join(root, file) | ||
| 180 | key = str(os.path.join("/", os.path.relpath(item, dir))) | ||
| 181 | |||
| 182 | valid = True | ||
| 183 | if key in files: | ||
| 184 | #check whether the file is allow to replace | ||
| 185 | if allow_rep.match(key): | ||
| 186 | valid = True | ||
| 187 | else: | ||
| 188 | if os.path.exists(files[key]) and \ | ||
| 189 | os.path.exists(item) and \ | ||
| 190 | not self._file_equal(key, files[key], item): | ||
| 191 | valid = False | ||
| 192 | bb.fatal("%s duplicate files %s %s is not the same\n" % | ||
| 193 | (error_prompt, item, files[key])) | ||
| 194 | |||
| 195 | #pass the check, add to list | ||
| 196 | if valid: | ||
| 197 | files[key] = item | ||
| 198 | |||
| 199 | def _multilib_test_install(self, pkgs): | ||
| 200 | ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS") | ||
| 201 | bb.utils.mkdirhier(ml_temp) | ||
| 202 | |||
| 203 | dirs = [self.image_rootfs] | ||
| 204 | |||
| 205 | for variant in self.d.getVar("MULTILIB_VARIANTS").split(): | ||
| 206 | ml_target_rootfs = os.path.join(ml_temp, variant) | ||
| 207 | |||
| 208 | bb.utils.remove(ml_target_rootfs, True) | ||
| 209 | |||
| 210 | ml_opkg_conf = os.path.join(ml_temp, | ||
| 211 | variant + "-" + os.path.basename(self.opkg_conf)) | ||
| 212 | |||
| 213 | ml_pm = OpkgPM(self.d, ml_target_rootfs, ml_opkg_conf, self.pkg_archs, prepare_index=False) | ||
| 214 | |||
| 215 | ml_pm.update() | ||
| 216 | ml_pm.install(pkgs) | ||
| 217 | |||
| 218 | dirs.append(ml_target_rootfs) | ||
| 219 | |||
| 220 | self._multilib_sanity_test(dirs) | ||
| 221 | |||
| 222 | ''' | ||
| 223 | While ipk incremental image generation is enabled, it will remove the | ||
| 224 | unneeded pkgs by comparing the old full manifest in previous existing | ||
| 225 | image and the new full manifest in the current image. | ||
| 226 | ''' | ||
| 227 | def _remove_extra_packages(self, pkgs_initial_install): | ||
| 228 | if self.inc_opkg_image_gen == "1": | ||
| 229 | # Parse full manifest in previous existing image creation session | ||
| 230 | old_full_manifest = self.manifest.parse_full_manifest() | ||
| 231 | |||
| 232 | # Create full manifest for the current image session, the old one | ||
| 233 | # will be replaced by the new one. | ||
| 234 | self.manifest.create_full(self.pm) | ||
| 235 | |||
| 236 | # Parse full manifest in current image creation session | ||
| 237 | new_full_manifest = self.manifest.parse_full_manifest() | ||
| 238 | |||
| 239 | pkg_to_remove = list() | ||
| 240 | for pkg in old_full_manifest: | ||
| 241 | if pkg not in new_full_manifest: | ||
| 242 | pkg_to_remove.append(pkg) | ||
| 243 | |||
| 244 | if pkg_to_remove != []: | ||
| 245 | bb.note('decremental removed: %s' % ' '.join(pkg_to_remove)) | ||
| 246 | self.pm.remove(pkg_to_remove) | ||
| 247 | |||
| 248 | ''' | ||
| 249 | Compare with previous existing image creation, if some conditions | ||
| 250 | triggered, the previous old image should be removed. | ||
| 251 | The conditions include any of 'PACKAGE_EXCLUDE, NO_RECOMMENDATIONS | ||
| 252 | and BAD_RECOMMENDATIONS' has been changed. | ||
| 253 | ''' | ||
| 254 | def _remove_old_rootfs(self): | ||
| 255 | if self.inc_opkg_image_gen != "1": | ||
| 256 | return True | ||
| 257 | |||
| 258 | vars_list_file = self.d.expand('${T}/vars_list') | ||
| 259 | |||
| 260 | old_vars_list = "" | ||
| 261 | if os.path.exists(vars_list_file): | ||
| 262 | old_vars_list = open(vars_list_file, 'r+').read() | ||
| 263 | |||
| 264 | new_vars_list = '%s:%s:%s\n' % \ | ||
| 265 | ((self.d.getVar('BAD_RECOMMENDATIONS') or '').strip(), | ||
| 266 | (self.d.getVar('NO_RECOMMENDATIONS') or '').strip(), | ||
| 267 | (self.d.getVar('PACKAGE_EXCLUDE') or '').strip()) | ||
| 268 | open(vars_list_file, 'w+').write(new_vars_list) | ||
| 269 | |||
| 270 | if old_vars_list != new_vars_list: | ||
| 271 | return True | ||
| 272 | |||
| 273 | return False | ||
| 274 | |||
| 275 | def _create(self): | ||
| 276 | pkgs_to_install = self.manifest.parse_initial_manifest() | ||
| 277 | opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS') | ||
| 278 | opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS') | ||
| 279 | |||
| 280 | # update PM index files | ||
| 281 | self.pm.write_index() | ||
| 282 | |||
| 283 | execute_pre_post_process(self.d, opkg_pre_process_cmds) | ||
| 284 | |||
| 285 | if self.progress_reporter: | ||
| 286 | self.progress_reporter.next_stage() | ||
| 287 | # Steps are a bit different in order, skip next | ||
| 288 | self.progress_reporter.next_stage() | ||
| 289 | |||
| 290 | self.pm.update() | ||
| 291 | |||
| 292 | if self.progress_reporter: | ||
| 293 | self.progress_reporter.next_stage() | ||
| 294 | |||
| 295 | if self.inc_opkg_image_gen == "1": | ||
| 296 | self._remove_extra_packages(pkgs_to_install) | ||
| 297 | |||
| 298 | if self.progress_reporter: | ||
| 299 | self.progress_reporter.next_stage() | ||
| 300 | |||
| 301 | for pkg_type in self.install_order: | ||
| 302 | if pkg_type in pkgs_to_install: | ||
| 303 | # For multilib, we perform a sanity test before final install | ||
| 304 | # If sanity test fails, it will automatically do a bb.fatal() | ||
| 305 | # and the installation will stop | ||
| 306 | if pkg_type == Manifest.PKG_TYPE_MULTILIB: | ||
| 307 | self._multilib_test_install(pkgs_to_install[pkg_type]) | ||
| 308 | |||
| 309 | self.pm.install(pkgs_to_install[pkg_type], | ||
| 310 | [False, True][pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY]) | ||
| 311 | |||
| 312 | if self.progress_reporter: | ||
| 313 | self.progress_reporter.next_stage() | ||
| 314 | |||
| 315 | self.pm.install_complementary() | ||
| 316 | |||
| 317 | if self.progress_reporter: | ||
| 318 | self.progress_reporter.next_stage() | ||
| 319 | |||
| 320 | opkg_lib_dir = self.d.getVar('OPKGLIBDIR') | ||
| 321 | opkg_dir = os.path.join(opkg_lib_dir, 'opkg') | ||
| 322 | self._setup_dbg_rootfs([opkg_dir]) | ||
| 323 | |||
| 324 | execute_pre_post_process(self.d, opkg_post_process_cmds) | ||
| 325 | |||
| 326 | if self.inc_opkg_image_gen == "1": | ||
| 327 | self.pm.backup_packaging_data() | ||
| 328 | |||
| 329 | if self.progress_reporter: | ||
| 330 | self.progress_reporter.next_stage() | ||
| 331 | |||
| 332 | @staticmethod | ||
| 333 | def _depends_list(): | ||
| 334 | return ['IPKGCONF_SDK', 'IPK_FEED_URIS', 'DEPLOY_DIR_IPK', 'IPKGCONF_TARGET', 'INC_IPK_IMAGE_GEN', 'OPKG_ARGS', 'OPKGLIBDIR', 'OPKG_PREPROCESS_COMMANDS', 'OPKG_POSTPROCESS_COMMANDS', 'OPKGLIBDIR'] | ||
| 335 | |||
| 336 | def _get_delayed_postinsts(self): | ||
| 337 | status_file = os.path.join(self.image_rootfs, | ||
| 338 | self.d.getVar('OPKGLIBDIR').strip('/'), | ||
| 339 | "opkg", "status") | ||
| 340 | return self._get_delayed_postinsts_common(status_file) | ||
| 341 | |||
| 342 | def _save_postinsts(self): | ||
| 343 | dst_postinst_dir = self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/ipk-postinsts") | ||
| 344 | src_postinst_dir = self.d.expand("${IMAGE_ROOTFS}${OPKGLIBDIR}/opkg/info") | ||
| 345 | return self._save_postinsts_common(dst_postinst_dir, src_postinst_dir) | ||
| 346 | |||
| 347 | def _log_check(self): | ||
| 348 | self._log_check_warn() | ||
| 349 | self._log_check_error() | ||
| 350 | |||
| 351 | def _cleanup(self): | ||
| 352 | self.pm.remove_lists() | ||
diff --git a/meta/lib/oe/package_manager/ipk/sdk.py b/meta/lib/oe/package_manager/ipk/sdk.py deleted file mode 100644 index 3acd55f548..0000000000 --- a/meta/lib/oe/package_manager/ipk/sdk.py +++ /dev/null | |||
| @@ -1,113 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import glob | ||
| 8 | import shutil | ||
| 9 | from oe.utils import execute_pre_post_process | ||
| 10 | from oe.sdk import Sdk | ||
| 11 | from oe.package_manager.ipk.manifest import PkgManifest | ||
| 12 | from oe.manifest import Manifest | ||
| 13 | from oe.package_manager.ipk import OpkgPM | ||
| 14 | |||
| 15 | class PkgSdk(Sdk): | ||
| 16 | def __init__(self, d, manifest_dir=None): | ||
| 17 | super(PkgSdk, self).__init__(d, manifest_dir) | ||
| 18 | |||
| 19 | # In sdk_list_installed_packages the call to opkg is hardcoded to | ||
| 20 | # always use IPKGCONF_TARGET and there's no exposed API to change this | ||
| 21 | # so simply override IPKGCONF_TARGET to use this separated config file. | ||
| 22 | ipkgconf_sdk_target = d.getVar("IPKGCONF_SDK_TARGET") | ||
| 23 | d.setVar("IPKGCONF_TARGET", ipkgconf_sdk_target) | ||
| 24 | |||
| 25 | self.target_conf = self.d.getVar("IPKGCONF_TARGET") | ||
| 26 | self.host_conf = self.d.getVar("IPKGCONF_SDK") | ||
| 27 | |||
| 28 | self.target_manifest = PkgManifest(d, self.manifest_dir, | ||
| 29 | Manifest.MANIFEST_TYPE_SDK_TARGET) | ||
| 30 | self.host_manifest = PkgManifest(d, self.manifest_dir, | ||
| 31 | Manifest.MANIFEST_TYPE_SDK_HOST) | ||
| 32 | |||
| 33 | ipk_repo_workdir = "oe-sdk-repo" | ||
| 34 | if "sdk_ext" in d.getVar("BB_RUNTASK"): | ||
| 35 | ipk_repo_workdir = "oe-sdk-ext-repo" | ||
| 36 | |||
| 37 | self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf, | ||
| 38 | self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS"), | ||
| 39 | ipk_repo_workdir=ipk_repo_workdir) | ||
| 40 | |||
| 41 | self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf, | ||
| 42 | self.d.getVar("SDK_PACKAGE_ARCHS"), | ||
| 43 | ipk_repo_workdir=ipk_repo_workdir) | ||
| 44 | |||
| 45 | def _populate_sysroot(self, pm, manifest): | ||
| 46 | pkgs_to_install = manifest.parse_initial_manifest() | ||
| 47 | |||
| 48 | if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": | ||
| 49 | pm.write_index() | ||
| 50 | |||
| 51 | pm.update() | ||
| 52 | |||
| 53 | for pkg_type in self.install_order: | ||
| 54 | if pkg_type in pkgs_to_install: | ||
| 55 | pm.install(pkgs_to_install[pkg_type], | ||
| 56 | [False, True][pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY]) | ||
| 57 | |||
| 58 | def _populate(self): | ||
| 59 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_PRE_TARGET_COMMAND")) | ||
| 60 | |||
| 61 | bb.note("Installing TARGET packages") | ||
| 62 | self._populate_sysroot(self.target_pm, self.target_manifest) | ||
| 63 | |||
| 64 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) | ||
| 65 | |||
| 66 | env_bkp = os.environ.copy() | ||
| 67 | os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \ | ||
| 68 | os.pathsep + os.environ["PATH"] | ||
| 69 | |||
| 70 | self.target_pm.run_intercepts(populate_sdk='target') | ||
| 71 | os.environ.update(env_bkp) | ||
| 72 | |||
| 73 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) | ||
| 74 | |||
| 75 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | ||
| 76 | self.target_pm.remove_packaging_data() | ||
| 77 | else: | ||
| 78 | self.target_pm.remove_lists() | ||
| 79 | |||
| 80 | bb.note("Installing NATIVESDK packages") | ||
| 81 | self._populate_sysroot(self.host_pm, self.host_manifest) | ||
| 82 | self.install_locales(self.host_pm) | ||
| 83 | |||
| 84 | self.host_pm.run_intercepts(populate_sdk='host') | ||
| 85 | |||
| 86 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) | ||
| 87 | |||
| 88 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | ||
| 89 | self.host_pm.remove_packaging_data() | ||
| 90 | else: | ||
| 91 | self.host_pm.remove_lists() | ||
| 92 | |||
| 93 | target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir) | ||
| 94 | host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir) | ||
| 95 | |||
| 96 | self.mkdirhier(target_sysconfdir) | ||
| 97 | shutil.copy(self.target_conf, target_sysconfdir) | ||
| 98 | os.chmod(os.path.join(target_sysconfdir, | ||
| 99 | os.path.basename(self.target_conf)), 0o644) | ||
| 100 | |||
| 101 | self.mkdirhier(host_sysconfdir) | ||
| 102 | shutil.copy(self.host_conf, host_sysconfdir) | ||
| 103 | os.chmod(os.path.join(host_sysconfdir, | ||
| 104 | os.path.basename(self.host_conf)), 0o644) | ||
| 105 | |||
| 106 | native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, | ||
| 107 | self.d.getVar('localstatedir_nativesdk').strip('/'), | ||
| 108 | "lib", "opkg") | ||
| 109 | self.mkdirhier(native_opkg_state_dir) | ||
| 110 | for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")): | ||
| 111 | self.movefile(f, native_opkg_state_dir) | ||
| 112 | |||
| 113 | self.remove(os.path.join(self.sdk_output, "var"), True) | ||
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py deleted file mode 100644 index 20e6cb8744..0000000000 --- a/meta/lib/oe/package_manager/rpm/__init__.py +++ /dev/null | |||
| @@ -1,429 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import shutil | ||
| 8 | import subprocess | ||
| 9 | from oe.package_manager import * | ||
| 10 | |||
| 11 | class RpmIndexer(Indexer): | ||
| 12 | def write_index(self): | ||
| 13 | self.do_write_index(self.deploy_dir) | ||
| 14 | |||
| 15 | def do_write_index(self, deploy_dir): | ||
| 16 | if self.d.getVar('PACKAGE_FEED_SIGN') == '1': | ||
| 17 | signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) | ||
| 18 | else: | ||
| 19 | signer = None | ||
| 20 | |||
| 21 | createrepo_c = bb.utils.which(os.environ['PATH'], "createrepo_c") | ||
| 22 | result = create_index("%s --update -q %s" % (createrepo_c, deploy_dir)) | ||
| 23 | if result: | ||
| 24 | bb.fatal(result) | ||
| 25 | |||
| 26 | # Sign repomd | ||
| 27 | if signer: | ||
| 28 | sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') | ||
| 29 | is_ascii_sig = (sig_type.upper() != "BIN") | ||
| 30 | signer.detach_sign(os.path.join(deploy_dir, 'repodata', 'repomd.xml'), | ||
| 31 | self.d.getVar('PACKAGE_FEED_GPG_NAME'), | ||
| 32 | self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), | ||
| 33 | armor=is_ascii_sig) | ||
| 34 | |||
| 35 | class RpmSubdirIndexer(RpmIndexer): | ||
| 36 | def write_index(self): | ||
| 37 | bb.note("Generating package index for %s" %(self.deploy_dir)) | ||
| 38 | # Remove the existing repodata to ensure that we re-generate it no matter what | ||
| 39 | bb.utils.remove(os.path.join(self.deploy_dir, "repodata"), recurse=True) | ||
| 40 | |||
| 41 | self.do_write_index(self.deploy_dir) | ||
| 42 | for entry in os.walk(self.deploy_dir): | ||
| 43 | if os.path.samefile(self.deploy_dir, entry[0]): | ||
| 44 | for dir in entry[1]: | ||
| 45 | if dir != 'repodata': | ||
| 46 | dir_path = oe.path.join(self.deploy_dir, dir) | ||
| 47 | bb.note("Generating package index for %s" %(dir_path)) | ||
| 48 | self.do_write_index(dir_path) | ||
| 49 | |||
| 50 | |||
| 51 | class PMPkgsList(PkgsList): | ||
| 52 | def list_pkgs(self): | ||
| 53 | return RpmPM(self.d, self.rootfs_dir, self.d.getVar('TARGET_VENDOR'), needfeed=False).list_installed() | ||
| 54 | |||
| 55 | class RpmPM(PackageManager): | ||
| 56 | def __init__(self, | ||
| 57 | d, | ||
| 58 | target_rootfs, | ||
| 59 | target_vendor, | ||
| 60 | task_name='target', | ||
| 61 | arch_var=None, | ||
| 62 | os_var=None, | ||
| 63 | rpm_repo_workdir="oe-rootfs-repo", | ||
| 64 | filterbydependencies=True, | ||
| 65 | needfeed=True): | ||
| 66 | super(RpmPM, self).__init__(d, target_rootfs) | ||
| 67 | self.target_vendor = target_vendor | ||
| 68 | self.task_name = task_name | ||
| 69 | if arch_var == None: | ||
| 70 | self.archs = self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS').replace("-","_") | ||
| 71 | else: | ||
| 72 | self.archs = self.d.getVar(arch_var).replace("-","_") | ||
| 73 | if task_name == "host": | ||
| 74 | self.primary_arch = self.d.getVar('SDK_ARCH') | ||
| 75 | else: | ||
| 76 | self.primary_arch = self.d.getVar('MACHINE_ARCH') | ||
| 77 | |||
| 78 | if needfeed: | ||
| 79 | self.rpm_repo_dir = oe.path.join(self.d.getVar('WORKDIR'), rpm_repo_workdir) | ||
| 80 | create_packages_dir(self.d, oe.path.join(self.rpm_repo_dir, "rpm"), d.getVar("DEPLOY_DIR_RPM"), "package_write_rpm", filterbydependencies) | ||
| 81 | |||
| 82 | self.saved_packaging_data = self.d.expand('${T}/saved_packaging_data/%s' % self.task_name) | ||
| 83 | if not os.path.exists(self.d.expand('${T}/saved_packaging_data')): | ||
| 84 | bb.utils.mkdirhier(self.d.expand('${T}/saved_packaging_data')) | ||
| 85 | self.packaging_data_dirs = ['etc/rpm', 'etc/rpmrc', 'etc/dnf', 'var/lib/rpm', 'var/lib/dnf', 'var/cache/dnf'] | ||
| 86 | self.solution_manifest = self.d.expand('${T}/saved/%s_solution' % | ||
| 87 | self.task_name) | ||
| 88 | if not os.path.exists(self.d.expand('${T}/saved')): | ||
| 89 | bb.utils.mkdirhier(self.d.expand('${T}/saved')) | ||
| 90 | |||
| 91 | def _configure_dnf(self): | ||
| 92 | # libsolv handles 'noarch' internally, we don't need to specify it explicitly | ||
| 93 | archs = [i for i in reversed(self.archs.split()) if i not in ["any", "all", "noarch"]] | ||
| 94 | # This prevents accidental matching against libsolv's built-in policies | ||
| 95 | if len(archs) <= 1: | ||
| 96 | archs = archs + ["bogusarch"] | ||
| 97 | # This architecture needs to be upfront so that packages using it are properly prioritized | ||
| 98 | archs = ["sdk_provides_dummy_target"] + archs | ||
| 99 | confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/") | ||
| 100 | bb.utils.mkdirhier(confdir) | ||
| 101 | with open(confdir + "arch", 'w') as f: | ||
| 102 | f.write(":".join(archs)) | ||
| 103 | |||
| 104 | distro_codename = self.d.getVar('DISTRO_CODENAME') | ||
| 105 | with open(confdir + "releasever", 'w') as f: | ||
| 106 | f.write(distro_codename if distro_codename is not None else '') | ||
| 107 | |||
| 108 | with open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w') as f: | ||
| 109 | f.write("") | ||
| 110 | |||
| 111 | |||
| 112 | def _configure_rpm(self): | ||
| 113 | # We need to configure rpm to use our primary package architecture as the installation architecture, | ||
| 114 | # and to make it compatible with other package architectures that we use. | ||
| 115 | # Otherwise it will refuse to proceed with packages installation. | ||
| 116 | platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/") | ||
| 117 | rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/") | ||
| 118 | bb.utils.mkdirhier(platformconfdir) | ||
| 119 | with open(platformconfdir + "platform", 'w') as f: | ||
| 120 | f.write("%s-pc-linux" % self.primary_arch) | ||
| 121 | with open(rpmrcconfdir + "rpmrc", 'w') as f: | ||
| 122 | f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch)) | ||
| 123 | f.write("buildarch_compat: %s: noarch\n" % self.primary_arch) | ||
| 124 | |||
| 125 | with open(platformconfdir + "macros", 'w') as f: | ||
| 126 | f.write("%_transaction_color 7\n") | ||
| 127 | if self.d.getVar('RPM_PREFER_ELF_ARCH'): | ||
| 128 | with open(platformconfdir + "macros", 'a') as f: | ||
| 129 | f.write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH'))) | ||
| 130 | |||
| 131 | if self.d.getVar('RPM_SIGN_PACKAGES') == '1': | ||
| 132 | signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND')) | ||
| 133 | pubkey_path = oe.path.join(self.d.getVar('B'), 'rpm-key') | ||
| 134 | signer.export_pubkey(pubkey_path, self.d.getVar('RPM_GPG_NAME')) | ||
| 135 | rpm_bin = bb.utils.which(os.getenv('PATH'), "rpmkeys") | ||
| 136 | cmd = [rpm_bin, '--root=%s' % self.target_rootfs, '--import', pubkey_path] | ||
| 137 | try: | ||
| 138 | subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
| 139 | except subprocess.CalledProcessError as e: | ||
| 140 | bb.fatal("Importing GPG key failed. Command '%s' " | ||
| 141 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
| 142 | |||
| 143 | def create_configs(self): | ||
| 144 | self._configure_dnf() | ||
| 145 | self._configure_rpm() | ||
| 146 | |||
| 147 | def write_index(self): | ||
| 148 | lockfilename = self.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock" | ||
| 149 | lf = bb.utils.lockfile(lockfilename, False) | ||
| 150 | RpmIndexer(self.d, self.rpm_repo_dir).write_index() | ||
| 151 | bb.utils.unlockfile(lf) | ||
| 152 | |||
| 153 | def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): | ||
| 154 | from urllib.parse import urlparse | ||
| 155 | |||
| 156 | if feed_uris == "": | ||
| 157 | return | ||
| 158 | |||
| 159 | gpg_opts = '' | ||
| 160 | if self.d.getVar('PACKAGE_FEED_SIGN') == '1': | ||
| 161 | gpg_opts += 'repo_gpgcheck=1\n' | ||
| 162 | gpg_opts += 'gpgkey=file://%s/pki/packagefeed-gpg/PACKAGEFEED-GPG-KEY-%s-%s\n' % (self.d.getVar('sysconfdir'), self.d.getVar('DISTRO'), self.d.getVar('DISTRO_CODENAME')) | ||
| 163 | |||
| 164 | if self.d.getVar('RPM_SIGN_PACKAGES') != '1': | ||
| 165 | gpg_opts += 'gpgcheck=0\n' | ||
| 166 | |||
| 167 | bb.utils.mkdirhier(oe.path.join(self.target_rootfs, "etc", "yum.repos.d")) | ||
| 168 | remote_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split()) | ||
| 169 | for uri in remote_uris: | ||
| 170 | repo_base = "oe-remote-repo" + "-".join(urlparse(uri).path.split("/")) | ||
| 171 | if feed_archs is not None: | ||
| 172 | for arch in feed_archs.split(): | ||
| 173 | repo_uri = uri + "/" + arch | ||
| 174 | repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/")) | ||
| 175 | repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/")) | ||
| 176 | with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a') as f: | ||
| 177 | f.write("[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts)) | ||
| 178 | else: | ||
| 179 | repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/")) | ||
| 180 | repo_uri = uri | ||
| 181 | with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w') as f: | ||
| 182 | f.write("[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts)) | ||
| 183 | |||
| 184 | def _prepare_pkg_transaction(self): | ||
| 185 | os.environ['D'] = self.target_rootfs | ||
| 186 | os.environ['OFFLINE_ROOT'] = self.target_rootfs | ||
| 187 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 188 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
| 189 | os.environ['INTERCEPT_DIR'] = self.intercepts_dir | ||
| 190 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') | ||
| 191 | |||
| 192 | |||
| 193 | def install(self, pkgs, attempt_only=False, hard_depends_only=False): | ||
| 194 | if len(pkgs) == 0: | ||
| 195 | return | ||
| 196 | self._prepare_pkg_transaction() | ||
| 197 | |||
| 198 | bad_recommendations = self.d.getVar('BAD_RECOMMENDATIONS') | ||
| 199 | package_exclude = self.d.getVar('PACKAGE_EXCLUDE') | ||
| 200 | exclude_pkgs = (bad_recommendations.split() if bad_recommendations else []) + (package_exclude.split() if package_exclude else []) | ||
| 201 | |||
| 202 | output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) + | ||
| 203 | (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) + | ||
| 204 | (["--setopt=install_weak_deps=False"] if (hard_depends_only or self.d.getVar('NO_RECOMMENDATIONS') == "1") else []) + | ||
| 205 | (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) + | ||
| 206 | ["install"] + | ||
| 207 | pkgs) | ||
| 208 | |||
| 209 | failed_scriptlets_pkgnames = collections.OrderedDict() | ||
| 210 | for line in output.splitlines(): | ||
| 211 | if line.startswith("Error: Systemctl"): | ||
| 212 | bb.error(line) | ||
| 213 | |||
| 214 | if line.startswith("Error in POSTIN scriptlet in rpm package"): | ||
| 215 | failed_scriptlets_pkgnames[line.split()[-1]] = True | ||
| 216 | |||
| 217 | if len(failed_scriptlets_pkgnames) > 0: | ||
| 218 | failed_postinsts_abort(list(failed_scriptlets_pkgnames.keys()), self.d.expand("${T}/log.do_${BB_CURRENTTASK}")) | ||
| 219 | |||
| 220 | def remove(self, pkgs, with_dependencies = True): | ||
| 221 | if not pkgs: | ||
| 222 | return | ||
| 223 | |||
| 224 | self._prepare_pkg_transaction() | ||
| 225 | |||
| 226 | if with_dependencies: | ||
| 227 | self._invoke_dnf(["remove"] + pkgs) | ||
| 228 | else: | ||
| 229 | cmd = bb.utils.which(os.getenv('PATH'), "rpm") | ||
| 230 | args = ["-e", "-v", "--nodeps", "--root=%s" %self.target_rootfs] | ||
| 231 | |||
| 232 | try: | ||
| 233 | bb.note("Running %s" % ' '.join([cmd] + args + pkgs)) | ||
| 234 | output = subprocess.check_output([cmd] + args + pkgs, stderr=subprocess.STDOUT).decode("utf-8") | ||
| 235 | bb.note(output) | ||
| 236 | except subprocess.CalledProcessError as e: | ||
| 237 | bb.fatal("Could not invoke rpm. Command " | ||
| 238 | "'%s' returned %d:\n%s" % (' '.join([cmd] + args + pkgs), e.returncode, e.output.decode("utf-8"))) | ||
| 239 | |||
| 240 | def upgrade(self): | ||
| 241 | self._prepare_pkg_transaction() | ||
| 242 | self._invoke_dnf(["upgrade"]) | ||
| 243 | |||
| 244 | def autoremove(self): | ||
| 245 | self._prepare_pkg_transaction() | ||
| 246 | self._invoke_dnf(["autoremove"]) | ||
| 247 | |||
| 248 | def remove_packaging_data(self): | ||
| 249 | self._invoke_dnf(["clean", "all"]) | ||
| 250 | for dir in self.packaging_data_dirs: | ||
| 251 | bb.utils.remove(oe.path.join(self.target_rootfs, dir), True) | ||
| 252 | |||
| 253 | def backup_packaging_data(self): | ||
| 254 | # Save the packaging dirs for increment rpm image generation | ||
| 255 | if os.path.exists(self.saved_packaging_data): | ||
| 256 | bb.utils.remove(self.saved_packaging_data, True) | ||
| 257 | for i in self.packaging_data_dirs: | ||
| 258 | source_dir = oe.path.join(self.target_rootfs, i) | ||
| 259 | target_dir = oe.path.join(self.saved_packaging_data, i) | ||
| 260 | if os.path.isdir(source_dir): | ||
| 261 | shutil.copytree(source_dir, target_dir, symlinks=True) | ||
| 262 | elif os.path.isfile(source_dir): | ||
| 263 | shutil.copy2(source_dir, target_dir) | ||
| 264 | |||
| 265 | def recovery_packaging_data(self): | ||
| 266 | # Move the rpmlib back | ||
| 267 | if os.path.exists(self.saved_packaging_data): | ||
| 268 | for i in self.packaging_data_dirs: | ||
| 269 | target_dir = oe.path.join(self.target_rootfs, i) | ||
| 270 | if os.path.exists(target_dir): | ||
| 271 | bb.utils.remove(target_dir, True) | ||
| 272 | source_dir = oe.path.join(self.saved_packaging_data, i) | ||
| 273 | if os.path.isdir(source_dir): | ||
| 274 | shutil.copytree(source_dir, target_dir, symlinks=True) | ||
| 275 | elif os.path.isfile(source_dir): | ||
| 276 | shutil.copy2(source_dir, target_dir) | ||
| 277 | |||
| 278 | def list_installed(self): | ||
| 279 | output = self._invoke_dnf(["repoquery", "--installed", "--queryformat", "Package: %{name} %{arch} %{version} %{name}-%{version}-%{release}.%{arch}.rpm\nDependencies:\n%{requires}\nRecommendations:\n%{recommends}\nDependenciesEndHere:\n"], | ||
| 280 | print_output = False) | ||
| 281 | packages = {} | ||
| 282 | current_package = None | ||
| 283 | current_deps = None | ||
| 284 | current_state = "initial" | ||
| 285 | for line in output.splitlines(): | ||
| 286 | if line.startswith("Package:"): | ||
| 287 | package_info = line.split(" ")[1:] | ||
| 288 | current_package = package_info[0] | ||
| 289 | package_arch = package_info[1] | ||
| 290 | package_version = package_info[2] | ||
| 291 | package_rpm = package_info[3] | ||
| 292 | packages[current_package] = {"arch":package_arch, "ver":package_version, "filename":package_rpm} | ||
| 293 | current_deps = [] | ||
| 294 | elif line.startswith("Dependencies:"): | ||
| 295 | current_state = "dependencies" | ||
| 296 | elif line.startswith("Recommendations"): | ||
| 297 | current_state = "recommendations" | ||
| 298 | elif line.startswith("DependenciesEndHere:"): | ||
| 299 | current_state = "initial" | ||
| 300 | packages[current_package]["deps"] = current_deps | ||
| 301 | elif len(line) > 0: | ||
| 302 | if current_state == "dependencies": | ||
| 303 | current_deps.append(line) | ||
| 304 | elif current_state == "recommendations": | ||
| 305 | current_deps.append("%s [REC]" % line) | ||
| 306 | |||
| 307 | return packages | ||
| 308 | |||
| 309 | def update(self): | ||
| 310 | self._invoke_dnf(["makecache", "--refresh"]) | ||
| 311 | |||
| 312 | def _invoke_dnf(self, dnf_args, fatal = True, print_output = True ): | ||
| 313 | os.environ['RPM_ETCCONFIGDIR'] = self.target_rootfs | ||
| 314 | |||
| 315 | dnf_cmd = bb.utils.which(os.getenv('PATH'), "dnf") | ||
| 316 | standard_dnf_args = ["-v", "--rpmverbosity=info", "-y", | ||
| 317 | "-c", oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), | ||
| 318 | "--setopt=reposdir=%s" %(oe.path.join(self.target_rootfs, "etc/yum.repos.d")), | ||
| 319 | "--installroot=%s" % (self.target_rootfs), | ||
| 320 | "--setopt=logdir=%s" % (self.d.getVar('T')) | ||
| 321 | ] | ||
| 322 | if hasattr(self, "rpm_repo_dir"): | ||
| 323 | standard_dnf_args.append("--repofrompath=oe-repo,%s" % (self.rpm_repo_dir)) | ||
| 324 | cmd = [dnf_cmd] + standard_dnf_args + dnf_args | ||
| 325 | bb.note('Running %s' % ' '.join(cmd)) | ||
| 326 | try: | ||
| 327 | output = subprocess.check_output(cmd,stderr=subprocess.STDOUT).decode("utf-8") | ||
| 328 | if print_output: | ||
| 329 | bb.debug(1, output) | ||
| 330 | return output | ||
| 331 | except subprocess.CalledProcessError as e: | ||
| 332 | if print_output: | ||
| 333 | e_output = e.output.decode("utf-8") | ||
| 334 | extra_info = "" | ||
| 335 | if "install" in dnf_args: | ||
| 336 | if "Error: Unable to find a match:" in e_output: | ||
| 337 | no_match_pkgs = re.search(r'Error: Unable to find a match: ([a-z0-9+\-\._\s]+)', e_output).group(1).split() | ||
| 338 | for pkg in no_match_pkgs: | ||
| 339 | extra_info += self.get_missing_pkg_reason(pkg) | ||
| 340 | (bb.note, bb.fatal)[fatal]("Could not invoke dnf. Command " | ||
| 341 | "'%s' returned %d:\n%s%s" % (' '.join(cmd), e.returncode, e_output, extra_info)) | ||
| 342 | else: | ||
| 343 | (bb.note, bb.fatal)[fatal]("Could not invoke dnf. Command " | ||
| 344 | "'%s' returned %d:" % (' '.join(cmd), e.returncode)) | ||
| 345 | return e.output.decode("utf-8") | ||
| 346 | |||
| 347 | def dump_install_solution(self, pkgs): | ||
| 348 | with open(self.solution_manifest, 'w') as f: | ||
| 349 | f.write(" ".join(pkgs)) | ||
| 350 | return pkgs | ||
| 351 | |||
| 352 | def load_old_install_solution(self): | ||
| 353 | if not os.path.exists(self.solution_manifest): | ||
| 354 | return [] | ||
| 355 | with open(self.solution_manifest, 'r') as fd: | ||
| 356 | return fd.read().split() | ||
| 357 | |||
| 358 | def _script_num_prefix(self, path): | ||
| 359 | files = os.listdir(path) | ||
| 360 | numbers = set() | ||
| 361 | numbers.add(99) | ||
| 362 | for f in files: | ||
| 363 | numbers.add(int(f.split("-")[0])) | ||
| 364 | return max(numbers) + 1 | ||
| 365 | |||
| 366 | def save_rpmpostinst(self, pkg): | ||
| 367 | bb.note("Saving postinstall script of %s" % (pkg)) | ||
| 368 | cmd = bb.utils.which(os.getenv('PATH'), "rpm") | ||
| 369 | args = ["-q", "--root=%s" % self.target_rootfs, "--queryformat", "%{postin}", pkg] | ||
| 370 | |||
| 371 | try: | ||
| 372 | output = subprocess.check_output([cmd] + args,stderr=subprocess.STDOUT).decode("utf-8") | ||
| 373 | except subprocess.CalledProcessError as e: | ||
| 374 | bb.fatal("Could not invoke rpm. Command " | ||
| 375 | "'%s' returned %d:\n%s" % (' '.join([cmd] + args), e.returncode, e.output.decode("utf-8"))) | ||
| 376 | |||
| 377 | # may need to prepend #!/bin/sh to output | ||
| 378 | |||
| 379 | target_path = oe.path.join(self.target_rootfs, self.d.expand('${sysconfdir}/rpm-postinsts/')) | ||
| 380 | bb.utils.mkdirhier(target_path) | ||
| 381 | num = self._script_num_prefix(target_path) | ||
| 382 | saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg)) | ||
| 383 | with open(saved_script_name, 'w') as f: | ||
| 384 | f.write(output) | ||
| 385 | os.chmod(saved_script_name, 0o755) | ||
| 386 | |||
| 387 | def _handle_intercept_failure(self, registered_pkgs): | ||
| 388 | rpm_postinsts_dir = self.target_rootfs + self.d.expand('${sysconfdir}/rpm-postinsts/') | ||
| 389 | bb.utils.mkdirhier(rpm_postinsts_dir) | ||
| 390 | |||
| 391 | # Save the package postinstalls in /etc/rpm-postinsts | ||
| 392 | for pkg in registered_pkgs.split(): | ||
| 393 | self.save_rpmpostinst(pkg) | ||
| 394 | |||
| 395 | def extract(self, pkg): | ||
| 396 | output = self._invoke_dnf(["repoquery", "--location", pkg]) | ||
| 397 | pkg_name = output.splitlines()[-1] | ||
| 398 | if not pkg_name.endswith(".rpm"): | ||
| 399 | bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output)) | ||
| 400 | # Strip file: prefix | ||
| 401 | pkg_path = pkg_name[5:] | ||
| 402 | |||
| 403 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
| 404 | rpm2archive_cmd = bb.utils.which(os.getenv("PATH"), "rpm2archive") | ||
| 405 | |||
| 406 | if not os.path.isfile(pkg_path): | ||
| 407 | bb.fatal("Unable to extract package for '%s'." | ||
| 408 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
| 409 | |||
| 410 | tmp_dir = tempfile.mkdtemp() | ||
| 411 | current_dir = os.getcwd() | ||
| 412 | os.chdir(tmp_dir) | ||
| 413 | |||
| 414 | try: | ||
| 415 | cmd = "%s -n %s | %s xv" % (rpm2archive_cmd, pkg_path, tar_cmd) | ||
| 416 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) | ||
| 417 | except subprocess.CalledProcessError as e: | ||
| 418 | bb.utils.remove(tmp_dir, recurse=True) | ||
| 419 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
| 420 | "returned %d:\n%s" % (pkg_path, cmd, e.returncode, e.output.decode("utf-8"))) | ||
| 421 | except OSError as e: | ||
| 422 | bb.utils.remove(tmp_dir, recurse=True) | ||
| 423 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
| 424 | "returned %d:\n%s at %s" % (pkg_path, cmd, e.errno, e.strerror, e.filename)) | ||
| 425 | |||
| 426 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
| 427 | os.chdir(current_dir) | ||
| 428 | |||
| 429 | return tmp_dir | ||
diff --git a/meta/lib/oe/package_manager/rpm/manifest.py b/meta/lib/oe/package_manager/rpm/manifest.py deleted file mode 100644 index 6ee7c329f0..0000000000 --- a/meta/lib/oe/package_manager/rpm/manifest.py +++ /dev/null | |||
| @@ -1,56 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from oe.manifest import Manifest | ||
| 8 | |||
| 9 | class PkgManifest(Manifest): | ||
| 10 | """ | ||
| 11 | Returns a dictionary object with mip and mlp packages. | ||
| 12 | """ | ||
| 13 | def _split_multilib(self, pkg_list): | ||
| 14 | pkgs = dict() | ||
| 15 | |||
| 16 | for pkg in pkg_list.split(): | ||
| 17 | pkg_type = self.PKG_TYPE_MUST_INSTALL | ||
| 18 | |||
| 19 | ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() | ||
| 20 | |||
| 21 | for ml_variant in ml_variants: | ||
| 22 | if pkg.startswith(ml_variant + '-'): | ||
| 23 | pkg_type = self.PKG_TYPE_MULTILIB | ||
| 24 | |||
| 25 | if not pkg_type in pkgs: | ||
| 26 | pkgs[pkg_type] = pkg | ||
| 27 | else: | ||
| 28 | pkgs[pkg_type] += " " + pkg | ||
| 29 | |||
| 30 | return pkgs | ||
| 31 | |||
| 32 | def create_initial(self): | ||
| 33 | pkgs = dict() | ||
| 34 | |||
| 35 | with open(self.initial_manifest, "w+") as manifest: | ||
| 36 | manifest.write(self.initial_manifest_file_header) | ||
| 37 | |||
| 38 | for var in self.var_maps[self.manifest_type]: | ||
| 39 | if var in self.vars_to_split: | ||
| 40 | split_pkgs = self._split_multilib(self.d.getVar(var)) | ||
| 41 | if split_pkgs is not None: | ||
| 42 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) | ||
| 43 | else: | ||
| 44 | pkg_list = self.d.getVar(var) | ||
| 45 | if pkg_list is not None: | ||
| 46 | pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) | ||
| 47 | |||
| 48 | for pkg_type in pkgs: | ||
| 49 | for pkg in pkgs[pkg_type].split(): | ||
| 50 | manifest.write("%s,%s\n" % (pkg_type, pkg)) | ||
| 51 | |||
| 52 | def create_final(self): | ||
| 53 | pass | ||
| 54 | |||
| 55 | def create_full(self, pm): | ||
| 56 | pass | ||
diff --git a/meta/lib/oe/package_manager/rpm/rootfs.py b/meta/lib/oe/package_manager/rpm/rootfs.py deleted file mode 100644 index 3ba5396320..0000000000 --- a/meta/lib/oe/package_manager/rpm/rootfs.py +++ /dev/null | |||
| @@ -1,150 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from oe.rootfs import Rootfs | ||
| 8 | from oe.manifest import Manifest | ||
| 9 | from oe.utils import execute_pre_post_process | ||
| 10 | from oe.package_manager.rpm.manifest import PkgManifest | ||
| 11 | from oe.package_manager.rpm import RpmPM | ||
| 12 | |||
| 13 | class PkgRootfs(Rootfs): | ||
| 14 | def __init__(self, d, manifest_dir, progress_reporter=None, logcatcher=None): | ||
| 15 | super(PkgRootfs, self).__init__(d, progress_reporter, logcatcher) | ||
| 16 | self.log_check_regex = r'(unpacking of archive failed|Cannot find package'\ | ||
| 17 | r'|exit 1|ERROR: |Error: |Error |ERROR '\ | ||
| 18 | r'|Failed |Failed: |Failed$|Failed\(\d+\):)' | ||
| 19 | |||
| 20 | self.manifest = PkgManifest(d, manifest_dir) | ||
| 21 | |||
| 22 | self.pm = RpmPM(d, | ||
| 23 | d.getVar('IMAGE_ROOTFS'), | ||
| 24 | self.d.getVar('TARGET_VENDOR') | ||
| 25 | ) | ||
| 26 | |||
| 27 | self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN') | ||
| 28 | if self.inc_rpm_image_gen != "1": | ||
| 29 | bb.utils.remove(self.image_rootfs, True) | ||
| 30 | else: | ||
| 31 | self.pm.recovery_packaging_data() | ||
| 32 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) | ||
| 33 | |||
| 34 | self.pm.create_configs() | ||
| 35 | |||
| 36 | ''' | ||
| 37 | While rpm incremental image generation is enabled, it will remove the | ||
| 38 | unneeded pkgs by comparing the new install solution manifest and the | ||
| 39 | old installed manifest. | ||
| 40 | ''' | ||
| 41 | def _create_incremental(self, pkgs_initial_install): | ||
| 42 | if self.inc_rpm_image_gen == "1": | ||
| 43 | |||
| 44 | pkgs_to_install = list() | ||
| 45 | for pkg_type in pkgs_initial_install: | ||
| 46 | pkgs_to_install += pkgs_initial_install[pkg_type] | ||
| 47 | |||
| 48 | installed_manifest = self.pm.load_old_install_solution() | ||
| 49 | solution_manifest = self.pm.dump_install_solution(pkgs_to_install) | ||
| 50 | |||
| 51 | pkg_to_remove = list() | ||
| 52 | for pkg in installed_manifest: | ||
| 53 | if pkg not in solution_manifest: | ||
| 54 | pkg_to_remove.append(pkg) | ||
| 55 | |||
| 56 | self.pm.update() | ||
| 57 | |||
| 58 | bb.note('incremental update -- upgrade packages in place ') | ||
| 59 | self.pm.upgrade() | ||
| 60 | if pkg_to_remove != []: | ||
| 61 | bb.note('incremental removed: %s' % ' '.join(pkg_to_remove)) | ||
| 62 | self.pm.remove(pkg_to_remove) | ||
| 63 | |||
| 64 | self.pm.autoremove() | ||
| 65 | |||
| 66 | def _create(self): | ||
| 67 | pkgs_to_install = self.manifest.parse_initial_manifest() | ||
| 68 | rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS') | ||
| 69 | rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS') | ||
| 70 | |||
| 71 | # update PM index files | ||
| 72 | self.pm.write_index() | ||
| 73 | |||
| 74 | execute_pre_post_process(self.d, rpm_pre_process_cmds) | ||
| 75 | |||
| 76 | if self.progress_reporter: | ||
| 77 | self.progress_reporter.next_stage() | ||
| 78 | |||
| 79 | if self.inc_rpm_image_gen == "1": | ||
| 80 | self._create_incremental(pkgs_to_install) | ||
| 81 | |||
| 82 | if self.progress_reporter: | ||
| 83 | self.progress_reporter.next_stage() | ||
| 84 | |||
| 85 | self.pm.update() | ||
| 86 | |||
| 87 | pkgs = [] | ||
| 88 | pkgs_attempt = [] | ||
| 89 | for pkg_type in pkgs_to_install: | ||
| 90 | if pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY: | ||
| 91 | pkgs_attempt += pkgs_to_install[pkg_type] | ||
| 92 | else: | ||
| 93 | pkgs += pkgs_to_install[pkg_type] | ||
| 94 | |||
| 95 | if self.progress_reporter: | ||
| 96 | self.progress_reporter.next_stage() | ||
| 97 | |||
| 98 | self.pm.install(pkgs) | ||
| 99 | |||
| 100 | if self.progress_reporter: | ||
| 101 | self.progress_reporter.next_stage() | ||
| 102 | |||
| 103 | self.pm.install(pkgs_attempt, True) | ||
| 104 | |||
| 105 | if self.progress_reporter: | ||
| 106 | self.progress_reporter.next_stage() | ||
| 107 | |||
| 108 | self.pm.install_complementary() | ||
| 109 | |||
| 110 | if self.progress_reporter: | ||
| 111 | self.progress_reporter.next_stage() | ||
| 112 | |||
| 113 | self._setup_dbg_rootfs(['/etc/rpm', '/etc/rpmrc', '/etc/dnf', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf']) | ||
| 114 | |||
| 115 | execute_pre_post_process(self.d, rpm_post_process_cmds) | ||
| 116 | |||
| 117 | if self.inc_rpm_image_gen == "1": | ||
| 118 | self.pm.backup_packaging_data() | ||
| 119 | |||
| 120 | if self.progress_reporter: | ||
| 121 | self.progress_reporter.next_stage() | ||
| 122 | |||
| 123 | |||
| 124 | @staticmethod | ||
| 125 | def _depends_list(): | ||
| 126 | return ['DEPLOY_DIR_RPM', 'INC_RPM_IMAGE_GEN', 'RPM_PREPROCESS_COMMANDS', | ||
| 127 | 'RPM_POSTPROCESS_COMMANDS', 'RPM_PREFER_ELF_ARCH'] | ||
| 128 | |||
| 129 | def _get_delayed_postinsts(self): | ||
| 130 | postinst_dir = self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/rpm-postinsts") | ||
| 131 | if os.path.isdir(postinst_dir): | ||
| 132 | files = os.listdir(postinst_dir) | ||
| 133 | for f in files: | ||
| 134 | bb.note('Delayed package scriptlet: %s' % f) | ||
| 135 | return files | ||
| 136 | |||
| 137 | return None | ||
| 138 | |||
| 139 | def _save_postinsts(self): | ||
| 140 | # this is just a stub. For RPM, the failed postinstalls are | ||
| 141 | # already saved in /etc/rpm-postinsts | ||
| 142 | pass | ||
| 143 | |||
| 144 | def _log_check(self): | ||
| 145 | self._log_check_warn() | ||
| 146 | self._log_check_error() | ||
| 147 | |||
| 148 | def _cleanup(self): | ||
| 149 | if bb.utils.contains("IMAGE_FEATURES", "package-management", True, False, self.d): | ||
| 150 | self.pm._invoke_dnf(["clean", "all"]) | ||
diff --git a/meta/lib/oe/package_manager/rpm/sdk.py b/meta/lib/oe/package_manager/rpm/sdk.py deleted file mode 100644 index ea79fe050b..0000000000 --- a/meta/lib/oe/package_manager/rpm/sdk.py +++ /dev/null | |||
| @@ -1,122 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import glob | ||
| 8 | from oe.utils import execute_pre_post_process | ||
| 9 | from oe.sdk import Sdk | ||
| 10 | from oe.manifest import Manifest | ||
| 11 | from oe.package_manager.rpm.manifest import PkgManifest | ||
| 12 | from oe.package_manager.rpm import RpmPM | ||
| 13 | |||
| 14 | class PkgSdk(Sdk): | ||
| 15 | def __init__(self, d, manifest_dir=None, rpm_workdir="oe-sdk-repo"): | ||
| 16 | super(PkgSdk, self).__init__(d, manifest_dir) | ||
| 17 | |||
| 18 | self.target_manifest = PkgManifest(d, self.manifest_dir, | ||
| 19 | Manifest.MANIFEST_TYPE_SDK_TARGET) | ||
| 20 | self.host_manifest = PkgManifest(d, self.manifest_dir, | ||
| 21 | Manifest.MANIFEST_TYPE_SDK_HOST) | ||
| 22 | |||
| 23 | rpm_repo_workdir = "oe-sdk-repo" | ||
| 24 | if "sdk_ext" in d.getVar("BB_RUNTASK"): | ||
| 25 | rpm_repo_workdir = "oe-sdk-ext-repo" | ||
| 26 | |||
| 27 | self.target_pm = RpmPM(d, | ||
| 28 | self.sdk_target_sysroot, | ||
| 29 | self.d.getVar('TARGET_VENDOR'), | ||
| 30 | 'target', | ||
| 31 | rpm_repo_workdir=rpm_repo_workdir | ||
| 32 | ) | ||
| 33 | |||
| 34 | self.host_pm = RpmPM(d, | ||
| 35 | self.sdk_host_sysroot, | ||
| 36 | self.d.getVar('SDK_VENDOR'), | ||
| 37 | 'host', | ||
| 38 | "SDK_PACKAGE_ARCHS", | ||
| 39 | "SDK_OS", | ||
| 40 | rpm_repo_workdir=rpm_repo_workdir | ||
| 41 | ) | ||
| 42 | |||
| 43 | def _populate_sysroot(self, pm, manifest): | ||
| 44 | pkgs_to_install = manifest.parse_initial_manifest() | ||
| 45 | |||
| 46 | pm.create_configs() | ||
| 47 | pm.write_index() | ||
| 48 | pm.update() | ||
| 49 | |||
| 50 | pkgs = [] | ||
| 51 | pkgs_attempt = [] | ||
| 52 | for pkg_type in pkgs_to_install: | ||
| 53 | if pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY: | ||
| 54 | pkgs_attempt += pkgs_to_install[pkg_type] | ||
| 55 | else: | ||
| 56 | pkgs += pkgs_to_install[pkg_type] | ||
| 57 | |||
| 58 | pm.install(pkgs) | ||
| 59 | |||
| 60 | pm.install(pkgs_attempt, True) | ||
| 61 | |||
| 62 | def _populate(self): | ||
| 63 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_PRE_TARGET_COMMAND")) | ||
| 64 | |||
| 65 | bb.note("Installing TARGET packages") | ||
| 66 | self._populate_sysroot(self.target_pm, self.target_manifest) | ||
| 67 | |||
| 68 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) | ||
| 69 | |||
| 70 | env_bkp = os.environ.copy() | ||
| 71 | os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \ | ||
| 72 | os.pathsep + os.environ["PATH"] | ||
| 73 | |||
| 74 | self.target_pm.run_intercepts(populate_sdk='target') | ||
| 75 | os.environ.update(env_bkp) | ||
| 76 | |||
| 77 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) | ||
| 78 | |||
| 79 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | ||
| 80 | self.target_pm.remove_packaging_data() | ||
| 81 | |||
| 82 | bb.note("Installing NATIVESDK packages") | ||
| 83 | self._populate_sysroot(self.host_pm, self.host_manifest) | ||
| 84 | self.install_locales(self.host_pm) | ||
| 85 | |||
| 86 | self.host_pm.run_intercepts(populate_sdk='host') | ||
| 87 | |||
| 88 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) | ||
| 89 | |||
| 90 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | ||
| 91 | self.host_pm.remove_packaging_data() | ||
| 92 | |||
| 93 | # Move host RPM library data | ||
| 94 | native_rpm_state_dir = os.path.join(self.sdk_output, | ||
| 95 | self.sdk_native_path, | ||
| 96 | self.d.getVar('localstatedir_nativesdk').strip('/'), | ||
| 97 | "lib", | ||
| 98 | "rpm" | ||
| 99 | ) | ||
| 100 | self.mkdirhier(native_rpm_state_dir) | ||
| 101 | for f in glob.glob(os.path.join(self.sdk_output, | ||
| 102 | "var", | ||
| 103 | "lib", | ||
| 104 | "rpm", | ||
| 105 | "*")): | ||
| 106 | self.movefile(f, native_rpm_state_dir) | ||
| 107 | |||
| 108 | self.remove(os.path.join(self.sdk_output, "var"), True) | ||
| 109 | |||
| 110 | # Move host sysconfig data | ||
| 111 | native_sysconf_dir = os.path.join(self.sdk_output, | ||
| 112 | self.sdk_native_path, | ||
| 113 | self.d.getVar('sysconfdir', | ||
| 114 | True).strip('/'), | ||
| 115 | ) | ||
| 116 | self.mkdirhier(native_sysconf_dir) | ||
| 117 | for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")): | ||
| 118 | self.movefile(f, native_sysconf_dir) | ||
| 119 | for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")): | ||
| 120 | self.mkdirhier(native_sysconf_dir + "/dnf") | ||
| 121 | self.movefile(f, native_sysconf_dir + "/dnf") | ||
| 122 | self.remove(os.path.join(self.sdk_output, "etc"), True) | ||
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py deleted file mode 100644 index b6a10a930a..0000000000 --- a/meta/lib/oe/packagedata.py +++ /dev/null | |||
| @@ -1,369 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import codecs | ||
| 8 | import os | ||
| 9 | import json | ||
| 10 | import bb.parse | ||
| 11 | import bb.compress.zstd | ||
| 12 | import oe.path | ||
| 13 | |||
| 14 | from glob import glob | ||
| 15 | |||
| 16 | def packaged(pkg, d): | ||
| 17 | return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) | ||
| 18 | |||
| 19 | def read_pkgdatafile(fn): | ||
| 20 | pkgdata = {} | ||
| 21 | |||
| 22 | def decode(str): | ||
| 23 | c = codecs.getdecoder("unicode_escape") | ||
| 24 | return c(str)[0] | ||
| 25 | |||
| 26 | if os.access(fn, os.R_OK): | ||
| 27 | import re | ||
| 28 | with open(fn, 'r') as f: | ||
| 29 | lines = f.readlines() | ||
| 30 | r = re.compile(r"(^.+?):\s+(.*)") | ||
| 31 | for l in lines: | ||
| 32 | m = r.match(l) | ||
| 33 | if m: | ||
| 34 | pkgdata[m.group(1)] = decode(m.group(2)) | ||
| 35 | |||
| 36 | return pkgdata | ||
| 37 | |||
| 38 | def get_subpkgedata_fn(pkg, d): | ||
| 39 | return d.expand('${PKGDATA_DIR}/runtime/%s' % pkg) | ||
| 40 | |||
| 41 | def has_subpkgdata(pkg, d): | ||
| 42 | return os.access(get_subpkgedata_fn(pkg, d), os.R_OK) | ||
| 43 | |||
| 44 | def read_subpkgdata(pkg, d): | ||
| 45 | return read_pkgdatafile(get_subpkgedata_fn(pkg, d)) | ||
| 46 | |||
| 47 | def has_pkgdata(pn, d): | ||
| 48 | fn = d.expand('${PKGDATA_DIR}/%s' % pn) | ||
| 49 | return os.access(fn, os.R_OK) | ||
| 50 | |||
| 51 | def read_pkgdata(pn, d): | ||
| 52 | fn = d.expand('${PKGDATA_DIR}/%s' % pn) | ||
| 53 | return read_pkgdatafile(fn) | ||
| 54 | |||
| 55 | # | ||
| 56 | # Collapse FOO:pkg variables into FOO | ||
| 57 | # | ||
| 58 | def read_subpkgdata_dict(pkg, d): | ||
| 59 | ret = {} | ||
| 60 | subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d)) | ||
| 61 | for var in subd: | ||
| 62 | newvar = var.replace(":" + pkg, "") | ||
| 63 | if newvar == var and var + ":" + pkg in subd: | ||
| 64 | continue | ||
| 65 | ret[newvar] = subd[var] | ||
| 66 | return ret | ||
| 67 | |||
| 68 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
| 69 | def read_subpkgdata_extended(pkg, d): | ||
| 70 | import json | ||
| 71 | import bb.compress.zstd | ||
| 72 | |||
| 73 | fn = d.expand("${PKGDATA_DIR}/extended/%s.json.zstd" % pkg) | ||
| 74 | try: | ||
| 75 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
| 76 | with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f: | ||
| 77 | return json.load(f) | ||
| 78 | except FileNotFoundError: | ||
| 79 | return None | ||
| 80 | |||
| 81 | def _pkgmap(d): | ||
| 82 | """Return a dictionary mapping package to recipe name.""" | ||
| 83 | |||
| 84 | pkgdatadir = d.getVar("PKGDATA_DIR") | ||
| 85 | |||
| 86 | pkgmap = {} | ||
| 87 | try: | ||
| 88 | files = os.listdir(pkgdatadir) | ||
| 89 | except OSError: | ||
| 90 | bb.warn("No files in %s?" % pkgdatadir) | ||
| 91 | files = [] | ||
| 92 | |||
| 93 | for pn in [f for f in files if not os.path.isdir(os.path.join(pkgdatadir, f))]: | ||
| 94 | try: | ||
| 95 | pkgdata = read_pkgdatafile(os.path.join(pkgdatadir, pn)) | ||
| 96 | except OSError: | ||
| 97 | continue | ||
| 98 | |||
| 99 | packages = pkgdata.get("PACKAGES") or "" | ||
| 100 | for pkg in packages.split(): | ||
| 101 | pkgmap[pkg] = pn | ||
| 102 | |||
| 103 | return pkgmap | ||
| 104 | |||
| 105 | def pkgmap(d): | ||
| 106 | """Return a dictionary mapping package to recipe name. | ||
| 107 | Cache the mapping in the metadata""" | ||
| 108 | |||
| 109 | pkgmap_data = d.getVar("__pkgmap_data", False) | ||
| 110 | if pkgmap_data is None: | ||
| 111 | pkgmap_data = _pkgmap(d) | ||
| 112 | d.setVar("__pkgmap_data", pkgmap_data) | ||
| 113 | |||
| 114 | return pkgmap_data | ||
| 115 | |||
| 116 | def recipename(pkg, d): | ||
| 117 | """Return the recipe name for the given binary package name.""" | ||
| 118 | |||
| 119 | return pkgmap(d).get(pkg) | ||
| 120 | |||
| 121 | def foreach_runtime_provider_pkgdata(d, rdep, include_rdep=False): | ||
| 122 | pkgdata_dir = d.getVar("PKGDATA_DIR") | ||
| 123 | possibles = set() | ||
| 124 | try: | ||
| 125 | possibles |= set(os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdep))) | ||
| 126 | except OSError: | ||
| 127 | pass | ||
| 128 | |||
| 129 | if include_rdep: | ||
| 130 | possibles.add(rdep) | ||
| 131 | |||
| 132 | for p in sorted(list(possibles)): | ||
| 133 | rdep_data = read_subpkgdata(p, d) | ||
| 134 | yield p, rdep_data | ||
| 135 | |||
| 136 | def get_package_mapping(pkg, basepkg, d, depversions=None): | ||
| 137 | import oe.packagedata | ||
| 138 | |||
| 139 | data = oe.packagedata.read_subpkgdata(pkg, d) | ||
| 140 | key = "PKG:%s" % pkg | ||
| 141 | |||
| 142 | if key in data: | ||
| 143 | if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]: | ||
| 144 | bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key])) | ||
| 145 | # Have to avoid undoing the write_extra_pkgs(global_variants...) | ||
| 146 | if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \ | ||
| 147 | and data[key] == basepkg: | ||
| 148 | return pkg | ||
| 149 | if depversions == []: | ||
| 150 | # Avoid returning a mapping if the renamed package rprovides its original name | ||
| 151 | rprovkey = "RPROVIDES:%s" % pkg | ||
| 152 | if rprovkey in data: | ||
| 153 | if pkg in bb.utils.explode_dep_versions2(data[rprovkey]): | ||
| 154 | bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg)) | ||
| 155 | return pkg | ||
| 156 | # Do map to rewritten package name | ||
| 157 | return data[key] | ||
| 158 | |||
| 159 | return pkg | ||
| 160 | |||
| 161 | def get_package_additional_metadata(pkg_type, d): | ||
| 162 | base_key = "PACKAGE_ADD_METADATA" | ||
| 163 | for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key): | ||
| 164 | if d.getVar(key, False) is None: | ||
| 165 | continue | ||
| 166 | d.setVarFlag(key, "type", "list") | ||
| 167 | if d.getVarFlag(key, "separator") is None: | ||
| 168 | d.setVarFlag(key, "separator", "\\n") | ||
| 169 | metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)] | ||
| 170 | return "\n".join(metadata_fields).strip() | ||
| 171 | |||
| 172 | def runtime_mapping_rename(varname, pkg, d): | ||
| 173 | #bb.note("%s before: %s" % (varname, d.getVar(varname))) | ||
| 174 | |||
| 175 | new_depends = {} | ||
| 176 | deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "") | ||
| 177 | for depend, depversions in deps.items(): | ||
| 178 | new_depend = get_package_mapping(depend, pkg, d, depversions) | ||
| 179 | if depend != new_depend: | ||
| 180 | bb.note("package name mapping done: %s -> %s" % (depend, new_depend)) | ||
| 181 | new_depends[new_depend] = deps[depend] | ||
| 182 | |||
| 183 | d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) | ||
| 184 | |||
| 185 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) | ||
| 186 | |||
| 187 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
| 188 | def emit_pkgdata(pkgfiles, d): | ||
| 189 | def process_postinst_on_target(pkg, mlprefix): | ||
| 190 | pkgval = d.getVar('PKG:%s' % pkg) | ||
| 191 | if pkgval is None: | ||
| 192 | pkgval = pkg | ||
| 193 | |||
| 194 | defer_fragment = """ | ||
| 195 | if [ -n "$D" ]; then | ||
| 196 | $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s | ||
| 197 | exit 0 | ||
| 198 | fi | ||
| 199 | """ % (pkgval, mlprefix) | ||
| 200 | |||
| 201 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
| 202 | postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg) | ||
| 203 | |||
| 204 | if postinst_ontarget: | ||
| 205 | bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg) | ||
| 206 | if not postinst: | ||
| 207 | postinst = '#!/bin/sh\n' | ||
| 208 | postinst += defer_fragment | ||
| 209 | postinst += postinst_ontarget | ||
| 210 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
| 211 | |||
| 212 | def add_set_e_to_scriptlets(pkg): | ||
| 213 | for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'): | ||
| 214 | scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg)) | ||
| 215 | if scriptlet: | ||
| 216 | scriptlet_split = scriptlet.split('\n') | ||
| 217 | if scriptlet_split[0].startswith("#!"): | ||
| 218 | scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:]) | ||
| 219 | else: | ||
| 220 | scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:]) | ||
| 221 | d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet) | ||
| 222 | |||
| 223 | def write_if_exists(f, pkg, var): | ||
| 224 | def encode(str): | ||
| 225 | import codecs | ||
| 226 | c = codecs.getencoder("unicode_escape") | ||
| 227 | return c(str)[0].decode("latin1") | ||
| 228 | |||
| 229 | val = d.getVar('%s:%s' % (var, pkg)) | ||
| 230 | if val: | ||
| 231 | f.write('%s:%s: %s\n' % (var, pkg, encode(val))) | ||
| 232 | return val | ||
| 233 | val = d.getVar('%s' % (var)) | ||
| 234 | if val: | ||
| 235 | f.write('%s: %s\n' % (var, encode(val))) | ||
| 236 | return val | ||
| 237 | |||
| 238 | def write_extra_pkgs(variants, pn, packages, pkgdatadir): | ||
| 239 | for variant in variants: | ||
| 240 | with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd: | ||
| 241 | fd.write("PACKAGES: %s\n" % ' '.join( | ||
| 242 | map(lambda pkg: '%s-%s' % (variant, pkg), packages.split()))) | ||
| 243 | |||
| 244 | def write_extra_runtime_pkgs(variants, packages, pkgdatadir): | ||
| 245 | for variant in variants: | ||
| 246 | for pkg in packages.split(): | ||
| 247 | ml_pkg = "%s-%s" % (variant, pkg) | ||
| 248 | subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg) | ||
| 249 | with open(subdata_file, 'w') as fd: | ||
| 250 | fd.write("PKG:%s: %s" % (ml_pkg, pkg)) | ||
| 251 | |||
| 252 | packages = d.getVar('PACKAGES') | ||
| 253 | pkgdest = d.getVar('PKGDEST') | ||
| 254 | pkgdatadir = d.getVar('PKGDESTWORK') | ||
| 255 | |||
| 256 | data_file = pkgdatadir + d.expand("/${PN}") | ||
| 257 | with open(data_file, 'w') as fd: | ||
| 258 | fd.write("PACKAGES: %s\n" % packages) | ||
| 259 | |||
| 260 | pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or [] | ||
| 261 | |||
| 262 | pn = d.getVar('PN') | ||
| 263 | global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split() | ||
| 264 | variants = (d.getVar('MULTILIB_VARIANTS') or "").split() | ||
| 265 | |||
| 266 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): | ||
| 267 | write_extra_pkgs(variants, pn, packages, pkgdatadir) | ||
| 268 | |||
| 269 | if bb.data.inherits_class('allarch', d) and not variants \ | ||
| 270 | and not bb.data.inherits_class('packagegroup', d): | ||
| 271 | write_extra_pkgs(global_variants, pn, packages, pkgdatadir) | ||
| 272 | |||
| 273 | workdir = d.getVar('WORKDIR') | ||
| 274 | |||
| 275 | for pkg in packages.split(): | ||
| 276 | pkgval = d.getVar('PKG:%s' % pkg) | ||
| 277 | if pkgval is None: | ||
| 278 | pkgval = pkg | ||
| 279 | d.setVar('PKG:%s' % pkg, pkg) | ||
| 280 | |||
| 281 | extended_data = { | ||
| 282 | "files_info": {} | ||
| 283 | } | ||
| 284 | |||
| 285 | pkgdestpkg = os.path.join(pkgdest, pkg) | ||
| 286 | files = {} | ||
| 287 | files_extra = {} | ||
| 288 | total_size = 0 | ||
| 289 | seen = set() | ||
| 290 | for f in pkgfiles[pkg]: | ||
| 291 | fpath = os.sep + os.path.relpath(f, pkgdestpkg) | ||
| 292 | |||
| 293 | fstat = os.lstat(f) | ||
| 294 | files[fpath] = fstat.st_size | ||
| 295 | |||
| 296 | extended_data["files_info"].setdefault(fpath, {}) | ||
| 297 | extended_data["files_info"][fpath]['size'] = fstat.st_size | ||
| 298 | |||
| 299 | if fstat.st_ino not in seen: | ||
| 300 | seen.add(fstat.st_ino) | ||
| 301 | total_size += fstat.st_size | ||
| 302 | |||
| 303 | if fpath in pkgdebugsource: | ||
| 304 | extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath] | ||
| 305 | del pkgdebugsource[fpath] | ||
| 306 | |||
| 307 | d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True)) | ||
| 308 | |||
| 309 | process_postinst_on_target(pkg, d.getVar("MLPREFIX")) | ||
| 310 | add_set_e_to_scriptlets(pkg) | ||
| 311 | |||
| 312 | subdata_file = pkgdatadir + "/runtime/%s" % pkg | ||
| 313 | with open(subdata_file, 'w') as sf: | ||
| 314 | for var in (d.getVar('PKGDATA_VARS') or "").split(): | ||
| 315 | val = write_if_exists(sf, pkg, var) | ||
| 316 | |||
| 317 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') | ||
| 318 | for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()): | ||
| 319 | write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile) | ||
| 320 | |||
| 321 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') | ||
| 322 | for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()): | ||
| 323 | write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile) | ||
| 324 | |||
| 325 | sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size)) | ||
| 326 | |||
| 327 | subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg | ||
| 328 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
| 329 | with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f: | ||
| 330 | json.dump(extended_data, f, sort_keys=True, separators=(",", ":")) | ||
| 331 | |||
| 332 | # Symlinks needed for rprovides lookup | ||
| 333 | rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES') | ||
| 334 | if rprov: | ||
| 335 | for p in bb.utils.explode_deps(rprov): | ||
| 336 | subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg) | ||
| 337 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) | ||
| 338 | oe.path.relsymlink(subdata_file, subdata_sym, True) | ||
| 339 | |||
| 340 | allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg) | ||
| 341 | if not allow_empty: | ||
| 342 | allow_empty = d.getVar('ALLOW_EMPTY') | ||
| 343 | root = "%s/%s" % (pkgdest, pkg) | ||
| 344 | os.chdir(root) | ||
| 345 | g = glob('*') | ||
| 346 | if g or allow_empty == "1": | ||
| 347 | # Symlinks needed for reverse lookups (from the final package name) | ||
| 348 | subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval | ||
| 349 | oe.path.relsymlink(subdata_file, subdata_sym, True) | ||
| 350 | |||
| 351 | packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg | ||
| 352 | open(packagedfile, 'w').close() | ||
| 353 | |||
| 354 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): | ||
| 355 | write_extra_runtime_pkgs(variants, packages, pkgdatadir) | ||
| 356 | |||
| 357 | if bb.data.inherits_class('allarch', d) and not variants \ | ||
| 358 | and not bb.data.inherits_class('packagegroup', d): | ||
| 359 | write_extra_runtime_pkgs(global_variants, packages, pkgdatadir) | ||
| 360 | |||
| 361 | def mapping_rename_hook(d): | ||
| 362 | """ | ||
| 363 | Rewrite variables to account for package renaming in things | ||
| 364 | like debian.bbclass or manual PKG variable name changes | ||
| 365 | """ | ||
| 366 | pkg = d.getVar("PKG") | ||
| 367 | oe.packagedata.runtime_mapping_rename("RDEPENDS", pkg, d) | ||
| 368 | oe.packagedata.runtime_mapping_rename("RRECOMMENDS", pkg, d) | ||
| 369 | oe.packagedata.runtime_mapping_rename("RSUGGESTS", pkg, d) | ||
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py deleted file mode 100644 index 7b7594751a..0000000000 --- a/meta/lib/oe/packagegroup.py +++ /dev/null | |||
| @@ -1,36 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import itertools | ||
| 8 | |||
| 9 | def is_optional(feature, d): | ||
| 10 | return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional")) | ||
| 11 | |||
| 12 | def packages(features, d): | ||
| 13 | for feature in features: | ||
| 14 | packages = d.getVar("FEATURE_PACKAGES_%s" % feature) | ||
| 15 | for pkg in (packages or "").split(): | ||
| 16 | yield pkg | ||
| 17 | |||
| 18 | def required_packages(features, d): | ||
| 19 | req = [feature for feature in features if not is_optional(feature, d)] | ||
| 20 | return packages(req, d) | ||
| 21 | |||
| 22 | def optional_packages(features, d): | ||
| 23 | opt = [feature for feature in features if is_optional(feature, d)] | ||
| 24 | return packages(opt, d) | ||
| 25 | |||
| 26 | def active_packages(features, d): | ||
| 27 | return itertools.chain(required_packages(features, d), | ||
| 28 | optional_packages(features, d)) | ||
| 29 | |||
| 30 | def active_recipes(features, d): | ||
| 31 | import oe.packagedata | ||
| 32 | |||
| 33 | for pkg in active_packages(features, d): | ||
| 34 | recipe = oe.packagedata.recipename(pkg, d) | ||
| 35 | if recipe: | ||
| 36 | yield recipe | ||
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py deleted file mode 100644 index edd77196ee..0000000000 --- a/meta/lib/oe/patch.py +++ /dev/null | |||
| @@ -1,1003 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import os | ||
| 8 | import shlex | ||
| 9 | import subprocess | ||
| 10 | import oe.path | ||
| 11 | import oe.types | ||
| 12 | |||
| 13 | class NotFoundError(bb.BBHandledException): | ||
| 14 | def __init__(self, path): | ||
| 15 | self.path = path | ||
| 16 | |||
| 17 | def __str__(self): | ||
| 18 | return "Error: %s not found." % self.path | ||
| 19 | |||
| 20 | class CmdError(bb.BBHandledException): | ||
| 21 | def __init__(self, command, exitstatus, output): | ||
| 22 | self.command = command | ||
| 23 | self.status = exitstatus | ||
| 24 | self.output = output | ||
| 25 | |||
| 26 | def __str__(self): | ||
| 27 | return "Command Error: '%s' exited with %d Output:\n%s" % \ | ||
| 28 | (self.command, self.status, self.output) | ||
| 29 | |||
| 30 | |||
| 31 | def runcmd(args, dir = None): | ||
| 32 | if dir: | ||
| 33 | olddir = os.path.abspath(os.curdir) | ||
| 34 | if not os.path.exists(dir): | ||
| 35 | raise NotFoundError(dir) | ||
| 36 | os.chdir(dir) | ||
| 37 | # print("cwd: %s -> %s" % (olddir, dir)) | ||
| 38 | |||
| 39 | try: | ||
| 40 | args = [ shlex.quote(str(arg)) for arg in args ] | ||
| 41 | cmd = " ".join(args) | ||
| 42 | # print("cmd: %s" % cmd) | ||
| 43 | proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) | ||
| 44 | stdout, stderr = proc.communicate() | ||
| 45 | stdout = stdout.decode('utf-8') | ||
| 46 | stderr = stderr.decode('utf-8') | ||
| 47 | exitstatus = proc.returncode | ||
| 48 | if exitstatus != 0: | ||
| 49 | raise CmdError(cmd, exitstatus >> 8, "stdout: %s\nstderr: %s" % (stdout, stderr)) | ||
| 50 | if " fuzz " in stdout and "Hunk " in stdout: | ||
| 51 | # Drop patch fuzz info with header and footer to log file so | ||
| 52 | # insane.bbclass can handle to throw error/warning | ||
| 53 | bb.note("--- Patch fuzz start ---\n%s\n--- Patch fuzz end ---" % format(stdout)) | ||
| 54 | |||
| 55 | return stdout | ||
| 56 | |||
| 57 | finally: | ||
| 58 | if dir: | ||
| 59 | os.chdir(olddir) | ||
| 60 | |||
| 61 | |||
| 62 | class PatchError(Exception): | ||
| 63 | def __init__(self, msg): | ||
| 64 | self.msg = msg | ||
| 65 | |||
| 66 | def __str__(self): | ||
| 67 | return "Patch Error: %s" % self.msg | ||
| 68 | |||
| 69 | class PatchSet(object): | ||
| 70 | defaults = { | ||
| 71 | "strippath": 1 | ||
| 72 | } | ||
| 73 | |||
| 74 | def __init__(self, dir, d): | ||
| 75 | self.dir = dir | ||
| 76 | self.d = d | ||
| 77 | self.patches = [] | ||
| 78 | self._current = None | ||
| 79 | |||
| 80 | def current(self): | ||
| 81 | return self._current | ||
| 82 | |||
| 83 | def Clean(self): | ||
| 84 | """ | ||
| 85 | Clean out the patch set. Generally includes unapplying all | ||
| 86 | patches and wiping out all associated metadata. | ||
| 87 | """ | ||
| 88 | raise NotImplementedError() | ||
| 89 | |||
| 90 | def Import(self, patch, force): | ||
| 91 | if not patch.get("file"): | ||
| 92 | if not patch.get("remote"): | ||
| 93 | raise PatchError("Patch file must be specified in patch import.") | ||
| 94 | else: | ||
| 95 | patch["file"] = bb.fetch2.localpath(patch["remote"], self.d) | ||
| 96 | |||
| 97 | for param in PatchSet.defaults: | ||
| 98 | if not patch.get(param): | ||
| 99 | patch[param] = PatchSet.defaults[param] | ||
| 100 | |||
| 101 | if patch.get("remote"): | ||
| 102 | patch["file"] = self.d.expand(bb.fetch2.localpath(patch["remote"], self.d)) | ||
| 103 | |||
| 104 | patch["filemd5"] = bb.utils.md5_file(patch["file"]) | ||
| 105 | |||
| 106 | def Push(self, force): | ||
| 107 | raise NotImplementedError() | ||
| 108 | |||
| 109 | def Pop(self, force): | ||
| 110 | raise NotImplementedError() | ||
| 111 | |||
| 112 | def Refresh(self, remote = None, all = None): | ||
| 113 | raise NotImplementedError() | ||
| 114 | |||
| 115 | @staticmethod | ||
| 116 | def getPatchedFiles(patchfile, striplevel, srcdir=None): | ||
| 117 | """ | ||
| 118 | Read a patch file and determine which files it will modify. | ||
| 119 | Params: | ||
| 120 | patchfile: the patch file to read | ||
| 121 | striplevel: the strip level at which the patch is going to be applied | ||
| 122 | srcdir: optional path to join onto the patched file paths | ||
| 123 | Returns: | ||
| 124 | A list of tuples of file path and change mode ('A' for add, | ||
| 125 | 'D' for delete or 'M' for modify) | ||
| 126 | """ | ||
| 127 | |||
| 128 | def patchedpath(patchline): | ||
| 129 | filepth = patchline.split()[1] | ||
| 130 | if filepth.endswith('/dev/null'): | ||
| 131 | return '/dev/null' | ||
| 132 | filesplit = filepth.split(os.sep) | ||
| 133 | if striplevel > len(filesplit): | ||
| 134 | bb.error('Patch %s has invalid strip level %d' % (patchfile, striplevel)) | ||
| 135 | return None | ||
| 136 | return os.sep.join(filesplit[striplevel:]) | ||
| 137 | |||
| 138 | for encoding in ['utf-8', 'latin-1']: | ||
| 139 | try: | ||
| 140 | copiedmode = False | ||
| 141 | filelist = [] | ||
| 142 | with open(patchfile) as f: | ||
| 143 | for line in f: | ||
| 144 | if line.startswith('--- '): | ||
| 145 | patchpth = patchedpath(line) | ||
| 146 | if not patchpth: | ||
| 147 | break | ||
| 148 | if copiedmode: | ||
| 149 | addedfile = patchpth | ||
| 150 | else: | ||
| 151 | removedfile = patchpth | ||
| 152 | elif line.startswith('+++ '): | ||
| 153 | addedfile = patchedpath(line) | ||
| 154 | if not addedfile: | ||
| 155 | break | ||
| 156 | elif line.startswith('*** '): | ||
| 157 | copiedmode = True | ||
| 158 | removedfile = patchedpath(line) | ||
| 159 | if not removedfile: | ||
| 160 | break | ||
| 161 | else: | ||
| 162 | removedfile = None | ||
| 163 | addedfile = None | ||
| 164 | |||
| 165 | if addedfile and removedfile: | ||
| 166 | if removedfile == '/dev/null': | ||
| 167 | mode = 'A' | ||
| 168 | elif addedfile == '/dev/null': | ||
| 169 | mode = 'D' | ||
| 170 | else: | ||
| 171 | mode = 'M' | ||
| 172 | if srcdir: | ||
| 173 | fullpath = os.path.abspath(os.path.join(srcdir, addedfile)) | ||
| 174 | else: | ||
| 175 | fullpath = addedfile | ||
| 176 | filelist.append((fullpath, mode)) | ||
| 177 | except UnicodeDecodeError: | ||
| 178 | continue | ||
| 179 | break | ||
| 180 | else: | ||
| 181 | raise PatchError('Unable to decode %s' % patchfile) | ||
| 182 | |||
| 183 | return filelist | ||
| 184 | |||
| 185 | |||
| 186 | class PatchTree(PatchSet): | ||
| 187 | def __init__(self, dir, d): | ||
| 188 | PatchSet.__init__(self, dir, d) | ||
| 189 | self.patchdir = os.path.join(self.dir, 'patches') | ||
| 190 | self.seriespath = os.path.join(self.dir, 'patches', 'series') | ||
| 191 | bb.utils.mkdirhier(self.patchdir) | ||
| 192 | |||
| 193 | def _appendPatchFile(self, patch, strippath): | ||
| 194 | with open(self.seriespath, 'a') as f: | ||
| 195 | f.write(os.path.basename(patch) + "," + strippath + "\n") | ||
| 196 | shellcmd = ["cat", patch, ">" , self.patchdir + "/" + os.path.basename(patch)] | ||
| 197 | runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 198 | |||
| 199 | def _removePatch(self, p): | ||
| 200 | patch = {} | ||
| 201 | patch['file'] = p.split(",")[0] | ||
| 202 | patch['strippath'] = p.split(",")[1] | ||
| 203 | self._applypatch(patch, False, True) | ||
| 204 | |||
| 205 | def _removePatchFile(self, all = False): | ||
| 206 | if not os.path.exists(self.seriespath): | ||
| 207 | return | ||
| 208 | with open(self.seriespath, 'r+') as f: | ||
| 209 | patches = f.readlines() | ||
| 210 | if all: | ||
| 211 | for p in reversed(patches): | ||
| 212 | self._removePatch(os.path.join(self.patchdir, p.strip())) | ||
| 213 | patches = [] | ||
| 214 | else: | ||
| 215 | self._removePatch(os.path.join(self.patchdir, patches[-1].strip())) | ||
| 216 | patches.pop() | ||
| 217 | with open(self.seriespath, 'w') as f: | ||
| 218 | for p in patches: | ||
| 219 | f.write(p) | ||
| 220 | |||
| 221 | def Import(self, patch, force = None): | ||
| 222 | """""" | ||
| 223 | PatchSet.Import(self, patch, force) | ||
| 224 | |||
| 225 | if self._current is not None: | ||
| 226 | i = self._current + 1 | ||
| 227 | else: | ||
| 228 | i = 0 | ||
| 229 | self.patches.insert(i, patch) | ||
| 230 | |||
| 231 | def _applypatch(self, patch, force = False, reverse = False, run = True): | ||
| 232 | shellcmd = ["cat", patch['file'], "|", "patch", "--no-backup-if-mismatch", "-p", patch['strippath']] | ||
| 233 | if reverse: | ||
| 234 | shellcmd.append('-R') | ||
| 235 | |||
| 236 | if not run: | ||
| 237 | return "sh" + "-c" + " ".join(shellcmd) | ||
| 238 | |||
| 239 | if not force: | ||
| 240 | shellcmd.append('--dry-run') | ||
| 241 | |||
| 242 | try: | ||
| 243 | output = runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 244 | |||
| 245 | if force: | ||
| 246 | return | ||
| 247 | |||
| 248 | shellcmd.pop(len(shellcmd) - 1) | ||
| 249 | output = runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 250 | except CmdError as err: | ||
| 251 | raise bb.BBHandledException("Applying '%s' failed:\n%s" % | ||
| 252 | (os.path.basename(patch['file']), err.output)) | ||
| 253 | |||
| 254 | if not reverse: | ||
| 255 | self._appendPatchFile(patch['file'], patch['strippath']) | ||
| 256 | |||
| 257 | return output | ||
| 258 | |||
| 259 | def Push(self, force = False, all = False, run = True): | ||
| 260 | bb.note("self._current is %s" % self._current) | ||
| 261 | bb.note("patches is %s" % self.patches) | ||
| 262 | if all: | ||
| 263 | for i in self.patches: | ||
| 264 | bb.note("applying patch %s" % i) | ||
| 265 | self._applypatch(i, force) | ||
| 266 | self._current = i | ||
| 267 | else: | ||
| 268 | if self._current is not None: | ||
| 269 | next = self._current + 1 | ||
| 270 | else: | ||
| 271 | next = 0 | ||
| 272 | |||
| 273 | bb.note("applying patch %s" % self.patches[next]) | ||
| 274 | ret = self._applypatch(self.patches[next], force) | ||
| 275 | |||
| 276 | self._current = next | ||
| 277 | return ret | ||
| 278 | |||
| 279 | def Pop(self, force = None, all = None): | ||
| 280 | if all: | ||
| 281 | self._removePatchFile(True) | ||
| 282 | self._current = None | ||
| 283 | else: | ||
| 284 | self._removePatchFile(False) | ||
| 285 | |||
| 286 | if self._current == 0: | ||
| 287 | self._current = None | ||
| 288 | |||
| 289 | if self._current is not None: | ||
| 290 | self._current = self._current - 1 | ||
| 291 | |||
| 292 | def Clean(self): | ||
| 293 | """""" | ||
| 294 | self.Pop(all=True) | ||
| 295 | |||
| 296 | class GitApplyTree(PatchTree): | ||
| 297 | notes_ref = "refs/notes/devtool" | ||
| 298 | original_patch = 'original patch' | ||
| 299 | ignore_commit = 'ignore' | ||
| 300 | |||
| 301 | def __init__(self, dir, d): | ||
| 302 | PatchTree.__init__(self, dir, d) | ||
| 303 | self.commituser = d.getVar('PATCH_GIT_USER_NAME') | ||
| 304 | self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') | ||
| 305 | if not self._isInitialized(d): | ||
| 306 | self._initRepo() | ||
| 307 | |||
| 308 | def _isInitialized(self, d): | ||
| 309 | cmd = "git rev-parse --show-toplevel" | ||
| 310 | try: | ||
| 311 | output = runcmd(cmd.split(), self.dir).strip() | ||
| 312 | except CmdError as err: | ||
| 313 | ## runcmd returned non-zero which most likely means 128 | ||
| 314 | ## Not a git directory | ||
| 315 | return False | ||
| 316 | ## Make sure repo is in builddir to not break top-level git repos, or under workdir | ||
| 317 | return os.path.samefile(output, self.dir) or oe.path.is_path_parent(d.getVar('WORKDIR'), output) | ||
| 318 | |||
| 319 | def _initRepo(self): | ||
| 320 | runcmd("git init".split(), self.dir) | ||
| 321 | runcmd("git add .".split(), self.dir) | ||
| 322 | runcmd("git commit -a --allow-empty -m bitbake_patching_started".split(), self.dir) | ||
| 323 | |||
| 324 | @staticmethod | ||
| 325 | def extractPatchHeader(patchfile): | ||
| 326 | """ | ||
| 327 | Extract just the header lines from the top of a patch file | ||
| 328 | """ | ||
| 329 | for encoding in ['utf-8', 'latin-1']: | ||
| 330 | lines = [] | ||
| 331 | try: | ||
| 332 | with open(patchfile, 'r', encoding=encoding) as f: | ||
| 333 | for line in f: | ||
| 334 | if line.startswith('Index: ') or line.startswith('diff -') or line.startswith('---'): | ||
| 335 | break | ||
| 336 | lines.append(line) | ||
| 337 | except UnicodeDecodeError: | ||
| 338 | continue | ||
| 339 | break | ||
| 340 | else: | ||
| 341 | raise PatchError('Unable to find a character encoding to decode %s' % patchfile) | ||
| 342 | return lines | ||
| 343 | |||
| 344 | @staticmethod | ||
| 345 | def decodeAuthor(line): | ||
| 346 | from email.header import decode_header | ||
| 347 | authorval = line.split(':', 1)[1].strip().replace('"', '') | ||
| 348 | result = decode_header(authorval)[0][0] | ||
| 349 | if hasattr(result, 'decode'): | ||
| 350 | result = result.decode('utf-8') | ||
| 351 | return result | ||
| 352 | |||
| 353 | @staticmethod | ||
| 354 | def interpretPatchHeader(headerlines): | ||
| 355 | import re | ||
| 356 | author_re = re.compile(r'[\S ]+ <\S+@\S+\.\S+>') | ||
| 357 | from_commit_re = re.compile(r'^From [a-z0-9]{40} .*') | ||
| 358 | outlines = [] | ||
| 359 | author = None | ||
| 360 | date = None | ||
| 361 | subject = None | ||
| 362 | for line in headerlines: | ||
| 363 | if line.startswith('Subject: '): | ||
| 364 | subject = line.split(':', 1)[1] | ||
| 365 | # Remove any [PATCH][oe-core] etc. | ||
| 366 | subject = re.sub(r'\[.+?\]\s*', '', subject) | ||
| 367 | continue | ||
| 368 | elif line.startswith('From: ') or line.startswith('Author: '): | ||
| 369 | authorval = GitApplyTree.decodeAuthor(line) | ||
| 370 | # git is fussy about author formatting i.e. it must be Name <email@domain> | ||
| 371 | if author_re.match(authorval): | ||
| 372 | author = authorval | ||
| 373 | continue | ||
| 374 | elif line.startswith('Date: '): | ||
| 375 | if date is None: | ||
| 376 | dateval = line.split(':', 1)[1].strip() | ||
| 377 | # Very crude check for date format, since git will blow up if it's not in the right | ||
| 378 | # format. Without e.g. a python-dateutils dependency we can't do a whole lot more | ||
| 379 | if len(dateval) > 12: | ||
| 380 | date = dateval | ||
| 381 | continue | ||
| 382 | elif not author and line.lower().startswith('signed-off-by: '): | ||
| 383 | authorval = GitApplyTree.decodeAuthor(line) | ||
| 384 | # git is fussy about author formatting i.e. it must be Name <email@domain> | ||
| 385 | if author_re.match(authorval): | ||
| 386 | author = authorval | ||
| 387 | elif from_commit_re.match(line): | ||
| 388 | # We don't want the From <commit> line - if it's present it will break rebasing | ||
| 389 | continue | ||
| 390 | outlines.append(line) | ||
| 391 | |||
| 392 | if not subject: | ||
| 393 | firstline = None | ||
| 394 | for line in headerlines: | ||
| 395 | line = line.strip() | ||
| 396 | if firstline: | ||
| 397 | if line: | ||
| 398 | # Second line is not blank, the first line probably isn't usable | ||
| 399 | firstline = None | ||
| 400 | break | ||
| 401 | elif line: | ||
| 402 | firstline = line | ||
| 403 | if firstline and not firstline.startswith(('#', 'Index:', 'Upstream-Status:')) and len(firstline) < 100: | ||
| 404 | subject = firstline | ||
| 405 | |||
| 406 | return outlines, author, date, subject | ||
| 407 | |||
| 408 | @staticmethod | ||
| 409 | def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None): | ||
| 410 | if d: | ||
| 411 | commituser = d.getVar('PATCH_GIT_USER_NAME') | ||
| 412 | commitemail = d.getVar('PATCH_GIT_USER_EMAIL') | ||
| 413 | if commituser: | ||
| 414 | cmd += ['-c', 'user.name="%s"' % commituser] | ||
| 415 | if commitemail: | ||
| 416 | cmd += ['-c', 'user.email="%s"' % commitemail] | ||
| 417 | |||
| 418 | @staticmethod | ||
| 419 | def prepareCommit(patchfile, commituser=None, commitemail=None): | ||
| 420 | """ | ||
| 421 | Prepare a git commit command line based on the header from a patch file | ||
| 422 | (typically this is useful for patches that cannot be applied with "git am" due to formatting) | ||
| 423 | """ | ||
| 424 | import tempfile | ||
| 425 | # Process patch header and extract useful information | ||
| 426 | lines = GitApplyTree.extractPatchHeader(patchfile) | ||
| 427 | outlines, author, date, subject = GitApplyTree.interpretPatchHeader(lines) | ||
| 428 | if not author or not subject or not date: | ||
| 429 | try: | ||
| 430 | shellcmd = ["git", "log", "--format=email", "--follow", "--diff-filter=A", "--", patchfile] | ||
| 431 | out = runcmd(["sh", "-c", " ".join(shellcmd)], os.path.dirname(patchfile)) | ||
| 432 | except CmdError: | ||
| 433 | out = None | ||
| 434 | if out: | ||
| 435 | _, newauthor, newdate, newsubject = GitApplyTree.interpretPatchHeader(out.splitlines()) | ||
| 436 | if not author: | ||
| 437 | # If we're setting the author then the date should be set as well | ||
| 438 | author = newauthor | ||
| 439 | date = newdate | ||
| 440 | elif not date: | ||
| 441 | # If we don't do this we'll get the current date, at least this will be closer | ||
| 442 | date = newdate | ||
| 443 | if not subject: | ||
| 444 | subject = newsubject | ||
| 445 | if subject and not (outlines and outlines[0].strip() == subject): | ||
| 446 | outlines.insert(0, '%s\n\n' % subject.strip()) | ||
| 447 | |||
| 448 | # Write out commit message to a file | ||
| 449 | with tempfile.NamedTemporaryFile('w', delete=False) as tf: | ||
| 450 | tmpfile = tf.name | ||
| 451 | for line in outlines: | ||
| 452 | tf.write(line) | ||
| 453 | # Prepare git command | ||
| 454 | cmd = ["git"] | ||
| 455 | GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail) | ||
| 456 | cmd += ["commit", "-F", tmpfile, "--no-verify"] | ||
| 457 | # git doesn't like plain email addresses as authors | ||
| 458 | if author and '<' in author: | ||
| 459 | cmd.append('--author="%s"' % author) | ||
| 460 | if date: | ||
| 461 | cmd.append('--date="%s"' % date) | ||
| 462 | return (tmpfile, cmd) | ||
| 463 | |||
| 464 | @staticmethod | ||
| 465 | def addNote(repo, ref, key, value=None, commituser=None, commitemail=None): | ||
| 466 | note = key + (": %s" % value if value else "") | ||
| 467 | notes_ref = GitApplyTree.notes_ref | ||
| 468 | runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo) | ||
| 469 | runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo) | ||
| 470 | runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo) | ||
| 471 | cmd = ["git"] | ||
| 472 | GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail) | ||
| 473 | runcmd(cmd + ["notes", "--ref", notes_ref, "append", "-m", note, ref], repo) | ||
| 474 | |||
| 475 | @staticmethod | ||
| 476 | def removeNote(repo, ref, key, commituser=None, commitemail=None): | ||
| 477 | notes = GitApplyTree.getNotes(repo, ref) | ||
| 478 | notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")} | ||
| 479 | runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo) | ||
| 480 | for note, value in notes.items(): | ||
| 481 | GitApplyTree.addNote(repo, ref, note, value, commituser, commitemail) | ||
| 482 | |||
| 483 | @staticmethod | ||
| 484 | def getNotes(repo, ref): | ||
| 485 | import re | ||
| 486 | |||
| 487 | note = None | ||
| 488 | try: | ||
| 489 | note = runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "show", ref], repo) | ||
| 490 | prefix = "" | ||
| 491 | except CmdError: | ||
| 492 | note = runcmd(['git', 'show', '-s', '--format=%B', ref], repo) | ||
| 493 | prefix = "%% " | ||
| 494 | |||
| 495 | note_re = re.compile(r'^%s(.*?)(?::\s*(.*))?$' % prefix) | ||
| 496 | notes = dict() | ||
| 497 | for line in note.splitlines(): | ||
| 498 | m = note_re.match(line) | ||
| 499 | if m: | ||
| 500 | notes[m.group(1)] = m.group(2) | ||
| 501 | |||
| 502 | return notes | ||
| 503 | |||
| 504 | @staticmethod | ||
| 505 | def commitIgnored(subject, dir=None, files=None, d=None): | ||
| 506 | if files: | ||
| 507 | runcmd(['git', 'add'] + files, dir) | ||
| 508 | cmd = ["git"] | ||
| 509 | GitApplyTree.gitCommandUserOptions(cmd, d=d) | ||
| 510 | cmd += ["commit", "-m", subject, "--no-verify"] | ||
| 511 | runcmd(cmd, dir) | ||
| 512 | GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit, d.getVar('PATCH_GIT_USER_NAME'), d.getVar('PATCH_GIT_USER_EMAIL')) | ||
| 513 | |||
| 514 | @staticmethod | ||
| 515 | def extractPatches(tree, startcommits, outdir, paths=None): | ||
| 516 | import tempfile | ||
| 517 | import shutil | ||
| 518 | tempdir = tempfile.mkdtemp(prefix='oepatch') | ||
| 519 | try: | ||
| 520 | for name, rev in startcommits.items(): | ||
| 521 | shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", rev, "-o", tempdir] | ||
| 522 | if paths: | ||
| 523 | shellcmd.append('--') | ||
| 524 | shellcmd.extend(paths) | ||
| 525 | out = runcmd(["sh", "-c", " ".join(shellcmd)], os.path.join(tree, name)) | ||
| 526 | if out: | ||
| 527 | for srcfile in out.split(): | ||
| 528 | # This loop, which is used to remove any line that | ||
| 529 | # starts with "%% original patch", is kept for backwards | ||
| 530 | # compatibility. If/when that compatibility is dropped, | ||
| 531 | # it can be replaced with code to just read the first | ||
| 532 | # line of the patch file to get the SHA-1, and the code | ||
| 533 | # below that writes the modified patch file can be | ||
| 534 | # replaced with a simple file move. | ||
| 535 | for encoding in ['utf-8', 'latin-1']: | ||
| 536 | patchlines = [] | ||
| 537 | try: | ||
| 538 | with open(srcfile, 'r', encoding=encoding, newline='') as f: | ||
| 539 | for line in f: | ||
| 540 | if line.startswith("%% " + GitApplyTree.original_patch): | ||
| 541 | continue | ||
| 542 | patchlines.append(line) | ||
| 543 | except UnicodeDecodeError: | ||
| 544 | continue | ||
| 545 | break | ||
| 546 | else: | ||
| 547 | raise PatchError('Unable to find a character encoding to decode %s' % srcfile) | ||
| 548 | |||
| 549 | sha1 = patchlines[0].split()[1] | ||
| 550 | notes = GitApplyTree.getNotes(os.path.join(tree, name), sha1) | ||
| 551 | if GitApplyTree.ignore_commit in notes: | ||
| 552 | continue | ||
| 553 | outfile = notes.get(GitApplyTree.original_patch, os.path.basename(srcfile)) | ||
| 554 | |||
| 555 | bb.utils.mkdirhier(os.path.join(outdir, name)) | ||
| 556 | with open(os.path.join(outdir, name, outfile), 'w') as of: | ||
| 557 | for line in patchlines: | ||
| 558 | of.write(line) | ||
| 559 | finally: | ||
| 560 | shutil.rmtree(tempdir) | ||
| 561 | |||
| 562 | def _need_dirty_check(self): | ||
| 563 | fetch = bb.fetch2.Fetch([], self.d) | ||
| 564 | check_dirtyness = False | ||
| 565 | for url in fetch.urls: | ||
| 566 | url_data = fetch.ud[url] | ||
| 567 | parm = url_data.parm | ||
| 568 | # a git url with subpath param will surely be dirty | ||
| 569 | # since the git tree from which we clone will be emptied | ||
| 570 | # from all files that are not in the subpath | ||
| 571 | if url_data.type == 'git' and parm.get('subpath'): | ||
| 572 | check_dirtyness = True | ||
| 573 | return check_dirtyness | ||
| 574 | |||
| 575 | def _commitpatch(self, patch, patchfilevar): | ||
| 576 | output = "" | ||
| 577 | # Add all files | ||
| 578 | shellcmd = ["git", "add", "-f", "-A", "."] | ||
| 579 | output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 580 | # Exclude the patches directory | ||
| 581 | shellcmd = ["git", "reset", "HEAD", self.patchdir] | ||
| 582 | output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 583 | # Commit the result | ||
| 584 | (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail) | ||
| 585 | try: | ||
| 586 | shellcmd.insert(0, patchfilevar) | ||
| 587 | output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 588 | finally: | ||
| 589 | os.remove(tmpfile) | ||
| 590 | return output | ||
| 591 | |||
| 592 | def _applypatch(self, patch, force = False, reverse = False, run = True): | ||
| 593 | import shutil | ||
| 594 | |||
| 595 | def _applypatchhelper(shellcmd, patch, force = False, reverse = False, run = True): | ||
| 596 | if reverse: | ||
| 597 | shellcmd.append('-R') | ||
| 598 | |||
| 599 | shellcmd.append(patch['file']) | ||
| 600 | |||
| 601 | if not run: | ||
| 602 | return "sh" + "-c" + " ".join(shellcmd) | ||
| 603 | |||
| 604 | return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 605 | |||
| 606 | reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip() | ||
| 607 | if not reporoot: | ||
| 608 | raise Exception("Cannot get repository root for directory %s" % self.dir) | ||
| 609 | |||
| 610 | patch_applied = True | ||
| 611 | try: | ||
| 612 | patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file']) | ||
| 613 | if self._need_dirty_check(): | ||
| 614 | # Check dirtyness of the tree | ||
| 615 | try: | ||
| 616 | output = runcmd(["git", "--work-tree=%s" % reporoot, "status", "--short"]) | ||
| 617 | except CmdError: | ||
| 618 | pass | ||
| 619 | else: | ||
| 620 | if output: | ||
| 621 | # The tree is dirty, no need to try to apply patches with git anymore | ||
| 622 | # since they fail, fallback directly to patch | ||
| 623 | output = PatchTree._applypatch(self, patch, force, reverse, run) | ||
| 624 | output += self._commitpatch(patch, patchfilevar) | ||
| 625 | return output | ||
| 626 | try: | ||
| 627 | shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot] | ||
| 628 | self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail) | ||
| 629 | shellcmd += ["am", "-3", "--keep-cr", "--no-scissors", "-p%s" % patch['strippath']] | ||
| 630 | return _applypatchhelper(shellcmd, patch, force, reverse, run) | ||
| 631 | except CmdError: | ||
| 632 | # Need to abort the git am, or we'll still be within it at the end | ||
| 633 | try: | ||
| 634 | shellcmd = ["git", "--work-tree=%s" % reporoot, "am", "--abort"] | ||
| 635 | runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 636 | except CmdError: | ||
| 637 | pass | ||
| 638 | # git am won't always clean up after itself, sadly, so... | ||
| 639 | shellcmd = ["git", "--work-tree=%s" % reporoot, "reset", "--hard", "HEAD"] | ||
| 640 | runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 641 | # Also need to take care of any stray untracked files | ||
| 642 | shellcmd = ["git", "--work-tree=%s" % reporoot, "clean", "-f"] | ||
| 643 | runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) | ||
| 644 | |||
| 645 | # Fall back to git apply | ||
| 646 | shellcmd = ["git", "--git-dir=%s" % reporoot, "apply", "-p%s" % patch['strippath']] | ||
| 647 | try: | ||
| 648 | output = _applypatchhelper(shellcmd, patch, force, reverse, run) | ||
| 649 | except CmdError: | ||
| 650 | # Fall back to patch | ||
| 651 | output = PatchTree._applypatch(self, patch, force, reverse, run) | ||
| 652 | output += self._commitpatch(patch, patchfilevar) | ||
| 653 | return output | ||
| 654 | except: | ||
| 655 | patch_applied = False | ||
| 656 | raise | ||
| 657 | finally: | ||
| 658 | if patch_applied: | ||
| 659 | GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file']), self.commituser, self.commitemail) | ||
| 660 | |||
| 661 | |||
| 662 | class QuiltTree(PatchSet): | ||
| 663 | def _runcmd(self, args, run = True): | ||
| 664 | quiltrc = self.d.getVar('QUILTRCFILE') | ||
| 665 | if not run: | ||
| 666 | return ["quilt"] + ["--quiltrc"] + [quiltrc] + args | ||
| 667 | runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) | ||
| 668 | |||
| 669 | def _quiltpatchpath(self, file): | ||
| 670 | return os.path.join(self.dir, "patches", os.path.basename(file)) | ||
| 671 | |||
| 672 | |||
| 673 | def __init__(self, dir, d): | ||
| 674 | PatchSet.__init__(self, dir, d) | ||
| 675 | self.initialized = False | ||
| 676 | p = os.path.join(self.dir, 'patches') | ||
| 677 | if not os.path.exists(p): | ||
| 678 | os.makedirs(p) | ||
| 679 | |||
| 680 | def Clean(self): | ||
| 681 | try: | ||
| 682 | # make sure that patches/series file exists before quilt pop to keep quilt-0.67 happy | ||
| 683 | open(os.path.join(self.dir, "patches","series"), 'a').close() | ||
| 684 | self._runcmd(["pop", "-a", "-f"]) | ||
| 685 | oe.path.remove(os.path.join(self.dir, "patches","series")) | ||
| 686 | except Exception: | ||
| 687 | pass | ||
| 688 | self.initialized = True | ||
| 689 | |||
| 690 | def InitFromDir(self): | ||
| 691 | # read series -> self.patches | ||
| 692 | seriespath = os.path.join(self.dir, 'patches', 'series') | ||
| 693 | if not os.path.exists(self.dir): | ||
| 694 | raise NotFoundError(self.dir) | ||
| 695 | if os.path.exists(seriespath): | ||
| 696 | with open(seriespath, 'r') as f: | ||
| 697 | for line in f.readlines(): | ||
| 698 | patch = {} | ||
| 699 | parts = line.strip().split() | ||
| 700 | patch["quiltfile"] = self._quiltpatchpath(parts[0]) | ||
| 701 | patch["quiltfilemd5"] = bb.utils.md5_file(patch["quiltfile"]) | ||
| 702 | if len(parts) > 1: | ||
| 703 | patch["strippath"] = parts[1][2:] | ||
| 704 | self.patches.append(patch) | ||
| 705 | |||
| 706 | # determine which patches are applied -> self._current | ||
| 707 | try: | ||
| 708 | output = runcmd(["quilt", "applied"], self.dir) | ||
| 709 | except CmdError: | ||
| 710 | import sys | ||
| 711 | if sys.exc_value.output.strip() == "No patches applied": | ||
| 712 | return | ||
| 713 | else: | ||
| 714 | raise | ||
| 715 | output = [val for val in output.split('\n') if not val.startswith('#')] | ||
| 716 | for patch in self.patches: | ||
| 717 | if os.path.basename(patch["quiltfile"]) == output[-1]: | ||
| 718 | self._current = self.patches.index(patch) | ||
| 719 | self.initialized = True | ||
| 720 | |||
| 721 | def Import(self, patch, force = None): | ||
| 722 | if not self.initialized: | ||
| 723 | self.InitFromDir() | ||
| 724 | PatchSet.Import(self, patch, force) | ||
| 725 | oe.path.symlink(patch["file"], self._quiltpatchpath(patch["file"]), force=True) | ||
| 726 | with open(os.path.join(self.dir, "patches", "series"), "a") as f: | ||
| 727 | f.write(os.path.basename(patch["file"]) + " -p" + patch["strippath"] + "\n") | ||
| 728 | patch["quiltfile"] = self._quiltpatchpath(patch["file"]) | ||
| 729 | patch["quiltfilemd5"] = bb.utils.md5_file(patch["quiltfile"]) | ||
| 730 | |||
| 731 | # TODO: determine if the file being imported: | ||
| 732 | # 1) is already imported, and is the same | ||
| 733 | # 2) is already imported, but differs | ||
| 734 | |||
| 735 | self.patches.insert(self._current or 0, patch) | ||
| 736 | |||
| 737 | |||
| 738 | def Push(self, force = False, all = False, run = True): | ||
| 739 | # quilt push [-f] | ||
| 740 | |||
| 741 | args = ["push"] | ||
| 742 | if force: | ||
| 743 | args.append("-f") | ||
| 744 | if all: | ||
| 745 | args.append("-a") | ||
| 746 | if not run: | ||
| 747 | return self._runcmd(args, run) | ||
| 748 | |||
| 749 | self._runcmd(args) | ||
| 750 | |||
| 751 | if self._current is not None: | ||
| 752 | self._current = self._current + 1 | ||
| 753 | else: | ||
| 754 | self._current = 0 | ||
| 755 | |||
| 756 | def Pop(self, force = None, all = None): | ||
| 757 | # quilt pop [-f] | ||
| 758 | args = ["pop"] | ||
| 759 | if force: | ||
| 760 | args.append("-f") | ||
| 761 | if all: | ||
| 762 | args.append("-a") | ||
| 763 | |||
| 764 | self._runcmd(args) | ||
| 765 | |||
| 766 | if self._current == 0: | ||
| 767 | self._current = None | ||
| 768 | |||
| 769 | if self._current is not None: | ||
| 770 | self._current = self._current - 1 | ||
| 771 | |||
| 772 | def Refresh(self, **kwargs): | ||
| 773 | if kwargs.get("remote"): | ||
| 774 | patch = self.patches[kwargs["patch"]] | ||
| 775 | if not patch: | ||
| 776 | raise PatchError("No patch found at index %s in patchset." % kwargs["patch"]) | ||
| 777 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(patch["remote"]) | ||
| 778 | if type == "file": | ||
| 779 | import shutil | ||
| 780 | if not patch.get("file") and patch.get("remote"): | ||
| 781 | patch["file"] = bb.fetch2.localpath(patch["remote"], self.d) | ||
| 782 | |||
| 783 | shutil.copyfile(patch["quiltfile"], patch["file"]) | ||
| 784 | else: | ||
| 785 | raise PatchError("Unable to do a remote refresh of %s, unsupported remote url scheme %s." % (os.path.basename(patch["quiltfile"]), type)) | ||
| 786 | else: | ||
| 787 | # quilt refresh | ||
| 788 | args = ["refresh"] | ||
| 789 | if kwargs.get("quiltfile"): | ||
| 790 | args.append(os.path.basename(kwargs["quiltfile"])) | ||
| 791 | elif kwargs.get("patch"): | ||
| 792 | args.append(os.path.basename(self.patches[kwargs["patch"]]["quiltfile"])) | ||
| 793 | self._runcmd(args) | ||
| 794 | |||
| 795 | class Resolver(object): | ||
| 796 | def __init__(self, patchset, terminal): | ||
| 797 | raise NotImplementedError() | ||
| 798 | |||
| 799 | def Resolve(self): | ||
| 800 | raise NotImplementedError() | ||
| 801 | |||
| 802 | def Revert(self): | ||
| 803 | raise NotImplementedError() | ||
| 804 | |||
| 805 | def Finalize(self): | ||
| 806 | raise NotImplementedError() | ||
| 807 | |||
| 808 | class NOOPResolver(Resolver): | ||
| 809 | def __init__(self, patchset, terminal): | ||
| 810 | self.patchset = patchset | ||
| 811 | self.terminal = terminal | ||
| 812 | |||
| 813 | def Resolve(self): | ||
| 814 | olddir = os.path.abspath(os.curdir) | ||
| 815 | os.chdir(self.patchset.dir) | ||
| 816 | try: | ||
| 817 | self.patchset.Push() | ||
| 818 | except Exception: | ||
| 819 | import sys | ||
| 820 | raise | ||
| 821 | finally: | ||
| 822 | os.chdir(olddir) | ||
| 823 | |||
| 824 | # Patch resolver which relies on the user doing all the work involved in the | ||
| 825 | # resolution, with the exception of refreshing the remote copy of the patch | ||
| 826 | # files (the urls). | ||
| 827 | class UserResolver(Resolver): | ||
| 828 | def __init__(self, patchset, terminal): | ||
| 829 | self.patchset = patchset | ||
| 830 | self.terminal = terminal | ||
| 831 | |||
| 832 | # Force a push in the patchset, then drop to a shell for the user to | ||
| 833 | # resolve any rejected hunks | ||
| 834 | def Resolve(self): | ||
| 835 | olddir = os.path.abspath(os.curdir) | ||
| 836 | os.chdir(self.patchset.dir) | ||
| 837 | try: | ||
| 838 | self.patchset.Push(False) | ||
| 839 | except CmdError as v: | ||
| 840 | # Patch application failed | ||
| 841 | patchcmd = self.patchset.Push(True, False, False) | ||
| 842 | |||
| 843 | t = self.patchset.d.getVar('T') | ||
| 844 | if not t: | ||
| 845 | bb.msg.fatal("Build", "T not set") | ||
| 846 | bb.utils.mkdirhier(t) | ||
| 847 | import random | ||
| 848 | rcfile = "%s/bashrc.%s.%s" % (t, str(os.getpid()), random.random()) | ||
| 849 | with open(rcfile, "w") as f: | ||
| 850 | f.write("echo '*** Manual patch resolution mode ***'\n") | ||
| 851 | f.write("echo 'Dropping to a shell, so patch rejects can be fixed manually.'\n") | ||
| 852 | f.write("echo 'Run \"quilt refresh\" when patch is corrected, press CTRL+D to exit.'\n") | ||
| 853 | f.write("echo ''\n") | ||
| 854 | f.write(" ".join(patchcmd) + "\n") | ||
| 855 | os.chmod(rcfile, 0o775) | ||
| 856 | |||
| 857 | self.terminal("bash --rcfile " + rcfile, 'Patch Rejects: Please fix patch rejects manually', self.patchset.d) | ||
| 858 | |||
| 859 | # Construct a new PatchSet after the user's changes, compare the | ||
| 860 | # sets, checking patches for modifications, and doing a remote | ||
| 861 | # refresh on each. | ||
| 862 | oldpatchset = self.patchset | ||
| 863 | self.patchset = oldpatchset.__class__(self.patchset.dir, self.patchset.d) | ||
| 864 | |||
| 865 | for patch in self.patchset.patches: | ||
| 866 | oldpatch = None | ||
| 867 | for opatch in oldpatchset.patches: | ||
| 868 | if opatch["quiltfile"] == patch["quiltfile"]: | ||
| 869 | oldpatch = opatch | ||
| 870 | |||
| 871 | if oldpatch: | ||
| 872 | patch["remote"] = oldpatch["remote"] | ||
| 873 | if patch["quiltfile"] == oldpatch["quiltfile"]: | ||
| 874 | if patch["quiltfilemd5"] != oldpatch["quiltfilemd5"]: | ||
| 875 | bb.note("Patch %s has changed, updating remote url %s" % (os.path.basename(patch["quiltfile"]), patch["remote"])) | ||
| 876 | # user change? remote refresh | ||
| 877 | self.patchset.Refresh(remote=True, patch=self.patchset.patches.index(patch)) | ||
| 878 | else: | ||
| 879 | # User did not fix the problem. Abort. | ||
| 880 | raise PatchError("Patch application failed, and user did not fix and refresh the patch.") | ||
| 881 | except Exception: | ||
| 882 | raise | ||
| 883 | finally: | ||
| 884 | os.chdir(olddir) | ||
| 885 | |||
| 886 | |||
| 887 | def patch_path(url, fetch, unpackdir, expand=True): | ||
| 888 | """Return the local path of a patch, or return nothing if this isn't a patch""" | ||
| 889 | |||
| 890 | local = fetch.localpath(url) | ||
| 891 | if os.path.isdir(local): | ||
| 892 | return | ||
| 893 | base, ext = os.path.splitext(os.path.basename(local)) | ||
| 894 | if ext in ('.gz', '.bz2', '.xz', '.Z'): | ||
| 895 | if expand: | ||
| 896 | local = os.path.join(unpackdir, base) | ||
| 897 | ext = os.path.splitext(base)[1] | ||
| 898 | |||
| 899 | urldata = fetch.ud[url] | ||
| 900 | if "apply" in urldata.parm: | ||
| 901 | apply = oe.types.boolean(urldata.parm["apply"]) | ||
| 902 | if not apply: | ||
| 903 | return | ||
| 904 | elif ext not in (".diff", ".patch"): | ||
| 905 | return | ||
| 906 | |||
| 907 | return local | ||
| 908 | |||
| 909 | def src_patches(d, all=False, expand=True): | ||
| 910 | unpackdir = d.getVar('UNPACKDIR') | ||
| 911 | fetch = bb.fetch2.Fetch([], d) | ||
| 912 | patches = [] | ||
| 913 | sources = [] | ||
| 914 | for url in fetch.urls: | ||
| 915 | local = patch_path(url, fetch, unpackdir, expand) | ||
| 916 | if not local: | ||
| 917 | if all: | ||
| 918 | local = fetch.localpath(url) | ||
| 919 | sources.append(local) | ||
| 920 | continue | ||
| 921 | |||
| 922 | urldata = fetch.ud[url] | ||
| 923 | parm = urldata.parm | ||
| 924 | patchname = parm.get('pname') or os.path.basename(local) | ||
| 925 | |||
| 926 | apply, reason = should_apply(parm, d) | ||
| 927 | if not apply: | ||
| 928 | if reason: | ||
| 929 | bb.note("Patch %s %s" % (patchname, reason)) | ||
| 930 | continue | ||
| 931 | |||
| 932 | patchparm = {'patchname': patchname} | ||
| 933 | if "striplevel" in parm: | ||
| 934 | striplevel = parm["striplevel"] | ||
| 935 | elif "pnum" in parm: | ||
| 936 | #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url) | ||
| 937 | striplevel = parm["pnum"] | ||
| 938 | else: | ||
| 939 | striplevel = '1' | ||
| 940 | patchparm['striplevel'] = striplevel | ||
| 941 | |||
| 942 | patchdir = parm.get('patchdir') | ||
| 943 | if patchdir: | ||
| 944 | patchparm['patchdir'] = patchdir | ||
| 945 | |||
| 946 | localurl = bb.fetch.encodeurl(('file', '', local, '', '', patchparm)) | ||
| 947 | patches.append(localurl) | ||
| 948 | |||
| 949 | if all: | ||
| 950 | return sources | ||
| 951 | |||
| 952 | return patches | ||
| 953 | |||
| 954 | |||
| 955 | def should_apply(parm, d): | ||
| 956 | import bb.utils | ||
| 957 | if "mindate" in parm or "maxdate" in parm: | ||
| 958 | pn = d.getVar('PN') | ||
| 959 | srcdate = d.getVar('SRCDATE_%s' % pn) | ||
| 960 | if not srcdate: | ||
| 961 | srcdate = d.getVar('SRCDATE') | ||
| 962 | |||
| 963 | if srcdate == "now": | ||
| 964 | srcdate = d.getVar('DATE') | ||
| 965 | |||
| 966 | if "maxdate" in parm and parm["maxdate"] < srcdate: | ||
| 967 | return False, 'is outdated' | ||
| 968 | |||
| 969 | if "mindate" in parm and parm["mindate"] > srcdate: | ||
| 970 | return False, 'is predated' | ||
| 971 | |||
| 972 | |||
| 973 | if "minrev" in parm: | ||
| 974 | srcrev = d.getVar('SRCREV') | ||
| 975 | if srcrev and srcrev < parm["minrev"]: | ||
| 976 | return False, 'applies to later revisions' | ||
| 977 | |||
| 978 | if "maxrev" in parm: | ||
| 979 | srcrev = d.getVar('SRCREV') | ||
| 980 | if srcrev and srcrev > parm["maxrev"]: | ||
| 981 | return False, 'applies to earlier revisions' | ||
| 982 | |||
| 983 | if "rev" in parm: | ||
| 984 | srcrev = d.getVar('SRCREV') | ||
| 985 | if srcrev and parm["rev"] not in srcrev: | ||
| 986 | return False, "doesn't apply to revision" | ||
| 987 | |||
| 988 | if "notrev" in parm: | ||
| 989 | srcrev = d.getVar('SRCREV') | ||
| 990 | if srcrev and parm["notrev"] in srcrev: | ||
| 991 | return False, "doesn't apply to revision" | ||
| 992 | |||
| 993 | if "maxver" in parm: | ||
| 994 | pv = d.getVar('PV') | ||
| 995 | if bb.utils.vercmp_string_op(pv, parm["maxver"], ">"): | ||
| 996 | return False, "applies to earlier version" | ||
| 997 | |||
| 998 | if "minver" in parm: | ||
| 999 | pv = d.getVar('PV') | ||
| 1000 | if bb.utils.vercmp_string_op(pv, parm["minver"], "<"): | ||
| 1001 | return False, "applies to later version" | ||
| 1002 | |||
| 1003 | return True, None | ||
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py deleted file mode 100644 index a1efe97d88..0000000000 --- a/meta/lib/oe/path.py +++ /dev/null | |||
| @@ -1,352 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import errno | ||
| 8 | import glob | ||
| 9 | import shutil | ||
| 10 | import subprocess | ||
| 11 | import os.path | ||
| 12 | |||
| 13 | import bb.parse | ||
| 14 | |||
| 15 | def join(*paths): | ||
| 16 | """Like os.path.join but doesn't treat absolute RHS specially""" | ||
| 17 | return os.path.normpath("/".join(paths)) | ||
| 18 | |||
| 19 | def relative(src, dest): | ||
| 20 | """ Return a relative path from src to dest. | ||
| 21 | |||
| 22 | >>> relative("/usr/bin", "/tmp/foo/bar") | ||
| 23 | ../../tmp/foo/bar | ||
| 24 | |||
| 25 | >>> relative("/usr/bin", "/usr/lib") | ||
| 26 | ../lib | ||
| 27 | |||
| 28 | >>> relative("/tmp", "/tmp/foo/bar") | ||
| 29 | foo/bar | ||
| 30 | """ | ||
| 31 | |||
| 32 | return os.path.relpath(dest, src) | ||
| 33 | |||
| 34 | def make_relative_symlink(path): | ||
| 35 | """ Convert an absolute symlink to a relative one """ | ||
| 36 | if not os.path.islink(path): | ||
| 37 | return | ||
| 38 | link = os.readlink(path) | ||
| 39 | if not os.path.isabs(link): | ||
| 40 | return | ||
| 41 | |||
| 42 | # find the common ancestor directory | ||
| 43 | ancestor = path | ||
| 44 | depth = 0 | ||
| 45 | while ancestor and not link.startswith(ancestor): | ||
| 46 | ancestor = ancestor.rpartition('/')[0] | ||
| 47 | depth += 1 | ||
| 48 | |||
| 49 | if not ancestor: | ||
| 50 | print("make_relative_symlink() Error: unable to find the common ancestor of %s and its target" % path) | ||
| 51 | return | ||
| 52 | |||
| 53 | base = link.partition(ancestor)[2].strip('/') | ||
| 54 | while depth > 1: | ||
| 55 | base = "../" + base | ||
| 56 | depth -= 1 | ||
| 57 | |||
| 58 | os.remove(path) | ||
| 59 | os.symlink(base, path) | ||
| 60 | |||
| 61 | def replace_absolute_symlinks(basedir, d): | ||
| 62 | """ | ||
| 63 | Walk basedir looking for absolute symlinks and replacing them with relative ones. | ||
| 64 | The absolute links are assumed to be relative to basedir | ||
| 65 | (compared to make_relative_symlink above which tries to compute common ancestors | ||
| 66 | using pattern matching instead) | ||
| 67 | """ | ||
| 68 | for walkroot, dirs, files in os.walk(basedir): | ||
| 69 | for file in files + dirs: | ||
| 70 | path = os.path.join(walkroot, file) | ||
| 71 | if not os.path.islink(path): | ||
| 72 | continue | ||
| 73 | link = os.readlink(path) | ||
| 74 | if not os.path.isabs(link): | ||
| 75 | continue | ||
| 76 | walkdir = os.path.dirname(path.rpartition(basedir)[2]) | ||
| 77 | base = os.path.relpath(link, walkdir) | ||
| 78 | bb.debug(2, "Replacing absolute path %s with relative path %s" % (link, base)) | ||
| 79 | os.remove(path) | ||
| 80 | os.symlink(base, path) | ||
| 81 | |||
| 82 | @bb.parse.vardepsexclude("TOPDIR") | ||
| 83 | def format_display(path, metadata): | ||
| 84 | """ Prepare a path for display to the user. """ | ||
| 85 | rel = relative(metadata.getVar("TOPDIR"), path) | ||
| 86 | if len(rel) > len(path): | ||
| 87 | return path | ||
| 88 | else: | ||
| 89 | return rel | ||
| 90 | |||
| 91 | def copytree(src, dst): | ||
| 92 | # We could use something like shutil.copytree here but it turns out to | ||
| 93 | # to be slow. It takes twice as long copying to an empty directory. | ||
| 94 | # If dst already has contents performance can be 15 time slower | ||
| 95 | # This way we also preserve hardlinks between files in the tree. | ||
| 96 | |||
| 97 | bb.utils.mkdirhier(dst) | ||
| 98 | cmd = "tar --xattrs --xattrs-include='*' -cf - -S -C %s -p . | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dst) | ||
| 99 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 100 | |||
| 101 | def copyhardlinktree(src, dst): | ||
| 102 | """Make a tree of hard links when possible, otherwise copy.""" | ||
| 103 | bb.utils.mkdirhier(dst) | ||
| 104 | if os.path.isdir(src) and not len(os.listdir(src)): | ||
| 105 | return | ||
| 106 | |||
| 107 | canhard = False | ||
| 108 | testfile = None | ||
| 109 | for root, dirs, files in os.walk(src): | ||
| 110 | if len(files): | ||
| 111 | testfile = os.path.join(root, files[0]) | ||
| 112 | break | ||
| 113 | |||
| 114 | if testfile is not None: | ||
| 115 | try: | ||
| 116 | os.link(testfile, os.path.join(dst, 'testfile')) | ||
| 117 | os.unlink(os.path.join(dst, 'testfile')) | ||
| 118 | canhard = True | ||
| 119 | except Exception as e: | ||
| 120 | bb.debug(2, "Hardlink test failed with " + str(e)) | ||
| 121 | |||
| 122 | if (canhard): | ||
| 123 | # Need to copy directories only with tar first since cp will error if two | ||
| 124 | # writers try and create a directory at the same time | ||
| 125 | cmd = "cd %s; find . -type d -print | tar --xattrs --xattrs-include='*' -cf - -S -C %s -p --no-recursion --files-from - | tar --xattrs --xattrs-include='*' -xhf - -C %s" % (src, src, dst) | ||
| 126 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 127 | source = '' | ||
| 128 | if os.path.isdir(src): | ||
| 129 | if len(glob.glob('%s/.??*' % src)) > 0: | ||
| 130 | source = './.??* ' | ||
| 131 | if len(glob.glob('%s/**' % src)) > 0: | ||
| 132 | source += './*' | ||
| 133 | s_dir = src | ||
| 134 | else: | ||
| 135 | source = src | ||
| 136 | s_dir = os.getcwd() | ||
| 137 | cmd = 'cp -afl --preserve=xattr %s %s' % (source, os.path.realpath(dst)) | ||
| 138 | subprocess.check_output(cmd, shell=True, cwd=s_dir, stderr=subprocess.STDOUT) | ||
| 139 | else: | ||
| 140 | copytree(src, dst) | ||
| 141 | |||
| 142 | def copyhardlink(src, dst): | ||
| 143 | """Make a hard link when possible, otherwise copy.""" | ||
| 144 | |||
| 145 | try: | ||
| 146 | os.link(src, dst) | ||
| 147 | except OSError: | ||
| 148 | shutil.copy(src, dst) | ||
| 149 | |||
| 150 | def remove(path, recurse=True): | ||
| 151 | """ | ||
| 152 | Equivalent to rm -f or rm -rf | ||
| 153 | NOTE: be careful about passing paths that may contain filenames with | ||
| 154 | wildcards in them (as opposed to passing an actual wildcarded path) - | ||
| 155 | since we use glob.glob() to expand the path. Filenames containing | ||
| 156 | square brackets are particularly problematic since the they may not | ||
| 157 | actually expand to match the original filename. | ||
| 158 | """ | ||
| 159 | for name in glob.glob(path): | ||
| 160 | try: | ||
| 161 | os.unlink(name) | ||
| 162 | except OSError as exc: | ||
| 163 | if recurse and exc.errno == errno.EISDIR: | ||
| 164 | shutil.rmtree(name) | ||
| 165 | elif exc.errno != errno.ENOENT: | ||
| 166 | raise | ||
| 167 | |||
| 168 | def symlink(source, destination, force=False): | ||
| 169 | """Create a symbolic link""" | ||
| 170 | try: | ||
| 171 | if force: | ||
| 172 | remove(destination) | ||
| 173 | os.symlink(source, destination) | ||
| 174 | except OSError as e: | ||
| 175 | if e.errno != errno.EEXIST or os.readlink(destination) != source: | ||
| 176 | raise | ||
| 177 | |||
| 178 | def relsymlink(target, name, force=False): | ||
| 179 | symlink(os.path.relpath(target, os.path.dirname(name)), name, force=force) | ||
| 180 | |||
| 181 | def find(dir, **walkoptions): | ||
| 182 | """ Given a directory, recurses into that directory, | ||
| 183 | returning all files as absolute paths. """ | ||
| 184 | |||
| 185 | for root, dirs, files in os.walk(dir, **walkoptions): | ||
| 186 | for file in files: | ||
| 187 | yield os.path.join(root, file) | ||
| 188 | |||
| 189 | |||
| 190 | ## realpath() related functions | ||
| 191 | def __is_path_below(file, root): | ||
| 192 | return (file + os.path.sep).startswith(root) | ||
| 193 | |||
| 194 | def __realpath_rel(start, rel_path, root, loop_cnt, assume_dir): | ||
| 195 | """Calculates real path of symlink 'start' + 'rel_path' below | ||
| 196 | 'root'; no part of 'start' below 'root' must contain symlinks. """ | ||
| 197 | have_dir = True | ||
| 198 | |||
| 199 | for d in rel_path.split(os.path.sep): | ||
| 200 | if not have_dir and not assume_dir: | ||
| 201 | raise OSError(errno.ENOENT, "no such directory %s" % start) | ||
| 202 | |||
| 203 | if d == os.path.pardir: # '..' | ||
| 204 | if len(start) >= len(root): | ||
| 205 | # do not follow '..' before root | ||
| 206 | start = os.path.dirname(start) | ||
| 207 | else: | ||
| 208 | # emit warning? | ||
| 209 | pass | ||
| 210 | else: | ||
| 211 | (start, have_dir) = __realpath(os.path.join(start, d), | ||
| 212 | root, loop_cnt, assume_dir) | ||
| 213 | |||
| 214 | assert(__is_path_below(start, root)) | ||
| 215 | |||
| 216 | return start | ||
| 217 | |||
| 218 | def __realpath(file, root, loop_cnt, assume_dir): | ||
| 219 | while os.path.islink(file) and len(file) >= len(root): | ||
| 220 | if loop_cnt == 0: | ||
| 221 | raise OSError(errno.ELOOP, file) | ||
| 222 | |||
| 223 | loop_cnt -= 1 | ||
| 224 | target = os.path.normpath(os.readlink(file)) | ||
| 225 | |||
| 226 | if not os.path.isabs(target): | ||
| 227 | tdir = os.path.dirname(file) | ||
| 228 | assert(__is_path_below(tdir, root)) | ||
| 229 | else: | ||
| 230 | tdir = root | ||
| 231 | |||
| 232 | file = __realpath_rel(tdir, target, root, loop_cnt, assume_dir) | ||
| 233 | |||
| 234 | try: | ||
| 235 | is_dir = os.path.isdir(file) | ||
| 236 | except: | ||
| 237 | is_dir = false | ||
| 238 | |||
| 239 | return (file, is_dir) | ||
| 240 | |||
| 241 | def realpath(file, root, use_physdir = True, loop_cnt = 100, assume_dir = False): | ||
| 242 | """ Returns the canonical path of 'file' with assuming a | ||
| 243 | toplevel 'root' directory. When 'use_physdir' is set, all | ||
| 244 | preceding path components of 'file' will be resolved first; | ||
| 245 | this flag should be set unless it is guaranteed that there is | ||
| 246 | no symlink in the path. When 'assume_dir' is not set, missing | ||
| 247 | path components will raise an ENOENT error""" | ||
| 248 | |||
| 249 | root = os.path.normpath(root) | ||
| 250 | file = os.path.normpath(file) | ||
| 251 | |||
| 252 | if not root.endswith(os.path.sep): | ||
| 253 | # letting root end with '/' makes some things easier | ||
| 254 | root = root + os.path.sep | ||
| 255 | |||
| 256 | if not __is_path_below(file, root): | ||
| 257 | raise OSError(errno.EINVAL, "file '%s' is not below root" % file) | ||
| 258 | |||
| 259 | try: | ||
| 260 | if use_physdir: | ||
| 261 | file = __realpath_rel(root, file[(len(root) - 1):], root, loop_cnt, assume_dir) | ||
| 262 | else: | ||
| 263 | file = __realpath(file, root, loop_cnt, assume_dir)[0] | ||
| 264 | except OSError as e: | ||
| 265 | if e.errno == errno.ELOOP: | ||
| 266 | # make ELOOP more readable; without catching it, there will | ||
| 267 | # be printed a backtrace with 100s of OSError exceptions | ||
| 268 | # else | ||
| 269 | raise OSError(errno.ELOOP, | ||
| 270 | "too much recursions while resolving '%s'; loop in '%s'" % | ||
| 271 | (file, e.strerror)) | ||
| 272 | |||
| 273 | raise | ||
| 274 | |||
| 275 | return file | ||
| 276 | |||
| 277 | def is_path_parent(possible_parent, *paths): | ||
| 278 | """ | ||
| 279 | Return True if a path is the parent of another, False otherwise. | ||
| 280 | Multiple paths to test can be specified in which case all | ||
| 281 | specified test paths must be under the parent in order to | ||
| 282 | return True. | ||
| 283 | """ | ||
| 284 | def abs_path_trailing(pth): | ||
| 285 | pth_abs = os.path.abspath(pth) | ||
| 286 | if not pth_abs.endswith(os.sep): | ||
| 287 | pth_abs += os.sep | ||
| 288 | return pth_abs | ||
| 289 | |||
| 290 | possible_parent_abs = abs_path_trailing(possible_parent) | ||
| 291 | if not paths: | ||
| 292 | return False | ||
| 293 | for path in paths: | ||
| 294 | path_abs = abs_path_trailing(path) | ||
| 295 | if not path_abs.startswith(possible_parent_abs): | ||
| 296 | return False | ||
| 297 | return True | ||
| 298 | |||
| 299 | def which_wild(pathname, path=None, mode=os.F_OK, *, reverse=False, candidates=False): | ||
| 300 | """Search a search path for pathname, supporting wildcards. | ||
| 301 | |||
| 302 | Return all paths in the specific search path matching the wildcard pattern | ||
| 303 | in pathname, returning only the first encountered for each file. If | ||
| 304 | candidates is True, information on all potential candidate paths are | ||
| 305 | included. | ||
| 306 | """ | ||
| 307 | paths = (path or os.environ.get('PATH', os.defpath)).split(':') | ||
| 308 | if reverse: | ||
| 309 | paths.reverse() | ||
| 310 | |||
| 311 | seen, files = set(), [] | ||
| 312 | for index, element in enumerate(paths): | ||
| 313 | if not os.path.isabs(element): | ||
| 314 | element = os.path.abspath(element) | ||
| 315 | |||
| 316 | candidate = os.path.join(element, pathname) | ||
| 317 | globbed = glob.glob(candidate) | ||
| 318 | if globbed: | ||
| 319 | for found_path in sorted(globbed): | ||
| 320 | if not os.access(found_path, mode): | ||
| 321 | continue | ||
| 322 | rel = os.path.relpath(found_path, element) | ||
| 323 | if rel not in seen: | ||
| 324 | seen.add(rel) | ||
| 325 | if candidates: | ||
| 326 | files.append((found_path, [os.path.join(p, rel) for p in paths[:index+1]])) | ||
| 327 | else: | ||
| 328 | files.append(found_path) | ||
| 329 | |||
| 330 | return files | ||
| 331 | |||
| 332 | def canonicalize(paths, sep=','): | ||
| 333 | """Given a string with paths (separated by commas by default), expand | ||
| 334 | each path using os.path.realpath() and return the resulting paths as a | ||
| 335 | string (separated using the same separator a the original string). | ||
| 336 | """ | ||
| 337 | # Ignore paths containing "$" as they are assumed to be unexpanded bitbake | ||
| 338 | # variables. Normally they would be ignored, e.g., when passing the paths | ||
| 339 | # through the shell they would expand to empty strings. However, when they | ||
| 340 | # are passed through os.path.realpath(), it will cause them to be prefixed | ||
| 341 | # with the absolute path to the current directory and thus not be empty | ||
| 342 | # anymore. | ||
| 343 | # | ||
| 344 | # Also maintain trailing slashes, as the paths may actually be used as | ||
| 345 | # prefixes in sting compares later on, where the slashes then are important. | ||
| 346 | canonical_paths = [] | ||
| 347 | for path in (paths or '').split(sep): | ||
| 348 | if '$' not in path: | ||
| 349 | trailing_slash = path.endswith('/') and '/' or '' | ||
| 350 | canonical_paths.append(os.path.realpath(path) + trailing_slash) | ||
| 351 | |||
| 352 | return sep.join(canonical_paths) | ||
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py deleted file mode 100644 index c41242c878..0000000000 --- a/meta/lib/oe/prservice.py +++ /dev/null | |||
| @@ -1,127 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | def prserv_make_conn(d, check = False): | ||
| 8 | import prserv.serv | ||
| 9 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) | ||
| 10 | try: | ||
| 11 | conn = None | ||
| 12 | conn = prserv.serv.connect(host_params[0], int(host_params[1])) | ||
| 13 | if check: | ||
| 14 | if not conn.ping(): | ||
| 15 | raise Exception('service not available') | ||
| 16 | except Exception as exc: | ||
| 17 | bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc))) | ||
| 18 | |||
| 19 | return conn | ||
| 20 | |||
| 21 | def prserv_dump_db(d): | ||
| 22 | if not d.getVar('PRSERV_HOST'): | ||
| 23 | bb.error("Not using network based PR service") | ||
| 24 | return None | ||
| 25 | |||
| 26 | conn = prserv_make_conn(d) | ||
| 27 | if conn is None: | ||
| 28 | bb.error("Making connection failed to remote PR service") | ||
| 29 | return None | ||
| 30 | |||
| 31 | #dump db | ||
| 32 | opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') | ||
| 33 | opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') | ||
| 34 | opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') | ||
| 35 | opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) | ||
| 36 | d = conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) | ||
| 37 | conn.close() | ||
| 38 | return d | ||
| 39 | |||
| 40 | def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): | ||
| 41 | if not d.getVar('PRSERV_HOST'): | ||
| 42 | bb.error("Not using network based PR service") | ||
| 43 | return None | ||
| 44 | |||
| 45 | conn = prserv_make_conn(d) | ||
| 46 | if conn is None: | ||
| 47 | bb.error("Making connection failed to remote PR service") | ||
| 48 | return None | ||
| 49 | #get the entry values | ||
| 50 | imported = [] | ||
| 51 | prefix = "PRAUTO$" | ||
| 52 | for v in d.keys(): | ||
| 53 | if v.startswith(prefix): | ||
| 54 | (remain, sep, checksum) = v.rpartition('$') | ||
| 55 | (remain, sep, pkgarch) = remain.rpartition('$') | ||
| 56 | (remain, sep, version) = remain.rpartition('$') | ||
| 57 | if (remain + '$' != prefix) or \ | ||
| 58 | (filter_version and filter_version != version) or \ | ||
| 59 | (filter_pkgarch and filter_pkgarch != pkgarch) or \ | ||
| 60 | (filter_checksum and filter_checksum != checksum): | ||
| 61 | continue | ||
| 62 | try: | ||
| 63 | value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum)) | ||
| 64 | except BaseException as exc: | ||
| 65 | bb.debug("Not valid value of %s:%s" % (v,str(exc))) | ||
| 66 | continue | ||
| 67 | ret = conn.importone(version,pkgarch,checksum,value) | ||
| 68 | if ret != value: | ||
| 69 | bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret)) | ||
| 70 | else: | ||
| 71 | imported.append((version,pkgarch,checksum,value)) | ||
| 72 | conn.close() | ||
| 73 | return imported | ||
| 74 | |||
| 75 | def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): | ||
| 76 | import bb.utils | ||
| 77 | #initilize the output file | ||
| 78 | bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) | ||
| 79 | df = d.getVar('PRSERV_DUMPFILE') | ||
| 80 | #write data | ||
| 81 | with open(df, "a") as f, bb.utils.fileslocked(["%s.lock" % df]) as locks: | ||
| 82 | if metainfo: | ||
| 83 | #dump column info | ||
| 84 | f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']); | ||
| 85 | f.write("#Table: %s\n" % metainfo['tbl_name']) | ||
| 86 | f.write("#Columns:\n") | ||
| 87 | f.write("#name \t type \t notn \t dflt \t pk\n") | ||
| 88 | f.write("#----------\t --------\t --------\t --------\t ----\n") | ||
| 89 | for i in range(len(metainfo['col_info'])): | ||
| 90 | f.write("#%10s\t %8s\t %8s\t %8s\t %4s\n" % | ||
| 91 | (metainfo['col_info'][i]['name'], | ||
| 92 | metainfo['col_info'][i]['type'], | ||
| 93 | metainfo['col_info'][i]['notnull'], | ||
| 94 | metainfo['col_info'][i]['dflt_value'], | ||
| 95 | metainfo['col_info'][i]['pk'])) | ||
| 96 | f.write("\n") | ||
| 97 | |||
| 98 | if lockdown: | ||
| 99 | f.write("PRSERV_LOCKDOWN = \"1\"\n\n") | ||
| 100 | |||
| 101 | if datainfo: | ||
| 102 | idx = {} | ||
| 103 | for i in range(len(datainfo)): | ||
| 104 | pkgarch = datainfo[i]['pkgarch'] | ||
| 105 | value = datainfo[i]['value'] | ||
| 106 | if pkgarch not in idx: | ||
| 107 | idx[pkgarch] = i | ||
| 108 | elif value > datainfo[idx[pkgarch]]['value']: | ||
| 109 | idx[pkgarch] = i | ||
| 110 | f.write("PRAUTO$%s$%s$%s = \"%s\"\n" % | ||
| 111 | (str(datainfo[i]['version']), pkgarch, str(datainfo[i]['checksum']), str(value))) | ||
| 112 | if not nomax: | ||
| 113 | for i in idx: | ||
| 114 | f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value']))) | ||
| 115 | |||
| 116 | def prserv_check_avail(d): | ||
| 117 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) | ||
| 118 | try: | ||
| 119 | if len(host_params) != 2: | ||
| 120 | raise TypeError | ||
| 121 | else: | ||
| 122 | int(host_params[1]) | ||
| 123 | except TypeError: | ||
| 124 | bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"') | ||
| 125 | else: | ||
| 126 | conn = prserv_make_conn(d, True) | ||
| 127 | conn.close() | ||
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py deleted file mode 100644 index cd36cb5070..0000000000 --- a/meta/lib/oe/qa.py +++ /dev/null | |||
| @@ -1,248 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import ast | ||
| 8 | import os, struct, mmap | ||
| 9 | |||
| 10 | class NotELFFileError(Exception): | ||
| 11 | pass | ||
| 12 | |||
| 13 | class ELFFile: | ||
| 14 | EI_NIDENT = 16 | ||
| 15 | |||
| 16 | EI_CLASS = 4 | ||
| 17 | EI_DATA = 5 | ||
| 18 | EI_VERSION = 6 | ||
| 19 | EI_OSABI = 7 | ||
| 20 | EI_ABIVERSION = 8 | ||
| 21 | |||
| 22 | E_MACHINE = 0x12 | ||
| 23 | |||
| 24 | # possible values for EI_CLASS | ||
| 25 | ELFCLASSNONE = 0 | ||
| 26 | ELFCLASS32 = 1 | ||
| 27 | ELFCLASS64 = 2 | ||
| 28 | |||
| 29 | # possible value for EI_VERSION | ||
| 30 | EV_CURRENT = 1 | ||
| 31 | |||
| 32 | # possible values for EI_DATA | ||
| 33 | EI_DATA_NONE = 0 | ||
| 34 | EI_DATA_LSB = 1 | ||
| 35 | EI_DATA_MSB = 2 | ||
| 36 | |||
| 37 | PT_INTERP = 3 | ||
| 38 | |||
| 39 | def my_assert(self, expectation, result): | ||
| 40 | if not expectation == result: | ||
| 41 | #print "'%x','%x' %s" % (ord(expectation), ord(result), self.name) | ||
| 42 | raise NotELFFileError("%s is not an ELF" % self.name) | ||
| 43 | |||
| 44 | def __init__(self, name): | ||
| 45 | self.name = name | ||
| 46 | self.objdump_output = {} | ||
| 47 | self.data = None | ||
| 48 | |||
| 49 | # Context Manager functions to close the mmap explicitly | ||
| 50 | def __enter__(self): | ||
| 51 | return self | ||
| 52 | |||
| 53 | def __exit__(self, exc_type, exc_value, traceback): | ||
| 54 | self.close() | ||
| 55 | |||
| 56 | def close(self): | ||
| 57 | if self.data: | ||
| 58 | self.data.close() | ||
| 59 | |||
| 60 | def open(self): | ||
| 61 | with open(self.name, "rb") as f: | ||
| 62 | try: | ||
| 63 | self.data = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) | ||
| 64 | except ValueError: | ||
| 65 | # This means the file is empty | ||
| 66 | raise NotELFFileError("%s is empty" % self.name) | ||
| 67 | |||
| 68 | # Check the file has the minimum number of ELF table entries | ||
| 69 | if len(self.data) < ELFFile.EI_NIDENT + 4: | ||
| 70 | raise NotELFFileError("%s is not an ELF" % self.name) | ||
| 71 | |||
| 72 | # ELF header | ||
| 73 | self.my_assert(self.data[0], 0x7f) | ||
| 74 | self.my_assert(self.data[1], ord('E')) | ||
| 75 | self.my_assert(self.data[2], ord('L')) | ||
| 76 | self.my_assert(self.data[3], ord('F')) | ||
| 77 | if self.data[ELFFile.EI_CLASS] == ELFFile.ELFCLASS32: | ||
| 78 | self.bits = 32 | ||
| 79 | elif self.data[ELFFile.EI_CLASS] == ELFFile.ELFCLASS64: | ||
| 80 | self.bits = 64 | ||
| 81 | else: | ||
| 82 | # Not 32-bit or 64.. lets assert | ||
| 83 | raise NotELFFileError("ELF but not 32 or 64 bit.") | ||
| 84 | self.my_assert(self.data[ELFFile.EI_VERSION], ELFFile.EV_CURRENT) | ||
| 85 | |||
| 86 | self.endian = self.data[ELFFile.EI_DATA] | ||
| 87 | if self.endian not in (ELFFile.EI_DATA_LSB, ELFFile.EI_DATA_MSB): | ||
| 88 | raise NotELFFileError("Unexpected EI_DATA %x" % self.endian) | ||
| 89 | |||
| 90 | def osAbi(self): | ||
| 91 | return self.data[ELFFile.EI_OSABI] | ||
| 92 | |||
| 93 | def abiVersion(self): | ||
| 94 | return self.data[ELFFile.EI_ABIVERSION] | ||
| 95 | |||
| 96 | def abiSize(self): | ||
| 97 | return self.bits | ||
| 98 | |||
| 99 | def isLittleEndian(self): | ||
| 100 | return self.endian == ELFFile.EI_DATA_LSB | ||
| 101 | |||
| 102 | def isBigEndian(self): | ||
| 103 | return self.endian == ELFFile.EI_DATA_MSB | ||
| 104 | |||
| 105 | def getStructEndian(self): | ||
| 106 | return {ELFFile.EI_DATA_LSB: "<", | ||
| 107 | ELFFile.EI_DATA_MSB: ">"}[self.endian] | ||
| 108 | |||
| 109 | def getShort(self, offset): | ||
| 110 | return struct.unpack_from(self.getStructEndian() + "H", self.data, offset)[0] | ||
| 111 | |||
| 112 | def getWord(self, offset): | ||
| 113 | return struct.unpack_from(self.getStructEndian() + "i", self.data, offset)[0] | ||
| 114 | |||
| 115 | def isDynamic(self): | ||
| 116 | """ | ||
| 117 | Return True if there is a .interp segment (therefore dynamically | ||
| 118 | linked), otherwise False (statically linked). | ||
| 119 | """ | ||
| 120 | offset = self.getWord(self.bits == 32 and 0x1C or 0x20) | ||
| 121 | size = self.getShort(self.bits == 32 and 0x2A or 0x36) | ||
| 122 | count = self.getShort(self.bits == 32 and 0x2C or 0x38) | ||
| 123 | |||
| 124 | for i in range(0, count): | ||
| 125 | p_type = self.getWord(offset + i * size) | ||
| 126 | if p_type == ELFFile.PT_INTERP: | ||
| 127 | return True | ||
| 128 | return False | ||
| 129 | |||
| 130 | def machine(self): | ||
| 131 | """ | ||
| 132 | We know the endian stored in self.endian and we | ||
| 133 | know the position | ||
| 134 | """ | ||
| 135 | return self.getShort(ELFFile.E_MACHINE) | ||
| 136 | |||
| 137 | def set_objdump(self, cmd, output): | ||
| 138 | self.objdump_output[cmd] = output | ||
| 139 | |||
| 140 | def run_objdump(self, cmd, d): | ||
| 141 | import bb.process | ||
| 142 | import sys | ||
| 143 | |||
| 144 | if cmd in self.objdump_output: | ||
| 145 | return self.objdump_output[cmd] | ||
| 146 | |||
| 147 | objdump = d.getVar('OBJDUMP') | ||
| 148 | |||
| 149 | env = os.environ.copy() | ||
| 150 | env["LC_ALL"] = "C" | ||
| 151 | env["PATH"] = d.getVar('PATH') | ||
| 152 | |||
| 153 | try: | ||
| 154 | bb.note("%s %s %s" % (objdump, cmd, self.name)) | ||
| 155 | self.objdump_output[cmd] = bb.process.run([objdump, cmd, self.name], env=env, shell=False)[0] | ||
| 156 | return self.objdump_output[cmd] | ||
| 157 | except Exception as e: | ||
| 158 | bb.note("%s %s %s failed: %s" % (objdump, cmd, self.name, e)) | ||
| 159 | return "" | ||
| 160 | |||
| 161 | def elf_machine_to_string(machine): | ||
| 162 | """ | ||
| 163 | Return the name of a given ELF e_machine field or the hex value as a string | ||
| 164 | if it isn't recognised. | ||
| 165 | """ | ||
| 166 | try: | ||
| 167 | return { | ||
| 168 | 0x00: "Unset", | ||
| 169 | 0x02: "SPARC", | ||
| 170 | 0x03: "x86", | ||
| 171 | 0x08: "MIPS", | ||
| 172 | 0x14: "PowerPC", | ||
| 173 | 0x28: "ARM", | ||
| 174 | 0x2A: "SuperH", | ||
| 175 | 0x32: "IA-64", | ||
| 176 | 0x3E: "x86-64", | ||
| 177 | 0xB7: "AArch64", | ||
| 178 | 0xF7: "BPF" | ||
| 179 | }[machine] | ||
| 180 | except: | ||
| 181 | return "Unknown (%s)" % repr(machine) | ||
| 182 | |||
| 183 | def write_error(type, error, d): | ||
| 184 | logfile = d.getVar('QA_LOGFILE') | ||
| 185 | if logfile: | ||
| 186 | p = d.getVar('P') | ||
| 187 | with open(logfile, "a+") as f: | ||
| 188 | f.write("%s: %s [%s]\n" % (p, error, type)) | ||
| 189 | |||
| 190 | def handle_error_visitorcode(name, args): | ||
| 191 | execs = set() | ||
| 192 | contains = {} | ||
| 193 | warn = None | ||
| 194 | if isinstance(args[0], ast.Constant) and isinstance(args[0].value, str): | ||
| 195 | for i in ["ERROR_QA", "WARN_QA"]: | ||
| 196 | if i not in contains: | ||
| 197 | contains[i] = set() | ||
| 198 | contains[i].add(args[0].value) | ||
| 199 | else: | ||
| 200 | warn = args[0] | ||
| 201 | execs.add(name) | ||
| 202 | return contains, execs, warn | ||
| 203 | |||
| 204 | def handle_error(error_class, error_msg, d): | ||
| 205 | if error_class in (d.getVar("ERROR_QA") or "").split(): | ||
| 206 | write_error(error_class, error_msg, d) | ||
| 207 | bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 208 | d.setVar("QA_ERRORS_FOUND", "True") | ||
| 209 | return False | ||
| 210 | elif error_class in (d.getVar("WARN_QA") or "").split(): | ||
| 211 | write_error(error_class, error_msg, d) | ||
| 212 | bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 213 | else: | ||
| 214 | bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) | ||
| 215 | return True | ||
| 216 | handle_error.visitorcode = handle_error_visitorcode | ||
| 217 | |||
| 218 | def exit_with_message_if_errors(message, d): | ||
| 219 | qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False) | ||
| 220 | if qa_fatal_errors: | ||
| 221 | bb.fatal(message) | ||
| 222 | |||
| 223 | def exit_if_errors(d): | ||
| 224 | exit_with_message_if_errors("Fatal QA errors were found, failing task.", d) | ||
| 225 | |||
| 226 | def check_upstream_status(fullpath): | ||
| 227 | import re | ||
| 228 | kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE) | ||
| 229 | strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE) | ||
| 230 | guidelines = "https://docs.yoctoproject.org/contributor-guide/recipe-style-guide.html#patch-upstream-status" | ||
| 231 | |||
| 232 | with open(fullpath, encoding='utf-8', errors='ignore') as f: | ||
| 233 | file_content = f.read() | ||
| 234 | match_kinda = kinda_status_re.search(file_content) | ||
| 235 | match_strict = strict_status_re.search(file_content) | ||
| 236 | |||
| 237 | if not match_strict: | ||
| 238 | if match_kinda: | ||
| 239 | return "Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0)) | ||
| 240 | else: | ||
| 241 | return "Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines) | ||
| 242 | |||
| 243 | if __name__ == "__main__": | ||
| 244 | import sys | ||
| 245 | |||
| 246 | with ELFFile(sys.argv[1]) as elf: | ||
| 247 | elf.open() | ||
| 248 | print(elf.isDynamic()) | ||
diff --git a/meta/lib/oe/qemu.py b/meta/lib/oe/qemu.py deleted file mode 100644 index 769865036c..0000000000 --- a/meta/lib/oe/qemu.py +++ /dev/null | |||
| @@ -1,54 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | def qemu_target_binary(d): | ||
| 8 | package_arch = d.getVar("PACKAGE_ARCH") | ||
| 9 | qemu_target_binary = (d.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "") | ||
| 10 | if qemu_target_binary: | ||
| 11 | return qemu_target_binary | ||
| 12 | |||
| 13 | target_arch = d.getVar("TARGET_ARCH") | ||
| 14 | if target_arch in ("i486", "i586", "i686"): | ||
| 15 | target_arch = "i386" | ||
| 16 | elif target_arch == "powerpc": | ||
| 17 | target_arch = "ppc" | ||
| 18 | elif target_arch == "powerpc64": | ||
| 19 | target_arch = "ppc64" | ||
| 20 | elif target_arch == "powerpc64le": | ||
| 21 | target_arch = "ppc64le" | ||
| 22 | |||
| 23 | return "qemu-" + target_arch | ||
| 24 | |||
| 25 | def qemu_wrapper_cmdline(d, rootfs_path, library_paths, qemu_options=None): | ||
| 26 | import string | ||
| 27 | |||
| 28 | package_arch = d.getVar("PACKAGE_ARCH") | ||
| 29 | if package_arch == "all": | ||
| 30 | return "false" | ||
| 31 | |||
| 32 | qemu_binary = qemu_target_binary(d) | ||
| 33 | if qemu_binary == "qemu-allarch": | ||
| 34 | qemu_binary = "qemuwrapper" | ||
| 35 | |||
| 36 | if qemu_options == None: | ||
| 37 | qemu_options = d.getVar("QEMU_OPTIONS") or "" | ||
| 38 | |||
| 39 | return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\ | ||
| 40 | + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " " | ||
| 41 | |||
| 42 | # Next function will return a string containing the command that is needed to | ||
| 43 | # to run a certain binary through qemu. For example, in order to make a certain | ||
| 44 | # postinstall scriptlet run at do_rootfs time and running the postinstall is | ||
| 45 | # architecture dependent, we can run it through qemu. For example, in the | ||
| 46 | # postinstall scriptlet, we could use the following: | ||
| 47 | # | ||
| 48 | # ${@qemu_run_binary(d, '$D', '/usr/bin/test_app')} [test_app arguments] | ||
| 49 | # | ||
| 50 | def qemu_run_binary(d, rootfs_path, binary): | ||
| 51 | libdir = rootfs_path + d.getVar("libdir", False) | ||
| 52 | base_libdir = rootfs_path + d.getVar("base_libdir", False) | ||
| 53 | |||
| 54 | return qemu_wrapper_cmdline(d, rootfs_path, [libdir, base_libdir]) + rootfs_path + binary | ||
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py deleted file mode 100644 index 102789ce73..0000000000 --- a/meta/lib/oe/recipeutils.py +++ /dev/null | |||
| @@ -1,1248 +0,0 @@ | |||
| 1 | # Utility functions for reading and modifying recipes | ||
| 2 | # | ||
| 3 | # Some code borrowed from the OE layer index | ||
| 4 | # | ||
| 5 | # Copyright (C) 2013-2017 Intel Corporation | ||
| 6 | # | ||
| 7 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 8 | # | ||
| 9 | |||
| 10 | import sys | ||
| 11 | import os | ||
| 12 | import os.path | ||
| 13 | import tempfile | ||
| 14 | import textwrap | ||
| 15 | import difflib | ||
| 16 | from . import utils | ||
| 17 | import shutil | ||
| 18 | import re | ||
| 19 | import fnmatch | ||
| 20 | import glob | ||
| 21 | import bb.tinfoil | ||
| 22 | |||
| 23 | from collections import OrderedDict, defaultdict | ||
| 24 | from bb.utils import vercmp_string | ||
| 25 | |||
| 26 | # Help us to find places to insert values | ||
| 27 | recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND'] | ||
| 28 | # Variables that sometimes are a bit long but shouldn't be wrapped | ||
| 29 | nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha[0-9]+sum\]'] | ||
| 30 | list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM'] | ||
| 31 | meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION'] | ||
| 32 | |||
| 33 | |||
| 34 | def simplify_history(history, d): | ||
| 35 | """ | ||
| 36 | Eliminate any irrelevant events from a variable history | ||
| 37 | """ | ||
| 38 | ret_history = [] | ||
| 39 | has_set = False | ||
| 40 | # Go backwards through the history and remove any immediate operations | ||
| 41 | # before the most recent set | ||
| 42 | for event in reversed(history): | ||
| 43 | if 'flag' in event or not 'file' in event: | ||
| 44 | continue | ||
| 45 | if event['op'] == 'set': | ||
| 46 | if has_set: | ||
| 47 | continue | ||
| 48 | has_set = True | ||
| 49 | elif event['op'] in ('append', 'prepend', 'postdot', 'predot'): | ||
| 50 | # Reminder: "append" and "prepend" mean += and =+ respectively, NOT :append / :prepend | ||
| 51 | if has_set: | ||
| 52 | continue | ||
| 53 | ret_history.insert(0, event) | ||
| 54 | return ret_history | ||
| 55 | |||
| 56 | |||
| 57 | def get_var_files(fn, varlist, d): | ||
| 58 | """Find the file in which each of a list of variables is set. | ||
| 59 | Note: requires variable history to be enabled when parsing. | ||
| 60 | """ | ||
| 61 | varfiles = {} | ||
| 62 | for v in varlist: | ||
| 63 | files = [] | ||
| 64 | if '[' in v: | ||
| 65 | varsplit = v.split('[') | ||
| 66 | varflag = varsplit[1].split(']')[0] | ||
| 67 | history = d.varhistory.variable(varsplit[0]) | ||
| 68 | for event in history: | ||
| 69 | if 'file' in event and event.get('flag', '') == varflag: | ||
| 70 | files.append(event['file']) | ||
| 71 | else: | ||
| 72 | history = d.varhistory.variable(v) | ||
| 73 | for event in history: | ||
| 74 | if 'file' in event and not 'flag' in event: | ||
| 75 | files.append(event['file']) | ||
| 76 | if files: | ||
| 77 | actualfile = files[-1] | ||
| 78 | else: | ||
| 79 | actualfile = None | ||
| 80 | varfiles[v] = actualfile | ||
| 81 | |||
| 82 | return varfiles | ||
| 83 | |||
| 84 | |||
| 85 | def split_var_value(value, assignment=True): | ||
| 86 | """ | ||
| 87 | Split a space-separated variable's value into a list of items, | ||
| 88 | taking into account that some of the items might be made up of | ||
| 89 | expressions containing spaces that should not be split. | ||
| 90 | Parameters: | ||
| 91 | value: | ||
| 92 | The string value to split | ||
| 93 | assignment: | ||
| 94 | True to assume that the value represents an assignment | ||
| 95 | statement, False otherwise. If True, and an assignment | ||
| 96 | statement is passed in the first item in | ||
| 97 | the returned list will be the part of the assignment | ||
| 98 | statement up to and including the opening quote character, | ||
| 99 | and the last item will be the closing quote. | ||
| 100 | """ | ||
| 101 | inexpr = 0 | ||
| 102 | lastchar = None | ||
| 103 | out = [] | ||
| 104 | buf = '' | ||
| 105 | for char in value: | ||
| 106 | if char == '{': | ||
| 107 | if lastchar == '$': | ||
| 108 | inexpr += 1 | ||
| 109 | elif char == '}': | ||
| 110 | inexpr -= 1 | ||
| 111 | elif assignment and char in '"\'' and inexpr == 0: | ||
| 112 | if buf: | ||
| 113 | out.append(buf) | ||
| 114 | out.append(char) | ||
| 115 | char = '' | ||
| 116 | buf = '' | ||
| 117 | elif char.isspace() and inexpr == 0: | ||
| 118 | char = '' | ||
| 119 | if buf: | ||
| 120 | out.append(buf) | ||
| 121 | buf = '' | ||
| 122 | buf += char | ||
| 123 | lastchar = char | ||
| 124 | if buf: | ||
| 125 | out.append(buf) | ||
| 126 | |||
| 127 | # Join together assignment statement and opening quote | ||
| 128 | outlist = out | ||
| 129 | if assignment: | ||
| 130 | assigfound = False | ||
| 131 | for idx, item in enumerate(out): | ||
| 132 | if '=' in item: | ||
| 133 | assigfound = True | ||
| 134 | if assigfound: | ||
| 135 | if '"' in item or "'" in item: | ||
| 136 | outlist = [' '.join(out[:idx+1])] | ||
| 137 | outlist.extend(out[idx+1:]) | ||
| 138 | break | ||
| 139 | return outlist | ||
| 140 | |||
| 141 | |||
| 142 | def patch_recipe_lines(fromlines, values, trailing_newline=True): | ||
| 143 | """Update or insert variable values into lines from a recipe. | ||
| 144 | Note that some manual inspection/intervention may be required | ||
| 145 | since this cannot handle all situations. | ||
| 146 | """ | ||
| 147 | |||
| 148 | import bb.utils | ||
| 149 | |||
| 150 | if trailing_newline: | ||
| 151 | newline = '\n' | ||
| 152 | else: | ||
| 153 | newline = '' | ||
| 154 | |||
| 155 | nowrap_vars_res = [] | ||
| 156 | for item in nowrap_vars: | ||
| 157 | nowrap_vars_res.append(re.compile('^%s$' % item)) | ||
| 158 | |||
| 159 | recipe_progression_res = [] | ||
| 160 | recipe_progression_restrs = [] | ||
| 161 | for item in recipe_progression: | ||
| 162 | if item.endswith('()'): | ||
| 163 | key = item[:-2] | ||
| 164 | else: | ||
| 165 | key = item | ||
| 166 | restr = r'%s(_[a-zA-Z0-9-_$(){}]+|\[[^\]]*\])?' % key | ||
| 167 | if item.endswith('()'): | ||
| 168 | recipe_progression_restrs.append(restr + '()') | ||
| 169 | else: | ||
| 170 | recipe_progression_restrs.append(restr) | ||
| 171 | recipe_progression_res.append(re.compile('^%s$' % restr)) | ||
| 172 | |||
| 173 | def get_recipe_pos(variable): | ||
| 174 | for i, p in enumerate(recipe_progression_res): | ||
| 175 | if p.match(variable): | ||
| 176 | return i | ||
| 177 | return -1 | ||
| 178 | |||
| 179 | remainingnames = {} | ||
| 180 | for k in values.keys(): | ||
| 181 | remainingnames[k] = get_recipe_pos(k) | ||
| 182 | remainingnames = OrderedDict(sorted(remainingnames.items(), key=lambda x: x[1])) | ||
| 183 | |||
| 184 | modifying = False | ||
| 185 | |||
| 186 | def outputvalue(name, lines, rewindcomments=False): | ||
| 187 | if values[name] is None: | ||
| 188 | return | ||
| 189 | if isinstance(values[name], tuple): | ||
| 190 | op, value = values[name] | ||
| 191 | if op == '+=' and value.strip() == '': | ||
| 192 | return | ||
| 193 | else: | ||
| 194 | value = values[name] | ||
| 195 | op = '=' | ||
| 196 | rawtext = '%s %s "%s"%s' % (name, op, value, newline) | ||
| 197 | addlines = [] | ||
| 198 | nowrap = False | ||
| 199 | for nowrap_re in nowrap_vars_res: | ||
| 200 | if nowrap_re.match(name): | ||
| 201 | nowrap = True | ||
| 202 | break | ||
| 203 | if nowrap: | ||
| 204 | addlines.append(rawtext) | ||
| 205 | elif name in list_vars: | ||
| 206 | splitvalue = split_var_value(value, assignment=False) | ||
| 207 | if len(splitvalue) > 1: | ||
| 208 | linesplit = ' \\\n' + (' ' * (len(name) + 4)) | ||
| 209 | addlines.append('%s %s "%s%s"%s' % (name, op, linesplit.join(splitvalue), linesplit, newline)) | ||
| 210 | else: | ||
| 211 | addlines.append(rawtext) | ||
| 212 | else: | ||
| 213 | wrapped = textwrap.wrap(rawtext) | ||
| 214 | for wrapline in wrapped[:-1]: | ||
| 215 | addlines.append('%s \\%s' % (wrapline, newline)) | ||
| 216 | addlines.append('%s%s' % (wrapped[-1], newline)) | ||
| 217 | |||
| 218 | # Split on newlines - this isn't strictly necessary if you are only | ||
| 219 | # going to write the output to disk, but if you want to compare it | ||
| 220 | # (as patch_recipe_file() will do if patch=True) then it's important. | ||
| 221 | addlines = [line for l in addlines for line in l.splitlines(True)] | ||
| 222 | if rewindcomments: | ||
| 223 | # Ensure we insert the lines before any leading comments | ||
| 224 | # (that we'd want to ensure remain leading the next value) | ||
| 225 | for i, ln in reversed(list(enumerate(lines))): | ||
| 226 | if not ln.startswith('#'): | ||
| 227 | lines[i+1:i+1] = addlines | ||
| 228 | break | ||
| 229 | else: | ||
| 230 | lines.extend(addlines) | ||
| 231 | else: | ||
| 232 | lines.extend(addlines) | ||
| 233 | |||
| 234 | existingnames = [] | ||
| 235 | def patch_recipe_varfunc(varname, origvalue, op, newlines): | ||
| 236 | if modifying: | ||
| 237 | # Insert anything that should come before this variable | ||
| 238 | pos = get_recipe_pos(varname) | ||
| 239 | for k in list(remainingnames): | ||
| 240 | if remainingnames[k] > -1 and pos >= remainingnames[k] and not k in existingnames: | ||
| 241 | outputvalue(k, newlines, rewindcomments=True) | ||
| 242 | del remainingnames[k] | ||
| 243 | # Now change this variable, if it needs to be changed | ||
| 244 | if varname in existingnames and op in ['+=', '=', '=+']: | ||
| 245 | if varname in remainingnames: | ||
| 246 | outputvalue(varname, newlines) | ||
| 247 | del remainingnames[varname] | ||
| 248 | return None, None, 0, True | ||
| 249 | else: | ||
| 250 | if varname in values: | ||
| 251 | existingnames.append(varname) | ||
| 252 | return origvalue, None, 0, True | ||
| 253 | |||
| 254 | # First run - establish which values we want to set are already in the file | ||
| 255 | varlist = [re.escape(item) for item in values.keys()] | ||
| 256 | bb.utils.edit_metadata(fromlines, varlist, patch_recipe_varfunc) | ||
| 257 | # Second run - actually set everything | ||
| 258 | modifying = True | ||
| 259 | varlist.extend(recipe_progression_restrs) | ||
| 260 | changed, tolines = bb.utils.edit_metadata(fromlines, varlist, patch_recipe_varfunc, match_overrides=True) | ||
| 261 | |||
| 262 | if remainingnames: | ||
| 263 | if tolines and tolines[-1].strip() != '': | ||
| 264 | tolines.append('\n') | ||
| 265 | for k in remainingnames.keys(): | ||
| 266 | outputvalue(k, tolines) | ||
| 267 | |||
| 268 | return changed, tolines | ||
| 269 | |||
| 270 | |||
| 271 | def patch_recipe_file(fn, values, patch=False, relpath='', redirect_output=None): | ||
| 272 | """Update or insert variable values into a recipe file (assuming you | ||
| 273 | have already identified the exact file you want to update.) | ||
| 274 | Note that some manual inspection/intervention may be required | ||
| 275 | since this cannot handle all situations. | ||
| 276 | """ | ||
| 277 | |||
| 278 | with open(fn, 'r') as f: | ||
| 279 | fromlines = f.readlines() | ||
| 280 | |||
| 281 | _, tolines = patch_recipe_lines(fromlines, values) | ||
| 282 | |||
| 283 | if redirect_output: | ||
| 284 | with open(os.path.join(redirect_output, os.path.basename(fn)), 'w') as f: | ||
| 285 | f.writelines(tolines) | ||
| 286 | return None | ||
| 287 | elif patch: | ||
| 288 | relfn = os.path.relpath(fn, relpath) | ||
| 289 | diff = difflib.unified_diff(fromlines, tolines, 'a/%s' % relfn, 'b/%s' % relfn) | ||
| 290 | return diff | ||
| 291 | else: | ||
| 292 | with open(fn, 'w') as f: | ||
| 293 | f.writelines(tolines) | ||
| 294 | return None | ||
| 295 | |||
| 296 | |||
| 297 | def localise_file_vars(fn, varfiles, varlist): | ||
| 298 | """Given a list of variables and variable history (fetched with get_var_files()) | ||
| 299 | find where each variable should be set/changed. This handles for example where a | ||
| 300 | recipe includes an inc file where variables might be changed - in most cases | ||
| 301 | we want to update the inc file when changing the variable value rather than adding | ||
| 302 | it to the recipe itself. | ||
| 303 | """ | ||
| 304 | fndir = os.path.dirname(fn) + os.sep | ||
| 305 | |||
| 306 | first_meta_file = None | ||
| 307 | for v in meta_vars: | ||
| 308 | f = varfiles.get(v, None) | ||
| 309 | if f: | ||
| 310 | actualdir = os.path.dirname(f) + os.sep | ||
| 311 | if actualdir.startswith(fndir): | ||
| 312 | first_meta_file = f | ||
| 313 | break | ||
| 314 | |||
| 315 | filevars = defaultdict(list) | ||
| 316 | for v in varlist: | ||
| 317 | f = varfiles[v] | ||
| 318 | # Only return files that are in the same directory as the recipe or in some directory below there | ||
| 319 | # (this excludes bbclass files and common inc files that wouldn't be appropriate to set the variable | ||
| 320 | # in if we were going to set a value specific to this recipe) | ||
| 321 | if f: | ||
| 322 | actualfile = f | ||
| 323 | else: | ||
| 324 | # Variable isn't in a file, if it's one of the "meta" vars, use the first file with a meta var in it | ||
| 325 | if first_meta_file: | ||
| 326 | actualfile = first_meta_file | ||
| 327 | else: | ||
| 328 | actualfile = fn | ||
| 329 | |||
| 330 | actualdir = os.path.dirname(actualfile) + os.sep | ||
| 331 | if not actualdir.startswith(fndir): | ||
| 332 | actualfile = fn | ||
| 333 | filevars[actualfile].append(v) | ||
| 334 | |||
| 335 | return filevars | ||
| 336 | |||
| 337 | def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None): | ||
| 338 | """Modify a list of variable values in the specified recipe. Handles inc files if | ||
| 339 | used by the recipe. | ||
| 340 | """ | ||
| 341 | overrides = d.getVar('OVERRIDES').split(':') | ||
| 342 | def override_applicable(hevent): | ||
| 343 | op = hevent['op'] | ||
| 344 | if '[' in op: | ||
| 345 | opoverrides = op.split('[')[1].split(']')[0].split(':') | ||
| 346 | for opoverride in opoverrides: | ||
| 347 | if not opoverride in overrides: | ||
| 348 | return False | ||
| 349 | return True | ||
| 350 | |||
| 351 | varlist = varvalues.keys() | ||
| 352 | fn = os.path.abspath(fn) | ||
| 353 | varfiles = get_var_files(fn, varlist, d) | ||
| 354 | locs = localise_file_vars(fn, varfiles, varlist) | ||
| 355 | patches = [] | ||
| 356 | for f,v in locs.items(): | ||
| 357 | vals = {k: varvalues[k] for k in v} | ||
| 358 | f = os.path.abspath(f) | ||
| 359 | if f == fn: | ||
| 360 | extravals = {} | ||
| 361 | for var, value in vals.items(): | ||
| 362 | if var in list_vars: | ||
| 363 | history = simplify_history(d.varhistory.variable(var), d) | ||
| 364 | recipe_set = False | ||
| 365 | for event in history: | ||
| 366 | if os.path.abspath(event['file']) == fn: | ||
| 367 | if event['op'] == 'set': | ||
| 368 | recipe_set = True | ||
| 369 | if not recipe_set: | ||
| 370 | for event in history: | ||
| 371 | if event['op'].startswith(':remove'): | ||
| 372 | continue | ||
| 373 | if not override_applicable(event): | ||
| 374 | continue | ||
| 375 | newvalue = value.replace(event['detail'], '') | ||
| 376 | if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith(':'): | ||
| 377 | op = event['op'].replace('[', ':').replace(']', '') | ||
| 378 | extravals[var + op] = None | ||
| 379 | value = newvalue | ||
| 380 | vals[var] = ('+=', value) | ||
| 381 | vals.update(extravals) | ||
| 382 | patchdata = patch_recipe_file(f, vals, patch, relpath, redirect_output) | ||
| 383 | if patch: | ||
| 384 | patches.append(patchdata) | ||
| 385 | |||
| 386 | if patch: | ||
| 387 | return patches | ||
| 388 | else: | ||
| 389 | return None | ||
| 390 | |||
| 391 | |||
| 392 | |||
| 393 | def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=False): | ||
| 394 | """Copy (local) recipe files, including both files included via include/require, | ||
| 395 | and files referred to in the SRC_URI variable.""" | ||
| 396 | import bb.fetch2 | ||
| 397 | import oe.path | ||
| 398 | |||
| 399 | # FIXME need a warning if the unexpanded SRC_URI value contains variable references | ||
| 400 | |||
| 401 | uri_values = [] | ||
| 402 | localpaths = [] | ||
| 403 | def fetch_urls(rdata): | ||
| 404 | # Collect the local paths from SRC_URI | ||
| 405 | srcuri = rdata.getVar('SRC_URI') or "" | ||
| 406 | if srcuri not in uri_values: | ||
| 407 | fetch = bb.fetch2.Fetch(srcuri.split(), rdata) | ||
| 408 | if download: | ||
| 409 | fetch.download() | ||
| 410 | for pth in fetch.localpaths(): | ||
| 411 | if pth not in localpaths: | ||
| 412 | localpaths.append(os.path.abspath(pth)) | ||
| 413 | uri_values.append(srcuri) | ||
| 414 | |||
| 415 | fetch_urls(d) | ||
| 416 | if all_variants: | ||
| 417 | # Get files for other variants e.g. in the case of a SRC_URI:append | ||
| 418 | localdata = bb.data.createCopy(d) | ||
| 419 | variants = (localdata.getVar('BBCLASSEXTEND') or '').split() | ||
| 420 | if variants: | ||
| 421 | # Ensure we handle class-target if we're dealing with one of the variants | ||
| 422 | variants.append('target') | ||
| 423 | for variant in variants: | ||
| 424 | localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant) | ||
| 425 | fetch_urls(localdata) | ||
| 426 | |||
| 427 | # Copy local files to target directory and gather any remote files | ||
| 428 | bb_dir = os.path.abspath(os.path.dirname(d.getVar('FILE'))) + os.sep | ||
| 429 | remotes = [] | ||
| 430 | copied = [] | ||
| 431 | # Need to do this in two steps since we want to check against the absolute path | ||
| 432 | includes = [os.path.abspath(path) for path in d.getVar('BBINCLUDED').split() if os.path.exists(path)] | ||
| 433 | # We also check this below, but we don't want any items in this list being considered remotes | ||
| 434 | includes = [path for path in includes if path.startswith(bb_dir)] | ||
| 435 | for path in localpaths + includes: | ||
| 436 | # Only import files that are under the meta directory | ||
| 437 | if path.startswith(bb_dir): | ||
| 438 | if not whole_dir: | ||
| 439 | relpath = os.path.relpath(path, bb_dir) | ||
| 440 | subdir = os.path.join(tgt_dir, os.path.dirname(relpath)) | ||
| 441 | if not os.path.exists(subdir): | ||
| 442 | os.makedirs(subdir) | ||
| 443 | shutil.copy2(path, os.path.join(tgt_dir, relpath)) | ||
| 444 | copied.append(relpath) | ||
| 445 | else: | ||
| 446 | remotes.append(path) | ||
| 447 | # Simply copy whole meta dir, if requested | ||
| 448 | if whole_dir: | ||
| 449 | shutil.copytree(bb_dir, tgt_dir) | ||
| 450 | |||
| 451 | return copied, remotes | ||
| 452 | |||
| 453 | |||
| 454 | def get_recipe_local_files(d, patches=False, archives=False): | ||
| 455 | """Get a list of local files in SRC_URI within a recipe.""" | ||
| 456 | import oe.patch | ||
| 457 | uris = (d.getVar('SRC_URI') or "").split() | ||
| 458 | fetch = bb.fetch2.Fetch(uris, d) | ||
| 459 | # FIXME this list should be factored out somewhere else (such as the | ||
| 460 | # fetcher) though note that this only encompasses actual container formats | ||
| 461 | # i.e. that can contain multiple files as opposed to those that only | ||
| 462 | # contain a compressed stream (i.e. .tar.gz as opposed to just .gz) | ||
| 463 | archive_exts = ['.tar', '.tgz', '.tar.gz', '.tar.Z', '.tbz', '.tbz2', '.tar.bz2', '.txz', '.tar.xz', '.tar.lz', '.zip', '.jar', '.rpm', '.srpm', '.deb', '.ipk', '.tar.7z', '.7z'] | ||
| 464 | ret = {} | ||
| 465 | for uri in uris: | ||
| 466 | if fetch.ud[uri].type == 'file': | ||
| 467 | if (not patches and | ||
| 468 | oe.patch.patch_path(uri, fetch, '', expand=False)): | ||
| 469 | continue | ||
| 470 | # Skip files that are referenced by absolute path | ||
| 471 | fname = fetch.ud[uri].basepath | ||
| 472 | if os.path.isabs(fname): | ||
| 473 | continue | ||
| 474 | # Handle subdir= | ||
| 475 | subdir = fetch.ud[uri].parm.get('subdir', '') | ||
| 476 | if subdir: | ||
| 477 | if os.path.isabs(subdir): | ||
| 478 | continue | ||
| 479 | fname = os.path.join(subdir, fname) | ||
| 480 | localpath = fetch.localpath(uri) | ||
| 481 | if not archives: | ||
| 482 | # Ignore archives that will be unpacked | ||
| 483 | if localpath.endswith(tuple(archive_exts)): | ||
| 484 | unpack = fetch.ud[uri].parm.get('unpack', True) | ||
| 485 | if unpack: | ||
| 486 | continue | ||
| 487 | if os.path.isdir(localpath): | ||
| 488 | for root, dirs, files in os.walk(localpath): | ||
| 489 | for fname in files: | ||
| 490 | fileabspath = os.path.join(root,fname) | ||
| 491 | srcdir = os.path.dirname(localpath) | ||
| 492 | ret[os.path.relpath(fileabspath,srcdir)] = fileabspath | ||
| 493 | else: | ||
| 494 | ret[fname] = localpath | ||
| 495 | return ret | ||
| 496 | |||
| 497 | |||
| 498 | def get_recipe_patches(d): | ||
| 499 | """Get a list of the patches included in SRC_URI within a recipe.""" | ||
| 500 | import oe.patch | ||
| 501 | patches = oe.patch.src_patches(d, expand=False) | ||
| 502 | patchfiles = [] | ||
| 503 | for patch in patches: | ||
| 504 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) | ||
| 505 | patchfiles.append(local) | ||
| 506 | return patchfiles | ||
| 507 | |||
| 508 | |||
| 509 | def get_recipe_patched_files(d): | ||
| 510 | """ | ||
| 511 | Get the list of patches for a recipe along with the files each patch modifies. | ||
| 512 | Params: | ||
| 513 | d: the datastore for the recipe | ||
| 514 | Returns: | ||
| 515 | a dict mapping patch file path to a list of tuples of changed files and | ||
| 516 | change mode ('A' for add, 'D' for delete or 'M' for modify) | ||
| 517 | """ | ||
| 518 | import oe.patch | ||
| 519 | patches = oe.patch.src_patches(d, expand=False) | ||
| 520 | patchedfiles = {} | ||
| 521 | for patch in patches: | ||
| 522 | _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch) | ||
| 523 | striplevel = int(parm['striplevel']) | ||
| 524 | patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S'), parm.get('patchdir', ''))) | ||
| 525 | return patchedfiles | ||
| 526 | |||
| 527 | |||
| 528 | def validate_pn(pn): | ||
| 529 | """Perform validation on a recipe name (PN) for a new recipe.""" | ||
| 530 | reserved_names = ['forcevariable', 'append', 'prepend', 'remove'] | ||
| 531 | if not re.match('^[0-9a-z-.+]+$', pn): | ||
| 532 | return 'Recipe name "%s" is invalid: only characters 0-9, a-z, -, + and . are allowed' % pn | ||
| 533 | elif pn in reserved_names: | ||
| 534 | return 'Recipe name "%s" is invalid: is a reserved keyword' % pn | ||
| 535 | elif pn.startswith('pn-'): | ||
| 536 | return 'Recipe name "%s" is invalid: names starting with "pn-" are reserved' % pn | ||
| 537 | elif pn.endswith(('.bb', '.bbappend', '.bbclass', '.inc', '.conf')): | ||
| 538 | return 'Recipe name "%s" is invalid: should be just a name, not a file name' % pn | ||
| 539 | return '' | ||
| 540 | |||
| 541 | |||
| 542 | def get_bbfile_path(d, destdir, extrapathhint=None): | ||
| 543 | """ | ||
| 544 | Determine the correct path for a recipe within a layer | ||
| 545 | Parameters: | ||
| 546 | d: Recipe-specific datastore | ||
| 547 | destdir: destination directory. Can be the path to the base of the layer or a | ||
| 548 | partial path somewhere within the layer. | ||
| 549 | extrapathhint: a path relative to the base of the layer to try | ||
| 550 | """ | ||
| 551 | import bb.cookerdata | ||
| 552 | |||
| 553 | destdir = os.path.abspath(destdir) | ||
| 554 | destlayerdir = find_layerdir(destdir) | ||
| 555 | |||
| 556 | # Parse the specified layer's layer.conf file directly, in case the layer isn't in bblayers.conf | ||
| 557 | confdata = d.createCopy() | ||
| 558 | confdata.setVar('BBFILES', '') | ||
| 559 | confdata.setVar('LAYERDIR', destlayerdir) | ||
| 560 | destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") | ||
| 561 | confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) | ||
| 562 | pn = d.getVar('PN') | ||
| 563 | |||
| 564 | # Parse BBFILES_DYNAMIC and append to BBFILES | ||
| 565 | bbfiles_dynamic = (confdata.getVar('BBFILES_DYNAMIC') or "").split() | ||
| 566 | collections = (confdata.getVar('BBFILE_COLLECTIONS') or "").split() | ||
| 567 | invalid = [] | ||
| 568 | for entry in bbfiles_dynamic: | ||
| 569 | parts = entry.split(":", 1) | ||
| 570 | if len(parts) != 2: | ||
| 571 | invalid.append(entry) | ||
| 572 | continue | ||
| 573 | l, f = parts | ||
| 574 | invert = l[0] == "!" | ||
| 575 | if invert: | ||
| 576 | l = l[1:] | ||
| 577 | if (l in collections and not invert) or (l not in collections and invert): | ||
| 578 | confdata.appendVar("BBFILES", " " + f) | ||
| 579 | if invalid: | ||
| 580 | return None | ||
| 581 | bbfilespecs = (confdata.getVar('BBFILES') or '').split() | ||
| 582 | if destdir == destlayerdir: | ||
| 583 | for bbfilespec in bbfilespecs: | ||
| 584 | if not bbfilespec.endswith('.bbappend'): | ||
| 585 | for match in glob.glob(bbfilespec): | ||
| 586 | splitext = os.path.splitext(os.path.basename(match)) | ||
| 587 | if splitext[1] == '.bb': | ||
| 588 | mpn = splitext[0].split('_')[0] | ||
| 589 | if mpn == pn: | ||
| 590 | return os.path.dirname(match) | ||
| 591 | |||
| 592 | # Try to make up a path that matches BBFILES | ||
| 593 | # this is a little crude, but better than nothing | ||
| 594 | bpn = d.getVar('BPN') | ||
| 595 | recipefn = os.path.basename(d.getVar('FILE')) | ||
| 596 | pathoptions = [destdir] | ||
| 597 | if extrapathhint: | ||
| 598 | pathoptions.append(os.path.join(destdir, extrapathhint)) | ||
| 599 | if destdir == destlayerdir: | ||
| 600 | pathoptions.append(os.path.join(destdir, 'recipes-%s' % bpn, bpn)) | ||
| 601 | pathoptions.append(os.path.join(destdir, 'recipes', bpn)) | ||
| 602 | pathoptions.append(os.path.join(destdir, bpn)) | ||
| 603 | elif not destdir.endswith(('/' + pn, '/' + bpn)): | ||
| 604 | pathoptions.append(os.path.join(destdir, bpn)) | ||
| 605 | closepath = '' | ||
| 606 | for pathoption in pathoptions: | ||
| 607 | bbfilepath = os.path.join(pathoption, 'test.bb') | ||
| 608 | for bbfilespec in bbfilespecs: | ||
| 609 | if fnmatch.fnmatchcase(bbfilepath, bbfilespec): | ||
| 610 | return pathoption | ||
| 611 | return None | ||
| 612 | |||
| 613 | def get_bbappend_path(d, destlayerdir, wildcardver=False): | ||
| 614 | """Determine how a bbappend for a recipe should be named and located within another layer""" | ||
| 615 | |||
| 616 | import bb.cookerdata | ||
| 617 | |||
| 618 | destlayerdir = os.path.abspath(destlayerdir) | ||
| 619 | recipefile = d.getVar('FILE') | ||
| 620 | recipefn = os.path.splitext(os.path.basename(recipefile))[0] | ||
| 621 | if wildcardver and '_' in recipefn: | ||
| 622 | recipefn = recipefn.split('_', 1)[0] + '_%' | ||
| 623 | appendfn = recipefn + '.bbappend' | ||
| 624 | |||
| 625 | # Parse the specified layer's layer.conf file directly, in case the layer isn't in bblayers.conf | ||
| 626 | confdata = d.createCopy() | ||
| 627 | confdata.setVar('BBFILES', '') | ||
| 628 | confdata.setVar('LAYERDIR', destlayerdir) | ||
| 629 | destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") | ||
| 630 | confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) | ||
| 631 | |||
| 632 | origlayerdir = find_layerdir(recipefile) | ||
| 633 | if not origlayerdir: | ||
| 634 | return (None, False) | ||
| 635 | # Now join this to the path where the bbappend is going and check if it is covered by BBFILES | ||
| 636 | appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn) | ||
| 637 | closepath = '' | ||
| 638 | pathok = True | ||
| 639 | for bbfilespec in confdata.getVar('BBFILES').split(): | ||
| 640 | if fnmatch.fnmatchcase(appendpath, bbfilespec): | ||
| 641 | # Our append path works, we're done | ||
| 642 | break | ||
| 643 | elif bbfilespec.startswith(destlayerdir) and fnmatch.fnmatchcase('test.bbappend', os.path.basename(bbfilespec)): | ||
| 644 | # Try to find the longest matching path | ||
| 645 | if len(bbfilespec) > len(closepath): | ||
| 646 | closepath = bbfilespec | ||
| 647 | else: | ||
| 648 | # Unfortunately the bbappend layer and the original recipe's layer don't have the same structure | ||
| 649 | if closepath: | ||
| 650 | # bbappend layer's layer.conf at least has a spec that picks up .bbappend files | ||
| 651 | # Now we just need to substitute out any wildcards | ||
| 652 | appendsubdir = os.path.relpath(os.path.dirname(closepath), destlayerdir) | ||
| 653 | if 'recipes-*' in appendsubdir: | ||
| 654 | # Try to copy this part from the original recipe path | ||
| 655 | res = re.search('/recipes-[^/]+/', recipefile) | ||
| 656 | if res: | ||
| 657 | appendsubdir = appendsubdir.replace('/recipes-*/', res.group(0)) | ||
| 658 | # This is crude, but we have to do something | ||
| 659 | appendsubdir = appendsubdir.replace('*', recipefn.split('_')[0]) | ||
| 660 | appendsubdir = appendsubdir.replace('?', 'a') | ||
| 661 | appendpath = os.path.join(destlayerdir, appendsubdir, appendfn) | ||
| 662 | else: | ||
| 663 | pathok = False | ||
| 664 | return (appendpath, pathok) | ||
| 665 | |||
| 666 | |||
| 667 | def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None, update_original_recipe=False): | ||
| 668 | """ | ||
| 669 | Writes a bbappend file for a recipe | ||
| 670 | Parameters: | ||
| 671 | rd: data dictionary for the recipe | ||
| 672 | destlayerdir: base directory of the layer to place the bbappend in | ||
| 673 | (subdirectory path from there will be determined automatically) | ||
| 674 | srcfiles: dict of source files to add to SRC_URI, where the key | ||
| 675 | is the full path to the file to be added, and the value is a | ||
| 676 | dict with following optional keys: | ||
| 677 | path: the original filename as it would appear in SRC_URI | ||
| 678 | or None if it isn't already present. | ||
| 679 | patchdir: the patchdir parameter | ||
| 680 | newname: the name to give to the new added file. None to use | ||
| 681 | the default value: basename(path) | ||
| 682 | You may pass None for this parameter if you simply want to specify | ||
| 683 | your own content via the extralines parameter. | ||
| 684 | install: dict mapping entries in srcfiles to a tuple of two elements: | ||
| 685 | install path (*without* ${D} prefix) and permission value (as a | ||
| 686 | string, e.g. '0644'). | ||
| 687 | wildcardver: True to use a % wildcard in the bbappend filename, or | ||
| 688 | False to make the bbappend specific to the recipe version. | ||
| 689 | machine: | ||
| 690 | If specified, make the changes in the bbappend specific to this | ||
| 691 | machine. This will also cause PACKAGE_ARCH = "${MACHINE_ARCH}" | ||
| 692 | to be added to the bbappend. | ||
| 693 | extralines: | ||
| 694 | Extra lines to add to the bbappend. This may be a dict of name | ||
| 695 | value pairs, or simply a list of the lines. | ||
| 696 | removevalues: | ||
| 697 | Variable values to remove - a dict of names/values. | ||
| 698 | redirect_output: | ||
| 699 | If specified, redirects writing the output file to the | ||
| 700 | specified directory (for dry-run purposes) | ||
| 701 | params: | ||
| 702 | Parameters to use when adding entries to SRC_URI. If specified, | ||
| 703 | should be a list of dicts with the same length as srcfiles. | ||
| 704 | update_original_recipe: | ||
| 705 | Force to update the original recipe instead of creating/updating | ||
| 706 | a bbapend. destlayerdir must contain the original recipe | ||
| 707 | """ | ||
| 708 | |||
| 709 | if not removevalues: | ||
| 710 | removevalues = {} | ||
| 711 | |||
| 712 | recipefile = rd.getVar('FILE') | ||
| 713 | if update_original_recipe: | ||
| 714 | if destlayerdir not in recipefile: | ||
| 715 | bb.error("destlayerdir %s doesn't contain the original recipe (%s), cannot update it" % (destlayerdir, recipefile)) | ||
| 716 | return (None, None) | ||
| 717 | |||
| 718 | appendpath = recipefile | ||
| 719 | else: | ||
| 720 | # Determine how the bbappend should be named | ||
| 721 | appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver) | ||
| 722 | if not appendpath: | ||
| 723 | bb.error('Unable to determine layer directory containing %s' % recipefile) | ||
| 724 | return (None, None) | ||
| 725 | if not pathok: | ||
| 726 | bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath))) | ||
| 727 | |||
| 728 | appenddir = os.path.dirname(appendpath) | ||
| 729 | if not redirect_output: | ||
| 730 | bb.utils.mkdirhier(appenddir) | ||
| 731 | |||
| 732 | # FIXME check if the bbappend doesn't get overridden by a higher priority layer? | ||
| 733 | |||
| 734 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] | ||
| 735 | if not os.path.abspath(destlayerdir) in layerdirs: | ||
| 736 | bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') | ||
| 737 | |||
| 738 | bbappendlines = [] | ||
| 739 | if extralines: | ||
| 740 | if isinstance(extralines, dict): | ||
| 741 | for name, value in extralines.items(): | ||
| 742 | bbappendlines.append((name, '=', value)) | ||
| 743 | else: | ||
| 744 | # Do our best to split it | ||
| 745 | for line in extralines: | ||
| 746 | if line[-1] == '\n': | ||
| 747 | line = line[:-1] | ||
| 748 | splitline = line.split(None, 2) | ||
| 749 | if len(splitline) == 3: | ||
| 750 | bbappendlines.append(tuple(splitline)) | ||
| 751 | else: | ||
| 752 | raise Exception('Invalid extralines value passed') | ||
| 753 | |||
| 754 | def popline(varname): | ||
| 755 | for i in range(0, len(bbappendlines)): | ||
| 756 | if bbappendlines[i][0] == varname: | ||
| 757 | line = bbappendlines.pop(i) | ||
| 758 | return line | ||
| 759 | return None | ||
| 760 | |||
| 761 | def appendline(varname, op, value): | ||
| 762 | for i in range(0, len(bbappendlines)): | ||
| 763 | item = bbappendlines[i] | ||
| 764 | if item[0] == varname: | ||
| 765 | bbappendlines[i] = (item[0], item[1], item[2] + ' ' + value) | ||
| 766 | break | ||
| 767 | else: | ||
| 768 | bbappendlines.append((varname, op, value)) | ||
| 769 | |||
| 770 | destsubdir = rd.getVar('PN') | ||
| 771 | if not update_original_recipe and srcfiles: | ||
| 772 | bbappendlines.append(('FILESEXTRAPATHS:prepend', ':=', '${THISDIR}/${PN}:')) | ||
| 773 | |||
| 774 | appendoverride = '' | ||
| 775 | if machine: | ||
| 776 | bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}')) | ||
| 777 | appendoverride = ':%s' % machine | ||
| 778 | copyfiles = {} | ||
| 779 | if srcfiles: | ||
| 780 | instfunclines = [] | ||
| 781 | for i, (newfile, param) in enumerate(srcfiles.items()): | ||
| 782 | srcurientry = None | ||
| 783 | if not 'path' in param or not param['path']: | ||
| 784 | if 'newname' in param and param['newname']: | ||
| 785 | srcfile = param['newname'] | ||
| 786 | else: | ||
| 787 | srcfile = os.path.basename(newfile) | ||
| 788 | srcurientry = 'file://%s' % srcfile | ||
| 789 | oldentry = None | ||
| 790 | for uri in rd.getVar('SRC_URI').split(): | ||
| 791 | if srcurientry in uri: | ||
| 792 | oldentry = uri | ||
| 793 | if params and params[i]: | ||
| 794 | srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items())) | ||
| 795 | # Double-check it's not there already | ||
| 796 | # FIXME do we care if the entry is added by another bbappend that might go away? | ||
| 797 | if not srcurientry in rd.getVar('SRC_URI').split(): | ||
| 798 | if machine: | ||
| 799 | if oldentry: | ||
| 800 | appendline('SRC_URI:remove%s' % appendoverride, '=', ' ' + oldentry) | ||
| 801 | appendline('SRC_URI:append%s' % appendoverride, '=', ' ' + srcurientry) | ||
| 802 | else: | ||
| 803 | if oldentry: | ||
| 804 | if update_original_recipe: | ||
| 805 | removevalues['SRC_URI'] = oldentry | ||
| 806 | else: | ||
| 807 | appendline('SRC_URI:remove', '=', oldentry) | ||
| 808 | appendline('SRC_URI', '+=', srcurientry) | ||
| 809 | param['path'] = srcfile | ||
| 810 | else: | ||
| 811 | srcfile = param['path'] | ||
| 812 | copyfiles[newfile] = param | ||
| 813 | if install: | ||
| 814 | institem = install.pop(newfile, None) | ||
| 815 | if institem: | ||
| 816 | (destpath, perms) = institem | ||
| 817 | instdestpath = replace_dir_vars(destpath, rd) | ||
| 818 | instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) | ||
| 819 | if not instdirline in instfunclines: | ||
| 820 | instfunclines.append(instdirline) | ||
| 821 | instfunclines.append('install -m %s ${UNPACKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) | ||
| 822 | if instfunclines: | ||
| 823 | bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines)) | ||
| 824 | |||
| 825 | if redirect_output: | ||
| 826 | bb.note('Writing append file %s (dry-run)' % appendpath) | ||
| 827 | outfile = os.path.join(redirect_output, os.path.basename(appendpath)) | ||
| 828 | # Only take a copy if the file isn't already there (this function may be called | ||
| 829 | # multiple times per operation when we're handling overrides) | ||
| 830 | if os.path.exists(appendpath) and not os.path.exists(outfile): | ||
| 831 | shutil.copy2(appendpath, outfile) | ||
| 832 | elif update_original_recipe: | ||
| 833 | outfile = recipefile | ||
| 834 | else: | ||
| 835 | bb.note('Writing append file %s' % appendpath) | ||
| 836 | outfile = appendpath | ||
| 837 | |||
| 838 | if os.path.exists(outfile): | ||
| 839 | # Work around lack of nonlocal in python 2 | ||
| 840 | extvars = {'destsubdir': destsubdir} | ||
| 841 | |||
| 842 | def appendfile_varfunc(varname, origvalue, op, newlines): | ||
| 843 | if varname == 'FILESEXTRAPATHS:prepend': | ||
| 844 | if origvalue.startswith('${THISDIR}/'): | ||
| 845 | popline('FILESEXTRAPATHS:prepend') | ||
| 846 | extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':')) | ||
| 847 | elif varname == 'PACKAGE_ARCH': | ||
| 848 | if machine: | ||
| 849 | popline('PACKAGE_ARCH') | ||
| 850 | return (machine, None, 4, False) | ||
| 851 | elif varname.startswith('do_install:append'): | ||
| 852 | func = popline(varname) | ||
| 853 | if func: | ||
| 854 | instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()] | ||
| 855 | for line in func[2]: | ||
| 856 | if not line in instfunclines: | ||
| 857 | instfunclines.append(line) | ||
| 858 | return (instfunclines, None, 4, False) | ||
| 859 | else: | ||
| 860 | splitval = split_var_value(origvalue, assignment=False) | ||
| 861 | changed = False | ||
| 862 | removevar = varname | ||
| 863 | if varname in ['SRC_URI', 'SRC_URI:append%s' % appendoverride]: | ||
| 864 | removevar = 'SRC_URI' | ||
| 865 | line = popline(varname) | ||
| 866 | if line: | ||
| 867 | if line[2] not in splitval: | ||
| 868 | splitval.append(line[2]) | ||
| 869 | changed = True | ||
| 870 | else: | ||
| 871 | line = popline(varname) | ||
| 872 | if line: | ||
| 873 | splitval = [line[2]] | ||
| 874 | changed = True | ||
| 875 | |||
| 876 | if removevar in removevalues: | ||
| 877 | remove = removevalues[removevar] | ||
| 878 | if isinstance(remove, str): | ||
| 879 | if remove in splitval: | ||
| 880 | splitval.remove(remove) | ||
| 881 | changed = True | ||
| 882 | else: | ||
| 883 | for removeitem in remove: | ||
| 884 | if removeitem in splitval: | ||
| 885 | splitval.remove(removeitem) | ||
| 886 | changed = True | ||
| 887 | |||
| 888 | if changed: | ||
| 889 | newvalue = splitval | ||
| 890 | if len(newvalue) == 1: | ||
| 891 | # Ensure it's written out as one line | ||
| 892 | if ':append' in varname: | ||
| 893 | newvalue = ' ' + newvalue[0] | ||
| 894 | else: | ||
| 895 | newvalue = newvalue[0] | ||
| 896 | if not newvalue and (op in ['+=', '.='] or ':append' in varname): | ||
| 897 | # There's no point appending nothing | ||
| 898 | newvalue = None | ||
| 899 | if varname.endswith('()'): | ||
| 900 | indent = 4 | ||
| 901 | else: | ||
| 902 | indent = -1 | ||
| 903 | return (newvalue, None, indent, True) | ||
| 904 | return (origvalue, None, 4, False) | ||
| 905 | |||
| 906 | varnames = [item[0] for item in bbappendlines] | ||
| 907 | if removevalues: | ||
| 908 | varnames.extend(list(removevalues.keys())) | ||
| 909 | |||
| 910 | with open(outfile, 'r') as f: | ||
| 911 | (updated, newlines) = bb.utils.edit_metadata(f, varnames, appendfile_varfunc) | ||
| 912 | |||
| 913 | destsubdir = extvars['destsubdir'] | ||
| 914 | else: | ||
| 915 | updated = False | ||
| 916 | newlines = [] | ||
| 917 | |||
| 918 | if bbappendlines: | ||
| 919 | for line in bbappendlines: | ||
| 920 | if line[0].endswith('()'): | ||
| 921 | newlines.append('%s {\n %s\n}\n' % (line[0], '\n '.join(line[2]))) | ||
| 922 | else: | ||
| 923 | newlines.append('%s %s "%s"\n\n' % line) | ||
| 924 | updated = True | ||
| 925 | |||
| 926 | if updated: | ||
| 927 | with open(outfile, 'w') as f: | ||
| 928 | f.writelines(newlines) | ||
| 929 | |||
| 930 | if copyfiles: | ||
| 931 | if machine: | ||
| 932 | destsubdir = os.path.join(destsubdir, machine) | ||
| 933 | if redirect_output: | ||
| 934 | outdir = redirect_output | ||
| 935 | else: | ||
| 936 | outdir = appenddir | ||
| 937 | for newfile, param in copyfiles.items(): | ||
| 938 | srcfile = param['path'] | ||
| 939 | patchdir = param.get('patchdir', ".") | ||
| 940 | |||
| 941 | if patchdir != ".": | ||
| 942 | newfile = os.path.join(os.path.split(newfile)[0], patchdir, os.path.split(newfile)[1]) | ||
| 943 | filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile)) | ||
| 944 | if os.path.abspath(newfile) != os.path.abspath(filedest): | ||
| 945 | if newfile.startswith(tempfile.gettempdir()): | ||
| 946 | newfiledisp = os.path.basename(newfile) | ||
| 947 | else: | ||
| 948 | newfiledisp = newfile | ||
| 949 | if redirect_output: | ||
| 950 | bb.note('Copying %s to %s (dry-run)' % (newfiledisp, os.path.join(appenddir, destsubdir, os.path.basename(srcfile)))) | ||
| 951 | else: | ||
| 952 | bb.note('Copying %s to %s' % (newfiledisp, filedest)) | ||
| 953 | bb.utils.mkdirhier(os.path.dirname(filedest)) | ||
| 954 | shutil.copyfile(newfile, filedest) | ||
| 955 | |||
| 956 | return (appendpath, os.path.join(appenddir, destsubdir)) | ||
| 957 | |||
| 958 | |||
| 959 | def find_layerdir(fn): | ||
| 960 | """ Figure out the path to the base of the layer containing a file (e.g. a recipe)""" | ||
| 961 | pth = os.path.abspath(fn) | ||
| 962 | layerdir = '' | ||
| 963 | while pth: | ||
| 964 | if os.path.exists(os.path.join(pth, 'conf', 'layer.conf')): | ||
| 965 | layerdir = pth | ||
| 966 | break | ||
| 967 | pth = os.path.dirname(pth) | ||
| 968 | if pth == '/': | ||
| 969 | return None | ||
| 970 | return layerdir | ||
| 971 | |||
| 972 | |||
| 973 | def replace_dir_vars(path, d): | ||
| 974 | """Replace common directory paths with appropriate variable references (e.g. /etc becomes ${sysconfdir})""" | ||
| 975 | dirvars = {} | ||
| 976 | # Sort by length so we get the variables we're interested in first | ||
| 977 | for var in sorted(list(d.keys()), key=len): | ||
| 978 | if var.endswith('dir') and var.lower() == var: | ||
| 979 | value = d.getVar(var) | ||
| 980 | if value.startswith('/') and not '\n' in value and value not in dirvars: | ||
| 981 | dirvars[value] = var | ||
| 982 | for dirpath in sorted(list(dirvars.keys()), reverse=True): | ||
| 983 | path = path.replace(dirpath, '${%s}' % dirvars[dirpath]) | ||
| 984 | return path | ||
| 985 | |||
| 986 | def get_recipe_pv_with_pfx_sfx(pv, uri_type): | ||
| 987 | """ | ||
| 988 | Get PV separating prefix and suffix components. | ||
| 989 | |||
| 990 | Returns tuple with pv, prefix and suffix. | ||
| 991 | """ | ||
| 992 | pfx = '' | ||
| 993 | sfx = '' | ||
| 994 | |||
| 995 | if uri_type == 'git': | ||
| 996 | git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+)?)(?P<rev>.*)") | ||
| 997 | m = git_regex.match(pv) | ||
| 998 | |||
| 999 | if m: | ||
| 1000 | pv = m.group('ver') | ||
| 1001 | pfx = m.group('pfx') | ||
| 1002 | sfx = m.group('sfx') | ||
| 1003 | else: | ||
| 1004 | regex = re.compile(r"(?P<pfx>(v|r)?)(?P<ver>.*)") | ||
| 1005 | m = regex.match(pv) | ||
| 1006 | if m: | ||
| 1007 | pv = m.group('ver') | ||
| 1008 | pfx = m.group('pfx') | ||
| 1009 | |||
| 1010 | return (pv, pfx, sfx) | ||
| 1011 | |||
| 1012 | def get_recipe_upstream_version(rd): | ||
| 1013 | """ | ||
| 1014 | Get upstream version of recipe using bb.fetch2 methods with support for | ||
| 1015 | http, https, ftp and git. | ||
| 1016 | |||
| 1017 | bb.fetch2 exceptions can be raised, | ||
| 1018 | FetchError when don't have network access or upstream site don't response. | ||
| 1019 | NoMethodError when uri latest_versionstring method isn't implemented. | ||
| 1020 | |||
| 1021 | Returns a dictonary with version, repository revision, current_version, type and datetime. | ||
| 1022 | Type can be A for Automatic, M for Manual and U for Unknown. | ||
| 1023 | """ | ||
| 1024 | from bb.fetch2 import decodeurl | ||
| 1025 | from datetime import datetime | ||
| 1026 | |||
| 1027 | ru = {} | ||
| 1028 | ru['current_version'] = rd.getVar('PV') | ||
| 1029 | ru['version'] = '' | ||
| 1030 | ru['type'] = 'U' | ||
| 1031 | ru['datetime'] = '' | ||
| 1032 | ru['revision'] = '' | ||
| 1033 | |||
| 1034 | # XXX: If don't have SRC_URI means that don't have upstream sources so | ||
| 1035 | # returns the current recipe version, so that upstream version check | ||
| 1036 | # declares a match. | ||
| 1037 | src_uris = rd.getVar('SRC_URI') | ||
| 1038 | if not src_uris: | ||
| 1039 | ru['version'] = ru['current_version'] | ||
| 1040 | ru['type'] = 'M' | ||
| 1041 | ru['datetime'] = datetime.now() | ||
| 1042 | return ru | ||
| 1043 | |||
| 1044 | # XXX: we suppose that the first entry points to the upstream sources | ||
| 1045 | src_uri = src_uris.split()[0] | ||
| 1046 | uri_type, _, _, _, _, _ = decodeurl(src_uri) | ||
| 1047 | |||
| 1048 | (pv, pfx, sfx) = get_recipe_pv_with_pfx_sfx(rd.getVar('PV'), uri_type) | ||
| 1049 | ru['current_version'] = pv | ||
| 1050 | |||
| 1051 | manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") | ||
| 1052 | if manual_upstream_version: | ||
| 1053 | # manual tracking of upstream version. | ||
| 1054 | ru['version'] = manual_upstream_version | ||
| 1055 | ru['type'] = 'M' | ||
| 1056 | |||
| 1057 | manual_upstream_date = rd.getVar("CHECK_DATE") | ||
| 1058 | if manual_upstream_date: | ||
| 1059 | date = datetime.strptime(manual_upstream_date, "%b %d, %Y") | ||
| 1060 | else: | ||
| 1061 | date = datetime.now() | ||
| 1062 | ru['datetime'] = date | ||
| 1063 | |||
| 1064 | elif uri_type == "file": | ||
| 1065 | # files are always up-to-date | ||
| 1066 | ru['version'] = pv | ||
| 1067 | ru['type'] = 'A' | ||
| 1068 | ru['datetime'] = datetime.now() | ||
| 1069 | else: | ||
| 1070 | ud = bb.fetch2.FetchData(src_uri, rd) | ||
| 1071 | if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": | ||
| 1072 | bb.fetch2.get_srcrev(rd) | ||
| 1073 | upversion = None | ||
| 1074 | revision = None | ||
| 1075 | try: | ||
| 1076 | revision = ud.method.latest_revision(ud, rd, 'default') | ||
| 1077 | upversion = pv | ||
| 1078 | if revision != ud.revision: | ||
| 1079 | upversion = upversion + "-new-commits-available" | ||
| 1080 | except bb.fetch2.FetchError as e: | ||
| 1081 | bb.warn("Unable to obtain latest revision: {}".format(e)) | ||
| 1082 | else: | ||
| 1083 | pupver = ud.method.latest_versionstring(ud, rd) | ||
| 1084 | (upversion, revision) = pupver | ||
| 1085 | |||
| 1086 | if upversion: | ||
| 1087 | ru['version'] = upversion | ||
| 1088 | ru['type'] = 'A' | ||
| 1089 | |||
| 1090 | if revision: | ||
| 1091 | ru['revision'] = revision | ||
| 1092 | |||
| 1093 | ru['datetime'] = datetime.now() | ||
| 1094 | |||
| 1095 | return ru | ||
| 1096 | |||
| 1097 | def _get_recipe_upgrade_status(data): | ||
| 1098 | uv = get_recipe_upstream_version(data) | ||
| 1099 | |||
| 1100 | pn = data.getVar('PN') | ||
| 1101 | cur_ver = uv['current_version'] | ||
| 1102 | |||
| 1103 | upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN') | ||
| 1104 | if not uv['version']: | ||
| 1105 | status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN" | ||
| 1106 | else: | ||
| 1107 | cmp = vercmp_string(uv['current_version'], uv['version']) | ||
| 1108 | if cmp == -1: | ||
| 1109 | status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN" | ||
| 1110 | elif cmp == 0: | ||
| 1111 | status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN" | ||
| 1112 | else: | ||
| 1113 | status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN" | ||
| 1114 | |||
| 1115 | next_ver = uv['version'] if uv['version'] else "N/A" | ||
| 1116 | revision = uv['revision'] if uv['revision'] else "N/A" | ||
| 1117 | maintainer = data.getVar('RECIPE_MAINTAINER') | ||
| 1118 | no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') | ||
| 1119 | |||
| 1120 | return {'pn':pn, 'status':status, 'cur_ver':cur_ver, 'next_ver':next_ver, 'maintainer':maintainer, 'revision':revision, 'no_upgrade_reason':no_upgrade_reason} | ||
| 1121 | |||
| 1122 | def get_recipe_upgrade_status(recipes=None): | ||
| 1123 | pkgs_list = [] | ||
| 1124 | data_copy_list = [] | ||
| 1125 | copy_vars = ('SRC_URI', | ||
| 1126 | 'PV', | ||
| 1127 | 'DL_DIR', | ||
| 1128 | 'PN', | ||
| 1129 | 'CACHE', | ||
| 1130 | 'PERSISTENT_DIR', | ||
| 1131 | 'BB_URI_HEADREVS', | ||
| 1132 | 'UPSTREAM_CHECK_COMMITS', | ||
| 1133 | 'UPSTREAM_CHECK_GITTAGREGEX', | ||
| 1134 | 'UPSTREAM_CHECK_REGEX', | ||
| 1135 | 'UPSTREAM_CHECK_URI', | ||
| 1136 | 'UPSTREAM_VERSION_UNKNOWN', | ||
| 1137 | 'RECIPE_MAINTAINER', | ||
| 1138 | 'RECIPE_NO_UPDATE_REASON', | ||
| 1139 | 'RECIPE_UPSTREAM_VERSION', | ||
| 1140 | 'RECIPE_UPSTREAM_DATE', | ||
| 1141 | 'CHECK_DATE', | ||
| 1142 | 'FETCHCMD_bzr', | ||
| 1143 | 'FETCHCMD_ccrc', | ||
| 1144 | 'FETCHCMD_cvs', | ||
| 1145 | 'FETCHCMD_git', | ||
| 1146 | 'FETCHCMD_hg', | ||
| 1147 | 'FETCHCMD_npm', | ||
| 1148 | 'FETCHCMD_osc', | ||
| 1149 | 'FETCHCMD_p4', | ||
| 1150 | 'FETCHCMD_repo', | ||
| 1151 | 'FETCHCMD_s3', | ||
| 1152 | 'FETCHCMD_svn', | ||
| 1153 | 'FETCHCMD_wget', | ||
| 1154 | ) | ||
| 1155 | |||
| 1156 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
| 1157 | tinfoil.prepare(config_only=False) | ||
| 1158 | |||
| 1159 | if not recipes: | ||
| 1160 | recipes = tinfoil.all_recipe_files(variants=False) | ||
| 1161 | |||
| 1162 | recipeincludes = {} | ||
| 1163 | for fn in recipes: | ||
| 1164 | try: | ||
| 1165 | if fn.startswith("/"): | ||
| 1166 | data = tinfoil.parse_recipe_file(fn) | ||
| 1167 | else: | ||
| 1168 | data = tinfoil.parse_recipe(fn) | ||
| 1169 | except bb.providers.NoProvider: | ||
| 1170 | bb.note(" No provider for %s" % fn) | ||
| 1171 | continue | ||
| 1172 | |||
| 1173 | unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE') | ||
| 1174 | if unreliable == "1": | ||
| 1175 | bb.note(" Skip package %s as upstream check unreliable" % pn) | ||
| 1176 | continue | ||
| 1177 | |||
| 1178 | data_copy = bb.data.init() | ||
| 1179 | for var in copy_vars: | ||
| 1180 | data_copy.setVar(var, data.getVar(var)) | ||
| 1181 | for k in data: | ||
| 1182 | if k.startswith('SRCREV'): | ||
| 1183 | data_copy.setVar(k, data.getVar(k)) | ||
| 1184 | |||
| 1185 | data_copy_list.append(data_copy) | ||
| 1186 | |||
| 1187 | recipeincludes[data.getVar('FILE')] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')} | ||
| 1188 | |||
| 1189 | from concurrent.futures import ProcessPoolExecutor | ||
| 1190 | with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: | ||
| 1191 | pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) | ||
| 1192 | |||
| 1193 | return _group_recipes(pkgs_list, _get_common_include_recipes(recipeincludes)) | ||
| 1194 | |||
| 1195 | def get_common_include_recipes(): | ||
| 1196 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
| 1197 | tinfoil.prepare(config_only=False) | ||
| 1198 | |||
| 1199 | recipes = tinfoil.all_recipe_files(variants=False) | ||
| 1200 | |||
| 1201 | recipeincludes = {} | ||
| 1202 | for fn in recipes: | ||
| 1203 | data = tinfoil.parse_recipe_file(fn) | ||
| 1204 | recipeincludes[fn] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')} | ||
| 1205 | return _get_common_include_recipes(recipeincludes) | ||
| 1206 | |||
| 1207 | def _get_common_include_recipes(recipeincludes_all): | ||
| 1208 | recipeincludes = {} | ||
| 1209 | for fn,data in recipeincludes_all.items(): | ||
| 1210 | bbincluded_filtered = [i for i in data['bbincluded'] if os.path.dirname(i) == os.path.dirname(fn) and i != fn] | ||
| 1211 | if bbincluded_filtered: | ||
| 1212 | recipeincludes[data['pn']] = bbincluded_filtered | ||
| 1213 | |||
| 1214 | recipeincludes_inverted = {} | ||
| 1215 | for k,v in recipeincludes.items(): | ||
| 1216 | for i in v: | ||
| 1217 | recipeincludes_inverted.setdefault(i,set()).add(k) | ||
| 1218 | |||
| 1219 | recipeincludes_inverted_filtered = {k:v for k,v in recipeincludes_inverted.items() if len(v) > 1} | ||
| 1220 | |||
| 1221 | recipes_with_shared_includes = list() | ||
| 1222 | for v in recipeincludes_inverted_filtered.values(): | ||
| 1223 | recipeset = v | ||
| 1224 | for v1 in recipeincludes_inverted_filtered.values(): | ||
| 1225 | if recipeset.intersection(v1): | ||
| 1226 | recipeset.update(v1) | ||
| 1227 | if recipeset not in recipes_with_shared_includes: | ||
| 1228 | recipes_with_shared_includes.append(recipeset) | ||
| 1229 | |||
| 1230 | return recipes_with_shared_includes | ||
| 1231 | |||
| 1232 | def _group_recipes(recipes, groups): | ||
| 1233 | recipedict = {} | ||
| 1234 | for r in recipes: | ||
| 1235 | recipedict[r['pn']] = r | ||
| 1236 | |||
| 1237 | recipegroups = [] | ||
| 1238 | for g in groups: | ||
| 1239 | recipeset = [] | ||
| 1240 | for r in g: | ||
| 1241 | if r in recipedict.keys(): | ||
| 1242 | recipeset.append(recipedict[r]) | ||
| 1243 | del recipedict[r] | ||
| 1244 | recipegroups.append(recipeset) | ||
| 1245 | |||
| 1246 | for r in recipedict.values(): | ||
| 1247 | recipegroups.append([r]) | ||
| 1248 | return recipegroups | ||
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py deleted file mode 100644 index 0270024a83..0000000000 --- a/meta/lib/oe/reproducible.py +++ /dev/null | |||
| @@ -1,199 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | import os | ||
| 7 | import subprocess | ||
| 8 | import bb | ||
| 9 | |||
| 10 | # For reproducible builds, this code sets the default SOURCE_DATE_EPOCH in each | ||
| 11 | # component's build environment. The format is number of seconds since the | ||
| 12 | # system epoch. | ||
| 13 | # | ||
| 14 | # Upstream components (generally) respect this environment variable, | ||
| 15 | # using it in place of the "current" date and time. | ||
| 16 | # See https://reproducible-builds.org/specs/source-date-epoch/ | ||
| 17 | # | ||
| 18 | # The default value of SOURCE_DATE_EPOCH comes from the function | ||
| 19 | # get_source_date_epoch_value which reads from the SDE_FILE, or if the file | ||
| 20 | # is not available will use the fallback of SOURCE_DATE_EPOCH_FALLBACK. | ||
| 21 | # | ||
| 22 | # The SDE_FILE is normally constructed from the function | ||
| 23 | # create_source_date_epoch_stamp which is typically added as a postfuncs to | ||
| 24 | # the do_unpack task. If a recipe does NOT have do_unpack, it should be added | ||
| 25 | # to a task that runs after the source is available and before the | ||
| 26 | # do_deploy_source_date_epoch task is executed. | ||
| 27 | # | ||
| 28 | # If a recipe wishes to override the default behavior it should set it's own | ||
| 29 | # SOURCE_DATE_EPOCH or override the do_deploy_source_date_epoch_stamp task | ||
| 30 | # with recipe-specific functionality to write the appropriate | ||
| 31 | # SOURCE_DATE_EPOCH into the SDE_FILE. | ||
| 32 | # | ||
| 33 | # SOURCE_DATE_EPOCH is intended to be a reproducible value. This value should | ||
| 34 | # be reproducible for anyone who builds the same revision from the same | ||
| 35 | # sources. | ||
| 36 | # | ||
| 37 | # There are 4 ways the create_source_date_epoch_stamp function determines what | ||
| 38 | # becomes SOURCE_DATE_EPOCH: | ||
| 39 | # | ||
| 40 | # 1. Use the value from __source_date_epoch.txt file if this file exists. | ||
| 41 | # This file was most likely created in the previous build by one of the | ||
| 42 | # following methods 2,3,4. | ||
| 43 | # Alternatively, it can be provided by a recipe via SRC_URI. | ||
| 44 | # | ||
| 45 | # If the file does not exist: | ||
| 46 | # | ||
| 47 | # 2. If there is a git checkout, use the last git commit timestamp. | ||
| 48 | # Git does not preserve file timestamps on checkout. | ||
| 49 | # | ||
| 50 | # 3. Use the mtime of "known" files such as NEWS, CHANGELOG, ... | ||
| 51 | # This works for well-kept repositories distributed via tarball. | ||
| 52 | # | ||
| 53 | # 4. Use the modification time of the youngest file in the source tree, if | ||
| 54 | # there is one. | ||
| 55 | # This will be the newest file from the distribution tarball, if any. | ||
| 56 | # | ||
| 57 | # 5. Fall back to a fixed timestamp (SOURCE_DATE_EPOCH_FALLBACK). | ||
| 58 | # | ||
| 59 | # Once the value is determined, it is stored in the recipe's SDE_FILE. | ||
| 60 | |||
| 61 | def get_source_date_epoch_from_known_files(d, sourcedir): | ||
| 62 | source_date_epoch = None | ||
| 63 | newest_file = None | ||
| 64 | known_files = set(["NEWS", "ChangeLog", "Changelog", "CHANGES"]) | ||
| 65 | for file in known_files: | ||
| 66 | filepath = os.path.join(sourcedir, file) | ||
| 67 | if os.path.isfile(filepath): | ||
| 68 | mtime = int(os.lstat(filepath).st_mtime) | ||
| 69 | # There may be more than one "known_file" present, if so, use the youngest one | ||
| 70 | if not source_date_epoch or mtime > source_date_epoch: | ||
| 71 | source_date_epoch = mtime | ||
| 72 | newest_file = filepath | ||
| 73 | if newest_file: | ||
| 74 | bb.debug(1, "SOURCE_DATE_EPOCH taken from: %s" % newest_file) | ||
| 75 | return source_date_epoch | ||
| 76 | |||
| 77 | def find_git_folder(d, sourcedir): | ||
| 78 | # First guess: UNPACKDIR/BB_GIT_DEFAULT_DESTSUFFIX | ||
| 79 | # This is the default git fetcher unpack path | ||
| 80 | unpackdir = d.getVar('UNPACKDIR') | ||
| 81 | default_destsuffix = d.getVar('BB_GIT_DEFAULT_DESTSUFFIX') | ||
| 82 | gitpath = os.path.join(unpackdir, default_destsuffix, ".git") | ||
| 83 | if os.path.isdir(gitpath): | ||
| 84 | return gitpath | ||
| 85 | |||
| 86 | # Second guess: ${S} | ||
| 87 | gitpath = os.path.join(sourcedir, ".git") | ||
| 88 | if os.path.isdir(gitpath): | ||
| 89 | return gitpath | ||
| 90 | |||
| 91 | # Perhaps there was a subpath or destsuffix specified. | ||
| 92 | # Go looking in the UNPACKDIR | ||
| 93 | for root, dirs, files in os.walk(unpackdir, topdown=True): | ||
| 94 | if '.git' in dirs: | ||
| 95 | return os.path.join(root, ".git") | ||
| 96 | |||
| 97 | for root, dirs, files in os.walk(sourcedir, topdown=True): | ||
| 98 | if '.git' in dirs: | ||
| 99 | return os.path.join(root, ".git") | ||
| 100 | |||
| 101 | bb.warn("Failed to find a git repository in UNPACKDIR: %s" % unpackdir) | ||
| 102 | return None | ||
| 103 | |||
| 104 | def get_source_date_epoch_from_git(d, sourcedir): | ||
| 105 | if not "git://" in d.getVar('SRC_URI') and not "gitsm://" in d.getVar('SRC_URI'): | ||
| 106 | return None | ||
| 107 | |||
| 108 | gitpath = find_git_folder(d, sourcedir) | ||
| 109 | if not gitpath: | ||
| 110 | return None | ||
| 111 | |||
| 112 | # Check that the repository has a valid HEAD; it may not if subdir is used | ||
| 113 | # in SRC_URI | ||
| 114 | p = subprocess.run(['git', '--git-dir', gitpath, 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) | ||
| 115 | if p.returncode != 0: | ||
| 116 | bb.debug(1, "%s does not have a valid HEAD: %s" % (gitpath, p.stdout.decode('utf-8'))) | ||
| 117 | return None | ||
| 118 | |||
| 119 | bb.debug(1, "git repository: %s" % gitpath) | ||
| 120 | p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], | ||
| 121 | check=True, stdout=subprocess.PIPE) | ||
| 122 | return int(p.stdout.decode('utf-8')) | ||
| 123 | |||
| 124 | def get_source_date_epoch_from_youngest_file(d, sourcedir): | ||
| 125 | if sourcedir == d.getVar('UNPACKDIR'): | ||
| 126 | # These sources are almost certainly not from a tarball | ||
| 127 | return None | ||
| 128 | |||
| 129 | # Do it the hard way: check all files and find the youngest one... | ||
| 130 | source_date_epoch = None | ||
| 131 | newest_file = None | ||
| 132 | for root, dirs, files in os.walk(sourcedir, topdown=True): | ||
| 133 | files = [f for f in files if not f[0] == '.'] | ||
| 134 | |||
| 135 | for fname in files: | ||
| 136 | if fname == "singletask.lock": | ||
| 137 | # Ignore externalsrc/devtool lockfile [YOCTO #14921] | ||
| 138 | continue | ||
| 139 | filename = os.path.join(root, fname) | ||
| 140 | try: | ||
| 141 | mtime = int(os.lstat(filename).st_mtime) | ||
| 142 | except ValueError: | ||
| 143 | mtime = 0 | ||
| 144 | if not source_date_epoch or mtime > source_date_epoch: | ||
| 145 | source_date_epoch = mtime | ||
| 146 | newest_file = filename | ||
| 147 | |||
| 148 | if newest_file: | ||
| 149 | bb.debug(1, "Newest file found: %s" % newest_file) | ||
| 150 | return source_date_epoch | ||
| 151 | |||
| 152 | def fixed_source_date_epoch(d): | ||
| 153 | bb.debug(1, "No tarball or git repo found to determine SOURCE_DATE_EPOCH") | ||
| 154 | source_date_epoch = d.getVar('SOURCE_DATE_EPOCH_FALLBACK') | ||
| 155 | if source_date_epoch: | ||
| 156 | bb.debug(1, "Using SOURCE_DATE_EPOCH_FALLBACK") | ||
| 157 | return int(source_date_epoch) | ||
| 158 | return 0 | ||
| 159 | |||
| 160 | def get_source_date_epoch(d, sourcedir): | ||
| 161 | return ( | ||
| 162 | get_source_date_epoch_from_git(d, sourcedir) or | ||
| 163 | get_source_date_epoch_from_youngest_file(d, sourcedir) or | ||
| 164 | fixed_source_date_epoch(d) # Last resort | ||
| 165 | ) | ||
| 166 | |||
| 167 | def epochfile_read(epochfile, d): | ||
| 168 | cached, efile = d.getVar('__CACHED_SOURCE_DATE_EPOCH') or (None, None) | ||
| 169 | if cached and efile == epochfile: | ||
| 170 | return cached | ||
| 171 | |||
| 172 | if cached and epochfile != efile: | ||
| 173 | bb.debug(1, "Epoch file changed from %s to %s" % (efile, epochfile)) | ||
| 174 | |||
| 175 | source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK')) | ||
| 176 | try: | ||
| 177 | with open(epochfile, 'r') as f: | ||
| 178 | s = f.read() | ||
| 179 | try: | ||
| 180 | source_date_epoch = int(s) | ||
| 181 | except ValueError: | ||
| 182 | bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK" % s) | ||
| 183 | source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK')) | ||
| 184 | bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch) | ||
| 185 | except FileNotFoundError: | ||
| 186 | bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch)) | ||
| 187 | |||
| 188 | d.setVar('__CACHED_SOURCE_DATE_EPOCH', (str(source_date_epoch), epochfile)) | ||
| 189 | return str(source_date_epoch) | ||
| 190 | |||
| 191 | def epochfile_write(source_date_epoch, epochfile, d): | ||
| 192 | |||
| 193 | bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch) | ||
| 194 | bb.utils.mkdirhier(os.path.dirname(epochfile)) | ||
| 195 | |||
| 196 | tmp_file = "%s.new" % epochfile | ||
| 197 | with open(tmp_file, 'w') as f: | ||
| 198 | f.write(str(source_date_epoch)) | ||
| 199 | os.rename(tmp_file, epochfile) | ||
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py deleted file mode 100644 index 14befac8fa..0000000000 --- a/meta/lib/oe/rootfs.py +++ /dev/null | |||
| @@ -1,430 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | from abc import ABCMeta, abstractmethod | ||
| 7 | from oe.utils import execute_pre_post_process | ||
| 8 | from oe.package_manager import * | ||
| 9 | from oe.manifest import * | ||
| 10 | import oe.path | ||
| 11 | import shutil | ||
| 12 | import os | ||
| 13 | import subprocess | ||
| 14 | import re | ||
| 15 | |||
| 16 | class Rootfs(object, metaclass=ABCMeta): | ||
| 17 | """ | ||
| 18 | This is an abstract class. Do not instantiate this directly. | ||
| 19 | """ | ||
| 20 | |||
| 21 | def __init__(self, d, progress_reporter=None, logcatcher=None): | ||
| 22 | self.d = d | ||
| 23 | self.pm = None | ||
| 24 | self.image_rootfs = self.d.getVar('IMAGE_ROOTFS') | ||
| 25 | self.deploydir = self.d.getVar('IMGDEPLOYDIR') | ||
| 26 | self.progress_reporter = progress_reporter | ||
| 27 | self.logcatcher = logcatcher | ||
| 28 | |||
| 29 | self.install_order = Manifest.INSTALL_ORDER | ||
| 30 | |||
| 31 | @abstractmethod | ||
| 32 | def _create(self): | ||
| 33 | pass | ||
| 34 | |||
| 35 | @abstractmethod | ||
| 36 | def _get_delayed_postinsts(self): | ||
| 37 | pass | ||
| 38 | |||
| 39 | @abstractmethod | ||
| 40 | def _save_postinsts(self): | ||
| 41 | pass | ||
| 42 | |||
| 43 | @abstractmethod | ||
| 44 | def _log_check(self): | ||
| 45 | pass | ||
| 46 | |||
| 47 | def _log_check_common(self, type, match): | ||
| 48 | # Ignore any lines containing log_check to avoid recursion, and ignore | ||
| 49 | # lines beginning with a + since sh -x may emit code which isn't | ||
| 50 | # actually executed, but may contain error messages | ||
| 51 | excludes = [ 'log_check', r'^\+' ] | ||
| 52 | if hasattr(self, 'log_check_expected_regexes'): | ||
| 53 | excludes.extend(self.log_check_expected_regexes) | ||
| 54 | # Insert custom log_check excludes | ||
| 55 | excludes += [x for x in (self.d.getVar("IMAGE_LOG_CHECK_EXCLUDES") or "").split(" ") if x] | ||
| 56 | excludes = [re.compile(x) for x in excludes] | ||
| 57 | r = re.compile(match) | ||
| 58 | log_path = self.d.expand("${T}/log.do_rootfs") | ||
| 59 | messages = [] | ||
| 60 | with open(log_path, 'r') as log: | ||
| 61 | for line in log: | ||
| 62 | if self.logcatcher and self.logcatcher.contains(line.rstrip()): | ||
| 63 | continue | ||
| 64 | for ee in excludes: | ||
| 65 | m = ee.search(line) | ||
| 66 | if m: | ||
| 67 | break | ||
| 68 | if m: | ||
| 69 | continue | ||
| 70 | |||
| 71 | m = r.search(line) | ||
| 72 | if m: | ||
| 73 | messages.append('[log_check] %s' % line) | ||
| 74 | if messages: | ||
| 75 | if len(messages) == 1: | ||
| 76 | msg = '1 %s message' % type | ||
| 77 | else: | ||
| 78 | msg = '%d %s messages' % (len(messages), type) | ||
| 79 | msg = '[log_check] %s: found %s in the logfile:\n%s' % \ | ||
| 80 | (self.d.getVar('PN'), msg, ''.join(messages)) | ||
| 81 | if type == 'error': | ||
| 82 | bb.fatal(msg) | ||
| 83 | else: | ||
| 84 | bb.warn(msg) | ||
| 85 | |||
| 86 | def _log_check_warn(self): | ||
| 87 | self._log_check_common('warning', '^(warn|Warn|WARNING:)') | ||
| 88 | |||
| 89 | def _log_check_error(self): | ||
| 90 | self._log_check_common('error', self.log_check_regex) | ||
| 91 | |||
| 92 | def _insert_feed_uris(self): | ||
| 93 | if bb.utils.contains("IMAGE_FEATURES", "package-management", | ||
| 94 | True, False, self.d): | ||
| 95 | self.pm.insert_feeds_uris(self.d.getVar('PACKAGE_FEED_URIS') or "", | ||
| 96 | self.d.getVar('PACKAGE_FEED_BASE_PATHS') or "", | ||
| 97 | self.d.getVar('PACKAGE_FEED_ARCHS')) | ||
| 98 | |||
| 99 | |||
| 100 | """ | ||
| 101 | The _cleanup() method should be used to clean-up stuff that we don't really | ||
| 102 | want to end up on target. For example, in the case of RPM, the DB locks. | ||
| 103 | The method is called, once, at the end of create() method. | ||
| 104 | """ | ||
| 105 | @abstractmethod | ||
| 106 | def _cleanup(self): | ||
| 107 | pass | ||
| 108 | |||
| 109 | def _setup_dbg_rootfs(self, package_paths): | ||
| 110 | gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' | ||
| 111 | if gen_debugfs != '1': | ||
| 112 | return | ||
| 113 | |||
| 114 | bb.note(" Renaming the original rootfs...") | ||
| 115 | try: | ||
| 116 | shutil.rmtree(self.image_rootfs + '-orig') | ||
| 117 | except: | ||
| 118 | pass | ||
| 119 | bb.utils.rename(self.image_rootfs, self.image_rootfs + '-orig') | ||
| 120 | |||
| 121 | bb.note(" Creating debug rootfs...") | ||
| 122 | bb.utils.mkdirhier(self.image_rootfs) | ||
| 123 | |||
| 124 | bb.note(" Copying back package database...") | ||
| 125 | for path in package_paths: | ||
| 126 | bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(path)) | ||
| 127 | if os.path.isdir(self.image_rootfs + '-orig' + path): | ||
| 128 | shutil.copytree(self.image_rootfs + '-orig' + path, self.image_rootfs + path, symlinks=True) | ||
| 129 | elif os.path.isfile(self.image_rootfs + '-orig' + path): | ||
| 130 | shutil.copyfile(self.image_rootfs + '-orig' + path, self.image_rootfs + path) | ||
| 131 | |||
| 132 | # Copy files located in /usr/lib/debug or /usr/src/debug | ||
| 133 | for dir in ["/usr/lib/debug", "/usr/src/debug"]: | ||
| 134 | src = self.image_rootfs + '-orig' + dir | ||
| 135 | if os.path.exists(src): | ||
| 136 | dst = self.image_rootfs + dir | ||
| 137 | bb.utils.mkdirhier(os.path.dirname(dst)) | ||
| 138 | shutil.copytree(src, dst) | ||
| 139 | |||
| 140 | # Copy files with suffix '.debug' or located in '.debug' dir. | ||
| 141 | for root, dirs, files in os.walk(self.image_rootfs + '-orig'): | ||
| 142 | relative_dir = root[len(self.image_rootfs + '-orig'):] | ||
| 143 | for f in files: | ||
| 144 | if f.endswith('.debug') or '/.debug' in relative_dir: | ||
| 145 | bb.utils.mkdirhier(self.image_rootfs + relative_dir) | ||
| 146 | shutil.copy(os.path.join(root, f), | ||
| 147 | self.image_rootfs + relative_dir) | ||
| 148 | |||
| 149 | bb.note(" Install complementary '*-dbg' packages...") | ||
| 150 | self.pm.install_complementary('*-dbg') | ||
| 151 | |||
| 152 | if self.d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': | ||
| 153 | bb.note(" Install complementary '*-src' packages...") | ||
| 154 | self.pm.install_complementary('*-src') | ||
| 155 | |||
| 156 | """ | ||
| 157 | Install additional debug packages. Possibility to install additional packages, | ||
| 158 | which are not automatically installed as complementary package of | ||
| 159 | standard one, e.g. debug package of static libraries. | ||
| 160 | """ | ||
| 161 | extra_debug_pkgs = self.d.getVar('IMAGE_INSTALL_DEBUGFS') | ||
| 162 | if extra_debug_pkgs: | ||
| 163 | bb.note(" Install extra debug packages...") | ||
| 164 | self.pm.install(extra_debug_pkgs.split(), True) | ||
| 165 | |||
| 166 | bb.note(" Removing package database...") | ||
| 167 | for path in package_paths: | ||
| 168 | if os.path.isdir(self.image_rootfs + path): | ||
| 169 | shutil.rmtree(self.image_rootfs + path) | ||
| 170 | elif os.path.isfile(self.image_rootfs + path): | ||
| 171 | os.remove(self.image_rootfs + path) | ||
| 172 | |||
| 173 | bb.note(" Rename debug rootfs...") | ||
| 174 | try: | ||
| 175 | shutil.rmtree(self.image_rootfs + '-dbg') | ||
| 176 | except: | ||
| 177 | pass | ||
| 178 | bb.utils.rename(self.image_rootfs, self.image_rootfs + '-dbg') | ||
| 179 | |||
| 180 | bb.note(" Restoring original rootfs...") | ||
| 181 | bb.utils.rename(self.image_rootfs + '-orig', self.image_rootfs) | ||
| 182 | |||
| 183 | def _exec_shell_cmd(self, cmd): | ||
| 184 | try: | ||
| 185 | subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
| 186 | except subprocess.CalledProcessError as e: | ||
| 187 | return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output)) | ||
| 188 | |||
| 189 | return None | ||
| 190 | |||
| 191 | def create(self): | ||
| 192 | bb.note("###### Generate rootfs #######") | ||
| 193 | pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND") | ||
| 194 | post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") | ||
| 195 | rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') | ||
| 196 | |||
| 197 | def make_last(command, commands): | ||
| 198 | commands = commands.split() | ||
| 199 | if command in commands: | ||
| 200 | commands.remove(command) | ||
| 201 | commands.append(command) | ||
| 202 | return " ".join(commands) | ||
| 203 | |||
| 204 | # We want this to run as late as possible, in particular after | ||
| 205 | # systemd_sysusers_create and set_user_group. Using :append is not enough | ||
| 206 | post_process_cmds = make_last("tidy_shadowutils_files", post_process_cmds) | ||
| 207 | post_process_cmds = make_last("rootfs_reproducible", post_process_cmds) | ||
| 208 | |||
| 209 | execute_pre_post_process(self.d, pre_process_cmds) | ||
| 210 | |||
| 211 | if self.progress_reporter: | ||
| 212 | self.progress_reporter.next_stage() | ||
| 213 | |||
| 214 | # call the package manager dependent create method | ||
| 215 | self._create() | ||
| 216 | |||
| 217 | sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir') | ||
| 218 | bb.utils.mkdirhier(sysconfdir) | ||
| 219 | with open(sysconfdir + "/version", "w+") as ver: | ||
| 220 | ver.write(self.d.getVar('BUILDNAME') + "\n") | ||
| 221 | |||
| 222 | execute_pre_post_process(self.d, rootfs_post_install_cmds) | ||
| 223 | |||
| 224 | self.pm.run_intercepts() | ||
| 225 | |||
| 226 | execute_pre_post_process(self.d, post_process_cmds) | ||
| 227 | |||
| 228 | if self.progress_reporter: | ||
| 229 | self.progress_reporter.next_stage() | ||
| 230 | |||
| 231 | if bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", | ||
| 232 | True, False, self.d) and \ | ||
| 233 | not bb.utils.contains("IMAGE_FEATURES", | ||
| 234 | "read-only-rootfs-delayed-postinsts", | ||
| 235 | True, False, self.d): | ||
| 236 | delayed_postinsts = self._get_delayed_postinsts() | ||
| 237 | if delayed_postinsts is not None: | ||
| 238 | bb.fatal("The following packages could not be configured " | ||
| 239 | "offline and rootfs is read-only: %s" % | ||
| 240 | delayed_postinsts) | ||
| 241 | |||
| 242 | if self.d.getVar('USE_DEVFS') != "1": | ||
| 243 | self._create_devfs() | ||
| 244 | |||
| 245 | self._uninstall_unneeded() | ||
| 246 | |||
| 247 | if self.progress_reporter: | ||
| 248 | self.progress_reporter.next_stage() | ||
| 249 | |||
| 250 | self._insert_feed_uris() | ||
| 251 | |||
| 252 | self._run_ldconfig() | ||
| 253 | |||
| 254 | if self.d.getVar('USE_DEPMOD') != "0": | ||
| 255 | self._generate_kernel_module_deps() | ||
| 256 | |||
| 257 | self._cleanup() | ||
| 258 | self._log_check() | ||
| 259 | |||
| 260 | if self.progress_reporter: | ||
| 261 | self.progress_reporter.next_stage() | ||
| 262 | |||
| 263 | |||
| 264 | def _uninstall_unneeded(self): | ||
| 265 | # Remove the run-postinsts package if no delayed postinsts are found | ||
| 266 | delayed_postinsts = self._get_delayed_postinsts() | ||
| 267 | if delayed_postinsts is None: | ||
| 268 | if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")) or os.path.exists(self.d.expand("${IMAGE_ROOTFS}${systemd_system_unitdir}/run-postinsts.service")): | ||
| 269 | self.pm.remove(["run-postinsts"]) | ||
| 270 | |||
| 271 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", | ||
| 272 | True, False, self.d) and \ | ||
| 273 | not bb.utils.contains("IMAGE_FEATURES", | ||
| 274 | "read-only-rootfs-delayed-postinsts", | ||
| 275 | True, False, self.d) | ||
| 276 | |||
| 277 | image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') | ||
| 278 | |||
| 279 | if image_rorfs or image_rorfs_force == "1": | ||
| 280 | # Remove components that we don't need if it's a read-only rootfs | ||
| 281 | unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED").split() | ||
| 282 | pkgs_installed = image_list_installed_packages(self.d) | ||
| 283 | # Make sure update-alternatives is removed last. This is | ||
| 284 | # because its database has to available while uninstalling | ||
| 285 | # other packages, allowing alternative symlinks of packages | ||
| 286 | # to be uninstalled or to be managed correctly otherwise. | ||
| 287 | provider = self.d.getVar("VIRTUAL-RUNTIME_update-alternatives") | ||
| 288 | pkgs_to_remove = sorted([pkg for pkg in pkgs_installed if pkg in unneeded_pkgs], key=lambda x: x == provider) | ||
| 289 | |||
| 290 | # update-alternatives provider is removed in its own remove() | ||
| 291 | # call because all package managers do not guarantee the packages | ||
| 292 | # are removed in the order they given in the list (which is | ||
| 293 | # passed to the command line). The sorting done earlier is | ||
| 294 | # utilized to implement the 2-stage removal. | ||
| 295 | if len(pkgs_to_remove) > 1: | ||
| 296 | self.pm.remove(pkgs_to_remove[:-1], False) | ||
| 297 | if len(pkgs_to_remove) > 0: | ||
| 298 | self.pm.remove([pkgs_to_remove[-1]], False) | ||
| 299 | |||
| 300 | if delayed_postinsts: | ||
| 301 | self._save_postinsts() | ||
| 302 | if image_rorfs: | ||
| 303 | bb.warn("There are post install scripts " | ||
| 304 | "in a read-only rootfs") | ||
| 305 | |||
| 306 | post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND") | ||
| 307 | execute_pre_post_process(self.d, post_uninstall_cmds) | ||
| 308 | |||
| 309 | runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management", | ||
| 310 | True, False, self.d) | ||
| 311 | if not runtime_pkgmanage: | ||
| 312 | # Remove the package manager data files | ||
| 313 | self.pm.remove_packaging_data() | ||
| 314 | |||
| 315 | def _run_ldconfig(self): | ||
| 316 | if self.d.getVar('LDCONFIGDEPEND'): | ||
| 317 | bb.note("Executing: ldconfig -r " + self.image_rootfs + " -c new -v -X") | ||
| 318 | self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', | ||
| 319 | 'new', '-v', '-X']) | ||
| 320 | |||
| 321 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", | ||
| 322 | True, False, self.d) | ||
| 323 | ldconfig_in_features = bb.utils.contains("DISTRO_FEATURES", "ldconfig", | ||
| 324 | True, False, self.d) | ||
| 325 | if image_rorfs or not ldconfig_in_features: | ||
| 326 | ldconfig_cache_dir = os.path.join(self.image_rootfs, "var/cache/ldconfig") | ||
| 327 | if os.path.exists(ldconfig_cache_dir): | ||
| 328 | bb.note("Removing ldconfig auxiliary cache...") | ||
| 329 | shutil.rmtree(ldconfig_cache_dir) | ||
| 330 | |||
| 331 | def _check_for_kernel_modules(self, modules_dir): | ||
| 332 | for root, dirs, files in os.walk(modules_dir, topdown=True): | ||
| 333 | for name in files: | ||
| 334 | found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz", ".ko.zst")) | ||
| 335 | if found_ko: | ||
| 336 | return found_ko | ||
| 337 | return False | ||
| 338 | |||
| 339 | def _generate_kernel_module_deps(self): | ||
| 340 | modules_dir = os.path.join(self.image_rootfs, 'lib', 'modules') | ||
| 341 | # if we don't have any modules don't bother to do the depmod | ||
| 342 | if not self._check_for_kernel_modules(modules_dir): | ||
| 343 | bb.note("No Kernel Modules found, not running depmod") | ||
| 344 | return | ||
| 345 | |||
| 346 | pkgdatadir = self.d.getVar('PKGDATA_DIR') | ||
| 347 | |||
| 348 | # PKGDATA_DIR can include multiple kernels so we run depmod for each | ||
| 349 | # one of them. | ||
| 350 | for direntry in os.listdir(pkgdatadir): | ||
| 351 | match = re.match('(.*)-depmod', direntry) | ||
| 352 | if not match: | ||
| 353 | continue | ||
| 354 | kernel_package_name = match.group(1) | ||
| 355 | |||
| 356 | kernel_abi_ver_file = oe.path.join(pkgdatadir, direntry, kernel_package_name + '-abiversion') | ||
| 357 | if not os.path.exists(kernel_abi_ver_file): | ||
| 358 | bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) | ||
| 359 | |||
| 360 | with open(kernel_abi_ver_file) as f: | ||
| 361 | kernel_ver = f.read().strip(' \n') | ||
| 362 | |||
| 363 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) | ||
| 364 | |||
| 365 | if os.path.exists(versioned_modules_dir): | ||
| 366 | bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir) | ||
| 367 | if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]): | ||
| 368 | bb.fatal("Kernel modules dependency generation failed") | ||
| 369 | else: | ||
| 370 | bb.note("Not running depmodwrapper for %s since directory does not exist" % versioned_modules_dir) | ||
| 371 | |||
| 372 | """ | ||
| 373 | Create devfs: | ||
| 374 | * IMAGE_DEVICE_TABLE is the old name to an absolute path to a device table file | ||
| 375 | * IMAGE_DEVICE_TABLES is a new name for a file, or list of files, seached | ||
| 376 | for in the BBPATH | ||
| 377 | If neither are specified then the default name of files/device_table-minimal.txt | ||
| 378 | is searched for in the BBPATH (same as the old version.) | ||
| 379 | """ | ||
| 380 | def _create_devfs(self): | ||
| 381 | devtable_list = [] | ||
| 382 | devtable = self.d.getVar('IMAGE_DEVICE_TABLE') | ||
| 383 | if devtable is not None: | ||
| 384 | devtable_list.append(devtable) | ||
| 385 | else: | ||
| 386 | devtables = self.d.getVar('IMAGE_DEVICE_TABLES') | ||
| 387 | if devtables is None: | ||
| 388 | devtables = 'files/device_table-minimal.txt' | ||
| 389 | for devtable in devtables.split(): | ||
| 390 | devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH'), devtable)) | ||
| 391 | |||
| 392 | for devtable in devtable_list: | ||
| 393 | self._exec_shell_cmd(["makedevs", "-r", | ||
| 394 | self.image_rootfs, "-D", devtable]) | ||
| 395 | |||
| 396 | |||
| 397 | def get_class_for_type(imgtype): | ||
| 398 | import importlib | ||
| 399 | mod = importlib.import_module('oe.package_manager.' + imgtype + '.rootfs') | ||
| 400 | return mod.PkgRootfs | ||
| 401 | |||
| 402 | def variable_depends(d, manifest_dir=None): | ||
| 403 | img_type = d.getVar('IMAGE_PKGTYPE') | ||
| 404 | cls = get_class_for_type(img_type) | ||
| 405 | return cls._depends_list() | ||
| 406 | |||
| 407 | def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None): | ||
| 408 | env_bkp = os.environ.copy() | ||
| 409 | |||
| 410 | img_type = d.getVar('IMAGE_PKGTYPE') | ||
| 411 | |||
| 412 | cls = get_class_for_type(img_type) | ||
| 413 | cls(d, manifest_dir, progress_reporter, logcatcher).create() | ||
| 414 | os.environ.clear() | ||
| 415 | os.environ.update(env_bkp) | ||
| 416 | |||
| 417 | |||
| 418 | def image_list_installed_packages(d, rootfs_dir=None): | ||
| 419 | # Theres no rootfs for baremetal images | ||
| 420 | if bb.data.inherits_class('baremetal-image', d): | ||
| 421 | return "" | ||
| 422 | |||
| 423 | if not rootfs_dir: | ||
| 424 | rootfs_dir = d.getVar('IMAGE_ROOTFS') | ||
| 425 | |||
| 426 | img_type = d.getVar('IMAGE_PKGTYPE') | ||
| 427 | |||
| 428 | import importlib | ||
| 429 | cls = importlib.import_module('oe.package_manager.' + img_type) | ||
| 430 | return cls.PMPkgsList(d, rootfs_dir).list_pkgs() | ||
diff --git a/meta/lib/oe/rootfspostcommands.py b/meta/lib/oe/rootfspostcommands.py deleted file mode 100644 index 5386eea409..0000000000 --- a/meta/lib/oe/rootfspostcommands.py +++ /dev/null | |||
| @@ -1,90 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import os | ||
| 8 | |||
| 9 | def sort_shadowutils_file(filename, mapping): | ||
| 10 | """ | ||
| 11 | Sorts a passwd or group file based on the numeric ID in the third column. | ||
| 12 | If a mapping is given, the name from the first column is mapped via that | ||
| 13 | dictionary instead (necessary for /etc/shadow and /etc/gshadow). If not, | ||
| 14 | a new mapping is created on the fly and returned. | ||
| 15 | """ | ||
| 16 | |||
| 17 | new_mapping = {} | ||
| 18 | with open(filename, 'rb+') as f: | ||
| 19 | lines = f.readlines() | ||
| 20 | # No explicit error checking for the sake of simplicity. /etc | ||
| 21 | # files are assumed to be well-formed, causing exceptions if | ||
| 22 | # not. | ||
| 23 | for line in lines: | ||
| 24 | entries = line.split(b':') | ||
| 25 | name = entries[0] | ||
| 26 | if mapping is None: | ||
| 27 | id = int(entries[2]) | ||
| 28 | else: | ||
| 29 | id = mapping[name] | ||
| 30 | new_mapping[name] = id | ||
| 31 | # Sort by numeric id first, with entire line as secondary key | ||
| 32 | # (just in case that there is more than one entry for the same id). | ||
| 33 | lines.sort(key=lambda line: (new_mapping[line.split(b':')[0]], line)) | ||
| 34 | # We overwrite the entire file, i.e. no truncate() necessary. | ||
| 35 | f.seek(0) | ||
| 36 | f.write(b''.join(lines)) | ||
| 37 | |||
| 38 | return new_mapping | ||
| 39 | |||
| 40 | def sort_shadowutils_files(sysconfdir): | ||
| 41 | """ | ||
| 42 | Sorts shadow-utils 'passwd' and 'group' files in a rootfs' /etc directory | ||
| 43 | by ID. | ||
| 44 | """ | ||
| 45 | |||
| 46 | for main, shadow in (('passwd', 'shadow'), | ||
| 47 | ('group', 'gshadow')): | ||
| 48 | filename = os.path.join(sysconfdir, main) | ||
| 49 | if os.path.exists(filename): | ||
| 50 | mapping = sort_shadowutils_file(filename, None) | ||
| 51 | filename = os.path.join(sysconfdir, shadow) | ||
| 52 | if os.path.exists(filename): | ||
| 53 | sort_shadowutils_file(filename, mapping) | ||
| 54 | |||
| 55 | def remove_shadowutils_backup_file(filename): | ||
| 56 | """ | ||
| 57 | Remove shadow-utils backup file for files like /etc/passwd. | ||
| 58 | """ | ||
| 59 | |||
| 60 | backup_filename = filename + '-' | ||
| 61 | if os.path.exists(backup_filename): | ||
| 62 | os.unlink(backup_filename) | ||
| 63 | |||
| 64 | def remove_shadowutils_backup_files(sysconfdir): | ||
| 65 | """ | ||
| 66 | Remove shadow-utils backup files in a rootfs /etc directory. They are not | ||
| 67 | needed in the initial root filesystem and sorting them can be inconsistent | ||
| 68 | (YOCTO #11043). | ||
| 69 | """ | ||
| 70 | |||
| 71 | for filename in ( | ||
| 72 | 'group', | ||
| 73 | 'gshadow', | ||
| 74 | 'passwd', | ||
| 75 | 'shadow', | ||
| 76 | 'subgid', | ||
| 77 | 'subuid', | ||
| 78 | ): | ||
| 79 | filepath = os.path.join(sysconfdir, filename) | ||
| 80 | remove_shadowutils_backup_file(filepath) | ||
| 81 | |||
| 82 | def tidy_shadowutils_files(sysconfdir): | ||
| 83 | """ | ||
| 84 | Tidy up shadow-utils files. | ||
| 85 | """ | ||
| 86 | |||
| 87 | remove_shadowutils_backup_files(sysconfdir) | ||
| 88 | sort_shadowutils_files(sysconfdir) | ||
| 89 | |||
| 90 | return True | ||
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py deleted file mode 100644 index 1dc9cf150d..0000000000 --- a/meta/lib/oe/rust.py +++ /dev/null | |||
| @@ -1,11 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | # Handle mismatches between `uname -m`-style output and Rust's arch names | ||
| 8 | def arch_to_rust_arch(arch): | ||
| 9 | if arch == "ppc64le": | ||
| 10 | return "powerpc64le" | ||
| 11 | return arch | ||
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py deleted file mode 100644 index fd4b6895d8..0000000000 --- a/meta/lib/oe/sbom.py +++ /dev/null | |||
| @@ -1,120 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import collections | ||
| 8 | |||
| 9 | DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe")) | ||
| 10 | DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file")) | ||
| 11 | |||
| 12 | |||
| 13 | def get_recipe_spdxid(d): | ||
| 14 | return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN")) | ||
| 15 | |||
| 16 | |||
| 17 | def get_download_spdxid(d, idx): | ||
| 18 | return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx) | ||
| 19 | |||
| 20 | |||
| 21 | def get_package_spdxid(pkg): | ||
| 22 | return "SPDXRef-Package-%s" % pkg | ||
| 23 | |||
| 24 | |||
| 25 | def get_source_file_spdxid(d, idx): | ||
| 26 | return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx) | ||
| 27 | |||
| 28 | |||
| 29 | def get_packaged_file_spdxid(pkg, idx): | ||
| 30 | return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx) | ||
| 31 | |||
| 32 | |||
| 33 | def get_image_spdxid(img): | ||
| 34 | return "SPDXRef-Image-%s" % img | ||
| 35 | |||
| 36 | |||
| 37 | def get_sdk_spdxid(sdk): | ||
| 38 | return "SPDXRef-SDK-%s" % sdk | ||
| 39 | |||
| 40 | |||
| 41 | def _doc_path_by_namespace(spdx_deploy, arch, doc_namespace): | ||
| 42 | return spdx_deploy / "by-namespace" / arch / doc_namespace.replace("/", "_") | ||
| 43 | |||
| 44 | |||
| 45 | def doc_find_by_namespace(spdx_deploy, search_arches, doc_namespace): | ||
| 46 | for pkgarch in search_arches: | ||
| 47 | p = _doc_path_by_namespace(spdx_deploy, pkgarch, doc_namespace) | ||
| 48 | if os.path.exists(p): | ||
| 49 | return p | ||
| 50 | return None | ||
| 51 | |||
| 52 | |||
| 53 | def _doc_path_by_hashfn(spdx_deploy, arch, doc_name, hashfn): | ||
| 54 | return ( | ||
| 55 | spdx_deploy / "by-hash" / arch / hashfn.split()[1] / (doc_name + ".spdx.json") | ||
| 56 | ) | ||
| 57 | |||
| 58 | |||
| 59 | def doc_find_by_hashfn(spdx_deploy, search_arches, doc_name, hashfn): | ||
| 60 | for pkgarch in search_arches: | ||
| 61 | p = _doc_path_by_hashfn(spdx_deploy, pkgarch, doc_name, hashfn) | ||
| 62 | if os.path.exists(p): | ||
| 63 | return p | ||
| 64 | return None | ||
| 65 | |||
| 66 | |||
| 67 | def doc_path(spdx_deploy, doc_name, arch, subdir): | ||
| 68 | return spdx_deploy / arch / subdir / (doc_name + ".spdx.json") | ||
| 69 | |||
| 70 | |||
| 71 | def write_doc(d, spdx_doc, arch, subdir, spdx_deploy=None, indent=None): | ||
| 72 | from pathlib import Path | ||
| 73 | |||
| 74 | if spdx_deploy is None: | ||
| 75 | spdx_deploy = Path(d.getVar("SPDXDEPLOY")) | ||
| 76 | |||
| 77 | dest = doc_path(spdx_deploy, spdx_doc.name, arch, subdir) | ||
| 78 | dest.parent.mkdir(exist_ok=True, parents=True) | ||
| 79 | with dest.open("wb") as f: | ||
| 80 | doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent) | ||
| 81 | |||
| 82 | l = _doc_path_by_namespace(spdx_deploy, arch, spdx_doc.documentNamespace) | ||
| 83 | l.parent.mkdir(exist_ok=True, parents=True) | ||
| 84 | l.symlink_to(os.path.relpath(dest, l.parent)) | ||
| 85 | |||
| 86 | l = _doc_path_by_hashfn( | ||
| 87 | spdx_deploy, arch, spdx_doc.name, d.getVar("BB_HASHFILENAME") | ||
| 88 | ) | ||
| 89 | l.parent.mkdir(exist_ok=True, parents=True) | ||
| 90 | l.symlink_to(os.path.relpath(dest, l.parent)) | ||
| 91 | |||
| 92 | return doc_sha1 | ||
| 93 | |||
| 94 | |||
| 95 | def read_doc(fn): | ||
| 96 | import hashlib | ||
| 97 | import oe.spdx | ||
| 98 | import io | ||
| 99 | import contextlib | ||
| 100 | |||
| 101 | @contextlib.contextmanager | ||
| 102 | def get_file(): | ||
| 103 | if isinstance(fn, io.IOBase): | ||
| 104 | yield fn | ||
| 105 | else: | ||
| 106 | with fn.open("rb") as f: | ||
| 107 | yield f | ||
| 108 | |||
| 109 | with get_file() as f: | ||
| 110 | sha1 = hashlib.sha1() | ||
| 111 | while True: | ||
| 112 | chunk = f.read(4096) | ||
| 113 | if not chunk: | ||
| 114 | break | ||
| 115 | sha1.update(chunk) | ||
| 116 | |||
| 117 | f.seek(0) | ||
| 118 | doc = oe.spdx.SPDXDocument.from_json(f) | ||
| 119 | |||
| 120 | return (doc, sha1.hexdigest()) | ||
diff --git a/meta/lib/oe/sbom30.py b/meta/lib/oe/sbom30.py deleted file mode 100644 index 227ac51877..0000000000 --- a/meta/lib/oe/sbom30.py +++ /dev/null | |||
| @@ -1,1096 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from pathlib import Path | ||
| 8 | |||
| 9 | import oe.spdx30 | ||
| 10 | import bb | ||
| 11 | import re | ||
| 12 | import hashlib | ||
| 13 | import uuid | ||
| 14 | import os | ||
| 15 | import oe.spdx_common | ||
| 16 | from datetime import datetime, timezone | ||
| 17 | |||
| 18 | OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/" | ||
| 19 | |||
| 20 | VEX_VERSION = "1.0.0" | ||
| 21 | |||
| 22 | SPDX_BUILD_TYPE = "http://openembedded.org/bitbake" | ||
| 23 | |||
| 24 | OE_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/by-doc-hash/" | ||
| 25 | OE_DOC_ALIAS_PREFIX = "http://spdxdocs.org/openembedded-alias/doc/" | ||
| 26 | |||
| 27 | |||
| 28 | @oe.spdx30.register(OE_SPDX_BASE + "id-alias") | ||
| 29 | class OEIdAliasExtension(oe.spdx30.extension_Extension): | ||
| 30 | """ | ||
| 31 | This extension allows an Element to provide an internal alias for the SPDX | ||
| 32 | ID. Since SPDX requires unique URIs for each SPDX ID, most of the objects | ||
| 33 | created have a unique UUID namespace and the unihash of the task encoded in | ||
| 34 | their SPDX ID. However, this causes a problem for referencing documents | ||
| 35 | across recipes, since the taskhash of a dependency may not factor into the | ||
| 36 | taskhash of the current task and thus the current task won't rebuild and | ||
| 37 | see the new SPDX ID when the dependency changes (e.g. ABI safe recipes and | ||
| 38 | tasks). | ||
| 39 | |||
| 40 | To help work around this, this extension provides a non-unique alias for an | ||
| 41 | Element by which it can be referenced from other tasks/recipes. When a | ||
| 42 | final SBoM is created, references to these aliases will be replaced with | ||
| 43 | the actual unique SPDX ID. | ||
| 44 | |||
| 45 | Most Elements will automatically get an alias created when they are written | ||
| 46 | out if they do not already have one. To suppress the creation of an alias, | ||
| 47 | add an extension with a blank `alias` property. | ||
| 48 | |||
| 49 | |||
| 50 | It is in internal extension that should be removed when writing out a final | ||
| 51 | SBoM | ||
| 52 | """ | ||
| 53 | |||
| 54 | CLOSED = True | ||
| 55 | INTERNAL = True | ||
| 56 | |||
| 57 | @classmethod | ||
| 58 | def _register_props(cls): | ||
| 59 | super()._register_props() | ||
| 60 | cls._add_property( | ||
| 61 | "alias", | ||
| 62 | oe.spdx30.StringProp(), | ||
| 63 | OE_SPDX_BASE + "alias", | ||
| 64 | max_count=1, | ||
| 65 | ) | ||
| 66 | |||
| 67 | cls._add_property( | ||
| 68 | "link_name", | ||
| 69 | oe.spdx30.StringProp(), | ||
| 70 | OE_SPDX_BASE + "link-name", | ||
| 71 | max_count=1, | ||
| 72 | ) | ||
| 73 | |||
| 74 | |||
| 75 | @oe.spdx30.register(OE_SPDX_BASE + "file-name-alias") | ||
| 76 | class OEFileNameAliasExtension(oe.spdx30.extension_Extension): | ||
| 77 | CLOSED = True | ||
| 78 | INTERNAL = True | ||
| 79 | |||
| 80 | @classmethod | ||
| 81 | def _register_props(cls): | ||
| 82 | super()._register_props() | ||
| 83 | cls._add_property( | ||
| 84 | "aliases", | ||
| 85 | oe.spdx30.ListProp(oe.spdx30.StringProp()), | ||
| 86 | OE_SPDX_BASE + "filename-alias", | ||
| 87 | ) | ||
| 88 | |||
| 89 | |||
| 90 | @oe.spdx30.register(OE_SPDX_BASE + "license-scanned") | ||
| 91 | class OELicenseScannedExtension(oe.spdx30.extension_Extension): | ||
| 92 | """ | ||
| 93 | The presence of this extension means the file has already been scanned for | ||
| 94 | license information | ||
| 95 | """ | ||
| 96 | |||
| 97 | CLOSED = True | ||
| 98 | INTERNAL = True | ||
| 99 | |||
| 100 | |||
| 101 | @oe.spdx30.register(OE_SPDX_BASE + "document-extension") | ||
| 102 | class OEDocumentExtension(oe.spdx30.extension_Extension): | ||
| 103 | """ | ||
| 104 | This extension is added to a SpdxDocument to indicate various useful bits | ||
| 105 | of information about its contents | ||
| 106 | """ | ||
| 107 | |||
| 108 | CLOSED = True | ||
| 109 | |||
| 110 | @classmethod | ||
| 111 | def _register_props(cls): | ||
| 112 | super()._register_props() | ||
| 113 | cls._add_property( | ||
| 114 | "is_native", | ||
| 115 | oe.spdx30.BooleanProp(), | ||
| 116 | OE_SPDX_BASE + "is-native", | ||
| 117 | max_count=1, | ||
| 118 | ) | ||
| 119 | |||
| 120 | |||
| 121 | def spdxid_hash(*items): | ||
| 122 | h = hashlib.md5() | ||
| 123 | for i in items: | ||
| 124 | if isinstance(i, oe.spdx30.Element): | ||
| 125 | h.update(i._id.encode("utf-8")) | ||
| 126 | else: | ||
| 127 | h.update(i.encode("utf-8")) | ||
| 128 | return h.hexdigest() | ||
| 129 | |||
| 130 | |||
| 131 | def spdx_sde(d): | ||
| 132 | sde = d.getVar("SOURCE_DATE_EPOCH") | ||
| 133 | if not sde: | ||
| 134 | return datetime.now(timezone.utc) | ||
| 135 | |||
| 136 | return datetime.fromtimestamp(int(sde), timezone.utc) | ||
| 137 | |||
| 138 | |||
| 139 | def get_element_link_id(e): | ||
| 140 | """ | ||
| 141 | Get the string ID which should be used to link to an Element. If the | ||
| 142 | element has an alias, that will be preferred, otherwise its SPDX ID will be | ||
| 143 | used. | ||
| 144 | """ | ||
| 145 | ext = get_alias(e) | ||
| 146 | if ext is not None and ext.alias: | ||
| 147 | return ext.alias | ||
| 148 | return e._id | ||
| 149 | |||
| 150 | |||
| 151 | def get_alias(obj): | ||
| 152 | for ext in obj.extension: | ||
| 153 | if not isinstance(ext, OEIdAliasExtension): | ||
| 154 | continue | ||
| 155 | return ext | ||
| 156 | |||
| 157 | return None | ||
| 158 | |||
| 159 | |||
| 160 | def hash_id(_id): | ||
| 161 | return hashlib.sha256(_id.encode("utf-8")).hexdigest() | ||
| 162 | |||
| 163 | |||
| 164 | def to_list(l): | ||
| 165 | if isinstance(l, set): | ||
| 166 | l = sorted(list(l)) | ||
| 167 | |||
| 168 | if not isinstance(l, (list, tuple)): | ||
| 169 | raise TypeError("Must be a list or tuple. Got %s" % type(l)) | ||
| 170 | |||
| 171 | return l | ||
| 172 | |||
| 173 | |||
| 174 | class ObjectSet(oe.spdx30.SHACLObjectSet): | ||
| 175 | def __init__(self, d): | ||
| 176 | super().__init__() | ||
| 177 | self.d = d | ||
| 178 | self.alias_prefix = None | ||
| 179 | |||
| 180 | def create_index(self): | ||
| 181 | self.by_sha256_hash = {} | ||
| 182 | super().create_index() | ||
| 183 | |||
| 184 | def add_index(self, obj): | ||
| 185 | # Check that all elements are given an ID before being inserted | ||
| 186 | if isinstance(obj, oe.spdx30.Element): | ||
| 187 | if not obj._id: | ||
| 188 | raise ValueError("Element missing ID") | ||
| 189 | |||
| 190 | alias_ext = get_alias(obj) | ||
| 191 | if alias_ext is not None and alias_ext.alias: | ||
| 192 | self.obj_by_id[alias_ext.alias] = obj | ||
| 193 | |||
| 194 | for v in obj.verifiedUsing: | ||
| 195 | if not isinstance(v, oe.spdx30.Hash): | ||
| 196 | continue | ||
| 197 | |||
| 198 | if v.algorithm != oe.spdx30.HashAlgorithm.sha256: | ||
| 199 | continue | ||
| 200 | |||
| 201 | self.by_sha256_hash.setdefault(v.hashValue, set()).add(obj) | ||
| 202 | |||
| 203 | super().add_index(obj) | ||
| 204 | if isinstance(obj, oe.spdx30.SpdxDocument): | ||
| 205 | self.doc = obj | ||
| 206 | alias_ext = get_alias(obj) | ||
| 207 | if alias_ext is not None and alias_ext.alias: | ||
| 208 | self.alias_prefix = OE_ALIAS_PREFIX + hash_id(alias_ext.alias) + "/" | ||
| 209 | |||
| 210 | def __filter_obj(self, obj, attr_filter): | ||
| 211 | return all(getattr(obj, k) == v for k, v in attr_filter.items()) | ||
| 212 | |||
| 213 | def foreach_filter(self, typ, *, match_subclass=True, **attr_filter): | ||
| 214 | for obj in self.foreach_type(typ, match_subclass=match_subclass): | ||
| 215 | if self.__filter_obj(obj, attr_filter): | ||
| 216 | yield obj | ||
| 217 | |||
| 218 | def find_filter(self, typ, *, match_subclass=True, **attr_filter): | ||
| 219 | for obj in self.foreach_filter( | ||
| 220 | typ, match_subclass=match_subclass, **attr_filter | ||
| 221 | ): | ||
| 222 | return obj | ||
| 223 | return None | ||
| 224 | |||
| 225 | def foreach_root(self, typ, **attr_filter): | ||
| 226 | for obj in self.doc.rootElement: | ||
| 227 | if not isinstance(obj, typ): | ||
| 228 | continue | ||
| 229 | |||
| 230 | if self.__filter_obj(obj, attr_filter): | ||
| 231 | yield obj | ||
| 232 | |||
| 233 | def find_root(self, typ, **attr_filter): | ||
| 234 | for obj in self.foreach_root(typ, **attr_filter): | ||
| 235 | return obj | ||
| 236 | return None | ||
| 237 | |||
| 238 | def add_root(self, obj): | ||
| 239 | self.add(obj) | ||
| 240 | self.doc.rootElement.append(obj) | ||
| 241 | return obj | ||
| 242 | |||
| 243 | def is_native(self): | ||
| 244 | for e in self.doc.extension: | ||
| 245 | if not isinstance(e, oe.sbom30.OEDocumentExtension): | ||
| 246 | continue | ||
| 247 | |||
| 248 | if e.is_native is not None: | ||
| 249 | return e.is_native | ||
| 250 | |||
| 251 | return False | ||
| 252 | |||
| 253 | def set_is_native(self, is_native): | ||
| 254 | for e in self.doc.extension: | ||
| 255 | if not isinstance(e, oe.sbom30.OEDocumentExtension): | ||
| 256 | continue | ||
| 257 | |||
| 258 | e.is_native = is_native | ||
| 259 | return | ||
| 260 | |||
| 261 | if is_native: | ||
| 262 | self.doc.extension.append(oe.sbom30.OEDocumentExtension(is_native=True)) | ||
| 263 | |||
| 264 | def add_aliases(self): | ||
| 265 | for o in self.foreach_type(oe.spdx30.Element): | ||
| 266 | self.set_element_alias(o) | ||
| 267 | |||
| 268 | def new_alias_id(self, obj, replace): | ||
| 269 | unihash = self.d.getVar("BB_UNIHASH") | ||
| 270 | namespace = self.get_namespace() | ||
| 271 | if unihash not in obj._id: | ||
| 272 | bb.warn(f"Unihash {unihash} not found in {obj._id}") | ||
| 273 | return None | ||
| 274 | |||
| 275 | if namespace not in obj._id: | ||
| 276 | bb.warn(f"Namespace {namespace} not found in {obj._id}") | ||
| 277 | return None | ||
| 278 | |||
| 279 | return obj._id.replace(unihash, "UNIHASH").replace( | ||
| 280 | namespace, replace + self.d.getVar("PN") | ||
| 281 | ) | ||
| 282 | |||
| 283 | def remove_internal_extensions(self): | ||
| 284 | def remove(o): | ||
| 285 | o.extension = [e for e in o.extension if not getattr(e, "INTERNAL", False)] | ||
| 286 | |||
| 287 | for o in self.foreach_type(oe.spdx30.Element): | ||
| 288 | remove(o) | ||
| 289 | |||
| 290 | if self.doc: | ||
| 291 | remove(self.doc) | ||
| 292 | |||
| 293 | def get_namespace(self): | ||
| 294 | namespace_uuid = uuid.uuid5( | ||
| 295 | uuid.NAMESPACE_DNS, self.d.getVar("SPDX_UUID_NAMESPACE") | ||
| 296 | ) | ||
| 297 | pn = self.d.getVar("PN") | ||
| 298 | return "%s/%s-%s" % ( | ||
| 299 | self.d.getVar("SPDX_NAMESPACE_PREFIX"), | ||
| 300 | pn, | ||
| 301 | str(uuid.uuid5(namespace_uuid, pn)), | ||
| 302 | ) | ||
| 303 | |||
| 304 | def set_element_alias(self, e): | ||
| 305 | if not e._id or e._id.startswith("_:"): | ||
| 306 | return | ||
| 307 | |||
| 308 | alias_ext = get_alias(e) | ||
| 309 | if alias_ext is None: | ||
| 310 | alias_id = self.new_alias_id(e, self.alias_prefix) | ||
| 311 | if alias_id is not None: | ||
| 312 | e.extension.append(OEIdAliasExtension(alias=alias_id)) | ||
| 313 | elif ( | ||
| 314 | alias_ext.alias | ||
| 315 | and not isinstance(e, oe.spdx30.SpdxDocument) | ||
| 316 | and not alias_ext.alias.startswith(self.alias_prefix) | ||
| 317 | ): | ||
| 318 | bb.warn( | ||
| 319 | f"Element {e._id} has alias {alias_ext.alias}, but it should have prefix {self.alias_prefix}" | ||
| 320 | ) | ||
| 321 | |||
| 322 | def new_spdxid(self, *suffix, include_unihash=True): | ||
| 323 | items = [self.get_namespace()] | ||
| 324 | if include_unihash: | ||
| 325 | unihash = self.d.getVar("BB_UNIHASH") | ||
| 326 | items.append(unihash) | ||
| 327 | items.extend(re.sub(r"[^a-zA-Z0-9_-]", "_", s) for s in suffix) | ||
| 328 | return "/".join(items) | ||
| 329 | |||
| 330 | def new_import(self, key): | ||
| 331 | base = f"SPDX_IMPORTS_{key}" | ||
| 332 | spdxid = self.d.getVar(f"{base}_spdxid") | ||
| 333 | if not spdxid: | ||
| 334 | bb.fatal(f"{key} is not a valid SPDX_IMPORTS key") | ||
| 335 | |||
| 336 | for i in self.doc.import_: | ||
| 337 | if i.externalSpdxId == spdxid: | ||
| 338 | # Already imported | ||
| 339 | return spdxid | ||
| 340 | |||
| 341 | m = oe.spdx30.ExternalMap(externalSpdxId=spdxid) | ||
| 342 | |||
| 343 | uri = self.d.getVar(f"{base}_uri") | ||
| 344 | if uri: | ||
| 345 | m.locationHint = uri | ||
| 346 | |||
| 347 | for pyname, algorithm in oe.spdx30.HashAlgorithm.NAMED_INDIVIDUALS.items(): | ||
| 348 | value = self.d.getVar(f"{base}_hash_{pyname}") | ||
| 349 | if value: | ||
| 350 | m.verifiedUsing.append( | ||
| 351 | oe.spdx30.Hash( | ||
| 352 | algorithm=algorithm, | ||
| 353 | hashValue=value, | ||
| 354 | ) | ||
| 355 | ) | ||
| 356 | |||
| 357 | self.doc.import_.append(m) | ||
| 358 | return spdxid | ||
| 359 | |||
| 360 | def new_agent(self, varname, *, creation_info=None, add=True): | ||
| 361 | ref_varname = self.d.getVar(f"{varname}_ref") | ||
| 362 | if ref_varname: | ||
| 363 | if ref_varname == varname: | ||
| 364 | bb.fatal(f"{varname} cannot reference itself") | ||
| 365 | return self.new_agent(ref_varname, creation_info=creation_info) | ||
| 366 | |||
| 367 | import_key = self.d.getVar(f"{varname}_import") | ||
| 368 | if import_key: | ||
| 369 | return self.new_import(import_key) | ||
| 370 | |||
| 371 | name = self.d.getVar(f"{varname}_name") | ||
| 372 | if not name: | ||
| 373 | return None | ||
| 374 | |||
| 375 | spdxid = self.new_spdxid("agent", name) | ||
| 376 | agent = self.find_by_id(spdxid) | ||
| 377 | if agent is not None: | ||
| 378 | return agent | ||
| 379 | |||
| 380 | agent_type = self.d.getVar("%s_type" % varname) | ||
| 381 | if agent_type == "person": | ||
| 382 | agent = oe.spdx30.Person() | ||
| 383 | elif agent_type == "software": | ||
| 384 | agent = oe.spdx30.SoftwareAgent() | ||
| 385 | elif agent_type == "organization": | ||
| 386 | agent = oe.spdx30.Organization() | ||
| 387 | elif not agent_type or agent_type == "agent": | ||
| 388 | agent = oe.spdx30.Agent() | ||
| 389 | else: | ||
| 390 | bb.fatal("Unknown agent type '%s' in %s_type" % (agent_type, varname)) | ||
| 391 | |||
| 392 | agent._id = spdxid | ||
| 393 | agent.creationInfo = creation_info or self.doc.creationInfo | ||
| 394 | agent.name = name | ||
| 395 | |||
| 396 | comment = self.d.getVar("%s_comment" % varname) | ||
| 397 | if comment: | ||
| 398 | agent.comment = comment | ||
| 399 | |||
| 400 | for ( | ||
| 401 | pyname, | ||
| 402 | idtype, | ||
| 403 | ) in oe.spdx30.ExternalIdentifierType.NAMED_INDIVIDUALS.items(): | ||
| 404 | value = self.d.getVar("%s_id_%s" % (varname, pyname)) | ||
| 405 | if value: | ||
| 406 | agent.externalIdentifier.append( | ||
| 407 | oe.spdx30.ExternalIdentifier( | ||
| 408 | externalIdentifierType=idtype, | ||
| 409 | identifier=value, | ||
| 410 | ) | ||
| 411 | ) | ||
| 412 | |||
| 413 | if add: | ||
| 414 | self.add(agent) | ||
| 415 | |||
| 416 | return agent | ||
| 417 | |||
| 418 | def new_creation_info(self): | ||
| 419 | creation_info = oe.spdx30.CreationInfo() | ||
| 420 | |||
| 421 | name = "%s %s" % ( | ||
| 422 | self.d.getVar("SPDX_TOOL_NAME"), | ||
| 423 | self.d.getVar("SPDX_TOOL_VERSION"), | ||
| 424 | ) | ||
| 425 | tool = self.add( | ||
| 426 | oe.spdx30.Tool( | ||
| 427 | _id=self.new_spdxid("tool", name), | ||
| 428 | creationInfo=creation_info, | ||
| 429 | name=name, | ||
| 430 | ) | ||
| 431 | ) | ||
| 432 | |||
| 433 | authors = [] | ||
| 434 | for a in self.d.getVar("SPDX_AUTHORS").split(): | ||
| 435 | varname = "SPDX_AUTHORS_%s" % a | ||
| 436 | author = self.new_agent(varname, creation_info=creation_info) | ||
| 437 | |||
| 438 | if not author: | ||
| 439 | bb.fatal("Unable to find or create author %s" % a) | ||
| 440 | |||
| 441 | authors.append(author) | ||
| 442 | |||
| 443 | creation_info.created = spdx_sde(self.d) | ||
| 444 | creation_info.specVersion = self.d.getVar("SPDX_VERSION") | ||
| 445 | creation_info.createdBy = authors | ||
| 446 | creation_info.createdUsing = [tool] | ||
| 447 | |||
| 448 | return creation_info | ||
| 449 | |||
| 450 | def copy_creation_info(self, copy): | ||
| 451 | c = oe.spdx30.CreationInfo( | ||
| 452 | created=spdx_sde(self.d), | ||
| 453 | specVersion=self.d.getVar("SPDX_VERSION"), | ||
| 454 | ) | ||
| 455 | |||
| 456 | for author in copy.createdBy: | ||
| 457 | if isinstance(author, str): | ||
| 458 | c.createdBy.append(author) | ||
| 459 | else: | ||
| 460 | c.createdBy.append(author._id) | ||
| 461 | |||
| 462 | for tool in copy.createdUsing: | ||
| 463 | if isinstance(tool, str): | ||
| 464 | c.createdUsing.append(tool) | ||
| 465 | else: | ||
| 466 | c.createdUsing.append(tool._id) | ||
| 467 | |||
| 468 | return c | ||
| 469 | |||
| 470 | def new_annotation(self, subject, comment, typ): | ||
| 471 | return self.add( | ||
| 472 | oe.spdx30.Annotation( | ||
| 473 | _id=self.new_spdxid("annotation", spdxid_hash(comment, typ)), | ||
| 474 | creationInfo=self.doc.creationInfo, | ||
| 475 | annotationType=typ, | ||
| 476 | subject=subject, | ||
| 477 | statement=comment, | ||
| 478 | ) | ||
| 479 | ) | ||
| 480 | |||
| 481 | def _new_relationship( | ||
| 482 | self, | ||
| 483 | cls, | ||
| 484 | from_, | ||
| 485 | typ, | ||
| 486 | to, | ||
| 487 | *, | ||
| 488 | spdxid_name="relationship", | ||
| 489 | **props, | ||
| 490 | ): | ||
| 491 | from_ = to_list(from_) | ||
| 492 | to = to_list(to) | ||
| 493 | |||
| 494 | if not from_: | ||
| 495 | return [] | ||
| 496 | |||
| 497 | if not to: | ||
| 498 | to = [oe.spdx30.IndividualElement.NoneElement] | ||
| 499 | |||
| 500 | ret = [] | ||
| 501 | |||
| 502 | for f in from_: | ||
| 503 | hash_args = [typ, f] | ||
| 504 | for k in sorted(props.keys()): | ||
| 505 | hash_args.append(props[k]) | ||
| 506 | hash_args.extend(to) | ||
| 507 | |||
| 508 | relationship = self.add( | ||
| 509 | cls( | ||
| 510 | _id=self.new_spdxid(spdxid_name, spdxid_hash(*hash_args)), | ||
| 511 | creationInfo=self.doc.creationInfo, | ||
| 512 | from_=f, | ||
| 513 | relationshipType=typ, | ||
| 514 | to=to, | ||
| 515 | **props, | ||
| 516 | ) | ||
| 517 | ) | ||
| 518 | ret.append(relationship) | ||
| 519 | |||
| 520 | return ret | ||
| 521 | |||
| 522 | def new_relationship(self, from_, typ, to): | ||
| 523 | return self._new_relationship(oe.spdx30.Relationship, from_, typ, to) | ||
| 524 | |||
| 525 | def new_scoped_relationship(self, from_, typ, scope, to): | ||
| 526 | return self._new_relationship( | ||
| 527 | oe.spdx30.LifecycleScopedRelationship, | ||
| 528 | from_, | ||
| 529 | typ, | ||
| 530 | to, | ||
| 531 | scope=scope, | ||
| 532 | ) | ||
| 533 | |||
| 534 | def new_license_expression( | ||
| 535 | self, license_expression, license_data, license_text_map={} | ||
| 536 | ): | ||
| 537 | license_list_version = license_data["licenseListVersion"] | ||
| 538 | # SPDX 3 requires that the license list version be a semver | ||
| 539 | # MAJOR.MINOR.MICRO, but the actual license version might be | ||
| 540 | # MAJOR.MINOR on some older versions. As such, manually append a .0 | ||
| 541 | # micro version if its missing to keep SPDX happy | ||
| 542 | if license_list_version.count(".") < 2: | ||
| 543 | license_list_version += ".0" | ||
| 544 | |||
| 545 | spdxid = [ | ||
| 546 | "license", | ||
| 547 | license_list_version, | ||
| 548 | re.sub(r"[^a-zA-Z0-9_-]", "_", license_expression), | ||
| 549 | ] | ||
| 550 | |||
| 551 | license_text = [ | ||
| 552 | (k, license_text_map[k]) for k in sorted(license_text_map.keys()) | ||
| 553 | ] | ||
| 554 | |||
| 555 | if not license_text: | ||
| 556 | lic = self.find_filter( | ||
| 557 | oe.spdx30.simplelicensing_LicenseExpression, | ||
| 558 | simplelicensing_licenseExpression=license_expression, | ||
| 559 | simplelicensing_licenseListVersion=license_list_version, | ||
| 560 | ) | ||
| 561 | if lic is not None: | ||
| 562 | return lic | ||
| 563 | else: | ||
| 564 | spdxid.append(spdxid_hash(*(v for _, v in license_text))) | ||
| 565 | lic = self.find_by_id(self.new_spdxid(*spdxid)) | ||
| 566 | if lic is not None: | ||
| 567 | return lic | ||
| 568 | |||
| 569 | lic = self.add( | ||
| 570 | oe.spdx30.simplelicensing_LicenseExpression( | ||
| 571 | _id=self.new_spdxid(*spdxid), | ||
| 572 | creationInfo=self.doc.creationInfo, | ||
| 573 | simplelicensing_licenseExpression=license_expression, | ||
| 574 | simplelicensing_licenseListVersion=license_list_version, | ||
| 575 | ) | ||
| 576 | ) | ||
| 577 | |||
| 578 | for key, value in license_text: | ||
| 579 | lic.simplelicensing_customIdToUri.append( | ||
| 580 | oe.spdx30.DictionaryEntry(key=key, value=value) | ||
| 581 | ) | ||
| 582 | |||
| 583 | return lic | ||
| 584 | |||
| 585 | def scan_declared_licenses(self, spdx_file, filepath, license_data): | ||
| 586 | for e in spdx_file.extension: | ||
| 587 | if isinstance(e, OELicenseScannedExtension): | ||
| 588 | return | ||
| 589 | |||
| 590 | file_licenses = set() | ||
| 591 | for extracted_lic in oe.spdx_common.extract_licenses(filepath): | ||
| 592 | lic = self.new_license_expression(extracted_lic, license_data) | ||
| 593 | self.set_element_alias(lic) | ||
| 594 | file_licenses.add(lic) | ||
| 595 | |||
| 596 | self.new_relationship( | ||
| 597 | [spdx_file], | ||
| 598 | oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
| 599 | [oe.sbom30.get_element_link_id(lic_alias) for lic_alias in file_licenses], | ||
| 600 | ) | ||
| 601 | spdx_file.extension.append(OELicenseScannedExtension()) | ||
| 602 | |||
| 603 | def new_file(self, _id, name, path, *, purposes=[]): | ||
| 604 | sha256_hash = bb.utils.sha256_file(path) | ||
| 605 | |||
| 606 | for f in self.by_sha256_hash.get(sha256_hash, []): | ||
| 607 | if not isinstance(f, oe.spdx30.software_File): | ||
| 608 | continue | ||
| 609 | |||
| 610 | if purposes: | ||
| 611 | new_primary = purposes[0] | ||
| 612 | new_additional = [] | ||
| 613 | |||
| 614 | if f.software_primaryPurpose: | ||
| 615 | new_additional.append(f.software_primaryPurpose) | ||
| 616 | new_additional.extend(f.software_additionalPurpose) | ||
| 617 | |||
| 618 | new_additional = sorted( | ||
| 619 | list(set(p for p in new_additional if p != new_primary)) | ||
| 620 | ) | ||
| 621 | |||
| 622 | f.software_primaryPurpose = new_primary | ||
| 623 | f.software_additionalPurpose = new_additional | ||
| 624 | |||
| 625 | if f.name != name: | ||
| 626 | for e in f.extension: | ||
| 627 | if isinstance(e, OEFileNameAliasExtension): | ||
| 628 | e.aliases.append(name) | ||
| 629 | break | ||
| 630 | else: | ||
| 631 | f.extension.append(OEFileNameAliasExtension(aliases=[name])) | ||
| 632 | |||
| 633 | return f | ||
| 634 | |||
| 635 | spdx_file = oe.spdx30.software_File( | ||
| 636 | _id=_id, | ||
| 637 | creationInfo=self.doc.creationInfo, | ||
| 638 | name=name, | ||
| 639 | ) | ||
| 640 | if purposes: | ||
| 641 | spdx_file.software_primaryPurpose = purposes[0] | ||
| 642 | spdx_file.software_additionalPurpose = purposes[1:] | ||
| 643 | |||
| 644 | spdx_file.verifiedUsing.append( | ||
| 645 | oe.spdx30.Hash( | ||
| 646 | algorithm=oe.spdx30.HashAlgorithm.sha256, | ||
| 647 | hashValue=sha256_hash, | ||
| 648 | ) | ||
| 649 | ) | ||
| 650 | |||
| 651 | return self.add(spdx_file) | ||
| 652 | |||
| 653 | def new_cve_vuln(self, cve): | ||
| 654 | v = oe.spdx30.security_Vulnerability() | ||
| 655 | v._id = self.new_spdxid("vulnerability", cve) | ||
| 656 | v.creationInfo = self.doc.creationInfo | ||
| 657 | |||
| 658 | v.externalIdentifier.append( | ||
| 659 | oe.spdx30.ExternalIdentifier( | ||
| 660 | externalIdentifierType=oe.spdx30.ExternalIdentifierType.cve, | ||
| 661 | identifier=cve, | ||
| 662 | identifierLocator=[ | ||
| 663 | f"https://cveawg.mitre.org/api/cve/{cve}", | ||
| 664 | f"https://www.cve.org/CVERecord?id={cve}", | ||
| 665 | ], | ||
| 666 | ) | ||
| 667 | ) | ||
| 668 | return self.add(v) | ||
| 669 | |||
| 670 | def new_vex_patched_relationship(self, from_, to): | ||
| 671 | return self._new_relationship( | ||
| 672 | oe.spdx30.security_VexFixedVulnAssessmentRelationship, | ||
| 673 | from_, | ||
| 674 | oe.spdx30.RelationshipType.fixedIn, | ||
| 675 | to, | ||
| 676 | spdxid_name="vex-fixed", | ||
| 677 | security_vexVersion=VEX_VERSION, | ||
| 678 | ) | ||
| 679 | |||
| 680 | def new_vex_unpatched_relationship(self, from_, to): | ||
| 681 | return self._new_relationship( | ||
| 682 | oe.spdx30.security_VexAffectedVulnAssessmentRelationship, | ||
| 683 | from_, | ||
| 684 | oe.spdx30.RelationshipType.affects, | ||
| 685 | to, | ||
| 686 | spdxid_name="vex-affected", | ||
| 687 | security_vexVersion=VEX_VERSION, | ||
| 688 | security_actionStatement="Mitigation action unknown", | ||
| 689 | ) | ||
| 690 | |||
| 691 | def new_vex_ignored_relationship(self, from_, to, *, impact_statement): | ||
| 692 | return self._new_relationship( | ||
| 693 | oe.spdx30.security_VexNotAffectedVulnAssessmentRelationship, | ||
| 694 | from_, | ||
| 695 | oe.spdx30.RelationshipType.doesNotAffect, | ||
| 696 | to, | ||
| 697 | spdxid_name="vex-not-affected", | ||
| 698 | security_vexVersion=VEX_VERSION, | ||
| 699 | security_impactStatement=impact_statement, | ||
| 700 | ) | ||
| 701 | |||
| 702 | def import_bitbake_build_objset(self): | ||
| 703 | deploy_dir_spdx = Path(self.d.getVar("DEPLOY_DIR_SPDX")) | ||
| 704 | bb_objset = load_jsonld( | ||
| 705 | self.d, deploy_dir_spdx / "bitbake.spdx.json", required=True | ||
| 706 | ) | ||
| 707 | self.doc.import_.extend(bb_objset.doc.import_) | ||
| 708 | self.update(bb_objset.objects) | ||
| 709 | |||
| 710 | return bb_objset | ||
| 711 | |||
| 712 | def import_bitbake_build(self): | ||
| 713 | def find_bitbake_build(objset): | ||
| 714 | return objset.find_filter( | ||
| 715 | oe.spdx30.build_Build, | ||
| 716 | build_buildType=SPDX_BUILD_TYPE, | ||
| 717 | ) | ||
| 718 | |||
| 719 | build = find_bitbake_build(self) | ||
| 720 | if build: | ||
| 721 | return build | ||
| 722 | |||
| 723 | bb_objset = self.import_bitbake_build_objset() | ||
| 724 | build = find_bitbake_build(bb_objset) | ||
| 725 | if build is None: | ||
| 726 | bb.fatal(f"No build found in {deploy_dir_spdx}") | ||
| 727 | |||
| 728 | return build | ||
| 729 | |||
| 730 | def new_task_build(self, name, typ): | ||
| 731 | current_task = self.d.getVar("BB_CURRENTTASK") | ||
| 732 | pn = self.d.getVar("PN") | ||
| 733 | |||
| 734 | build = self.add( | ||
| 735 | oe.spdx30.build_Build( | ||
| 736 | _id=self.new_spdxid("build", name), | ||
| 737 | creationInfo=self.doc.creationInfo, | ||
| 738 | name=f"{pn}:do_{current_task}:{name}", | ||
| 739 | build_buildType=f"{SPDX_BUILD_TYPE}/do_{current_task}/{typ}", | ||
| 740 | ) | ||
| 741 | ) | ||
| 742 | |||
| 743 | if self.d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1": | ||
| 744 | bitbake_build = self.import_bitbake_build() | ||
| 745 | |||
| 746 | self.new_relationship( | ||
| 747 | [bitbake_build], | ||
| 748 | oe.spdx30.RelationshipType.ancestorOf, | ||
| 749 | [build], | ||
| 750 | ) | ||
| 751 | |||
| 752 | if self.d.getVar("SPDX_INCLUDE_BUILD_VARIABLES") == "1": | ||
| 753 | for varname in sorted(self.d.keys()): | ||
| 754 | if varname.startswith("__"): | ||
| 755 | continue | ||
| 756 | |||
| 757 | value = self.d.getVar(varname, expand=False) | ||
| 758 | |||
| 759 | # TODO: Deal with non-string values | ||
| 760 | if not isinstance(value, str): | ||
| 761 | continue | ||
| 762 | |||
| 763 | build.build_parameter.append( | ||
| 764 | oe.spdx30.DictionaryEntry(key=varname, value=value) | ||
| 765 | ) | ||
| 766 | |||
| 767 | return build | ||
| 768 | |||
| 769 | def new_archive(self, archive_name): | ||
| 770 | return self.add( | ||
| 771 | oe.spdx30.software_File( | ||
| 772 | _id=self.new_spdxid("archive", str(archive_name)), | ||
| 773 | creationInfo=self.doc.creationInfo, | ||
| 774 | name=str(archive_name), | ||
| 775 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
| 776 | ) | ||
| 777 | ) | ||
| 778 | |||
| 779 | @classmethod | ||
| 780 | def new_objset(cls, d, name, copy_from_bitbake_doc=True): | ||
| 781 | objset = cls(d) | ||
| 782 | |||
| 783 | document = oe.spdx30.SpdxDocument( | ||
| 784 | _id=objset.new_spdxid("document", name), | ||
| 785 | name=name, | ||
| 786 | ) | ||
| 787 | |||
| 788 | document.extension.append( | ||
| 789 | OEIdAliasExtension( | ||
| 790 | alias=objset.new_alias_id( | ||
| 791 | document, | ||
| 792 | OE_DOC_ALIAS_PREFIX + d.getVar("PN") + "/" + name + "/", | ||
| 793 | ), | ||
| 794 | ) | ||
| 795 | ) | ||
| 796 | objset.doc = document | ||
| 797 | objset.add_index(document) | ||
| 798 | |||
| 799 | if copy_from_bitbake_doc: | ||
| 800 | bb_objset = objset.import_bitbake_build_objset() | ||
| 801 | document.creationInfo = objset.copy_creation_info( | ||
| 802 | bb_objset.doc.creationInfo | ||
| 803 | ) | ||
| 804 | else: | ||
| 805 | document.creationInfo = objset.new_creation_info() | ||
| 806 | |||
| 807 | return objset | ||
| 808 | |||
| 809 | def expand_collection(self, *, add_objectsets=[]): | ||
| 810 | """ | ||
| 811 | Expands a collection to pull in all missing elements | ||
| 812 | |||
| 813 | Returns the set of ids that could not be found to link into the document | ||
| 814 | """ | ||
| 815 | missing_spdxids = set() | ||
| 816 | imports = {e.externalSpdxId: e for e in self.doc.import_} | ||
| 817 | |||
| 818 | def merge_doc(other): | ||
| 819 | nonlocal imports | ||
| 820 | |||
| 821 | for e in other.doc.import_: | ||
| 822 | if not e.externalSpdxId in imports: | ||
| 823 | imports[e.externalSpdxId] = e | ||
| 824 | |||
| 825 | self.objects |= other.objects | ||
| 826 | |||
| 827 | for o in add_objectsets: | ||
| 828 | merge_doc(o) | ||
| 829 | |||
| 830 | needed_spdxids = self.link() | ||
| 831 | provided_spdxids = set(self.obj_by_id.keys()) | ||
| 832 | |||
| 833 | while True: | ||
| 834 | import_spdxids = set(imports.keys()) | ||
| 835 | searching_spdxids = ( | ||
| 836 | needed_spdxids - provided_spdxids - missing_spdxids - import_spdxids | ||
| 837 | ) | ||
| 838 | if not searching_spdxids: | ||
| 839 | break | ||
| 840 | |||
| 841 | spdxid = searching_spdxids.pop() | ||
| 842 | bb.debug( | ||
| 843 | 1, | ||
| 844 | f"Searching for {spdxid}. Remaining: {len(searching_spdxids)}, Total: {len(provided_spdxids)}, Missing: {len(missing_spdxids)}, Imports: {len(import_spdxids)}", | ||
| 845 | ) | ||
| 846 | dep_objset, dep_path = find_by_spdxid(self.d, spdxid) | ||
| 847 | |||
| 848 | if dep_objset: | ||
| 849 | dep_provided = set(dep_objset.obj_by_id.keys()) | ||
| 850 | if spdxid not in dep_provided: | ||
| 851 | bb.fatal(f"{spdxid} not found in {dep_path}") | ||
| 852 | provided_spdxids |= dep_provided | ||
| 853 | needed_spdxids |= dep_objset.missing_ids | ||
| 854 | merge_doc(dep_objset) | ||
| 855 | else: | ||
| 856 | missing_spdxids.add(spdxid) | ||
| 857 | |||
| 858 | self.doc.import_ = sorted(imports.values(), key=lambda e: e.externalSpdxId) | ||
| 859 | bb.debug(1, "Linking...") | ||
| 860 | self.link() | ||
| 861 | |||
| 862 | # Manually go through all of the simplelicensing_customIdToUri DictionaryEntry | ||
| 863 | # items and resolve any aliases to actual objects. | ||
| 864 | for lic in self.foreach_type(oe.spdx30.simplelicensing_LicenseExpression): | ||
| 865 | for d in lic.simplelicensing_customIdToUri: | ||
| 866 | if d.value.startswith(OE_ALIAS_PREFIX): | ||
| 867 | obj = self.find_by_id(d.value) | ||
| 868 | if obj is not None: | ||
| 869 | d.value = obj._id | ||
| 870 | else: | ||
| 871 | self.missing_ids.add(d.value) | ||
| 872 | |||
| 873 | self.missing_ids -= set(imports.keys()) | ||
| 874 | return self.missing_ids | ||
| 875 | |||
| 876 | |||
| 877 | def load_jsonld(d, path, required=False): | ||
| 878 | deserializer = oe.spdx30.JSONLDDeserializer() | ||
| 879 | objset = ObjectSet(d) | ||
| 880 | try: | ||
| 881 | with path.open("rb") as f: | ||
| 882 | deserializer.read(f, objset) | ||
| 883 | except FileNotFoundError: | ||
| 884 | if required: | ||
| 885 | bb.fatal("No SPDX document named %s found" % path) | ||
| 886 | return None | ||
| 887 | |||
| 888 | if not objset.doc: | ||
| 889 | bb.fatal("SPDX Document %s has no SPDXDocument element" % path) | ||
| 890 | return None | ||
| 891 | |||
| 892 | objset.objects.remove(objset.doc) | ||
| 893 | return objset | ||
| 894 | |||
| 895 | |||
| 896 | def jsonld_arch_path(d, arch, subdir, name, deploydir=None): | ||
| 897 | if deploydir is None: | ||
| 898 | deploydir = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 899 | return deploydir / arch / subdir / (name + ".spdx.json") | ||
| 900 | |||
| 901 | |||
| 902 | def jsonld_hash_path(h): | ||
| 903 | return Path("by-spdxid-hash") / h[:2], h | ||
| 904 | |||
| 905 | |||
| 906 | def load_jsonld_by_arch(d, arch, subdir, name, *, required=False): | ||
| 907 | path = jsonld_arch_path(d, arch, subdir, name) | ||
| 908 | objset = load_jsonld(d, path, required=required) | ||
| 909 | if objset is not None: | ||
| 910 | return (objset, path) | ||
| 911 | return (None, None) | ||
| 912 | |||
| 913 | |||
| 914 | def find_jsonld(d, subdir, name, *, required=False): | ||
| 915 | package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split() | ||
| 916 | package_archs.reverse() | ||
| 917 | |||
| 918 | for arch in package_archs: | ||
| 919 | objset, path = load_jsonld_by_arch(d, arch, subdir, name) | ||
| 920 | if objset is not None: | ||
| 921 | return (objset, path) | ||
| 922 | |||
| 923 | if required: | ||
| 924 | bb.fatal("Could not find a %s SPDX document named %s" % (subdir, name)) | ||
| 925 | |||
| 926 | return (None, None) | ||
| 927 | |||
| 928 | |||
| 929 | def write_jsonld_doc(d, objset, dest): | ||
| 930 | if not isinstance(objset, ObjectSet): | ||
| 931 | bb.fatal("Only an ObjsetSet can be serialized") | ||
| 932 | return | ||
| 933 | |||
| 934 | if not objset.doc: | ||
| 935 | bb.fatal("ObjectSet is missing a SpdxDocument") | ||
| 936 | return | ||
| 937 | |||
| 938 | objset.doc.rootElement = sorted(list(set(objset.doc.rootElement))) | ||
| 939 | objset.doc.profileConformance = sorted( | ||
| 940 | list( | ||
| 941 | getattr(oe.spdx30.ProfileIdentifierType, p) | ||
| 942 | for p in d.getVar("SPDX_PROFILES").split() | ||
| 943 | ) | ||
| 944 | ) | ||
| 945 | |||
| 946 | dest.parent.mkdir(exist_ok=True, parents=True) | ||
| 947 | |||
| 948 | if d.getVar("SPDX_PRETTY") == "1": | ||
| 949 | serializer = oe.spdx30.JSONLDSerializer( | ||
| 950 | indent=2, | ||
| 951 | ) | ||
| 952 | else: | ||
| 953 | serializer = oe.spdx30.JSONLDInlineSerializer() | ||
| 954 | |||
| 955 | objset.objects.add(objset.doc) | ||
| 956 | with dest.open("wb") as f: | ||
| 957 | serializer.write(objset, f, force_at_graph=True) | ||
| 958 | objset.objects.remove(objset.doc) | ||
| 959 | |||
| 960 | |||
| 961 | def write_recipe_jsonld_doc( | ||
| 962 | d, | ||
| 963 | objset, | ||
| 964 | subdir, | ||
| 965 | deploydir, | ||
| 966 | *, | ||
| 967 | create_spdx_id_links=True, | ||
| 968 | ): | ||
| 969 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
| 970 | |||
| 971 | dest = jsonld_arch_path(d, pkg_arch, subdir, objset.doc.name, deploydir=deploydir) | ||
| 972 | |||
| 973 | def link_id(_id): | ||
| 974 | hash_path = jsonld_hash_path(hash_id(_id)) | ||
| 975 | |||
| 976 | link_name = jsonld_arch_path( | ||
| 977 | d, | ||
| 978 | pkg_arch, | ||
| 979 | *hash_path, | ||
| 980 | deploydir=deploydir, | ||
| 981 | ) | ||
| 982 | try: | ||
| 983 | link_name.parent.mkdir(exist_ok=True, parents=True) | ||
| 984 | link_name.symlink_to(os.path.relpath(dest, link_name.parent)) | ||
| 985 | except: | ||
| 986 | target = link_name.readlink() | ||
| 987 | bb.warn( | ||
| 988 | f"Unable to link {_id} in {dest} as {link_name}. Already points to {target}" | ||
| 989 | ) | ||
| 990 | raise | ||
| 991 | |||
| 992 | return hash_path[-1] | ||
| 993 | |||
| 994 | objset.add_aliases() | ||
| 995 | |||
| 996 | try: | ||
| 997 | if create_spdx_id_links: | ||
| 998 | alias_ext = get_alias(objset.doc) | ||
| 999 | if alias_ext is not None and alias_ext.alias: | ||
| 1000 | alias_ext.link_name = link_id(alias_ext.alias) | ||
| 1001 | |||
| 1002 | finally: | ||
| 1003 | # It is really helpful for debugging if the JSON document is written | ||
| 1004 | # out, so always do that even if there is an error making the links | ||
| 1005 | write_jsonld_doc(d, objset, dest) | ||
| 1006 | |||
| 1007 | |||
| 1008 | def find_root_obj_in_jsonld(d, subdir, fn_name, obj_type, **attr_filter): | ||
| 1009 | objset, fn = find_jsonld(d, subdir, fn_name, required=True) | ||
| 1010 | |||
| 1011 | spdx_obj = objset.find_root(obj_type, **attr_filter) | ||
| 1012 | if not spdx_obj: | ||
| 1013 | bb.fatal("No root %s found in %s" % (obj_type.__name__, fn)) | ||
| 1014 | |||
| 1015 | return spdx_obj, objset | ||
| 1016 | |||
| 1017 | |||
| 1018 | def load_obj_in_jsonld(d, arch, subdir, fn_name, obj_type, **attr_filter): | ||
| 1019 | objset, fn = load_jsonld_by_arch(d, arch, subdir, fn_name, required=True) | ||
| 1020 | |||
| 1021 | spdx_obj = objset.find_filter(obj_type, **attr_filter) | ||
| 1022 | if not spdx_obj: | ||
| 1023 | bb.fatal("No %s found in %s" % (obj_type.__name__, fn)) | ||
| 1024 | |||
| 1025 | return spdx_obj, objset | ||
| 1026 | |||
| 1027 | |||
| 1028 | def find_by_spdxid(d, spdxid, *, required=False): | ||
| 1029 | if spdxid.startswith(OE_ALIAS_PREFIX): | ||
| 1030 | h = spdxid[len(OE_ALIAS_PREFIX) :].split("/", 1)[0] | ||
| 1031 | return find_jsonld(d, *jsonld_hash_path(h), required=required) | ||
| 1032 | return find_jsonld(d, *jsonld_hash_path(hash_id(spdxid)), required=required) | ||
| 1033 | |||
| 1034 | |||
| 1035 | def create_sbom(d, name, root_elements, add_objectsets=[]): | ||
| 1036 | objset = ObjectSet.new_objset(d, name) | ||
| 1037 | |||
| 1038 | sbom = objset.add( | ||
| 1039 | oe.spdx30.software_Sbom( | ||
| 1040 | _id=objset.new_spdxid("sbom", name), | ||
| 1041 | name=name, | ||
| 1042 | creationInfo=objset.doc.creationInfo, | ||
| 1043 | software_sbomType=[oe.spdx30.software_SbomType.build], | ||
| 1044 | rootElement=root_elements, | ||
| 1045 | ) | ||
| 1046 | ) | ||
| 1047 | |||
| 1048 | missing_spdxids = objset.expand_collection(add_objectsets=add_objectsets) | ||
| 1049 | if missing_spdxids: | ||
| 1050 | bb.warn( | ||
| 1051 | "The following SPDX IDs were unable to be resolved:\n " | ||
| 1052 | + "\n ".join(sorted(list(missing_spdxids))) | ||
| 1053 | ) | ||
| 1054 | |||
| 1055 | # Filter out internal extensions from final SBoMs | ||
| 1056 | objset.remove_internal_extensions() | ||
| 1057 | |||
| 1058 | # SBoM should be the only root element of the document | ||
| 1059 | objset.doc.rootElement = [sbom] | ||
| 1060 | |||
| 1061 | # De-duplicate licenses | ||
| 1062 | unique = set() | ||
| 1063 | dedup = {} | ||
| 1064 | for lic in objset.foreach_type(oe.spdx30.simplelicensing_LicenseExpression): | ||
| 1065 | for u in unique: | ||
| 1066 | if ( | ||
| 1067 | u.simplelicensing_licenseExpression | ||
| 1068 | == lic.simplelicensing_licenseExpression | ||
| 1069 | and u.simplelicensing_licenseListVersion | ||
| 1070 | == lic.simplelicensing_licenseListVersion | ||
| 1071 | ): | ||
| 1072 | dedup[lic] = u | ||
| 1073 | break | ||
| 1074 | else: | ||
| 1075 | unique.add(lic) | ||
| 1076 | |||
| 1077 | if dedup: | ||
| 1078 | for rel in objset.foreach_filter( | ||
| 1079 | oe.spdx30.Relationship, | ||
| 1080 | relationshipType=oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
| 1081 | ): | ||
| 1082 | rel.to = [dedup.get(to, to) for to in rel.to] | ||
| 1083 | |||
| 1084 | for rel in objset.foreach_filter( | ||
| 1085 | oe.spdx30.Relationship, | ||
| 1086 | relationshipType=oe.spdx30.RelationshipType.hasConcludedLicense, | ||
| 1087 | ): | ||
| 1088 | rel.to = [dedup.get(to, to) for to in rel.to] | ||
| 1089 | |||
| 1090 | for k, v in dedup.items(): | ||
| 1091 | bb.debug(1, f"Removing duplicate License {k._id} -> {v._id}") | ||
| 1092 | objset.objects.remove(k) | ||
| 1093 | |||
| 1094 | objset.create_index() | ||
| 1095 | |||
| 1096 | return objset, sbom | ||
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py deleted file mode 100644 index 9fe0fbb752..0000000000 --- a/meta/lib/oe/sdk.py +++ /dev/null | |||
| @@ -1,158 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | from abc import ABCMeta, abstractmethod | ||
| 8 | from oe.utils import execute_pre_post_process | ||
| 9 | from oe.manifest import * | ||
| 10 | from oe.package_manager import * | ||
| 11 | import os | ||
| 12 | import traceback | ||
| 13 | |||
| 14 | class Sdk(object, metaclass=ABCMeta): | ||
| 15 | def __init__(self, d, manifest_dir): | ||
| 16 | self.d = d | ||
| 17 | self.sdk_output = self.d.getVar('SDK_OUTPUT') | ||
| 18 | self.sdk_native_path = self.d.getVar('SDKPATHNATIVE').strip('/') | ||
| 19 | self.target_path = self.d.getVar('SDKTARGETSYSROOT').strip('/') | ||
| 20 | self.sysconfdir = self.d.getVar('sysconfdir').strip('/') | ||
| 21 | |||
| 22 | self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path) | ||
| 23 | self.sdk_host_sysroot = self.sdk_output | ||
| 24 | |||
| 25 | if manifest_dir is None: | ||
| 26 | self.manifest_dir = self.d.getVar("SDK_DIR") | ||
| 27 | else: | ||
| 28 | self.manifest_dir = manifest_dir | ||
| 29 | |||
| 30 | self.remove(self.sdk_output, True) | ||
| 31 | |||
| 32 | self.install_order = Manifest.INSTALL_ORDER | ||
| 33 | |||
| 34 | @abstractmethod | ||
| 35 | def _populate(self): | ||
| 36 | pass | ||
| 37 | |||
| 38 | def populate(self): | ||
| 39 | self.mkdirhier(self.sdk_output) | ||
| 40 | |||
| 41 | # call backend dependent implementation | ||
| 42 | self._populate() | ||
| 43 | |||
| 44 | # Don't ship any libGL in the SDK | ||
| 45 | self.remove(os.path.join(self.sdk_output, self.sdk_native_path, | ||
| 46 | self.d.getVar('libdir_nativesdk').strip('/'), | ||
| 47 | "libGL*")) | ||
| 48 | |||
| 49 | # Fix or remove broken .la files | ||
| 50 | self.remove(os.path.join(self.sdk_output, self.sdk_native_path, | ||
| 51 | self.d.getVar('libdir_nativesdk').strip('/'), | ||
| 52 | "*.la")) | ||
| 53 | |||
| 54 | # Link the ld.so.cache file into the hosts filesystem | ||
| 55 | link_name = os.path.join(self.sdk_output, self.sdk_native_path, | ||
| 56 | self.sysconfdir, "ld.so.cache") | ||
| 57 | self.mkdirhier(os.path.dirname(link_name)) | ||
| 58 | os.symlink("/etc/ld.so.cache", link_name) | ||
| 59 | |||
| 60 | execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND')) | ||
| 61 | |||
| 62 | def movefile(self, sourcefile, destdir): | ||
| 63 | try: | ||
| 64 | # FIXME: this check of movefile's return code to None should be | ||
| 65 | # fixed within the function to use only exceptions to signal when | ||
| 66 | # something goes wrong | ||
| 67 | if (bb.utils.movefile(sourcefile, destdir) == None): | ||
| 68 | raise OSError("moving %s to %s failed" | ||
| 69 | %(sourcefile, destdir)) | ||
| 70 | #FIXME: using umbrella exc catching because bb.utils method raises it | ||
| 71 | except Exception as e: | ||
| 72 | bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc()) | ||
| 73 | bb.fatal("unable to place %s in final SDK location" % sourcefile) | ||
| 74 | |||
| 75 | def mkdirhier(self, dirpath): | ||
| 76 | try: | ||
| 77 | bb.utils.mkdirhier(dirpath) | ||
| 78 | except OSError as e: | ||
| 79 | bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc()) | ||
| 80 | bb.fatal("cannot make dir for SDK: %s" % dirpath) | ||
| 81 | |||
| 82 | def remove(self, path, recurse=False): | ||
| 83 | try: | ||
| 84 | bb.utils.remove(path, recurse) | ||
| 85 | #FIXME: using umbrella exc catching because bb.utils method raises it | ||
| 86 | except Exception as e: | ||
| 87 | bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc()) | ||
| 88 | bb.warn("cannot remove SDK dir: %s" % path) | ||
| 89 | |||
| 90 | def install_locales(self, pm): | ||
| 91 | linguas = self.d.getVar("SDKIMAGE_LINGUAS") | ||
| 92 | if linguas: | ||
| 93 | import fnmatch | ||
| 94 | # Install the binary locales | ||
| 95 | if linguas == "all": | ||
| 96 | pm.install_glob("nativesdk-glibc-binary-localedata-*.utf-8", sdk=True) | ||
| 97 | else: | ||
| 98 | pm.install(["nativesdk-glibc-binary-localedata-%s.utf-8" % \ | ||
| 99 | lang for lang in linguas.split()]) | ||
| 100 | # Generate a locale archive of them | ||
| 101 | target_arch = self.d.getVar('SDK_ARCH') | ||
| 102 | rootfs = oe.path.join(self.sdk_host_sysroot, self.sdk_native_path) | ||
| 103 | localedir = oe.path.join(rootfs, self.d.getVar("libdir_nativesdk"), "locale") | ||
| 104 | generate_locale_archive(self.d, rootfs, target_arch, localedir) | ||
| 105 | # And now delete the binary locales | ||
| 106 | pkgs = fnmatch.filter(pm.list_installed(), "nativesdk-glibc-binary-localedata-*.utf-8") | ||
| 107 | pm.remove(pkgs) | ||
| 108 | else: | ||
| 109 | # No linguas so do nothing | ||
| 110 | pass | ||
| 111 | |||
| 112 | |||
| 113 | def sdk_list_installed_packages(d, target, rootfs_dir=None): | ||
| 114 | if rootfs_dir is None: | ||
| 115 | sdk_output = d.getVar('SDK_OUTPUT') | ||
| 116 | target_path = d.getVar('SDKTARGETSYSROOT').strip('/') | ||
| 117 | |||
| 118 | rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] | ||
| 119 | |||
| 120 | if target is False: | ||
| 121 | ipkgconf_sdk_target = d.getVar("IPKGCONF_SDK") | ||
| 122 | d.setVar("IPKGCONF_TARGET", ipkgconf_sdk_target) | ||
| 123 | |||
| 124 | img_type = d.getVar('IMAGE_PKGTYPE') | ||
| 125 | import importlib | ||
| 126 | cls = importlib.import_module('oe.package_manager.' + img_type) | ||
| 127 | return cls.PMPkgsList(d, rootfs_dir).list_pkgs() | ||
| 128 | |||
| 129 | def populate_sdk(d, manifest_dir=None): | ||
| 130 | env_bkp = os.environ.copy() | ||
| 131 | |||
| 132 | img_type = d.getVar('IMAGE_PKGTYPE') | ||
| 133 | import importlib | ||
| 134 | cls = importlib.import_module('oe.package_manager.' + img_type + '.sdk') | ||
| 135 | cls.PkgSdk(d, manifest_dir).populate() | ||
| 136 | |||
| 137 | os.environ.clear() | ||
| 138 | os.environ.update(env_bkp) | ||
| 139 | |||
| 140 | def get_extra_sdkinfo(sstate_dir): | ||
| 141 | """ | ||
| 142 | This function is going to be used for generating the target and host manifest files packages of eSDK. | ||
| 143 | """ | ||
| 144 | import math | ||
| 145 | |||
| 146 | extra_info = {} | ||
| 147 | extra_info['tasksizes'] = {} | ||
| 148 | extra_info['filesizes'] = {} | ||
| 149 | for root, _, files in os.walk(sstate_dir): | ||
| 150 | for fn in files: | ||
| 151 | # Note that this makes an assumption about the sstate filenames | ||
| 152 | if '.tar.' in fn and not fn.endswith('.siginfo'): | ||
| 153 | fsize = int(math.ceil(float(os.path.getsize(os.path.join(root, fn))) / 1024)) | ||
| 154 | task = fn.rsplit(':',1)[1].split('_',1)[1].split(',')[0] | ||
| 155 | origtotal = extra_info['tasksizes'].get(task, 0) | ||
| 156 | extra_info['tasksizes'][task] = origtotal + fsize | ||
| 157 | extra_info['filesizes'][fn] = fsize | ||
| 158 | return extra_info | ||
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py deleted file mode 100644 index 7aaf2af5ed..0000000000 --- a/meta/lib/oe/spdx.py +++ /dev/null | |||
| @@ -1,357 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | # | ||
| 8 | # This library is intended to capture the JSON SPDX specification in a type | ||
| 9 | # safe manner. It is not intended to encode any particular OE specific | ||
| 10 | # behaviors, see the sbom.py for that. | ||
| 11 | # | ||
| 12 | # The documented SPDX spec document doesn't cover the JSON syntax for | ||
| 13 | # particular configuration, which can make it hard to determine what the JSON | ||
| 14 | # syntax should be. I've found it is actually much simpler to read the official | ||
| 15 | # SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec | ||
| 16 | # in schemas/spdx-schema.json | ||
| 17 | # | ||
| 18 | |||
| 19 | import hashlib | ||
| 20 | import itertools | ||
| 21 | import json | ||
| 22 | |||
| 23 | SPDX_VERSION = "2.2" | ||
| 24 | |||
| 25 | |||
| 26 | # | ||
| 27 | # The following are the support classes that are used to implement SPDX object | ||
| 28 | # | ||
| 29 | |||
| 30 | class _Property(object): | ||
| 31 | """ | ||
| 32 | A generic SPDX object property. The different types will derive from this | ||
| 33 | class | ||
| 34 | """ | ||
| 35 | |||
| 36 | def __init__(self, *, default=None): | ||
| 37 | self.default = default | ||
| 38 | |||
| 39 | def setdefault(self, dest, name): | ||
| 40 | if self.default is not None: | ||
| 41 | dest.setdefault(name, self.default) | ||
| 42 | |||
| 43 | |||
| 44 | class _String(_Property): | ||
| 45 | """ | ||
| 46 | A scalar string property for an SPDX object | ||
| 47 | """ | ||
| 48 | |||
| 49 | def __init__(self, **kwargs): | ||
| 50 | super().__init__(**kwargs) | ||
| 51 | |||
| 52 | def set_property(self, attrs, name): | ||
| 53 | def get_helper(obj): | ||
| 54 | return obj._spdx[name] | ||
| 55 | |||
| 56 | def set_helper(obj, value): | ||
| 57 | obj._spdx[name] = value | ||
| 58 | |||
| 59 | def del_helper(obj): | ||
| 60 | del obj._spdx[name] | ||
| 61 | |||
| 62 | attrs[name] = property(get_helper, set_helper, del_helper) | ||
| 63 | |||
| 64 | def init(self, source): | ||
| 65 | return source | ||
| 66 | |||
| 67 | |||
| 68 | class _Object(_Property): | ||
| 69 | """ | ||
| 70 | A scalar SPDX object property of a SPDX object | ||
| 71 | """ | ||
| 72 | |||
| 73 | def __init__(self, cls, **kwargs): | ||
| 74 | super().__init__(**kwargs) | ||
| 75 | self.cls = cls | ||
| 76 | |||
| 77 | def set_property(self, attrs, name): | ||
| 78 | def get_helper(obj): | ||
| 79 | if not name in obj._spdx: | ||
| 80 | obj._spdx[name] = self.cls() | ||
| 81 | return obj._spdx[name] | ||
| 82 | |||
| 83 | def set_helper(obj, value): | ||
| 84 | obj._spdx[name] = value | ||
| 85 | |||
| 86 | def del_helper(obj): | ||
| 87 | del obj._spdx[name] | ||
| 88 | |||
| 89 | attrs[name] = property(get_helper, set_helper) | ||
| 90 | |||
| 91 | def init(self, source): | ||
| 92 | return self.cls(**source) | ||
| 93 | |||
| 94 | |||
| 95 | class _ListProperty(_Property): | ||
| 96 | """ | ||
| 97 | A list of SPDX properties | ||
| 98 | """ | ||
| 99 | |||
| 100 | def __init__(self, prop, **kwargs): | ||
| 101 | super().__init__(**kwargs) | ||
| 102 | self.prop = prop | ||
| 103 | |||
| 104 | def set_property(self, attrs, name): | ||
| 105 | def get_helper(obj): | ||
| 106 | if not name in obj._spdx: | ||
| 107 | obj._spdx[name] = [] | ||
| 108 | return obj._spdx[name] | ||
| 109 | |||
| 110 | def set_helper(obj, value): | ||
| 111 | obj._spdx[name] = list(value) | ||
| 112 | |||
| 113 | def del_helper(obj): | ||
| 114 | del obj._spdx[name] | ||
| 115 | |||
| 116 | attrs[name] = property(get_helper, set_helper, del_helper) | ||
| 117 | |||
| 118 | def init(self, source): | ||
| 119 | return [self.prop.init(o) for o in source] | ||
| 120 | |||
| 121 | |||
| 122 | class _StringList(_ListProperty): | ||
| 123 | """ | ||
| 124 | A list of strings as a property for an SPDX object | ||
| 125 | """ | ||
| 126 | |||
| 127 | def __init__(self, **kwargs): | ||
| 128 | super().__init__(_String(), **kwargs) | ||
| 129 | |||
| 130 | |||
| 131 | class _ObjectList(_ListProperty): | ||
| 132 | """ | ||
| 133 | A list of SPDX objects as a property for an SPDX object | ||
| 134 | """ | ||
| 135 | |||
| 136 | def __init__(self, cls, **kwargs): | ||
| 137 | super().__init__(_Object(cls), **kwargs) | ||
| 138 | |||
| 139 | |||
| 140 | class MetaSPDXObject(type): | ||
| 141 | """ | ||
| 142 | A metaclass that allows properties (anything derived from a _Property | ||
| 143 | class) to be defined for a SPDX object | ||
| 144 | """ | ||
| 145 | def __new__(mcls, name, bases, attrs): | ||
| 146 | attrs["_properties"] = {} | ||
| 147 | |||
| 148 | for key in attrs.keys(): | ||
| 149 | if isinstance(attrs[key], _Property): | ||
| 150 | prop = attrs[key] | ||
| 151 | attrs["_properties"][key] = prop | ||
| 152 | prop.set_property(attrs, key) | ||
| 153 | |||
| 154 | return super().__new__(mcls, name, bases, attrs) | ||
| 155 | |||
| 156 | |||
| 157 | class SPDXObject(metaclass=MetaSPDXObject): | ||
| 158 | """ | ||
| 159 | The base SPDX object; all SPDX spec classes must derive from this class | ||
| 160 | """ | ||
| 161 | def __init__(self, **d): | ||
| 162 | self._spdx = {} | ||
| 163 | |||
| 164 | for name, prop in self._properties.items(): | ||
| 165 | prop.setdefault(self._spdx, name) | ||
| 166 | if name in d: | ||
| 167 | self._spdx[name] = prop.init(d[name]) | ||
| 168 | |||
| 169 | def serializer(self): | ||
| 170 | return self._spdx | ||
| 171 | |||
| 172 | def __setattr__(self, name, value): | ||
| 173 | if name in self._properties or name == "_spdx": | ||
| 174 | super().__setattr__(name, value) | ||
| 175 | return | ||
| 176 | raise KeyError("%r is not a valid SPDX property" % name) | ||
| 177 | |||
| 178 | # | ||
| 179 | # These are the SPDX objects implemented from the spec. The *only* properties | ||
| 180 | # that can be added to these objects are ones directly specified in the SPDX | ||
| 181 | # spec, however you may add helper functions to make operations easier. | ||
| 182 | # | ||
| 183 | # Defaults should *only* be specified if the SPDX spec says there is a certain | ||
| 184 | # required value for a field (e.g. dataLicense), or if the field is mandatory | ||
| 185 | # and has some sane "this field is unknown" (e.g. "NOASSERTION") | ||
| 186 | # | ||
| 187 | |||
| 188 | class SPDXAnnotation(SPDXObject): | ||
| 189 | annotationDate = _String() | ||
| 190 | annotationType = _String() | ||
| 191 | annotator = _String() | ||
| 192 | comment = _String() | ||
| 193 | |||
| 194 | class SPDXChecksum(SPDXObject): | ||
| 195 | algorithm = _String() | ||
| 196 | checksumValue = _String() | ||
| 197 | |||
| 198 | |||
| 199 | class SPDXRelationship(SPDXObject): | ||
| 200 | spdxElementId = _String() | ||
| 201 | relatedSpdxElement = _String() | ||
| 202 | relationshipType = _String() | ||
| 203 | comment = _String() | ||
| 204 | annotations = _ObjectList(SPDXAnnotation) | ||
| 205 | |||
| 206 | |||
| 207 | class SPDXExternalReference(SPDXObject): | ||
| 208 | referenceCategory = _String() | ||
| 209 | referenceType = _String() | ||
| 210 | referenceLocator = _String() | ||
| 211 | |||
| 212 | |||
| 213 | class SPDXPackageVerificationCode(SPDXObject): | ||
| 214 | packageVerificationCodeValue = _String() | ||
| 215 | packageVerificationCodeExcludedFiles = _StringList() | ||
| 216 | |||
| 217 | |||
| 218 | class SPDXPackage(SPDXObject): | ||
| 219 | ALLOWED_CHECKSUMS = [ | ||
| 220 | "SHA1", | ||
| 221 | "SHA224", | ||
| 222 | "SHA256", | ||
| 223 | "SHA384", | ||
| 224 | "SHA512", | ||
| 225 | "MD2", | ||
| 226 | "MD4", | ||
| 227 | "MD5", | ||
| 228 | "MD6", | ||
| 229 | ] | ||
| 230 | |||
| 231 | name = _String() | ||
| 232 | SPDXID = _String() | ||
| 233 | versionInfo = _String() | ||
| 234 | downloadLocation = _String(default="NOASSERTION") | ||
| 235 | supplier = _String(default="NOASSERTION") | ||
| 236 | homepage = _String() | ||
| 237 | licenseConcluded = _String(default="NOASSERTION") | ||
| 238 | licenseDeclared = _String(default="NOASSERTION") | ||
| 239 | summary = _String() | ||
| 240 | description = _String() | ||
| 241 | sourceInfo = _String() | ||
| 242 | copyrightText = _String(default="NOASSERTION") | ||
| 243 | licenseInfoFromFiles = _StringList(default=["NOASSERTION"]) | ||
| 244 | externalRefs = _ObjectList(SPDXExternalReference) | ||
| 245 | packageVerificationCode = _Object(SPDXPackageVerificationCode) | ||
| 246 | hasFiles = _StringList() | ||
| 247 | packageFileName = _String() | ||
| 248 | annotations = _ObjectList(SPDXAnnotation) | ||
| 249 | checksums = _ObjectList(SPDXChecksum) | ||
| 250 | |||
| 251 | |||
| 252 | class SPDXFile(SPDXObject): | ||
| 253 | SPDXID = _String() | ||
| 254 | fileName = _String() | ||
| 255 | licenseConcluded = _String(default="NOASSERTION") | ||
| 256 | copyrightText = _String(default="NOASSERTION") | ||
| 257 | licenseInfoInFiles = _StringList(default=["NOASSERTION"]) | ||
| 258 | checksums = _ObjectList(SPDXChecksum) | ||
| 259 | fileTypes = _StringList() | ||
| 260 | |||
| 261 | |||
| 262 | class SPDXCreationInfo(SPDXObject): | ||
| 263 | created = _String() | ||
| 264 | licenseListVersion = _String() | ||
| 265 | comment = _String() | ||
| 266 | creators = _StringList() | ||
| 267 | |||
| 268 | |||
| 269 | class SPDXExternalDocumentRef(SPDXObject): | ||
| 270 | externalDocumentId = _String() | ||
| 271 | spdxDocument = _String() | ||
| 272 | checksum = _Object(SPDXChecksum) | ||
| 273 | |||
| 274 | |||
| 275 | class SPDXExtractedLicensingInfo(SPDXObject): | ||
| 276 | name = _String() | ||
| 277 | comment = _String() | ||
| 278 | licenseId = _String() | ||
| 279 | extractedText = _String() | ||
| 280 | |||
| 281 | |||
| 282 | class SPDXDocument(SPDXObject): | ||
| 283 | spdxVersion = _String(default="SPDX-" + SPDX_VERSION) | ||
| 284 | dataLicense = _String(default="CC0-1.0") | ||
| 285 | SPDXID = _String(default="SPDXRef-DOCUMENT") | ||
| 286 | name = _String() | ||
| 287 | documentNamespace = _String() | ||
| 288 | creationInfo = _Object(SPDXCreationInfo) | ||
| 289 | packages = _ObjectList(SPDXPackage) | ||
| 290 | files = _ObjectList(SPDXFile) | ||
| 291 | relationships = _ObjectList(SPDXRelationship) | ||
| 292 | externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef) | ||
| 293 | hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo) | ||
| 294 | |||
| 295 | def __init__(self, **d): | ||
| 296 | super().__init__(**d) | ||
| 297 | |||
| 298 | def to_json(self, f, *, sort_keys=False, indent=None, separators=None): | ||
| 299 | class Encoder(json.JSONEncoder): | ||
| 300 | def default(self, o): | ||
| 301 | if isinstance(o, SPDXObject): | ||
| 302 | return o.serializer() | ||
| 303 | |||
| 304 | return super().default(o) | ||
| 305 | |||
| 306 | sha1 = hashlib.sha1() | ||
| 307 | for chunk in Encoder( | ||
| 308 | sort_keys=sort_keys, | ||
| 309 | indent=indent, | ||
| 310 | separators=separators, | ||
| 311 | ).iterencode(self): | ||
| 312 | chunk = chunk.encode("utf-8") | ||
| 313 | f.write(chunk) | ||
| 314 | sha1.update(chunk) | ||
| 315 | |||
| 316 | return sha1.hexdigest() | ||
| 317 | |||
| 318 | @classmethod | ||
| 319 | def from_json(cls, f): | ||
| 320 | return cls(**json.load(f)) | ||
| 321 | |||
| 322 | def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None): | ||
| 323 | if isinstance(_from, SPDXObject): | ||
| 324 | from_spdxid = _from.SPDXID | ||
| 325 | else: | ||
| 326 | from_spdxid = _from | ||
| 327 | |||
| 328 | if isinstance(_to, SPDXObject): | ||
| 329 | to_spdxid = _to.SPDXID | ||
| 330 | else: | ||
| 331 | to_spdxid = _to | ||
| 332 | |||
| 333 | r = SPDXRelationship( | ||
| 334 | spdxElementId=from_spdxid, | ||
| 335 | relatedSpdxElement=to_spdxid, | ||
| 336 | relationshipType=relationship, | ||
| 337 | ) | ||
| 338 | |||
| 339 | if comment is not None: | ||
| 340 | r.comment = comment | ||
| 341 | |||
| 342 | if annotation is not None: | ||
| 343 | r.annotations.append(annotation) | ||
| 344 | |||
| 345 | self.relationships.append(r) | ||
| 346 | |||
| 347 | def find_by_spdxid(self, spdxid): | ||
| 348 | for o in itertools.chain(self.packages, self.files): | ||
| 349 | if o.SPDXID == spdxid: | ||
| 350 | return o | ||
| 351 | return None | ||
| 352 | |||
| 353 | def find_external_document_ref(self, namespace): | ||
| 354 | for r in self.externalDocumentRefs: | ||
| 355 | if r.spdxDocument == namespace: | ||
| 356 | return r | ||
| 357 | return None | ||
diff --git a/meta/lib/oe/spdx30.py b/meta/lib/oe/spdx30.py deleted file mode 100644 index cd97eebd18..0000000000 --- a/meta/lib/oe/spdx30.py +++ /dev/null | |||
| @@ -1,5593 +0,0 @@ | |||
| 1 | #! /usr/bin/env python3 | ||
| 2 | # | ||
| 3 | # Generated Python bindings from a SHACL model | ||
| 4 | # | ||
| 5 | # This file was automatically generated by shacl2code. DO NOT MANUALLY MODIFY IT | ||
| 6 | # | ||
| 7 | # SPDX-License-Identifier: MIT | ||
| 8 | |||
| 9 | import functools | ||
| 10 | import hashlib | ||
| 11 | import json | ||
| 12 | import re | ||
| 13 | import sys | ||
| 14 | import threading | ||
| 15 | import time | ||
| 16 | from contextlib import contextmanager | ||
| 17 | from datetime import datetime, timezone, timedelta | ||
| 18 | from enum import Enum | ||
| 19 | from abc import ABC, abstractmethod | ||
| 20 | |||
| 21 | |||
| 22 | def check_type(obj, types): | ||
| 23 | if not isinstance(obj, types): | ||
| 24 | if isinstance(types, (list, tuple)): | ||
| 25 | raise TypeError( | ||
| 26 | f"Value must be one of type: {', '.join(t.__name__ for t in types)}. Got {type(obj)}" | ||
| 27 | ) | ||
| 28 | raise TypeError(f"Value must be of type {types.__name__}. Got {type(obj)}") | ||
| 29 | |||
| 30 | |||
| 31 | class Property(ABC): | ||
| 32 | """ | ||
| 33 | A generic SHACL object property. The different types will derive from this | ||
| 34 | class | ||
| 35 | """ | ||
| 36 | |||
| 37 | def __init__(self, *, pattern=None): | ||
| 38 | self.pattern = pattern | ||
| 39 | |||
| 40 | def init(self): | ||
| 41 | return None | ||
| 42 | |||
| 43 | def validate(self, value): | ||
| 44 | check_type(value, self.VALID_TYPES) | ||
| 45 | if self.pattern is not None and not re.search( | ||
| 46 | self.pattern, self.to_string(value) | ||
| 47 | ): | ||
| 48 | raise ValueError( | ||
| 49 | f"Value is not correctly formatted. Got '{self.to_string(value)}'" | ||
| 50 | ) | ||
| 51 | |||
| 52 | def set(self, value): | ||
| 53 | return value | ||
| 54 | |||
| 55 | def check_min_count(self, value, min_count): | ||
| 56 | return min_count == 1 | ||
| 57 | |||
| 58 | def check_max_count(self, value, max_count): | ||
| 59 | return max_count == 1 | ||
| 60 | |||
| 61 | def elide(self, value): | ||
| 62 | return value is None | ||
| 63 | |||
| 64 | def walk(self, value, callback, path): | ||
| 65 | callback(value, path) | ||
| 66 | |||
| 67 | def iter_objects(self, value, recursive, visited): | ||
| 68 | return [] | ||
| 69 | |||
| 70 | def link_prop(self, value, objectset, missing, visited): | ||
| 71 | return value | ||
| 72 | |||
| 73 | def to_string(self, value): | ||
| 74 | return str(value) | ||
| 75 | |||
| 76 | @abstractmethod | ||
| 77 | def encode(self, encoder, value, state): | ||
| 78 | pass | ||
| 79 | |||
| 80 | @abstractmethod | ||
| 81 | def decode(self, decoder, *, objectset=None): | ||
| 82 | pass | ||
| 83 | |||
| 84 | |||
| 85 | class StringProp(Property): | ||
| 86 | """ | ||
| 87 | A scalar string property for an SHACL object | ||
| 88 | """ | ||
| 89 | |||
| 90 | VALID_TYPES = str | ||
| 91 | |||
| 92 | def set(self, value): | ||
| 93 | return str(value) | ||
| 94 | |||
| 95 | def encode(self, encoder, value, state): | ||
| 96 | encoder.write_string(value) | ||
| 97 | |||
| 98 | def decode(self, decoder, *, objectset=None): | ||
| 99 | return decoder.read_string() | ||
| 100 | |||
| 101 | |||
| 102 | class AnyURIProp(StringProp): | ||
| 103 | def encode(self, encoder, value, state): | ||
| 104 | encoder.write_iri(value) | ||
| 105 | |||
| 106 | def decode(self, decoder, *, objectset=None): | ||
| 107 | return decoder.read_iri() | ||
| 108 | |||
| 109 | |||
| 110 | class DateTimeProp(Property): | ||
| 111 | """ | ||
| 112 | A Date/Time Object with optional timezone | ||
| 113 | """ | ||
| 114 | |||
| 115 | VALID_TYPES = datetime | ||
| 116 | UTC_FORMAT_STR = "%Y-%m-%dT%H:%M:%SZ" | ||
| 117 | REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})?$" | ||
| 118 | |||
| 119 | def set(self, value): | ||
| 120 | return self._normalize(value) | ||
| 121 | |||
| 122 | def encode(self, encoder, value, state): | ||
| 123 | encoder.write_datetime(self.to_string(value)) | ||
| 124 | |||
| 125 | def decode(self, decoder, *, objectset=None): | ||
| 126 | s = decoder.read_datetime() | ||
| 127 | if s is None: | ||
| 128 | return None | ||
| 129 | v = self.from_string(s) | ||
| 130 | return self._normalize(v) | ||
| 131 | |||
| 132 | def _normalize(self, value): | ||
| 133 | if value.utcoffset() is None: | ||
| 134 | value = value.astimezone() | ||
| 135 | offset = value.utcoffset() | ||
| 136 | seconds = offset % timedelta(minutes=-1 if offset.total_seconds() < 0 else 1) | ||
| 137 | if seconds: | ||
| 138 | offset = offset - seconds | ||
| 139 | value = value.replace(tzinfo=timezone(offset)) | ||
| 140 | value = value.replace(microsecond=0) | ||
| 141 | return value | ||
| 142 | |||
| 143 | def to_string(self, value): | ||
| 144 | value = self._normalize(value) | ||
| 145 | if value.tzinfo == timezone.utc: | ||
| 146 | return value.strftime(self.UTC_FORMAT_STR) | ||
| 147 | return value.isoformat() | ||
| 148 | |||
| 149 | def from_string(self, value): | ||
| 150 | if not re.match(self.REGEX, value): | ||
| 151 | raise ValueError(f"'{value}' is not a correctly formatted datetime") | ||
| 152 | if "Z" in value: | ||
| 153 | d = datetime( | ||
| 154 | *(time.strptime(value, self.UTC_FORMAT_STR)[0:6]), | ||
| 155 | tzinfo=timezone.utc, | ||
| 156 | ) | ||
| 157 | else: | ||
| 158 | d = datetime.fromisoformat(value) | ||
| 159 | |||
| 160 | return self._normalize(d) | ||
| 161 | |||
| 162 | |||
| 163 | class DateTimeStampProp(DateTimeProp): | ||
| 164 | """ | ||
| 165 | A Date/Time Object with required timestamp | ||
| 166 | """ | ||
| 167 | |||
| 168 | REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})$" | ||
| 169 | |||
| 170 | |||
| 171 | class IntegerProp(Property): | ||
| 172 | VALID_TYPES = int | ||
| 173 | |||
| 174 | def set(self, value): | ||
| 175 | return int(value) | ||
| 176 | |||
| 177 | def encode(self, encoder, value, state): | ||
| 178 | encoder.write_integer(value) | ||
| 179 | |||
| 180 | def decode(self, decoder, *, objectset=None): | ||
| 181 | return decoder.read_integer() | ||
| 182 | |||
| 183 | |||
| 184 | class PositiveIntegerProp(IntegerProp): | ||
| 185 | def validate(self, value): | ||
| 186 | super().validate(value) | ||
| 187 | if value < 1: | ||
| 188 | raise ValueError(f"Value must be >=1. Got {value}") | ||
| 189 | |||
| 190 | |||
| 191 | class NonNegativeIntegerProp(IntegerProp): | ||
| 192 | def validate(self, value): | ||
| 193 | super().validate(value) | ||
| 194 | if value < 0: | ||
| 195 | raise ValueError(f"Value must be >= 0. Got {value}") | ||
| 196 | |||
| 197 | |||
| 198 | class BooleanProp(Property): | ||
| 199 | VALID_TYPES = bool | ||
| 200 | |||
| 201 | def set(self, value): | ||
| 202 | return bool(value) | ||
| 203 | |||
| 204 | def encode(self, encoder, value, state): | ||
| 205 | encoder.write_bool(value) | ||
| 206 | |||
| 207 | def decode(self, decoder, *, objectset=None): | ||
| 208 | return decoder.read_bool() | ||
| 209 | |||
| 210 | |||
| 211 | class FloatProp(Property): | ||
| 212 | VALID_TYPES = (float, int) | ||
| 213 | |||
| 214 | def set(self, value): | ||
| 215 | return float(value) | ||
| 216 | |||
| 217 | def encode(self, encoder, value, state): | ||
| 218 | encoder.write_float(value) | ||
| 219 | |||
| 220 | def decode(self, decoder, *, objectset=None): | ||
| 221 | return decoder.read_float() | ||
| 222 | |||
| 223 | |||
| 224 | class IRIProp(Property): | ||
| 225 | def __init__(self, context=[], *, pattern=None): | ||
| 226 | super().__init__(pattern=pattern) | ||
| 227 | self.context = context | ||
| 228 | |||
| 229 | def compact(self, value): | ||
| 230 | for iri, compact in self.context: | ||
| 231 | if value == iri: | ||
| 232 | return compact | ||
| 233 | return None | ||
| 234 | |||
| 235 | def expand(self, value): | ||
| 236 | for iri, compact in self.context: | ||
| 237 | if value == compact: | ||
| 238 | return iri | ||
| 239 | return None | ||
| 240 | |||
| 241 | def iri_values(self): | ||
| 242 | return (iri for iri, _ in self.context) | ||
| 243 | |||
| 244 | |||
| 245 | class ObjectProp(IRIProp): | ||
| 246 | """ | ||
| 247 | A scalar SHACL object property of a SHACL object | ||
| 248 | """ | ||
| 249 | |||
| 250 | def __init__(self, cls, required, context=[]): | ||
| 251 | super().__init__(context) | ||
| 252 | self.cls = cls | ||
| 253 | self.required = required | ||
| 254 | |||
| 255 | def init(self): | ||
| 256 | if self.required and not self.cls.IS_ABSTRACT: | ||
| 257 | return self.cls() | ||
| 258 | return None | ||
| 259 | |||
| 260 | def validate(self, value): | ||
| 261 | check_type(value, (self.cls, str)) | ||
| 262 | |||
| 263 | def walk(self, value, callback, path): | ||
| 264 | if value is None: | ||
| 265 | return | ||
| 266 | |||
| 267 | if not isinstance(value, str): | ||
| 268 | value.walk(callback, path) | ||
| 269 | else: | ||
| 270 | callback(value, path) | ||
| 271 | |||
| 272 | def iter_objects(self, value, recursive, visited): | ||
| 273 | if value is None or isinstance(value, str): | ||
| 274 | return | ||
| 275 | |||
| 276 | if value not in visited: | ||
| 277 | visited.add(value) | ||
| 278 | yield value | ||
| 279 | |||
| 280 | if recursive: | ||
| 281 | for c in value.iter_objects(recursive=True, visited=visited): | ||
| 282 | yield c | ||
| 283 | |||
| 284 | def encode(self, encoder, value, state): | ||
| 285 | if value is None: | ||
| 286 | raise ValueError("Object cannot be None") | ||
| 287 | |||
| 288 | if isinstance(value, str): | ||
| 289 | encoder.write_iri(value, self.compact(value)) | ||
| 290 | return | ||
| 291 | |||
| 292 | return value.encode(encoder, state) | ||
| 293 | |||
| 294 | def decode(self, decoder, *, objectset=None): | ||
| 295 | iri = decoder.read_iri() | ||
| 296 | if iri is None: | ||
| 297 | return self.cls.decode(decoder, objectset=objectset) | ||
| 298 | |||
| 299 | iri = self.expand(iri) or iri | ||
| 300 | |||
| 301 | if objectset is None: | ||
| 302 | return iri | ||
| 303 | |||
| 304 | obj = objectset.find_by_id(iri) | ||
| 305 | if obj is None: | ||
| 306 | return iri | ||
| 307 | |||
| 308 | self.validate(obj) | ||
| 309 | return obj | ||
| 310 | |||
| 311 | def link_prop(self, value, objectset, missing, visited): | ||
| 312 | if value is None: | ||
| 313 | return value | ||
| 314 | |||
| 315 | if isinstance(value, str): | ||
| 316 | o = objectset.find_by_id(value) | ||
| 317 | if o is not None: | ||
| 318 | self.validate(o) | ||
| 319 | return o | ||
| 320 | |||
| 321 | if missing is not None: | ||
| 322 | missing.add(value) | ||
| 323 | |||
| 324 | return value | ||
| 325 | |||
| 326 | # De-duplicate IDs | ||
| 327 | if value._id: | ||
| 328 | value = objectset.find_by_id(value._id, value) | ||
| 329 | self.validate(value) | ||
| 330 | |||
| 331 | value.link_helper(objectset, missing, visited) | ||
| 332 | return value | ||
| 333 | |||
| 334 | |||
| 335 | class ListProxy(object): | ||
| 336 | def __init__(self, prop, data=None): | ||
| 337 | if data is None: | ||
| 338 | self.__data = [] | ||
| 339 | else: | ||
| 340 | self.__data = data | ||
| 341 | self.__prop = prop | ||
| 342 | |||
| 343 | def append(self, value): | ||
| 344 | self.__prop.validate(value) | ||
| 345 | self.__data.append(self.__prop.set(value)) | ||
| 346 | |||
| 347 | def insert(self, idx, value): | ||
| 348 | self.__prop.validate(value) | ||
| 349 | self.__data.insert(idx, self.__prop.set(value)) | ||
| 350 | |||
| 351 | def extend(self, items): | ||
| 352 | for i in items: | ||
| 353 | self.append(i) | ||
| 354 | |||
| 355 | def sort(self, *args, **kwargs): | ||
| 356 | self.__data.sort(*args, **kwargs) | ||
| 357 | |||
| 358 | def __getitem__(self, key): | ||
| 359 | return self.__data[key] | ||
| 360 | |||
| 361 | def __setitem__(self, key, value): | ||
| 362 | if isinstance(key, slice): | ||
| 363 | for v in value: | ||
| 364 | self.__prop.validate(v) | ||
| 365 | self.__data[key] = [self.__prop.set(v) for v in value] | ||
| 366 | else: | ||
| 367 | self.__prop.validate(value) | ||
| 368 | self.__data[key] = self.__prop.set(value) | ||
| 369 | |||
| 370 | def __delitem__(self, key): | ||
| 371 | del self.__data[key] | ||
| 372 | |||
| 373 | def __contains__(self, item): | ||
| 374 | return item in self.__data | ||
| 375 | |||
| 376 | def __iter__(self): | ||
| 377 | return iter(self.__data) | ||
| 378 | |||
| 379 | def __len__(self): | ||
| 380 | return len(self.__data) | ||
| 381 | |||
| 382 | def __str__(self): | ||
| 383 | return str(self.__data) | ||
| 384 | |||
| 385 | def __repr__(self): | ||
| 386 | return repr(self.__data) | ||
| 387 | |||
| 388 | def __eq__(self, other): | ||
| 389 | if isinstance(other, ListProxy): | ||
| 390 | return self.__data == other.__data | ||
| 391 | |||
| 392 | return self.__data == other | ||
| 393 | |||
| 394 | |||
| 395 | class ListProp(Property): | ||
| 396 | """ | ||
| 397 | A list of SHACL properties | ||
| 398 | """ | ||
| 399 | |||
| 400 | VALID_TYPES = (list, ListProxy) | ||
| 401 | |||
| 402 | def __init__(self, prop): | ||
| 403 | super().__init__() | ||
| 404 | self.prop = prop | ||
| 405 | |||
| 406 | def init(self): | ||
| 407 | return ListProxy(self.prop) | ||
| 408 | |||
| 409 | def validate(self, value): | ||
| 410 | super().validate(value) | ||
| 411 | |||
| 412 | for i in value: | ||
| 413 | self.prop.validate(i) | ||
| 414 | |||
| 415 | def set(self, value): | ||
| 416 | if isinstance(value, ListProxy): | ||
| 417 | return value | ||
| 418 | |||
| 419 | return ListProxy(self.prop, [self.prop.set(d) for d in value]) | ||
| 420 | |||
| 421 | def check_min_count(self, value, min_count): | ||
| 422 | check_type(value, ListProxy) | ||
| 423 | return len(value) >= min_count | ||
| 424 | |||
| 425 | def check_max_count(self, value, max_count): | ||
| 426 | check_type(value, ListProxy) | ||
| 427 | return len(value) <= max_count | ||
| 428 | |||
| 429 | def elide(self, value): | ||
| 430 | check_type(value, ListProxy) | ||
| 431 | return len(value) == 0 | ||
| 432 | |||
| 433 | def walk(self, value, callback, path): | ||
| 434 | callback(value, path) | ||
| 435 | for idx, v in enumerate(value): | ||
| 436 | self.prop.walk(v, callback, path + [f"[{idx}]"]) | ||
| 437 | |||
| 438 | def iter_objects(self, value, recursive, visited): | ||
| 439 | for v in value: | ||
| 440 | for c in self.prop.iter_objects(v, recursive, visited): | ||
| 441 | yield c | ||
| 442 | |||
| 443 | def link_prop(self, value, objectset, missing, visited): | ||
| 444 | if isinstance(value, ListProxy): | ||
| 445 | data = [self.prop.link_prop(v, objectset, missing, visited) for v in value] | ||
| 446 | else: | ||
| 447 | data = [self.prop.link_prop(v, objectset, missing, visited) for v in value] | ||
| 448 | |||
| 449 | return ListProxy(self.prop, data=data) | ||
| 450 | |||
| 451 | def encode(self, encoder, value, state): | ||
| 452 | check_type(value, ListProxy) | ||
| 453 | |||
| 454 | with encoder.write_list() as list_s: | ||
| 455 | for v in value: | ||
| 456 | with list_s.write_list_item() as item_s: | ||
| 457 | self.prop.encode(item_s, v, state) | ||
| 458 | |||
| 459 | def decode(self, decoder, *, objectset=None): | ||
| 460 | data = [] | ||
| 461 | for val_d in decoder.read_list(): | ||
| 462 | v = self.prop.decode(val_d, objectset=objectset) | ||
| 463 | self.prop.validate(v) | ||
| 464 | data.append(v) | ||
| 465 | |||
| 466 | return ListProxy(self.prop, data=data) | ||
| 467 | |||
| 468 | |||
| 469 | class EnumProp(IRIProp): | ||
| 470 | VALID_TYPES = str | ||
| 471 | |||
| 472 | def __init__(self, values, *, pattern=None): | ||
| 473 | super().__init__(values, pattern=pattern) | ||
| 474 | |||
| 475 | def validate(self, value): | ||
| 476 | super().validate(value) | ||
| 477 | |||
| 478 | valid_values = self.iri_values() | ||
| 479 | if value not in valid_values: | ||
| 480 | raise ValueError( | ||
| 481 | f"'{value}' is not a valid value. Choose one of {' '.join(valid_values)}" | ||
| 482 | ) | ||
| 483 | |||
| 484 | def encode(self, encoder, value, state): | ||
| 485 | encoder.write_enum(value, self, self.compact(value)) | ||
| 486 | |||
| 487 | def decode(self, decoder, *, objectset=None): | ||
| 488 | v = decoder.read_enum(self) | ||
| 489 | return self.expand(v) or v | ||
| 490 | |||
| 491 | |||
| 492 | class NodeKind(Enum): | ||
| 493 | BlankNode = 1 | ||
| 494 | IRI = 2 | ||
| 495 | BlankNodeOrIRI = 3 | ||
| 496 | |||
| 497 | |||
| 498 | def is_IRI(s): | ||
| 499 | if not isinstance(s, str): | ||
| 500 | return False | ||
| 501 | if s.startswith("_:"): | ||
| 502 | return False | ||
| 503 | if ":" not in s: | ||
| 504 | return False | ||
| 505 | return True | ||
| 506 | |||
| 507 | |||
| 508 | def is_blank_node(s): | ||
| 509 | if not isinstance(s, str): | ||
| 510 | return False | ||
| 511 | if not s.startswith("_:"): | ||
| 512 | return False | ||
| 513 | return True | ||
| 514 | |||
| 515 | |||
| 516 | def register(type_iri, *, compact_type=None, abstract=False): | ||
| 517 | def add_class(key, c): | ||
| 518 | assert ( | ||
| 519 | key not in SHACLObject.CLASSES | ||
| 520 | ), f"{key} already registered to {SHACLObject.CLASSES[key].__name__}" | ||
| 521 | SHACLObject.CLASSES[key] = c | ||
| 522 | |||
| 523 | def decorator(c): | ||
| 524 | global NAMED_INDIVIDUALS | ||
| 525 | |||
| 526 | assert issubclass( | ||
| 527 | c, SHACLObject | ||
| 528 | ), f"{c.__name__} is not derived from SHACLObject" | ||
| 529 | |||
| 530 | c._OBJ_TYPE = type_iri | ||
| 531 | c.IS_ABSTRACT = abstract | ||
| 532 | add_class(type_iri, c) | ||
| 533 | |||
| 534 | c._OBJ_COMPACT_TYPE = compact_type | ||
| 535 | if compact_type: | ||
| 536 | add_class(compact_type, c) | ||
| 537 | |||
| 538 | NAMED_INDIVIDUALS |= set(c.NAMED_INDIVIDUALS.values()) | ||
| 539 | |||
| 540 | # Registration is deferred until the first instance of class is created | ||
| 541 | # so that it has access to any other defined class | ||
| 542 | c._NEEDS_REG = True | ||
| 543 | return c | ||
| 544 | |||
| 545 | return decorator | ||
| 546 | |||
| 547 | |||
| 548 | register_lock = threading.Lock() | ||
| 549 | NAMED_INDIVIDUALS = set() | ||
| 550 | |||
| 551 | |||
| 552 | @functools.total_ordering | ||
| 553 | class SHACLObject(object): | ||
| 554 | CLASSES = {} | ||
| 555 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 556 | ID_ALIAS = None | ||
| 557 | IS_ABSTRACT = True | ||
| 558 | |||
| 559 | def __init__(self, **kwargs): | ||
| 560 | if self._is_abstract(): | ||
| 561 | raise NotImplementedError( | ||
| 562 | f"{self.__class__.__name__} is abstract and cannot be implemented" | ||
| 563 | ) | ||
| 564 | |||
| 565 | with register_lock: | ||
| 566 | cls = self.__class__ | ||
| 567 | if cls._NEEDS_REG: | ||
| 568 | cls._OBJ_PROPERTIES = {} | ||
| 569 | cls._OBJ_IRIS = {} | ||
| 570 | cls._register_props() | ||
| 571 | cls._NEEDS_REG = False | ||
| 572 | |||
| 573 | self.__dict__["_obj_data"] = {} | ||
| 574 | self.__dict__["_obj_metadata"] = {} | ||
| 575 | |||
| 576 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
| 577 | self.__dict__["_obj_data"][iri] = prop.init() | ||
| 578 | |||
| 579 | for k, v in kwargs.items(): | ||
| 580 | setattr(self, k, v) | ||
| 581 | |||
| 582 | def _is_abstract(self): | ||
| 583 | return self.__class__.IS_ABSTRACT | ||
| 584 | |||
| 585 | @classmethod | ||
| 586 | def _register_props(cls): | ||
| 587 | cls._add_property("_id", StringProp(), iri="@id") | ||
| 588 | |||
| 589 | @classmethod | ||
| 590 | def _add_property( | ||
| 591 | cls, | ||
| 592 | pyname, | ||
| 593 | prop, | ||
| 594 | iri, | ||
| 595 | min_count=None, | ||
| 596 | max_count=None, | ||
| 597 | compact=None, | ||
| 598 | ): | ||
| 599 | if pyname in cls._OBJ_IRIS: | ||
| 600 | raise KeyError(f"'{pyname}' is already defined for '{cls.__name__}'") | ||
| 601 | if iri in cls._OBJ_PROPERTIES: | ||
| 602 | raise KeyError(f"'{iri}' is already defined for '{cls.__name__}'") | ||
| 603 | |||
| 604 | while hasattr(cls, pyname): | ||
| 605 | pyname = pyname + "_" | ||
| 606 | |||
| 607 | pyname = sys.intern(pyname) | ||
| 608 | iri = sys.intern(iri) | ||
| 609 | |||
| 610 | cls._OBJ_IRIS[pyname] = iri | ||
| 611 | cls._OBJ_PROPERTIES[iri] = (prop, min_count, max_count, pyname, compact) | ||
| 612 | |||
| 613 | def __setattr__(self, name, value): | ||
| 614 | if name == self.ID_ALIAS: | ||
| 615 | self["@id"] = value | ||
| 616 | return | ||
| 617 | |||
| 618 | try: | ||
| 619 | iri = self._OBJ_IRIS[name] | ||
| 620 | self[iri] = value | ||
| 621 | except KeyError: | ||
| 622 | raise AttributeError( | ||
| 623 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
| 624 | ) | ||
| 625 | |||
| 626 | def __getattr__(self, name): | ||
| 627 | if name in self._OBJ_IRIS: | ||
| 628 | return self.__dict__["_obj_data"][self._OBJ_IRIS[name]] | ||
| 629 | |||
| 630 | if name == self.ID_ALIAS: | ||
| 631 | return self.__dict__["_obj_data"]["@id"] | ||
| 632 | |||
| 633 | if name == "_metadata": | ||
| 634 | return self.__dict__["_obj_metadata"] | ||
| 635 | |||
| 636 | if name == "_IRI": | ||
| 637 | return self._OBJ_IRIS | ||
| 638 | |||
| 639 | if name == "TYPE": | ||
| 640 | return self.__class__._OBJ_TYPE | ||
| 641 | |||
| 642 | if name == "COMPACT_TYPE": | ||
| 643 | return self.__class__._OBJ_COMPACT_TYPE | ||
| 644 | |||
| 645 | raise AttributeError( | ||
| 646 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
| 647 | ) | ||
| 648 | |||
| 649 | def __delattr__(self, name): | ||
| 650 | if name == self.ID_ALIAS: | ||
| 651 | del self["@id"] | ||
| 652 | return | ||
| 653 | |||
| 654 | try: | ||
| 655 | iri = self._OBJ_IRIS[name] | ||
| 656 | del self[iri] | ||
| 657 | except KeyError: | ||
| 658 | raise AttributeError( | ||
| 659 | f"'{name}' is not a valid property of {self.__class__.__name__}" | ||
| 660 | ) | ||
| 661 | |||
| 662 | def __get_prop(self, iri): | ||
| 663 | if iri not in self._OBJ_PROPERTIES: | ||
| 664 | raise KeyError( | ||
| 665 | f"'{iri}' is not a valid property of {self.__class__.__name__}" | ||
| 666 | ) | ||
| 667 | |||
| 668 | return self._OBJ_PROPERTIES[iri] | ||
| 669 | |||
| 670 | def __iter_props(self): | ||
| 671 | for iri, v in self._OBJ_PROPERTIES.items(): | ||
| 672 | yield iri, *v | ||
| 673 | |||
| 674 | def __getitem__(self, iri): | ||
| 675 | return self.__dict__["_obj_data"][iri] | ||
| 676 | |||
| 677 | def __setitem__(self, iri, value): | ||
| 678 | if iri == "@id": | ||
| 679 | if self.NODE_KIND == NodeKind.BlankNode: | ||
| 680 | if not is_blank_node(value): | ||
| 681 | raise ValueError( | ||
| 682 | f"{self.__class__.__name__} ({id(self)}) can only have local reference. Property '{iri}' cannot be set to '{value}' and must start with '_:'" | ||
| 683 | ) | ||
| 684 | elif self.NODE_KIND == NodeKind.IRI: | ||
| 685 | if not is_IRI(value): | ||
| 686 | raise ValueError( | ||
| 687 | f"{self.__class__.__name__} ({id(self)}) can only have an IRI value. Property '{iri}' cannot be set to '{value}'" | ||
| 688 | ) | ||
| 689 | else: | ||
| 690 | if not is_blank_node(value) and not is_IRI(value): | ||
| 691 | raise ValueError( | ||
| 692 | f"{self.__class__.__name__} ({id(self)}) Has invalid Property '{iri}' '{value}'. Must be a blank node or IRI" | ||
| 693 | ) | ||
| 694 | |||
| 695 | prop, _, _, _, _ = self.__get_prop(iri) | ||
| 696 | prop.validate(value) | ||
| 697 | self.__dict__["_obj_data"][iri] = prop.set(value) | ||
| 698 | |||
| 699 | def __delitem__(self, iri): | ||
| 700 | prop, _, _, _, _ = self.__get_prop(iri) | ||
| 701 | self.__dict__["_obj_data"][iri] = prop.init() | ||
| 702 | |||
| 703 | def __iter__(self): | ||
| 704 | return self._OBJ_PROPERTIES.keys() | ||
| 705 | |||
| 706 | def walk(self, callback, path=None): | ||
| 707 | """ | ||
| 708 | Walk object tree, invoking the callback for each item | ||
| 709 | |||
| 710 | Callback has the form: | ||
| 711 | |||
| 712 | def callback(object, path): | ||
| 713 | """ | ||
| 714 | if path is None: | ||
| 715 | path = ["."] | ||
| 716 | |||
| 717 | if callback(self, path): | ||
| 718 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
| 719 | prop.walk(self.__dict__["_obj_data"][iri], callback, path + [f".{iri}"]) | ||
| 720 | |||
| 721 | def property_keys(self): | ||
| 722 | for iri, _, _, _, pyname, compact in self.__iter_props(): | ||
| 723 | if iri == "@id": | ||
| 724 | compact = self.ID_ALIAS | ||
| 725 | yield pyname, iri, compact | ||
| 726 | |||
| 727 | def iter_objects(self, *, recursive=False, visited=None): | ||
| 728 | """ | ||
| 729 | Iterate of all objects that are a child of this one | ||
| 730 | """ | ||
| 731 | if visited is None: | ||
| 732 | visited = set() | ||
| 733 | |||
| 734 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
| 735 | for c in prop.iter_objects( | ||
| 736 | self.__dict__["_obj_data"][iri], recursive=recursive, visited=visited | ||
| 737 | ): | ||
| 738 | yield c | ||
| 739 | |||
| 740 | def encode(self, encoder, state): | ||
| 741 | idname = self.ID_ALIAS or self._OBJ_IRIS["_id"] | ||
| 742 | if not self._id and self.NODE_KIND == NodeKind.IRI: | ||
| 743 | raise ValueError( | ||
| 744 | f"{self.__class__.__name__} ({id(self)}) must have a IRI for property '{idname}'" | ||
| 745 | ) | ||
| 746 | |||
| 747 | if state.is_written(self): | ||
| 748 | encoder.write_iri(state.get_object_id(self)) | ||
| 749 | return | ||
| 750 | |||
| 751 | state.add_written(self) | ||
| 752 | |||
| 753 | with encoder.write_object( | ||
| 754 | self, | ||
| 755 | state.get_object_id(self), | ||
| 756 | bool(self._id) or state.is_refed(self), | ||
| 757 | ) as obj_s: | ||
| 758 | self._encode_properties(obj_s, state) | ||
| 759 | |||
| 760 | def _encode_properties(self, encoder, state): | ||
| 761 | for iri, prop, min_count, max_count, pyname, compact in self.__iter_props(): | ||
| 762 | value = self.__dict__["_obj_data"][iri] | ||
| 763 | if prop.elide(value): | ||
| 764 | if min_count: | ||
| 765 | raise ValueError( | ||
| 766 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) is required (currently {value!r})" | ||
| 767 | ) | ||
| 768 | continue | ||
| 769 | |||
| 770 | if min_count is not None: | ||
| 771 | if not prop.check_min_count(value, min_count): | ||
| 772 | raise ValueError( | ||
| 773 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a minimum of {min_count} elements" | ||
| 774 | ) | ||
| 775 | |||
| 776 | if max_count is not None: | ||
| 777 | if not prop.check_max_count(value, max_count): | ||
| 778 | raise ValueError( | ||
| 779 | f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a maximum of {max_count} elements" | ||
| 780 | ) | ||
| 781 | |||
| 782 | if iri == self._OBJ_IRIS["_id"]: | ||
| 783 | continue | ||
| 784 | |||
| 785 | with encoder.write_property(iri, compact) as prop_s: | ||
| 786 | prop.encode(prop_s, value, state) | ||
| 787 | |||
| 788 | @classmethod | ||
| 789 | def _make_object(cls, typ): | ||
| 790 | if typ not in cls.CLASSES: | ||
| 791 | raise TypeError(f"Unknown type {typ}") | ||
| 792 | |||
| 793 | return cls.CLASSES[typ]() | ||
| 794 | |||
| 795 | @classmethod | ||
| 796 | def decode(cls, decoder, *, objectset=None): | ||
| 797 | typ, obj_d = decoder.read_object() | ||
| 798 | if typ is None: | ||
| 799 | raise TypeError("Unable to determine type for object") | ||
| 800 | |||
| 801 | obj = cls._make_object(typ) | ||
| 802 | for key in (obj.ID_ALIAS, obj._OBJ_IRIS["_id"]): | ||
| 803 | with obj_d.read_property(key) as prop_d: | ||
| 804 | if prop_d is None: | ||
| 805 | continue | ||
| 806 | |||
| 807 | _id = prop_d.read_iri() | ||
| 808 | if _id is None: | ||
| 809 | raise TypeError(f"Object key '{key}' is the wrong type") | ||
| 810 | |||
| 811 | obj._id = _id | ||
| 812 | break | ||
| 813 | |||
| 814 | if obj.NODE_KIND == NodeKind.IRI and not obj._id: | ||
| 815 | raise ValueError("Object is missing required IRI") | ||
| 816 | |||
| 817 | if objectset is not None: | ||
| 818 | if obj._id: | ||
| 819 | v = objectset.find_by_id(_id) | ||
| 820 | if v is not None: | ||
| 821 | return v | ||
| 822 | |||
| 823 | obj._decode_properties(obj_d, objectset=objectset) | ||
| 824 | |||
| 825 | if objectset is not None: | ||
| 826 | objectset.add_index(obj) | ||
| 827 | return obj | ||
| 828 | |||
| 829 | def _decode_properties(self, decoder, objectset=None): | ||
| 830 | for key in decoder.object_keys(): | ||
| 831 | if not self._decode_prop(decoder, key, objectset=objectset): | ||
| 832 | raise KeyError(f"Unknown property '{key}'") | ||
| 833 | |||
| 834 | def _decode_prop(self, decoder, key, objectset=None): | ||
| 835 | if key in (self._OBJ_IRIS["_id"], self.ID_ALIAS): | ||
| 836 | return True | ||
| 837 | |||
| 838 | for iri, prop, _, _, _, compact in self.__iter_props(): | ||
| 839 | if compact == key: | ||
| 840 | read_key = compact | ||
| 841 | elif iri == key: | ||
| 842 | read_key = iri | ||
| 843 | else: | ||
| 844 | continue | ||
| 845 | |||
| 846 | with decoder.read_property(read_key) as prop_d: | ||
| 847 | v = prop.decode(prop_d, objectset=objectset) | ||
| 848 | prop.validate(v) | ||
| 849 | self.__dict__["_obj_data"][iri] = v | ||
| 850 | return True | ||
| 851 | |||
| 852 | return False | ||
| 853 | |||
| 854 | def link_helper(self, objectset, missing, visited): | ||
| 855 | if self in visited: | ||
| 856 | return | ||
| 857 | |||
| 858 | visited.add(self) | ||
| 859 | |||
| 860 | for iri, prop, _, _, _, _ in self.__iter_props(): | ||
| 861 | self.__dict__["_obj_data"][iri] = prop.link_prop( | ||
| 862 | self.__dict__["_obj_data"][iri], | ||
| 863 | objectset, | ||
| 864 | missing, | ||
| 865 | visited, | ||
| 866 | ) | ||
| 867 | |||
| 868 | def __str__(self): | ||
| 869 | parts = [ | ||
| 870 | f"{self.__class__.__name__}(", | ||
| 871 | ] | ||
| 872 | if self._id: | ||
| 873 | parts.append(f"@id='{self._id}'") | ||
| 874 | parts.append(")") | ||
| 875 | return "".join(parts) | ||
| 876 | |||
| 877 | def __hash__(self): | ||
| 878 | return super().__hash__() | ||
| 879 | |||
| 880 | def __eq__(self, other): | ||
| 881 | return super().__eq__(other) | ||
| 882 | |||
| 883 | def __lt__(self, other): | ||
| 884 | def sort_key(obj): | ||
| 885 | if isinstance(obj, str): | ||
| 886 | return (obj, "", "", "") | ||
| 887 | return ( | ||
| 888 | obj._id or "", | ||
| 889 | obj.TYPE, | ||
| 890 | getattr(obj, "name", None) or "", | ||
| 891 | id(obj), | ||
| 892 | ) | ||
| 893 | |||
| 894 | return sort_key(self) < sort_key(other) | ||
| 895 | |||
| 896 | |||
| 897 | class SHACLExtensibleObject(object): | ||
| 898 | CLOSED = False | ||
| 899 | |||
| 900 | def __init__(self, typ=None, **kwargs): | ||
| 901 | if typ: | ||
| 902 | self.__dict__["_obj_TYPE"] = (typ, None) | ||
| 903 | else: | ||
| 904 | self.__dict__["_obj_TYPE"] = (self._OBJ_TYPE, self._OBJ_COMPACT_TYPE) | ||
| 905 | super().__init__(**kwargs) | ||
| 906 | |||
| 907 | def _is_abstract(self): | ||
| 908 | # Unknown classes are assumed to not be abstract so that they can be | ||
| 909 | # deserialized | ||
| 910 | typ = self.__dict__["_obj_TYPE"][0] | ||
| 911 | if typ in self.__class__.CLASSES: | ||
| 912 | return self.__class__.CLASSES[typ].IS_ABSTRACT | ||
| 913 | |||
| 914 | return False | ||
| 915 | |||
| 916 | @classmethod | ||
| 917 | def _make_object(cls, typ): | ||
| 918 | # Check for a known type, and if so, deserialize as that instead | ||
| 919 | if typ in cls.CLASSES: | ||
| 920 | return cls.CLASSES[typ]() | ||
| 921 | |||
| 922 | obj = cls(typ) | ||
| 923 | return obj | ||
| 924 | |||
| 925 | def _decode_properties(self, decoder, objectset=None): | ||
| 926 | def decode_value(d): | ||
| 927 | if not d.is_list(): | ||
| 928 | return d.read_value() | ||
| 929 | |||
| 930 | return [decode_value(val_d) for val_d in d.read_list()] | ||
| 931 | |||
| 932 | if self.CLOSED: | ||
| 933 | super()._decode_properties(decoder, objectset=objectset) | ||
| 934 | return | ||
| 935 | |||
| 936 | for key in decoder.object_keys(): | ||
| 937 | if self._decode_prop(decoder, key, objectset=objectset): | ||
| 938 | continue | ||
| 939 | |||
| 940 | if not is_IRI(key): | ||
| 941 | raise KeyError( | ||
| 942 | f"Extensible object properties must be IRIs. Got '{key}'" | ||
| 943 | ) | ||
| 944 | |||
| 945 | with decoder.read_property(key) as prop_d: | ||
| 946 | self.__dict__["_obj_data"][key] = decode_value(prop_d) | ||
| 947 | |||
| 948 | def _encode_properties(self, encoder, state): | ||
| 949 | def encode_value(encoder, v): | ||
| 950 | if isinstance(v, bool): | ||
| 951 | encoder.write_bool(v) | ||
| 952 | elif isinstance(v, str): | ||
| 953 | encoder.write_string(v) | ||
| 954 | elif isinstance(v, int): | ||
| 955 | encoder.write_integer(v) | ||
| 956 | elif isinstance(v, float): | ||
| 957 | encoder.write_float(v) | ||
| 958 | elif isinstance(v, list): | ||
| 959 | with encoder.write_list() as list_s: | ||
| 960 | for i in v: | ||
| 961 | with list_s.write_list_item() as item_s: | ||
| 962 | encode_value(item_s, i) | ||
| 963 | else: | ||
| 964 | raise TypeError( | ||
| 965 | f"Unsupported serialized type {type(v)} with value '{v}'" | ||
| 966 | ) | ||
| 967 | |||
| 968 | super()._encode_properties(encoder, state) | ||
| 969 | if self.CLOSED: | ||
| 970 | return | ||
| 971 | |||
| 972 | for iri, value in self.__dict__["_obj_data"].items(): | ||
| 973 | if iri in self._OBJ_PROPERTIES: | ||
| 974 | continue | ||
| 975 | |||
| 976 | with encoder.write_property(iri) as prop_s: | ||
| 977 | encode_value(prop_s, value) | ||
| 978 | |||
| 979 | def __setitem__(self, iri, value): | ||
| 980 | try: | ||
| 981 | super().__setitem__(iri, value) | ||
| 982 | except KeyError: | ||
| 983 | if self.CLOSED: | ||
| 984 | raise | ||
| 985 | |||
| 986 | if not is_IRI(iri): | ||
| 987 | raise KeyError(f"Key '{iri}' must be an IRI") | ||
| 988 | self.__dict__["_obj_data"][iri] = value | ||
| 989 | |||
| 990 | def __delitem__(self, iri): | ||
| 991 | try: | ||
| 992 | super().__delitem__(iri) | ||
| 993 | except KeyError: | ||
| 994 | if self.CLOSED: | ||
| 995 | raise | ||
| 996 | |||
| 997 | if not is_IRI(iri): | ||
| 998 | raise KeyError(f"Key '{iri}' must be an IRI") | ||
| 999 | del self.__dict__["_obj_data"][iri] | ||
| 1000 | |||
| 1001 | def __getattr__(self, name): | ||
| 1002 | if name == "TYPE": | ||
| 1003 | return self.__dict__["_obj_TYPE"][0] | ||
| 1004 | if name == "COMPACT_TYPE": | ||
| 1005 | return self.__dict__["_obj_TYPE"][1] | ||
| 1006 | return super().__getattr__(name) | ||
| 1007 | |||
| 1008 | def property_keys(self): | ||
| 1009 | iris = set() | ||
| 1010 | for pyname, iri, compact in super().property_keys(): | ||
| 1011 | iris.add(iri) | ||
| 1012 | yield pyname, iri, compact | ||
| 1013 | |||
| 1014 | if self.CLOSED: | ||
| 1015 | return | ||
| 1016 | |||
| 1017 | for iri in self.__dict__["_obj_data"].keys(): | ||
| 1018 | if iri not in iris: | ||
| 1019 | yield None, iri, None | ||
| 1020 | |||
| 1021 | |||
| 1022 | class SHACLObjectSet(object): | ||
| 1023 | def __init__(self, objects=[], *, link=False): | ||
| 1024 | self.objects = set() | ||
| 1025 | self.missing_ids = set() | ||
| 1026 | for o in objects: | ||
| 1027 | self.objects.add(o) | ||
| 1028 | self.create_index() | ||
| 1029 | if link: | ||
| 1030 | self._link() | ||
| 1031 | |||
| 1032 | def create_index(self): | ||
| 1033 | """ | ||
| 1034 | (re)Create object index | ||
| 1035 | |||
| 1036 | Creates or recreates the indices for the object set to enable fast | ||
| 1037 | lookup. All objects and their children are walked and indexed | ||
| 1038 | """ | ||
| 1039 | self.obj_by_id = {} | ||
| 1040 | self.obj_by_type = {} | ||
| 1041 | for o in self.foreach(): | ||
| 1042 | self.add_index(o) | ||
| 1043 | |||
| 1044 | def add_index(self, obj): | ||
| 1045 | """ | ||
| 1046 | Add object to index | ||
| 1047 | |||
| 1048 | Adds the object to all appropriate indices | ||
| 1049 | """ | ||
| 1050 | |||
| 1051 | def reg_type(typ, compact, o, exact): | ||
| 1052 | self.obj_by_type.setdefault(typ, set()).add((exact, o)) | ||
| 1053 | if compact: | ||
| 1054 | self.obj_by_type.setdefault(compact, set()).add((exact, o)) | ||
| 1055 | |||
| 1056 | if not isinstance(obj, SHACLObject): | ||
| 1057 | raise TypeError("Object is not of type SHACLObject") | ||
| 1058 | |||
| 1059 | for typ in SHACLObject.CLASSES.values(): | ||
| 1060 | if isinstance(obj, typ): | ||
| 1061 | reg_type( | ||
| 1062 | typ._OBJ_TYPE, typ._OBJ_COMPACT_TYPE, obj, obj.__class__ is typ | ||
| 1063 | ) | ||
| 1064 | |||
| 1065 | # This covers custom extensions | ||
| 1066 | reg_type(obj.TYPE, obj.COMPACT_TYPE, obj, True) | ||
| 1067 | |||
| 1068 | if not obj._id: | ||
| 1069 | return | ||
| 1070 | |||
| 1071 | self.missing_ids.discard(obj._id) | ||
| 1072 | |||
| 1073 | if obj._id in self.obj_by_id: | ||
| 1074 | return | ||
| 1075 | |||
| 1076 | self.obj_by_id[obj._id] = obj | ||
| 1077 | |||
| 1078 | def add(self, obj): | ||
| 1079 | """ | ||
| 1080 | Add object to object set | ||
| 1081 | |||
| 1082 | Adds a SHACLObject to the object set and index it. | ||
| 1083 | |||
| 1084 | NOTE: Child objects of the attached object are not indexes | ||
| 1085 | """ | ||
| 1086 | if not isinstance(obj, SHACLObject): | ||
| 1087 | raise TypeError("Object is not of type SHACLObject") | ||
| 1088 | |||
| 1089 | if obj not in self.objects: | ||
| 1090 | self.objects.add(obj) | ||
| 1091 | self.add_index(obj) | ||
| 1092 | return obj | ||
| 1093 | |||
| 1094 | def update(self, *others): | ||
| 1095 | """ | ||
| 1096 | Update object set adding all objects in each other iterable | ||
| 1097 | """ | ||
| 1098 | for o in others: | ||
| 1099 | for obj in o: | ||
| 1100 | self.add(obj) | ||
| 1101 | |||
| 1102 | def __contains__(self, item): | ||
| 1103 | """ | ||
| 1104 | Returns True if the item is in the object set | ||
| 1105 | """ | ||
| 1106 | return item in self.objects | ||
| 1107 | |||
| 1108 | def link(self): | ||
| 1109 | """ | ||
| 1110 | Link object set | ||
| 1111 | |||
| 1112 | Links the object in the object set by replacing string object | ||
| 1113 | references with references to the objects themselves. e.g. | ||
| 1114 | a property that references object "https://foo/bar" by a string | ||
| 1115 | reference will be replaced with an actual reference to the object in | ||
| 1116 | the object set with the same ID if it exists in the object set | ||
| 1117 | |||
| 1118 | If multiple objects with the same ID are found, the duplicates are | ||
| 1119 | eliminated | ||
| 1120 | """ | ||
| 1121 | self.create_index() | ||
| 1122 | return self._link() | ||
| 1123 | |||
| 1124 | def _link(self): | ||
| 1125 | global NAMED_INDIVIDUALS | ||
| 1126 | |||
| 1127 | self.missing_ids = set() | ||
| 1128 | visited = set() | ||
| 1129 | |||
| 1130 | new_objects = set() | ||
| 1131 | |||
| 1132 | for o in self.objects: | ||
| 1133 | if o._id: | ||
| 1134 | o = self.find_by_id(o._id, o) | ||
| 1135 | o.link_helper(self, self.missing_ids, visited) | ||
| 1136 | new_objects.add(o) | ||
| 1137 | |||
| 1138 | self.objects = new_objects | ||
| 1139 | |||
| 1140 | # Remove blank nodes | ||
| 1141 | obj_by_id = {} | ||
| 1142 | for _id, obj in self.obj_by_id.items(): | ||
| 1143 | if _id.startswith("_:"): | ||
| 1144 | del obj._id | ||
| 1145 | else: | ||
| 1146 | obj_by_id[_id] = obj | ||
| 1147 | self.obj_by_id = obj_by_id | ||
| 1148 | |||
| 1149 | # Named individuals aren't considered missing | ||
| 1150 | self.missing_ids -= NAMED_INDIVIDUALS | ||
| 1151 | |||
| 1152 | return self.missing_ids | ||
| 1153 | |||
| 1154 | def find_by_id(self, _id, default=None): | ||
| 1155 | """ | ||
| 1156 | Find object by ID | ||
| 1157 | |||
| 1158 | Returns objects that match the specified ID, or default if there is no | ||
| 1159 | object with the specified ID | ||
| 1160 | """ | ||
| 1161 | if _id not in self.obj_by_id: | ||
| 1162 | return default | ||
| 1163 | return self.obj_by_id[_id] | ||
| 1164 | |||
| 1165 | def foreach(self): | ||
| 1166 | """ | ||
| 1167 | Iterate over every object in the object set, and all child objects | ||
| 1168 | """ | ||
| 1169 | visited = set() | ||
| 1170 | for o in self.objects: | ||
| 1171 | if o not in visited: | ||
| 1172 | yield o | ||
| 1173 | visited.add(o) | ||
| 1174 | |||
| 1175 | for child in o.iter_objects(recursive=True, visited=visited): | ||
| 1176 | yield child | ||
| 1177 | |||
| 1178 | def foreach_type(self, typ, *, match_subclass=True): | ||
| 1179 | """ | ||
| 1180 | Iterate over each object of a specified type (or subclass there of) | ||
| 1181 | |||
| 1182 | If match_subclass is True, and class derived from typ will also match | ||
| 1183 | (similar to isinstance()). If False, only exact matches will be | ||
| 1184 | returned | ||
| 1185 | """ | ||
| 1186 | if not isinstance(typ, str): | ||
| 1187 | if not issubclass(typ, SHACLObject): | ||
| 1188 | raise TypeError(f"Type must be derived from SHACLObject, got {typ}") | ||
| 1189 | typ = typ._OBJ_TYPE | ||
| 1190 | |||
| 1191 | if typ not in self.obj_by_type: | ||
| 1192 | return | ||
| 1193 | |||
| 1194 | for exact, o in self.obj_by_type[typ]: | ||
| 1195 | if match_subclass or exact: | ||
| 1196 | yield o | ||
| 1197 | |||
| 1198 | def merge(self, *objectsets): | ||
| 1199 | """ | ||
| 1200 | Merge object sets | ||
| 1201 | |||
| 1202 | Returns a new object set that is the combination of this object set and | ||
| 1203 | all provided arguments | ||
| 1204 | """ | ||
| 1205 | new_objects = set() | ||
| 1206 | new_objects |= self.objects | ||
| 1207 | for d in objectsets: | ||
| 1208 | new_objects |= d.objects | ||
| 1209 | |||
| 1210 | return SHACLObjectSet(new_objects, link=True) | ||
| 1211 | |||
| 1212 | def encode(self, encoder, force_list=False, *, key=None): | ||
| 1213 | """ | ||
| 1214 | Serialize a list of objects to a serialization encoder | ||
| 1215 | |||
| 1216 | If force_list is true, a list will always be written using the encoder. | ||
| 1217 | """ | ||
| 1218 | ref_counts = {} | ||
| 1219 | state = EncodeState() | ||
| 1220 | |||
| 1221 | def walk_callback(value, path): | ||
| 1222 | nonlocal state | ||
| 1223 | nonlocal ref_counts | ||
| 1224 | |||
| 1225 | if not isinstance(value, SHACLObject): | ||
| 1226 | return True | ||
| 1227 | |||
| 1228 | # Remove blank node ID for re-assignment | ||
| 1229 | if value._id and value._id.startswith("_:"): | ||
| 1230 | del value._id | ||
| 1231 | |||
| 1232 | if value._id: | ||
| 1233 | state.add_refed(value) | ||
| 1234 | |||
| 1235 | # If the object is referenced more than once, add it to the set of | ||
| 1236 | # referenced objects | ||
| 1237 | ref_counts.setdefault(value, 0) | ||
| 1238 | ref_counts[value] += 1 | ||
| 1239 | if ref_counts[value] > 1: | ||
| 1240 | state.add_refed(value) | ||
| 1241 | return False | ||
| 1242 | |||
| 1243 | return True | ||
| 1244 | |||
| 1245 | for o in self.objects: | ||
| 1246 | if o._id: | ||
| 1247 | state.add_refed(o) | ||
| 1248 | o.walk(walk_callback) | ||
| 1249 | |||
| 1250 | use_list = force_list or len(self.objects) > 1 | ||
| 1251 | |||
| 1252 | if use_list: | ||
| 1253 | # If we are making a list add all the objects referred to by reference | ||
| 1254 | # to the list | ||
| 1255 | objects = list(self.objects | state.ref_objects) | ||
| 1256 | else: | ||
| 1257 | objects = list(self.objects) | ||
| 1258 | |||
| 1259 | objects.sort(key=key) | ||
| 1260 | |||
| 1261 | if use_list: | ||
| 1262 | # Ensure top level objects are only written in the top level graph | ||
| 1263 | # node, and referenced by ID everywhere else. This is done by setting | ||
| 1264 | # the flag that indicates this object has been written for all the top | ||
| 1265 | # level objects, then clearing it right before serializing the object. | ||
| 1266 | # | ||
| 1267 | # In this way, if an object is referenced before it is supposed to be | ||
| 1268 | # serialized into the @graph, it will serialize as a string instead of | ||
| 1269 | # the actual object | ||
| 1270 | for o in objects: | ||
| 1271 | state.written_objects.add(o) | ||
| 1272 | |||
| 1273 | with encoder.write_list() as list_s: | ||
| 1274 | for o in objects: | ||
| 1275 | # Allow this specific object to be written now | ||
| 1276 | state.written_objects.remove(o) | ||
| 1277 | with list_s.write_list_item() as item_s: | ||
| 1278 | o.encode(item_s, state) | ||
| 1279 | |||
| 1280 | elif objects: | ||
| 1281 | objects[0].encode(encoder, state) | ||
| 1282 | |||
| 1283 | def decode(self, decoder): | ||
| 1284 | self.create_index() | ||
| 1285 | |||
| 1286 | for obj_d in decoder.read_list(): | ||
| 1287 | o = SHACLObject.decode(obj_d, objectset=self) | ||
| 1288 | self.objects.add(o) | ||
| 1289 | |||
| 1290 | self._link() | ||
| 1291 | |||
| 1292 | |||
| 1293 | class EncodeState(object): | ||
| 1294 | def __init__(self): | ||
| 1295 | self.ref_objects = set() | ||
| 1296 | self.written_objects = set() | ||
| 1297 | self.blank_objects = {} | ||
| 1298 | |||
| 1299 | def get_object_id(self, o): | ||
| 1300 | if o._id: | ||
| 1301 | return o._id | ||
| 1302 | |||
| 1303 | if o not in self.blank_objects: | ||
| 1304 | _id = f"_:{o.__class__.__name__}{len(self.blank_objects)}" | ||
| 1305 | self.blank_objects[o] = _id | ||
| 1306 | |||
| 1307 | return self.blank_objects[o] | ||
| 1308 | |||
| 1309 | def is_refed(self, o): | ||
| 1310 | return o in self.ref_objects | ||
| 1311 | |||
| 1312 | def add_refed(self, o): | ||
| 1313 | self.ref_objects.add(o) | ||
| 1314 | |||
| 1315 | def is_written(self, o): | ||
| 1316 | return o in self.written_objects | ||
| 1317 | |||
| 1318 | def add_written(self, o): | ||
| 1319 | self.written_objects.add(o) | ||
| 1320 | |||
| 1321 | |||
| 1322 | class Decoder(ABC): | ||
| 1323 | @abstractmethod | ||
| 1324 | def read_value(self): | ||
| 1325 | """ | ||
| 1326 | Consume next item | ||
| 1327 | |||
| 1328 | Consumes the next item of any type | ||
| 1329 | """ | ||
| 1330 | pass | ||
| 1331 | |||
| 1332 | @abstractmethod | ||
| 1333 | def read_string(self): | ||
| 1334 | """ | ||
| 1335 | Consume the next item as a string. | ||
| 1336 | |||
| 1337 | Returns the string value of the next item, or `None` if the next item | ||
| 1338 | is not a string | ||
| 1339 | """ | ||
| 1340 | pass | ||
| 1341 | |||
| 1342 | @abstractmethod | ||
| 1343 | def read_datetime(self): | ||
| 1344 | """ | ||
| 1345 | Consumes the next item as a date & time string | ||
| 1346 | |||
| 1347 | Returns the string value of the next item, if it is a ISO datetime, or | ||
| 1348 | `None` if the next item is not a ISO datetime string. | ||
| 1349 | |||
| 1350 | Note that validation of the string is done by the caller, so a minimal | ||
| 1351 | implementation can just check if the next item is a string without | ||
| 1352 | worrying about the format | ||
| 1353 | """ | ||
| 1354 | pass | ||
| 1355 | |||
| 1356 | @abstractmethod | ||
| 1357 | def read_integer(self): | ||
| 1358 | """ | ||
| 1359 | Consumes the next item as an integer | ||
| 1360 | |||
| 1361 | Returns the integer value of the next item, or `None` if the next item | ||
| 1362 | is not an integer | ||
| 1363 | """ | ||
| 1364 | pass | ||
| 1365 | |||
| 1366 | @abstractmethod | ||
| 1367 | def read_iri(self): | ||
| 1368 | """ | ||
| 1369 | Consumes the next item as an IRI string | ||
| 1370 | |||
| 1371 | Returns the string value of the next item an IRI, or `None` if the next | ||
| 1372 | item is not an IRI. | ||
| 1373 | |||
| 1374 | The returned string should be either a fully-qualified IRI, or a blank | ||
| 1375 | node ID | ||
| 1376 | """ | ||
| 1377 | pass | ||
| 1378 | |||
| 1379 | @abstractmethod | ||
| 1380 | def read_enum(self, e): | ||
| 1381 | """ | ||
| 1382 | Consumes the next item as an Enum value string | ||
| 1383 | |||
| 1384 | Returns the fully qualified IRI of the next enum item, or `None` if the | ||
| 1385 | next item is not an enum value. | ||
| 1386 | |||
| 1387 | The callee is responsible for validating that the returned IRI is | ||
| 1388 | actually a member of the specified Enum, so the `Decoder` does not need | ||
| 1389 | to check that, but can if it wishes | ||
| 1390 | """ | ||
| 1391 | pass | ||
| 1392 | |||
| 1393 | @abstractmethod | ||
| 1394 | def read_bool(self): | ||
| 1395 | """ | ||
| 1396 | Consume the next item as a boolean value | ||
| 1397 | |||
| 1398 | Returns the boolean value of the next item, or `None` if the next item | ||
| 1399 | is not a boolean | ||
| 1400 | """ | ||
| 1401 | pass | ||
| 1402 | |||
| 1403 | @abstractmethod | ||
| 1404 | def read_float(self): | ||
| 1405 | """ | ||
| 1406 | Consume the next item as a float value | ||
| 1407 | |||
| 1408 | Returns the float value of the next item, or `None` if the next item is | ||
| 1409 | not a float | ||
| 1410 | """ | ||
| 1411 | pass | ||
| 1412 | |||
| 1413 | @abstractmethod | ||
| 1414 | def read_list(self): | ||
| 1415 | """ | ||
| 1416 | Consume the next item as a list generator | ||
| 1417 | |||
| 1418 | This should generate a `Decoder` object for each item in the list. The | ||
| 1419 | generated `Decoder` can be used to read the corresponding item from the | ||
| 1420 | list | ||
| 1421 | """ | ||
| 1422 | pass | ||
| 1423 | |||
| 1424 | @abstractmethod | ||
| 1425 | def is_list(self): | ||
| 1426 | """ | ||
| 1427 | Checks if the next item is a list | ||
| 1428 | |||
| 1429 | Returns True if the next item is a list, or False if it is a scalar | ||
| 1430 | """ | ||
| 1431 | pass | ||
| 1432 | |||
| 1433 | @abstractmethod | ||
| 1434 | def read_object(self): | ||
| 1435 | """ | ||
| 1436 | Consume next item as an object | ||
| 1437 | |||
| 1438 | A context manager that "enters" the next item as a object and yields a | ||
| 1439 | `Decoder` that can read properties from it. If the next item is not an | ||
| 1440 | object, yields `None` | ||
| 1441 | |||
| 1442 | Properties will be read out of the object using `read_property` and | ||
| 1443 | `read_object_id` | ||
| 1444 | """ | ||
| 1445 | pass | ||
| 1446 | |||
| 1447 | @abstractmethod | ||
| 1448 | @contextmanager | ||
| 1449 | def read_property(self, key): | ||
| 1450 | """ | ||
| 1451 | Read property from object | ||
| 1452 | |||
| 1453 | A context manager that yields a `Decoder` that can be used to read the | ||
| 1454 | value of the property with the given key in current object, or `None` | ||
| 1455 | if the property does not exist in the current object. | ||
| 1456 | """ | ||
| 1457 | pass | ||
| 1458 | |||
| 1459 | @abstractmethod | ||
| 1460 | def object_keys(self): | ||
| 1461 | """ | ||
| 1462 | Read property keys from an object | ||
| 1463 | |||
| 1464 | Iterates over all the serialized keys for the current object | ||
| 1465 | """ | ||
| 1466 | pass | ||
| 1467 | |||
| 1468 | @abstractmethod | ||
| 1469 | def read_object_id(self, alias=None): | ||
| 1470 | """ | ||
| 1471 | Read current object ID property | ||
| 1472 | |||
| 1473 | Returns the ID of the current object if one is defined, or `None` if | ||
| 1474 | the current object has no ID. | ||
| 1475 | |||
| 1476 | The ID must be a fully qualified IRI or a blank node | ||
| 1477 | |||
| 1478 | If `alias` is provided, is is a hint as to another name by which the ID | ||
| 1479 | might be found, if the `Decoder` supports aliases for an ID | ||
| 1480 | """ | ||
| 1481 | pass | ||
| 1482 | |||
| 1483 | |||
| 1484 | class JSONLDDecoder(Decoder): | ||
| 1485 | def __init__(self, data, root=False): | ||
| 1486 | self.data = data | ||
| 1487 | self.root = root | ||
| 1488 | |||
| 1489 | def read_value(self): | ||
| 1490 | if isinstance(self.data, str): | ||
| 1491 | try: | ||
| 1492 | return float(self.data) | ||
| 1493 | except ValueError: | ||
| 1494 | pass | ||
| 1495 | return self.data | ||
| 1496 | |||
| 1497 | def read_string(self): | ||
| 1498 | if isinstance(self.data, str): | ||
| 1499 | return self.data | ||
| 1500 | return None | ||
| 1501 | |||
| 1502 | def read_datetime(self): | ||
| 1503 | return self.read_string() | ||
| 1504 | |||
| 1505 | def read_integer(self): | ||
| 1506 | if isinstance(self.data, int): | ||
| 1507 | return self.data | ||
| 1508 | return None | ||
| 1509 | |||
| 1510 | def read_bool(self): | ||
| 1511 | if isinstance(self.data, bool): | ||
| 1512 | return self.data | ||
| 1513 | return None | ||
| 1514 | |||
| 1515 | def read_float(self): | ||
| 1516 | if isinstance(self.data, (int, float, str)): | ||
| 1517 | return float(self.data) | ||
| 1518 | return None | ||
| 1519 | |||
| 1520 | def read_iri(self): | ||
| 1521 | if isinstance(self.data, str): | ||
| 1522 | return self.data | ||
| 1523 | return None | ||
| 1524 | |||
| 1525 | def read_enum(self, e): | ||
| 1526 | if isinstance(self.data, str): | ||
| 1527 | return self.data | ||
| 1528 | return None | ||
| 1529 | |||
| 1530 | def read_list(self): | ||
| 1531 | if self.is_list(): | ||
| 1532 | for v in self.data: | ||
| 1533 | yield self.__class__(v) | ||
| 1534 | else: | ||
| 1535 | yield self | ||
| 1536 | |||
| 1537 | def is_list(self): | ||
| 1538 | return isinstance(self.data, (list, tuple, set)) | ||
| 1539 | |||
| 1540 | def __get_value(self, *keys): | ||
| 1541 | for k in keys: | ||
| 1542 | if k and k in self.data: | ||
| 1543 | return self.data[k] | ||
| 1544 | return None | ||
| 1545 | |||
| 1546 | @contextmanager | ||
| 1547 | def read_property(self, key): | ||
| 1548 | v = self.__get_value(key) | ||
| 1549 | if v is not None: | ||
| 1550 | yield self.__class__(v) | ||
| 1551 | else: | ||
| 1552 | yield None | ||
| 1553 | |||
| 1554 | def object_keys(self): | ||
| 1555 | for key in self.data.keys(): | ||
| 1556 | if key in ("@type", "type"): | ||
| 1557 | continue | ||
| 1558 | if self.root and key == "@context": | ||
| 1559 | continue | ||
| 1560 | yield key | ||
| 1561 | |||
| 1562 | def read_object(self): | ||
| 1563 | typ = self.__get_value("@type", "type") | ||
| 1564 | if typ is not None: | ||
| 1565 | return typ, self | ||
| 1566 | |||
| 1567 | return None, self | ||
| 1568 | |||
| 1569 | def read_object_id(self, alias=None): | ||
| 1570 | return self.__get_value(alias, "@id") | ||
| 1571 | |||
| 1572 | |||
| 1573 | class JSONLDDeserializer(object): | ||
| 1574 | def deserialize_data(self, data, objectset: SHACLObjectSet): | ||
| 1575 | if "@graph" in data: | ||
| 1576 | h = JSONLDDecoder(data["@graph"], True) | ||
| 1577 | else: | ||
| 1578 | h = JSONLDDecoder(data, True) | ||
| 1579 | |||
| 1580 | objectset.decode(h) | ||
| 1581 | |||
| 1582 | def read(self, f, objectset: SHACLObjectSet): | ||
| 1583 | data = json.load(f) | ||
| 1584 | self.deserialize_data(data, objectset) | ||
| 1585 | |||
| 1586 | |||
| 1587 | class Encoder(ABC): | ||
| 1588 | @abstractmethod | ||
| 1589 | def write_string(self, v): | ||
| 1590 | """ | ||
| 1591 | Write a string value | ||
| 1592 | |||
| 1593 | Encodes the value as a string in the output | ||
| 1594 | """ | ||
| 1595 | pass | ||
| 1596 | |||
| 1597 | @abstractmethod | ||
| 1598 | def write_datetime(self, v): | ||
| 1599 | """ | ||
| 1600 | Write a date & time string | ||
| 1601 | |||
| 1602 | Encodes the value as an ISO datetime string | ||
| 1603 | |||
| 1604 | Note: The provided string is already correctly encoded as an ISO datetime | ||
| 1605 | """ | ||
| 1606 | pass | ||
| 1607 | |||
| 1608 | @abstractmethod | ||
| 1609 | def write_integer(self, v): | ||
| 1610 | """ | ||
| 1611 | Write an integer value | ||
| 1612 | |||
| 1613 | Encodes the value as an integer in the output | ||
| 1614 | """ | ||
| 1615 | pass | ||
| 1616 | |||
| 1617 | @abstractmethod | ||
| 1618 | def write_iri(self, v, compact=None): | ||
| 1619 | """ | ||
| 1620 | Write IRI | ||
| 1621 | |||
| 1622 | Encodes the string as an IRI. Note that the string will be either a | ||
| 1623 | fully qualified IRI or a blank node ID. If `compact` is provided and | ||
| 1624 | the serialization supports compacted IRIs, it should be preferred to | ||
| 1625 | the full IRI | ||
| 1626 | """ | ||
| 1627 | pass | ||
| 1628 | |||
| 1629 | @abstractmethod | ||
| 1630 | def write_enum(self, v, e, compact=None): | ||
| 1631 | """ | ||
| 1632 | Write enum value IRI | ||
| 1633 | |||
| 1634 | Encodes the string enum value IRI. Note that the string will be a fully | ||
| 1635 | qualified IRI. If `compact` is provided and the serialization supports | ||
| 1636 | compacted IRIs, it should be preferred to the full IRI. | ||
| 1637 | """ | ||
| 1638 | pass | ||
| 1639 | |||
| 1640 | @abstractmethod | ||
| 1641 | def write_bool(self, v): | ||
| 1642 | """ | ||
| 1643 | Write boolean | ||
| 1644 | |||
| 1645 | Encodes the value as a boolean in the output | ||
| 1646 | """ | ||
| 1647 | pass | ||
| 1648 | |||
| 1649 | @abstractmethod | ||
| 1650 | def write_float(self, v): | ||
| 1651 | """ | ||
| 1652 | Write float | ||
| 1653 | |||
| 1654 | Encodes the value as a floating point number in the output | ||
| 1655 | """ | ||
| 1656 | pass | ||
| 1657 | |||
| 1658 | @abstractmethod | ||
| 1659 | @contextmanager | ||
| 1660 | def write_object(self, o, _id, needs_id): | ||
| 1661 | """ | ||
| 1662 | Write object | ||
| 1663 | |||
| 1664 | A context manager that yields an `Encoder` that can be used to encode | ||
| 1665 | the given object properties. | ||
| 1666 | |||
| 1667 | The provided ID will always be a valid ID (even if o._id is `None`), in | ||
| 1668 | case the `Encoder` _must_ have an ID. `needs_id` is a hint to indicate | ||
| 1669 | to the `Encoder` if an ID must be written or not (if that is even an | ||
| 1670 | option). If it is `True`, the `Encoder` must encode an ID for the | ||
| 1671 | object. If `False`, the encoder is not required to encode an ID and may | ||
| 1672 | omit it. | ||
| 1673 | |||
| 1674 | The ID will be either a fully qualified IRI, or a blank node IRI. | ||
| 1675 | |||
| 1676 | Properties will be written the object using `write_property` | ||
| 1677 | """ | ||
| 1678 | pass | ||
| 1679 | |||
| 1680 | @abstractmethod | ||
| 1681 | @contextmanager | ||
| 1682 | def write_property(self, iri, compact=None): | ||
| 1683 | """ | ||
| 1684 | Write object property | ||
| 1685 | |||
| 1686 | A context manager that yields an `Encoder` that can be used to encode | ||
| 1687 | the value for the property with the given IRI in the current object | ||
| 1688 | |||
| 1689 | Note that the IRI will be fully qualified. If `compact` is provided and | ||
| 1690 | the serialization supports compacted IRIs, it should be preferred to | ||
| 1691 | the full IRI. | ||
| 1692 | """ | ||
| 1693 | pass | ||
| 1694 | |||
| 1695 | @abstractmethod | ||
| 1696 | @contextmanager | ||
| 1697 | def write_list(self): | ||
| 1698 | """ | ||
| 1699 | Write list | ||
| 1700 | |||
| 1701 | A context manager that yields an `Encoder` that can be used to encode a | ||
| 1702 | list. | ||
| 1703 | |||
| 1704 | Each item of the list will be added using `write_list_item` | ||
| 1705 | """ | ||
| 1706 | pass | ||
| 1707 | |||
| 1708 | @abstractmethod | ||
| 1709 | @contextmanager | ||
| 1710 | def write_list_item(self): | ||
| 1711 | """ | ||
| 1712 | Write list item | ||
| 1713 | |||
| 1714 | A context manager that yields an `Encoder` that can be used to encode | ||
| 1715 | the value for a list item | ||
| 1716 | """ | ||
| 1717 | pass | ||
| 1718 | |||
| 1719 | |||
| 1720 | class JSONLDEncoder(Encoder): | ||
| 1721 | def __init__(self, data=None): | ||
| 1722 | self.data = data | ||
| 1723 | |||
| 1724 | def write_string(self, v): | ||
| 1725 | self.data = v | ||
| 1726 | |||
| 1727 | def write_datetime(self, v): | ||
| 1728 | self.data = v | ||
| 1729 | |||
| 1730 | def write_integer(self, v): | ||
| 1731 | self.data = v | ||
| 1732 | |||
| 1733 | def write_iri(self, v, compact=None): | ||
| 1734 | self.write_string(compact or v) | ||
| 1735 | |||
| 1736 | def write_enum(self, v, e, compact=None): | ||
| 1737 | self.write_string(compact or v) | ||
| 1738 | |||
| 1739 | def write_bool(self, v): | ||
| 1740 | self.data = v | ||
| 1741 | |||
| 1742 | def write_float(self, v): | ||
| 1743 | self.data = str(v) | ||
| 1744 | |||
| 1745 | @contextmanager | ||
| 1746 | def write_property(self, iri, compact=None): | ||
| 1747 | s = self.__class__(None) | ||
| 1748 | yield s | ||
| 1749 | if s.data is not None: | ||
| 1750 | self.data[compact or iri] = s.data | ||
| 1751 | |||
| 1752 | @contextmanager | ||
| 1753 | def write_object(self, o, _id, needs_id): | ||
| 1754 | self.data = { | ||
| 1755 | "type": o.COMPACT_TYPE or o.TYPE, | ||
| 1756 | } | ||
| 1757 | if needs_id: | ||
| 1758 | self.data[o.ID_ALIAS or "@id"] = _id | ||
| 1759 | yield self | ||
| 1760 | |||
| 1761 | @contextmanager | ||
| 1762 | def write_list(self): | ||
| 1763 | self.data = [] | ||
| 1764 | yield self | ||
| 1765 | if not self.data: | ||
| 1766 | self.data = None | ||
| 1767 | |||
| 1768 | @contextmanager | ||
| 1769 | def write_list_item(self): | ||
| 1770 | s = self.__class__(None) | ||
| 1771 | yield s | ||
| 1772 | if s.data is not None: | ||
| 1773 | self.data.append(s.data) | ||
| 1774 | |||
| 1775 | |||
| 1776 | class JSONLDSerializer(object): | ||
| 1777 | def __init__(self, **args): | ||
| 1778 | self.args = args | ||
| 1779 | |||
| 1780 | def serialize_data( | ||
| 1781 | self, | ||
| 1782 | objectset: SHACLObjectSet, | ||
| 1783 | force_at_graph=False, | ||
| 1784 | ): | ||
| 1785 | h = JSONLDEncoder() | ||
| 1786 | objectset.encode(h, force_at_graph) | ||
| 1787 | data = {} | ||
| 1788 | if len(CONTEXT_URLS) == 1: | ||
| 1789 | data["@context"] = CONTEXT_URLS[0] | ||
| 1790 | elif CONTEXT_URLS: | ||
| 1791 | data["@context"] = CONTEXT_URLS | ||
| 1792 | |||
| 1793 | if isinstance(h.data, list): | ||
| 1794 | data["@graph"] = h.data | ||
| 1795 | else: | ||
| 1796 | for k, v in h.data.items(): | ||
| 1797 | data[k] = v | ||
| 1798 | |||
| 1799 | return data | ||
| 1800 | |||
| 1801 | def write( | ||
| 1802 | self, | ||
| 1803 | objectset: SHACLObjectSet, | ||
| 1804 | f, | ||
| 1805 | force_at_graph=False, | ||
| 1806 | **kwargs, | ||
| 1807 | ): | ||
| 1808 | """ | ||
| 1809 | Write a SHACLObjectSet to a JSON LD file | ||
| 1810 | |||
| 1811 | If force_at_graph is True, a @graph node will always be written | ||
| 1812 | """ | ||
| 1813 | data = self.serialize_data(objectset, force_at_graph) | ||
| 1814 | |||
| 1815 | args = {**self.args, **kwargs} | ||
| 1816 | |||
| 1817 | sha1 = hashlib.sha1() | ||
| 1818 | for chunk in json.JSONEncoder(**args).iterencode(data): | ||
| 1819 | chunk = chunk.encode("utf-8") | ||
| 1820 | f.write(chunk) | ||
| 1821 | sha1.update(chunk) | ||
| 1822 | |||
| 1823 | return sha1.hexdigest() | ||
| 1824 | |||
| 1825 | |||
| 1826 | class JSONLDInlineEncoder(Encoder): | ||
| 1827 | def __init__(self, f, sha1): | ||
| 1828 | self.f = f | ||
| 1829 | self.comma = False | ||
| 1830 | self.sha1 = sha1 | ||
| 1831 | |||
| 1832 | def write(self, s): | ||
| 1833 | s = s.encode("utf-8") | ||
| 1834 | self.f.write(s) | ||
| 1835 | self.sha1.update(s) | ||
| 1836 | |||
| 1837 | def _write_comma(self): | ||
| 1838 | if self.comma: | ||
| 1839 | self.write(",") | ||
| 1840 | self.comma = False | ||
| 1841 | |||
| 1842 | def write_string(self, v): | ||
| 1843 | self.write(json.dumps(v)) | ||
| 1844 | |||
| 1845 | def write_datetime(self, v): | ||
| 1846 | self.write_string(v) | ||
| 1847 | |||
| 1848 | def write_integer(self, v): | ||
| 1849 | self.write(f"{v}") | ||
| 1850 | |||
| 1851 | def write_iri(self, v, compact=None): | ||
| 1852 | self.write_string(compact or v) | ||
| 1853 | |||
| 1854 | def write_enum(self, v, e, compact=None): | ||
| 1855 | self.write_iri(v, compact) | ||
| 1856 | |||
| 1857 | def write_bool(self, v): | ||
| 1858 | if v: | ||
| 1859 | self.write("true") | ||
| 1860 | else: | ||
| 1861 | self.write("false") | ||
| 1862 | |||
| 1863 | def write_float(self, v): | ||
| 1864 | self.write(json.dumps(str(v))) | ||
| 1865 | |||
| 1866 | @contextmanager | ||
| 1867 | def write_property(self, iri, compact=None): | ||
| 1868 | self._write_comma() | ||
| 1869 | self.write_string(compact or iri) | ||
| 1870 | self.write(":") | ||
| 1871 | yield self | ||
| 1872 | self.comma = True | ||
| 1873 | |||
| 1874 | @contextmanager | ||
| 1875 | def write_object(self, o, _id, needs_id): | ||
| 1876 | self._write_comma() | ||
| 1877 | |||
| 1878 | self.write("{") | ||
| 1879 | self.write_string("type") | ||
| 1880 | self.write(":") | ||
| 1881 | self.write_string(o.COMPACT_TYPE or o.TYPE) | ||
| 1882 | self.comma = True | ||
| 1883 | |||
| 1884 | if needs_id: | ||
| 1885 | self._write_comma() | ||
| 1886 | self.write_string(o.ID_ALIAS or "@id") | ||
| 1887 | self.write(":") | ||
| 1888 | self.write_string(_id) | ||
| 1889 | self.comma = True | ||
| 1890 | |||
| 1891 | self.comma = True | ||
| 1892 | yield self | ||
| 1893 | |||
| 1894 | self.write("}") | ||
| 1895 | self.comma = True | ||
| 1896 | |||
| 1897 | @contextmanager | ||
| 1898 | def write_list(self): | ||
| 1899 | self._write_comma() | ||
| 1900 | self.write("[") | ||
| 1901 | yield self.__class__(self.f, self.sha1) | ||
| 1902 | self.write("]") | ||
| 1903 | self.comma = True | ||
| 1904 | |||
| 1905 | @contextmanager | ||
| 1906 | def write_list_item(self): | ||
| 1907 | self._write_comma() | ||
| 1908 | yield self.__class__(self.f, self.sha1) | ||
| 1909 | self.comma = True | ||
| 1910 | |||
| 1911 | |||
| 1912 | class JSONLDInlineSerializer(object): | ||
| 1913 | def write( | ||
| 1914 | self, | ||
| 1915 | objectset: SHACLObjectSet, | ||
| 1916 | f, | ||
| 1917 | force_at_graph=False, | ||
| 1918 | ): | ||
| 1919 | """ | ||
| 1920 | Write a SHACLObjectSet to a JSON LD file | ||
| 1921 | |||
| 1922 | Note: force_at_graph is included for compatibility, but ignored. This | ||
| 1923 | serializer always writes out a graph | ||
| 1924 | """ | ||
| 1925 | sha1 = hashlib.sha1() | ||
| 1926 | h = JSONLDInlineEncoder(f, sha1) | ||
| 1927 | h.write('{"@context":') | ||
| 1928 | if len(CONTEXT_URLS) == 1: | ||
| 1929 | h.write(f'"{CONTEXT_URLS[0]}"') | ||
| 1930 | elif CONTEXT_URLS: | ||
| 1931 | h.write('["') | ||
| 1932 | h.write('","'.join(CONTEXT_URLS)) | ||
| 1933 | h.write('"]') | ||
| 1934 | h.write(",") | ||
| 1935 | |||
| 1936 | h.write('"@graph":') | ||
| 1937 | |||
| 1938 | objectset.encode(h, True) | ||
| 1939 | h.write("}") | ||
| 1940 | return sha1.hexdigest() | ||
| 1941 | |||
| 1942 | |||
| 1943 | def print_tree(objects, all_fields=False): | ||
| 1944 | """ | ||
| 1945 | Print object tree | ||
| 1946 | """ | ||
| 1947 | seen = set() | ||
| 1948 | |||
| 1949 | def callback(value, path): | ||
| 1950 | nonlocal seen | ||
| 1951 | |||
| 1952 | s = (" " * (len(path) - 1)) + f"{path[-1]}" | ||
| 1953 | if isinstance(value, SHACLObject): | ||
| 1954 | s += f" {value} ({id(value)})" | ||
| 1955 | is_empty = False | ||
| 1956 | elif isinstance(value, ListProxy): | ||
| 1957 | is_empty = len(value) == 0 | ||
| 1958 | if is_empty: | ||
| 1959 | s += " []" | ||
| 1960 | else: | ||
| 1961 | s += f" {value!r}" | ||
| 1962 | is_empty = value is None | ||
| 1963 | |||
| 1964 | if all_fields or not is_empty: | ||
| 1965 | print(s) | ||
| 1966 | |||
| 1967 | if isinstance(value, SHACLObject): | ||
| 1968 | if value in seen: | ||
| 1969 | return False | ||
| 1970 | seen.add(value) | ||
| 1971 | return True | ||
| 1972 | |||
| 1973 | return True | ||
| 1974 | |||
| 1975 | for o in objects: | ||
| 1976 | o.walk(callback) | ||
| 1977 | |||
| 1978 | |||
| 1979 | # fmt: off | ||
| 1980 | """Format Guard""" | ||
| 1981 | |||
| 1982 | |||
| 1983 | CONTEXT_URLS = [ | ||
| 1984 | "https://spdx.org/rdf/3.0.1/spdx-context.jsonld", | ||
| 1985 | ] | ||
| 1986 | |||
| 1987 | |||
| 1988 | # CLASSES | ||
| 1989 | # A class for describing the energy consumption incurred by an AI model in | ||
| 1990 | # different stages of its lifecycle. | ||
| 1991 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumption", compact_type="ai_EnergyConsumption", abstract=False) | ||
| 1992 | class ai_EnergyConsumption(SHACLObject): | ||
| 1993 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 1994 | NAMED_INDIVIDUALS = { | ||
| 1995 | } | ||
| 1996 | |||
| 1997 | @classmethod | ||
| 1998 | def _register_props(cls): | ||
| 1999 | super()._register_props() | ||
| 2000 | # Specifies the amount of energy consumed when finetuning the AI model that is | ||
| 2001 | # being used in the AI system. | ||
| 2002 | cls._add_property( | ||
| 2003 | "ai_finetuningEnergyConsumption", | ||
| 2004 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
| 2005 | iri="https://spdx.org/rdf/3.0.1/terms/AI/finetuningEnergyConsumption", | ||
| 2006 | compact="ai_finetuningEnergyConsumption", | ||
| 2007 | ) | ||
| 2008 | # Specifies the amount of energy consumed during inference time by an AI model | ||
| 2009 | # that is being used in the AI system. | ||
| 2010 | cls._add_property( | ||
| 2011 | "ai_inferenceEnergyConsumption", | ||
| 2012 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
| 2013 | iri="https://spdx.org/rdf/3.0.1/terms/AI/inferenceEnergyConsumption", | ||
| 2014 | compact="ai_inferenceEnergyConsumption", | ||
| 2015 | ) | ||
| 2016 | # Specifies the amount of energy consumed when training the AI model that is | ||
| 2017 | # being used in the AI system. | ||
| 2018 | cls._add_property( | ||
| 2019 | "ai_trainingEnergyConsumption", | ||
| 2020 | ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)), | ||
| 2021 | iri="https://spdx.org/rdf/3.0.1/terms/AI/trainingEnergyConsumption", | ||
| 2022 | compact="ai_trainingEnergyConsumption", | ||
| 2023 | ) | ||
| 2024 | |||
| 2025 | |||
| 2026 | # The class that helps note down the quantity of energy consumption and the unit | ||
| 2027 | # used for measurement. | ||
| 2028 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyConsumptionDescription", compact_type="ai_EnergyConsumptionDescription", abstract=False) | ||
| 2029 | class ai_EnergyConsumptionDescription(SHACLObject): | ||
| 2030 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2031 | NAMED_INDIVIDUALS = { | ||
| 2032 | } | ||
| 2033 | |||
| 2034 | @classmethod | ||
| 2035 | def _register_props(cls): | ||
| 2036 | super()._register_props() | ||
| 2037 | # Represents the energy quantity. | ||
| 2038 | cls._add_property( | ||
| 2039 | "ai_energyQuantity", | ||
| 2040 | FloatProp(), | ||
| 2041 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyQuantity", | ||
| 2042 | min_count=1, | ||
| 2043 | compact="ai_energyQuantity", | ||
| 2044 | ) | ||
| 2045 | # Specifies the unit in which energy is measured. | ||
| 2046 | cls._add_property( | ||
| 2047 | "ai_energyUnit", | ||
| 2048 | EnumProp([ | ||
| 2049 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", "kilowattHour"), | ||
| 2050 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", "megajoule"), | ||
| 2051 | ("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", "other"), | ||
| 2052 | ]), | ||
| 2053 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyUnit", | ||
| 2054 | min_count=1, | ||
| 2055 | compact="ai_energyUnit", | ||
| 2056 | ) | ||
| 2057 | |||
| 2058 | |||
| 2059 | # Specifies the unit of energy consumption. | ||
| 2060 | @register("https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType", compact_type="ai_EnergyUnitType", abstract=False) | ||
| 2061 | class ai_EnergyUnitType(SHACLObject): | ||
| 2062 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2063 | NAMED_INDIVIDUALS = { | ||
| 2064 | "kilowattHour": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour", | ||
| 2065 | "megajoule": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule", | ||
| 2066 | "other": "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other", | ||
| 2067 | } | ||
| 2068 | # Kilowatt-hour. | ||
| 2069 | kilowattHour = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/kilowattHour" | ||
| 2070 | # Megajoule. | ||
| 2071 | megajoule = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/megajoule" | ||
| 2072 | # Any other units of energy measurement. | ||
| 2073 | other = "https://spdx.org/rdf/3.0.1/terms/AI/EnergyUnitType/other" | ||
| 2074 | |||
| 2075 | |||
| 2076 | # Specifies the safety risk level. | ||
| 2077 | @register("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType", compact_type="ai_SafetyRiskAssessmentType", abstract=False) | ||
| 2078 | class ai_SafetyRiskAssessmentType(SHACLObject): | ||
| 2079 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2080 | NAMED_INDIVIDUALS = { | ||
| 2081 | "high": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", | ||
| 2082 | "low": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", | ||
| 2083 | "medium": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", | ||
| 2084 | "serious": "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", | ||
| 2085 | } | ||
| 2086 | # The second-highest level of risk posed by an AI system. | ||
| 2087 | high = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high" | ||
| 2088 | # Low/no risk is posed by an AI system. | ||
| 2089 | low = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low" | ||
| 2090 | # The third-highest level of risk posed by an AI system. | ||
| 2091 | medium = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium" | ||
| 2092 | # The highest level of risk posed by an AI system. | ||
| 2093 | serious = "https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious" | ||
| 2094 | |||
| 2095 | |||
| 2096 | # Specifies the type of an annotation. | ||
| 2097 | @register("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType", compact_type="AnnotationType", abstract=False) | ||
| 2098 | class AnnotationType(SHACLObject): | ||
| 2099 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2100 | NAMED_INDIVIDUALS = { | ||
| 2101 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", | ||
| 2102 | "review": "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", | ||
| 2103 | } | ||
| 2104 | # Used to store extra information about an Element which is not part of a review (e.g. extra information provided during the creation of the Element). | ||
| 2105 | other = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other" | ||
| 2106 | # Used when someone reviews the Element. | ||
| 2107 | review = "https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review" | ||
| 2108 | |||
| 2109 | |||
| 2110 | # Provides information about the creation of the Element. | ||
| 2111 | @register("https://spdx.org/rdf/3.0.1/terms/Core/CreationInfo", compact_type="CreationInfo", abstract=False) | ||
| 2112 | class CreationInfo(SHACLObject): | ||
| 2113 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2114 | NAMED_INDIVIDUALS = { | ||
| 2115 | } | ||
| 2116 | |||
| 2117 | @classmethod | ||
| 2118 | def _register_props(cls): | ||
| 2119 | super()._register_props() | ||
| 2120 | # Provide consumers with comments by the creator of the Element about the | ||
| 2121 | # Element. | ||
| 2122 | cls._add_property( | ||
| 2123 | "comment", | ||
| 2124 | StringProp(), | ||
| 2125 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
| 2126 | compact="comment", | ||
| 2127 | ) | ||
| 2128 | # Identifies when the Element was originally created. | ||
| 2129 | cls._add_property( | ||
| 2130 | "created", | ||
| 2131 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 2132 | iri="https://spdx.org/rdf/3.0.1/terms/Core/created", | ||
| 2133 | min_count=1, | ||
| 2134 | compact="created", | ||
| 2135 | ) | ||
| 2136 | # Identifies who or what created the Element. | ||
| 2137 | cls._add_property( | ||
| 2138 | "createdBy", | ||
| 2139 | ListProp(ObjectProp(Agent, False, context=[ | ||
| 2140 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 2141 | ],)), | ||
| 2142 | iri="https://spdx.org/rdf/3.0.1/terms/Core/createdBy", | ||
| 2143 | min_count=1, | ||
| 2144 | compact="createdBy", | ||
| 2145 | ) | ||
| 2146 | # Identifies the tooling that was used during the creation of the Element. | ||
| 2147 | cls._add_property( | ||
| 2148 | "createdUsing", | ||
| 2149 | ListProp(ObjectProp(Tool, False)), | ||
| 2150 | iri="https://spdx.org/rdf/3.0.1/terms/Core/createdUsing", | ||
| 2151 | compact="createdUsing", | ||
| 2152 | ) | ||
| 2153 | # Provides a reference number that can be used to understand how to parse and | ||
| 2154 | # interpret an Element. | ||
| 2155 | cls._add_property( | ||
| 2156 | "specVersion", | ||
| 2157 | StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",), | ||
| 2158 | iri="https://spdx.org/rdf/3.0.1/terms/Core/specVersion", | ||
| 2159 | min_count=1, | ||
| 2160 | compact="specVersion", | ||
| 2161 | ) | ||
| 2162 | |||
| 2163 | |||
| 2164 | # A key with an associated value. | ||
| 2165 | @register("https://spdx.org/rdf/3.0.1/terms/Core/DictionaryEntry", compact_type="DictionaryEntry", abstract=False) | ||
| 2166 | class DictionaryEntry(SHACLObject): | ||
| 2167 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2168 | NAMED_INDIVIDUALS = { | ||
| 2169 | } | ||
| 2170 | |||
| 2171 | @classmethod | ||
| 2172 | def _register_props(cls): | ||
| 2173 | super()._register_props() | ||
| 2174 | # A key used in a generic key-value pair. | ||
| 2175 | cls._add_property( | ||
| 2176 | "key", | ||
| 2177 | StringProp(), | ||
| 2178 | iri="https://spdx.org/rdf/3.0.1/terms/Core/key", | ||
| 2179 | min_count=1, | ||
| 2180 | compact="key", | ||
| 2181 | ) | ||
| 2182 | # A value used in a generic key-value pair. | ||
| 2183 | cls._add_property( | ||
| 2184 | "value", | ||
| 2185 | StringProp(), | ||
| 2186 | iri="https://spdx.org/rdf/3.0.1/terms/Core/value", | ||
| 2187 | compact="value", | ||
| 2188 | ) | ||
| 2189 | |||
| 2190 | |||
| 2191 | # Base domain class from which all other SPDX-3.0 domain classes derive. | ||
| 2192 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Element", compact_type="Element", abstract=True) | ||
| 2193 | class Element(SHACLObject): | ||
| 2194 | NODE_KIND = NodeKind.IRI | ||
| 2195 | ID_ALIAS = "spdxId" | ||
| 2196 | NAMED_INDIVIDUALS = { | ||
| 2197 | } | ||
| 2198 | |||
| 2199 | @classmethod | ||
| 2200 | def _register_props(cls): | ||
| 2201 | super()._register_props() | ||
| 2202 | # Provide consumers with comments by the creator of the Element about the | ||
| 2203 | # Element. | ||
| 2204 | cls._add_property( | ||
| 2205 | "comment", | ||
| 2206 | StringProp(), | ||
| 2207 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
| 2208 | compact="comment", | ||
| 2209 | ) | ||
| 2210 | # Provides information about the creation of the Element. | ||
| 2211 | cls._add_property( | ||
| 2212 | "creationInfo", | ||
| 2213 | ObjectProp(CreationInfo, True), | ||
| 2214 | iri="https://spdx.org/rdf/3.0.1/terms/Core/creationInfo", | ||
| 2215 | min_count=1, | ||
| 2216 | compact="creationInfo", | ||
| 2217 | ) | ||
| 2218 | # Provides a detailed description of the Element. | ||
| 2219 | cls._add_property( | ||
| 2220 | "description", | ||
| 2221 | StringProp(), | ||
| 2222 | iri="https://spdx.org/rdf/3.0.1/terms/Core/description", | ||
| 2223 | compact="description", | ||
| 2224 | ) | ||
| 2225 | # Specifies an Extension characterization of some aspect of an Element. | ||
| 2226 | cls._add_property( | ||
| 2227 | "extension", | ||
| 2228 | ListProp(ObjectProp(extension_Extension, False)), | ||
| 2229 | iri="https://spdx.org/rdf/3.0.1/terms/Core/extension", | ||
| 2230 | compact="extension", | ||
| 2231 | ) | ||
| 2232 | # Provides a reference to a resource outside the scope of SPDX-3.0 content | ||
| 2233 | # that uniquely identifies an Element. | ||
| 2234 | cls._add_property( | ||
| 2235 | "externalIdentifier", | ||
| 2236 | ListProp(ObjectProp(ExternalIdentifier, False)), | ||
| 2237 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifier", | ||
| 2238 | compact="externalIdentifier", | ||
| 2239 | ) | ||
| 2240 | # Points to a resource outside the scope of the SPDX-3.0 content | ||
| 2241 | # that provides additional characteristics of an Element. | ||
| 2242 | cls._add_property( | ||
| 2243 | "externalRef", | ||
| 2244 | ListProp(ObjectProp(ExternalRef, False)), | ||
| 2245 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRef", | ||
| 2246 | compact="externalRef", | ||
| 2247 | ) | ||
| 2248 | # Identifies the name of an Element as designated by the creator. | ||
| 2249 | cls._add_property( | ||
| 2250 | "name", | ||
| 2251 | StringProp(), | ||
| 2252 | iri="https://spdx.org/rdf/3.0.1/terms/Core/name", | ||
| 2253 | compact="name", | ||
| 2254 | ) | ||
| 2255 | # A short description of an Element. | ||
| 2256 | cls._add_property( | ||
| 2257 | "summary", | ||
| 2258 | StringProp(), | ||
| 2259 | iri="https://spdx.org/rdf/3.0.1/terms/Core/summary", | ||
| 2260 | compact="summary", | ||
| 2261 | ) | ||
| 2262 | # Provides an IntegrityMethod with which the integrity of an Element can be | ||
| 2263 | # asserted. | ||
| 2264 | cls._add_property( | ||
| 2265 | "verifiedUsing", | ||
| 2266 | ListProp(ObjectProp(IntegrityMethod, False)), | ||
| 2267 | iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing", | ||
| 2268 | compact="verifiedUsing", | ||
| 2269 | ) | ||
| 2270 | |||
| 2271 | |||
| 2272 | # A collection of Elements, not necessarily with unifying context. | ||
| 2273 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ElementCollection", compact_type="ElementCollection", abstract=True) | ||
| 2274 | class ElementCollection(Element): | ||
| 2275 | NODE_KIND = NodeKind.IRI | ||
| 2276 | ID_ALIAS = "spdxId" | ||
| 2277 | NAMED_INDIVIDUALS = { | ||
| 2278 | } | ||
| 2279 | |||
| 2280 | @classmethod | ||
| 2281 | def _register_props(cls): | ||
| 2282 | super()._register_props() | ||
| 2283 | # Refers to one or more Elements that are part of an ElementCollection. | ||
| 2284 | cls._add_property( | ||
| 2285 | "element", | ||
| 2286 | ListProp(ObjectProp(Element, False, context=[ | ||
| 2287 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
| 2288 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 2289 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 2290 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 2291 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
| 2292 | ],)), | ||
| 2293 | iri="https://spdx.org/rdf/3.0.1/terms/Core/element", | ||
| 2294 | compact="element", | ||
| 2295 | ) | ||
| 2296 | # Describes one a profile which the creator of this ElementCollection intends to | ||
| 2297 | # conform to. | ||
| 2298 | cls._add_property( | ||
| 2299 | "profileConformance", | ||
| 2300 | ListProp(EnumProp([ | ||
| 2301 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", "ai"), | ||
| 2302 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", "build"), | ||
| 2303 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", "core"), | ||
| 2304 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", "dataset"), | ||
| 2305 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", "expandedLicensing"), | ||
| 2306 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", "extension"), | ||
| 2307 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", "lite"), | ||
| 2308 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", "security"), | ||
| 2309 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", "simpleLicensing"), | ||
| 2310 | ("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", "software"), | ||
| 2311 | ])), | ||
| 2312 | iri="https://spdx.org/rdf/3.0.1/terms/Core/profileConformance", | ||
| 2313 | compact="profileConformance", | ||
| 2314 | ) | ||
| 2315 | # This property is used to denote the root Element(s) of a tree of elements contained in a BOM. | ||
| 2316 | cls._add_property( | ||
| 2317 | "rootElement", | ||
| 2318 | ListProp(ObjectProp(Element, False, context=[ | ||
| 2319 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
| 2320 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 2321 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 2322 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 2323 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
| 2324 | ],)), | ||
| 2325 | iri="https://spdx.org/rdf/3.0.1/terms/Core/rootElement", | ||
| 2326 | compact="rootElement", | ||
| 2327 | ) | ||
| 2328 | |||
| 2329 | |||
| 2330 | # A reference to a resource identifier defined outside the scope of SPDX-3.0 content that uniquely identifies an Element. | ||
| 2331 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifier", compact_type="ExternalIdentifier", abstract=False) | ||
| 2332 | class ExternalIdentifier(SHACLObject): | ||
| 2333 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2334 | NAMED_INDIVIDUALS = { | ||
| 2335 | } | ||
| 2336 | |||
| 2337 | @classmethod | ||
| 2338 | def _register_props(cls): | ||
| 2339 | super()._register_props() | ||
| 2340 | # Provide consumers with comments by the creator of the Element about the | ||
| 2341 | # Element. | ||
| 2342 | cls._add_property( | ||
| 2343 | "comment", | ||
| 2344 | StringProp(), | ||
| 2345 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
| 2346 | compact="comment", | ||
| 2347 | ) | ||
| 2348 | # Specifies the type of the external identifier. | ||
| 2349 | cls._add_property( | ||
| 2350 | "externalIdentifierType", | ||
| 2351 | EnumProp([ | ||
| 2352 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", "cpe22"), | ||
| 2353 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", "cpe23"), | ||
| 2354 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", "cve"), | ||
| 2355 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", "email"), | ||
| 2356 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", "gitoid"), | ||
| 2357 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", "other"), | ||
| 2358 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", "packageUrl"), | ||
| 2359 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", "securityOther"), | ||
| 2360 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", "swhid"), | ||
| 2361 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", "swid"), | ||
| 2362 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", "urlScheme"), | ||
| 2363 | ]), | ||
| 2364 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalIdentifierType", | ||
| 2365 | min_count=1, | ||
| 2366 | compact="externalIdentifierType", | ||
| 2367 | ) | ||
| 2368 | # Uniquely identifies an external element. | ||
| 2369 | cls._add_property( | ||
| 2370 | "identifier", | ||
| 2371 | StringProp(), | ||
| 2372 | iri="https://spdx.org/rdf/3.0.1/terms/Core/identifier", | ||
| 2373 | min_count=1, | ||
| 2374 | compact="identifier", | ||
| 2375 | ) | ||
| 2376 | # Provides the location for more information regarding an external identifier. | ||
| 2377 | cls._add_property( | ||
| 2378 | "identifierLocator", | ||
| 2379 | ListProp(AnyURIProp()), | ||
| 2380 | iri="https://spdx.org/rdf/3.0.1/terms/Core/identifierLocator", | ||
| 2381 | compact="identifierLocator", | ||
| 2382 | ) | ||
| 2383 | # An entity that is authorized to issue identification credentials. | ||
| 2384 | cls._add_property( | ||
| 2385 | "issuingAuthority", | ||
| 2386 | StringProp(), | ||
| 2387 | iri="https://spdx.org/rdf/3.0.1/terms/Core/issuingAuthority", | ||
| 2388 | compact="issuingAuthority", | ||
| 2389 | ) | ||
| 2390 | |||
| 2391 | |||
| 2392 | # Specifies the type of an external identifier. | ||
| 2393 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType", compact_type="ExternalIdentifierType", abstract=False) | ||
| 2394 | class ExternalIdentifierType(SHACLObject): | ||
| 2395 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2396 | NAMED_INDIVIDUALS = { | ||
| 2397 | "cpe22": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22", | ||
| 2398 | "cpe23": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23", | ||
| 2399 | "cve": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve", | ||
| 2400 | "email": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email", | ||
| 2401 | "gitoid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid", | ||
| 2402 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other", | ||
| 2403 | "packageUrl": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl", | ||
| 2404 | "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther", | ||
| 2405 | "swhid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid", | ||
| 2406 | "swid": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid", | ||
| 2407 | "urlScheme": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme", | ||
| 2408 | } | ||
| 2409 | # [Common Platform Enumeration Specification 2.2](https://cpe.mitre.org/files/cpe-specification_2.2.pdf) | ||
| 2410 | cpe22 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe22" | ||
| 2411 | # [Common Platform Enumeration: Naming Specification Version 2.3](https://csrc.nist.gov/publications/detail/nistir/7695/final) | ||
| 2412 | cpe23 = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cpe23" | ||
| 2413 | # Common Vulnerabilities and Exposures identifiers, an identifier for a specific software flaw defined within the official CVE Dictionary and that conforms to the [CVE specification](https://csrc.nist.gov/glossary/term/cve_id). | ||
| 2414 | cve = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/cve" | ||
| 2415 | # Email address, as defined in [RFC 3696](https://datatracker.ietf.org/doc/rfc3986/) Section 3. | ||
| 2416 | email = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/email" | ||
| 2417 | # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg). | ||
| 2418 | gitoid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/gitoid" | ||
| 2419 | # Used when the type does not match any of the other options. | ||
| 2420 | other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/other" | ||
| 2421 | # Package URL, as defined in the corresponding [Annex](../../../annexes/pkg-url-specification.md) of this specification. | ||
| 2422 | packageUrl = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/packageUrl" | ||
| 2423 | # Used when there is a security related identifier of unspecified type. | ||
| 2424 | securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/securityOther" | ||
| 2425 | # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`. | ||
| 2426 | swhid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swhid" | ||
| 2427 | # Concise Software Identification (CoSWID) tag, as defined in [RFC 9393](https://datatracker.ietf.org/doc/rfc9393/) Section 2.3. | ||
| 2428 | swid = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/swid" | ||
| 2429 | # [Uniform Resource Identifier (URI) Schemes](https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml). The scheme used in order to locate a resource. | ||
| 2430 | urlScheme = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalIdentifierType/urlScheme" | ||
| 2431 | |||
| 2432 | |||
| 2433 | # A map of Element identifiers that are used within an SpdxDocument but defined | ||
| 2434 | # external to that SpdxDocument. | ||
| 2435 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalMap", compact_type="ExternalMap", abstract=False) | ||
| 2436 | class ExternalMap(SHACLObject): | ||
| 2437 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2438 | NAMED_INDIVIDUALS = { | ||
| 2439 | } | ||
| 2440 | |||
| 2441 | @classmethod | ||
| 2442 | def _register_props(cls): | ||
| 2443 | super()._register_props() | ||
| 2444 | # Artifact representing a serialization instance of SPDX data containing the | ||
| 2445 | # definition of a particular Element. | ||
| 2446 | cls._add_property( | ||
| 2447 | "definingArtifact", | ||
| 2448 | ObjectProp(Artifact, False), | ||
| 2449 | iri="https://spdx.org/rdf/3.0.1/terms/Core/definingArtifact", | ||
| 2450 | compact="definingArtifact", | ||
| 2451 | ) | ||
| 2452 | # Identifies an external Element used within an SpdxDocument but defined | ||
| 2453 | # external to that SpdxDocument. | ||
| 2454 | cls._add_property( | ||
| 2455 | "externalSpdxId", | ||
| 2456 | AnyURIProp(), | ||
| 2457 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalSpdxId", | ||
| 2458 | min_count=1, | ||
| 2459 | compact="externalSpdxId", | ||
| 2460 | ) | ||
| 2461 | # Provides an indication of where to retrieve an external Element. | ||
| 2462 | cls._add_property( | ||
| 2463 | "locationHint", | ||
| 2464 | AnyURIProp(), | ||
| 2465 | iri="https://spdx.org/rdf/3.0.1/terms/Core/locationHint", | ||
| 2466 | compact="locationHint", | ||
| 2467 | ) | ||
| 2468 | # Provides an IntegrityMethod with which the integrity of an Element can be | ||
| 2469 | # asserted. | ||
| 2470 | cls._add_property( | ||
| 2471 | "verifiedUsing", | ||
| 2472 | ListProp(ObjectProp(IntegrityMethod, False)), | ||
| 2473 | iri="https://spdx.org/rdf/3.0.1/terms/Core/verifiedUsing", | ||
| 2474 | compact="verifiedUsing", | ||
| 2475 | ) | ||
| 2476 | |||
| 2477 | |||
| 2478 | # A reference to a resource outside the scope of SPDX-3.0 content related to an Element. | ||
| 2479 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRef", compact_type="ExternalRef", abstract=False) | ||
| 2480 | class ExternalRef(SHACLObject): | ||
| 2481 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2482 | NAMED_INDIVIDUALS = { | ||
| 2483 | } | ||
| 2484 | |||
| 2485 | @classmethod | ||
| 2486 | def _register_props(cls): | ||
| 2487 | super()._register_props() | ||
| 2488 | # Provide consumers with comments by the creator of the Element about the | ||
| 2489 | # Element. | ||
| 2490 | cls._add_property( | ||
| 2491 | "comment", | ||
| 2492 | StringProp(), | ||
| 2493 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
| 2494 | compact="comment", | ||
| 2495 | ) | ||
| 2496 | # Provides information about the content type of an Element or a Property. | ||
| 2497 | cls._add_property( | ||
| 2498 | "contentType", | ||
| 2499 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
| 2500 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
| 2501 | compact="contentType", | ||
| 2502 | ) | ||
| 2503 | # Specifies the type of the external reference. | ||
| 2504 | cls._add_property( | ||
| 2505 | "externalRefType", | ||
| 2506 | EnumProp([ | ||
| 2507 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", "altDownloadLocation"), | ||
| 2508 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", "altWebPage"), | ||
| 2509 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", "binaryArtifact"), | ||
| 2510 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", "bower"), | ||
| 2511 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", "buildMeta"), | ||
| 2512 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", "buildSystem"), | ||
| 2513 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", "certificationReport"), | ||
| 2514 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", "chat"), | ||
| 2515 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", "componentAnalysisReport"), | ||
| 2516 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", "cwe"), | ||
| 2517 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", "documentation"), | ||
| 2518 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", "dynamicAnalysisReport"), | ||
| 2519 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", "eolNotice"), | ||
| 2520 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", "exportControlAssessment"), | ||
| 2521 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", "funding"), | ||
| 2522 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", "issueTracker"), | ||
| 2523 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", "license"), | ||
| 2524 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", "mailingList"), | ||
| 2525 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", "mavenCentral"), | ||
| 2526 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", "metrics"), | ||
| 2527 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", "npm"), | ||
| 2528 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", "nuget"), | ||
| 2529 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", "other"), | ||
| 2530 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", "privacyAssessment"), | ||
| 2531 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", "productMetadata"), | ||
| 2532 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", "purchaseOrder"), | ||
| 2533 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", "qualityAssessmentReport"), | ||
| 2534 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", "releaseHistory"), | ||
| 2535 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", "releaseNotes"), | ||
| 2536 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", "riskAssessment"), | ||
| 2537 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", "runtimeAnalysisReport"), | ||
| 2538 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", "secureSoftwareAttestation"), | ||
| 2539 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", "securityAdversaryModel"), | ||
| 2540 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", "securityAdvisory"), | ||
| 2541 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", "securityFix"), | ||
| 2542 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", "securityOther"), | ||
| 2543 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", "securityPenTestReport"), | ||
| 2544 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", "securityPolicy"), | ||
| 2545 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", "securityThreatModel"), | ||
| 2546 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", "socialMedia"), | ||
| 2547 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", "sourceArtifact"), | ||
| 2548 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", "staticAnalysisReport"), | ||
| 2549 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", "support"), | ||
| 2550 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", "vcs"), | ||
| 2551 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", "vulnerabilityDisclosureReport"), | ||
| 2552 | ("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", "vulnerabilityExploitabilityAssessment"), | ||
| 2553 | ]), | ||
| 2554 | iri="https://spdx.org/rdf/3.0.1/terms/Core/externalRefType", | ||
| 2555 | compact="externalRefType", | ||
| 2556 | ) | ||
| 2557 | # Provides the location of an external reference. | ||
| 2558 | cls._add_property( | ||
| 2559 | "locator", | ||
| 2560 | ListProp(StringProp()), | ||
| 2561 | iri="https://spdx.org/rdf/3.0.1/terms/Core/locator", | ||
| 2562 | compact="locator", | ||
| 2563 | ) | ||
| 2564 | |||
| 2565 | |||
| 2566 | # Specifies the type of an external reference. | ||
| 2567 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType", compact_type="ExternalRefType", abstract=False) | ||
| 2568 | class ExternalRefType(SHACLObject): | ||
| 2569 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2570 | NAMED_INDIVIDUALS = { | ||
| 2571 | "altDownloadLocation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation", | ||
| 2572 | "altWebPage": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage", | ||
| 2573 | "binaryArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact", | ||
| 2574 | "bower": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower", | ||
| 2575 | "buildMeta": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta", | ||
| 2576 | "buildSystem": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem", | ||
| 2577 | "certificationReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport", | ||
| 2578 | "chat": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat", | ||
| 2579 | "componentAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport", | ||
| 2580 | "cwe": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe", | ||
| 2581 | "documentation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation", | ||
| 2582 | "dynamicAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport", | ||
| 2583 | "eolNotice": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice", | ||
| 2584 | "exportControlAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment", | ||
| 2585 | "funding": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding", | ||
| 2586 | "issueTracker": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker", | ||
| 2587 | "license": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license", | ||
| 2588 | "mailingList": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList", | ||
| 2589 | "mavenCentral": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral", | ||
| 2590 | "metrics": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics", | ||
| 2591 | "npm": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm", | ||
| 2592 | "nuget": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget", | ||
| 2593 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other", | ||
| 2594 | "privacyAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment", | ||
| 2595 | "productMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata", | ||
| 2596 | "purchaseOrder": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder", | ||
| 2597 | "qualityAssessmentReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport", | ||
| 2598 | "releaseHistory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory", | ||
| 2599 | "releaseNotes": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes", | ||
| 2600 | "riskAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment", | ||
| 2601 | "runtimeAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport", | ||
| 2602 | "secureSoftwareAttestation": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation", | ||
| 2603 | "securityAdversaryModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel", | ||
| 2604 | "securityAdvisory": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory", | ||
| 2605 | "securityFix": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix", | ||
| 2606 | "securityOther": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther", | ||
| 2607 | "securityPenTestReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport", | ||
| 2608 | "securityPolicy": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy", | ||
| 2609 | "securityThreatModel": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel", | ||
| 2610 | "socialMedia": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia", | ||
| 2611 | "sourceArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact", | ||
| 2612 | "staticAnalysisReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport", | ||
| 2613 | "support": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support", | ||
| 2614 | "vcs": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs", | ||
| 2615 | "vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", | ||
| 2616 | "vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", | ||
| 2617 | } | ||
| 2618 | # A reference to an alternative download location. | ||
| 2619 | altDownloadLocation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altDownloadLocation" | ||
| 2620 | # A reference to an alternative web page. | ||
| 2621 | altWebPage = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/altWebPage" | ||
| 2622 | # A reference to binary artifacts related to a package. | ||
| 2623 | binaryArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/binaryArtifact" | ||
| 2624 | # A reference to a Bower package. The package locator format, looks like `package#version`, is defined in the "install" section of [Bower API documentation](https://bower.io/docs/api/#install). | ||
| 2625 | bower = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/bower" | ||
| 2626 | # A reference build metadata related to a published package. | ||
| 2627 | buildMeta = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildMeta" | ||
| 2628 | # A reference build system used to create or publish the package. | ||
| 2629 | buildSystem = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/buildSystem" | ||
| 2630 | # A reference to a certification report for a package from an accredited/independent body. | ||
| 2631 | certificationReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/certificationReport" | ||
| 2632 | # A reference to the instant messaging system used by the maintainer for a package. | ||
| 2633 | chat = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/chat" | ||
| 2634 | # A reference to a Software Composition Analysis (SCA) report. | ||
| 2635 | componentAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/componentAnalysisReport" | ||
| 2636 | # [Common Weakness Enumeration](https://csrc.nist.gov/glossary/term/common_weakness_enumeration). A reference to a source of software flaw defined within the official [CWE List](https://cwe.mitre.org/data/) that conforms to the [CWE specification](https://cwe.mitre.org/). | ||
| 2637 | cwe = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/cwe" | ||
| 2638 | # A reference to the documentation for a package. | ||
| 2639 | documentation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/documentation" | ||
| 2640 | # A reference to a dynamic analysis report for a package. | ||
| 2641 | dynamicAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/dynamicAnalysisReport" | ||
| 2642 | # A reference to the End Of Sale (EOS) and/or End Of Life (EOL) information related to a package. | ||
| 2643 | eolNotice = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/eolNotice" | ||
| 2644 | # A reference to a export control assessment for a package. | ||
| 2645 | exportControlAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/exportControlAssessment" | ||
| 2646 | # A reference to funding information related to a package. | ||
| 2647 | funding = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/funding" | ||
| 2648 | # A reference to the issue tracker for a package. | ||
| 2649 | issueTracker = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/issueTracker" | ||
| 2650 | # A reference to additional license information related to an artifact. | ||
| 2651 | license = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/license" | ||
| 2652 | # A reference to the mailing list used by the maintainer for a package. | ||
| 2653 | mailingList = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mailingList" | ||
| 2654 | # A reference to a Maven repository artifact. The artifact locator format is defined in the [Maven documentation](https://maven.apache.org/guides/mini/guide-naming-conventions.html) and looks like `groupId:artifactId[:version]`. | ||
| 2655 | mavenCentral = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/mavenCentral" | ||
| 2656 | # A reference to metrics related to package such as OpenSSF scorecards. | ||
| 2657 | metrics = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/metrics" | ||
| 2658 | # A reference to an npm package. The package locator format is defined in the [npm documentation](https://docs.npmjs.com/cli/v10/configuring-npm/package-json) and looks like `package@version`. | ||
| 2659 | npm = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/npm" | ||
| 2660 | # A reference to a NuGet package. The package locator format is defined in the [NuGet documentation](https://docs.nuget.org) and looks like `package/version`. | ||
| 2661 | nuget = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/nuget" | ||
| 2662 | # Used when the type does not match any of the other options. | ||
| 2663 | other = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/other" | ||
| 2664 | # A reference to a privacy assessment for a package. | ||
| 2665 | privacyAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/privacyAssessment" | ||
| 2666 | # A reference to additional product metadata such as reference within organization's product catalog. | ||
| 2667 | productMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/productMetadata" | ||
| 2668 | # A reference to a purchase order for a package. | ||
| 2669 | purchaseOrder = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/purchaseOrder" | ||
| 2670 | # A reference to a quality assessment for a package. | ||
| 2671 | qualityAssessmentReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/qualityAssessmentReport" | ||
| 2672 | # A reference to a published list of releases for a package. | ||
| 2673 | releaseHistory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseHistory" | ||
| 2674 | # A reference to the release notes for a package. | ||
| 2675 | releaseNotes = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/releaseNotes" | ||
| 2676 | # A reference to a risk assessment for a package. | ||
| 2677 | riskAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/riskAssessment" | ||
| 2678 | # A reference to a runtime analysis report for a package. | ||
| 2679 | runtimeAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/runtimeAnalysisReport" | ||
| 2680 | # A reference to information assuring that the software is developed using security practices as defined by [NIST SP 800-218 Secure Software Development Framework (SSDF) Version 1.1](https://csrc.nist.gov/pubs/sp/800/218/final) or [CISA Secure Software Development Attestation Form](https://www.cisa.gov/resources-tools/resources/secure-software-development-attestation-form). | ||
| 2681 | secureSoftwareAttestation = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/secureSoftwareAttestation" | ||
| 2682 | # A reference to the security adversary model for a package. | ||
| 2683 | securityAdversaryModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdversaryModel" | ||
| 2684 | # A reference to a published security advisory (where advisory as defined per [ISO 29147:2018](https://www.iso.org/standard/72311.html)) that may affect one or more elements, e.g., vendor advisories or specific NVD entries. | ||
| 2685 | securityAdvisory = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityAdvisory" | ||
| 2686 | # A reference to the patch or source code that fixes a vulnerability. | ||
| 2687 | securityFix = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityFix" | ||
| 2688 | # A reference to related security information of unspecified type. | ||
| 2689 | securityOther = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityOther" | ||
| 2690 | # A reference to a [penetration test](https://en.wikipedia.org/wiki/Penetration_test) report for a package. | ||
| 2691 | securityPenTestReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPenTestReport" | ||
| 2692 | # A reference to instructions for reporting newly discovered security vulnerabilities for a package. | ||
| 2693 | securityPolicy = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityPolicy" | ||
| 2694 | # A reference the [security threat model](https://en.wikipedia.org/wiki/Threat_model) for a package. | ||
| 2695 | securityThreatModel = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/securityThreatModel" | ||
| 2696 | # A reference to a social media channel for a package. | ||
| 2697 | socialMedia = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/socialMedia" | ||
| 2698 | # A reference to an artifact containing the sources for a package. | ||
| 2699 | sourceArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/sourceArtifact" | ||
| 2700 | # A reference to a static analysis report for a package. | ||
| 2701 | staticAnalysisReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/staticAnalysisReport" | ||
| 2702 | # A reference to the software support channel or other support information for a package. | ||
| 2703 | support = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/support" | ||
| 2704 | # A reference to a version control system related to a software artifact. | ||
| 2705 | vcs = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vcs" | ||
| 2706 | # A reference to a Vulnerability Disclosure Report (VDR) which provides the software supplier's analysis and findings describing the impact (or lack of impact) that reported vulnerabilities have on packages or products in the supplier's SBOM as defined in [NIST SP 800-161 Cybersecurity Supply Chain Risk Management Practices for Systems and Organizations](https://csrc.nist.gov/pubs/sp/800/161/r1/final). | ||
| 2707 | vulnerabilityDisclosureReport = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityDisclosureReport" | ||
| 2708 | # A reference to a Vulnerability Exploitability eXchange (VEX) statement which provides information on whether a product is impacted by a specific vulnerability in an included package and, if affected, whether there are actions recommended to remediate. See also [NTIA VEX one-page summary](https://ntia.gov/files/ntia/publications/vex_one-page_summary.pdf). | ||
| 2709 | vulnerabilityExploitabilityAssessment = "https://spdx.org/rdf/3.0.1/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment" | ||
| 2710 | |||
| 2711 | |||
| 2712 | # A mathematical algorithm that maps data of arbitrary size to a bit string. | ||
| 2713 | @register("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm", compact_type="HashAlgorithm", abstract=False) | ||
| 2714 | class HashAlgorithm(SHACLObject): | ||
| 2715 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2716 | NAMED_INDIVIDUALS = { | ||
| 2717 | "adler32": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", | ||
| 2718 | "blake2b256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", | ||
| 2719 | "blake2b384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", | ||
| 2720 | "blake2b512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", | ||
| 2721 | "blake3": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", | ||
| 2722 | "crystalsDilithium": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", | ||
| 2723 | "crystalsKyber": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", | ||
| 2724 | "falcon": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", | ||
| 2725 | "md2": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", | ||
| 2726 | "md4": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", | ||
| 2727 | "md5": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", | ||
| 2728 | "md6": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", | ||
| 2729 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", | ||
| 2730 | "sha1": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", | ||
| 2731 | "sha224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", | ||
| 2732 | "sha256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", | ||
| 2733 | "sha384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", | ||
| 2734 | "sha3_224": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", | ||
| 2735 | "sha3_256": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", | ||
| 2736 | "sha3_384": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", | ||
| 2737 | "sha3_512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", | ||
| 2738 | "sha512": "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", | ||
| 2739 | } | ||
| 2740 | # Adler-32 checksum is part of the widely used zlib compression library as defined in [RFC 1950](https://datatracker.ietf.org/doc/rfc1950/) Section 2.3. | ||
| 2741 | adler32 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32" | ||
| 2742 | # BLAKE2b algorithm with a digest size of 256, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
| 2743 | blake2b256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256" | ||
| 2744 | # BLAKE2b algorithm with a digest size of 384, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
| 2745 | blake2b384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384" | ||
| 2746 | # BLAKE2b algorithm with a digest size of 512, as defined in [RFC 7693](https://datatracker.ietf.org/doc/rfc7693/) Section 4. | ||
| 2747 | blake2b512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512" | ||
| 2748 | # [BLAKE3](https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf) | ||
| 2749 | blake3 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3" | ||
| 2750 | # [Dilithium](https://pq-crystals.org/dilithium/) | ||
| 2751 | crystalsDilithium = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium" | ||
| 2752 | # [Kyber](https://pq-crystals.org/kyber/) | ||
| 2753 | crystalsKyber = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber" | ||
| 2754 | # [FALCON](https://falcon-sign.info/falcon.pdf) | ||
| 2755 | falcon = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon" | ||
| 2756 | # MD2 message-digest algorithm, as defined in [RFC 1319](https://datatracker.ietf.org/doc/rfc1319/). | ||
| 2757 | md2 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2" | ||
| 2758 | # MD4 message-digest algorithm, as defined in [RFC 1186](https://datatracker.ietf.org/doc/rfc1186/). | ||
| 2759 | md4 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4" | ||
| 2760 | # MD5 message-digest algorithm, as defined in [RFC 1321](https://datatracker.ietf.org/doc/rfc1321/). | ||
| 2761 | md5 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5" | ||
| 2762 | # [MD6 hash function](https://people.csail.mit.edu/rivest/pubs/RABCx08.pdf) | ||
| 2763 | md6 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6" | ||
| 2764 | # any hashing algorithm that does not exist in this list of entries | ||
| 2765 | other = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other" | ||
| 2766 | # SHA-1, a secure hashing algorithm, as defined in [RFC 3174](https://datatracker.ietf.org/doc/rfc3174/). | ||
| 2767 | sha1 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1" | ||
| 2768 | # SHA-2 with a digest length of 224, as defined in [RFC 3874](https://datatracker.ietf.org/doc/rfc3874/). | ||
| 2769 | sha224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224" | ||
| 2770 | # SHA-2 with a digest length of 256, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
| 2771 | sha256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256" | ||
| 2772 | # SHA-2 with a digest length of 384, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
| 2773 | sha384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384" | ||
| 2774 | # SHA-3 with a digest length of 224, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
| 2775 | sha3_224 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224" | ||
| 2776 | # SHA-3 with a digest length of 256, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
| 2777 | sha3_256 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256" | ||
| 2778 | # SHA-3 with a digest length of 384, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
| 2779 | sha3_384 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384" | ||
| 2780 | # SHA-3 with a digest length of 512, as defined in [FIPS 202](https://csrc.nist.gov/pubs/fips/202/final). | ||
| 2781 | sha3_512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512" | ||
| 2782 | # SHA-2 with a digest length of 512, as defined in [RFC 6234](https://datatracker.ietf.org/doc/rfc6234/). | ||
| 2783 | sha512 = "https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512" | ||
| 2784 | |||
| 2785 | |||
| 2786 | # A concrete subclass of Element used by Individuals in the | ||
| 2787 | # Core profile. | ||
| 2788 | @register("https://spdx.org/rdf/3.0.1/terms/Core/IndividualElement", compact_type="IndividualElement", abstract=False) | ||
| 2789 | class IndividualElement(Element): | ||
| 2790 | NODE_KIND = NodeKind.IRI | ||
| 2791 | ID_ALIAS = "spdxId" | ||
| 2792 | NAMED_INDIVIDUALS = { | ||
| 2793 | "NoAssertionElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", | ||
| 2794 | "NoneElement": "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", | ||
| 2795 | } | ||
| 2796 | # An Individual Value for Element representing a set of Elements of unknown | ||
| 2797 | # identify or cardinality (number). | ||
| 2798 | NoAssertionElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement" | ||
| 2799 | # An Individual Value for Element representing a set of Elements with | ||
| 2800 | # cardinality (number/count) of zero. | ||
| 2801 | NoneElement = "https://spdx.org/rdf/3.0.1/terms/Core/NoneElement" | ||
| 2802 | |||
| 2803 | |||
| 2804 | # Provides an independently reproducible mechanism that permits verification of a specific Element. | ||
| 2805 | @register("https://spdx.org/rdf/3.0.1/terms/Core/IntegrityMethod", compact_type="IntegrityMethod", abstract=True) | ||
| 2806 | class IntegrityMethod(SHACLObject): | ||
| 2807 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2808 | NAMED_INDIVIDUALS = { | ||
| 2809 | } | ||
| 2810 | |||
| 2811 | @classmethod | ||
| 2812 | def _register_props(cls): | ||
| 2813 | super()._register_props() | ||
| 2814 | # Provide consumers with comments by the creator of the Element about the | ||
| 2815 | # Element. | ||
| 2816 | cls._add_property( | ||
| 2817 | "comment", | ||
| 2818 | StringProp(), | ||
| 2819 | iri="https://spdx.org/rdf/3.0.1/terms/Core/comment", | ||
| 2820 | compact="comment", | ||
| 2821 | ) | ||
| 2822 | |||
| 2823 | |||
| 2824 | # Provide an enumerated set of lifecycle phases that can provide context to relationships. | ||
| 2825 | @register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType", compact_type="LifecycleScopeType", abstract=False) | ||
| 2826 | class LifecycleScopeType(SHACLObject): | ||
| 2827 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2828 | NAMED_INDIVIDUALS = { | ||
| 2829 | "build": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", | ||
| 2830 | "design": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", | ||
| 2831 | "development": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", | ||
| 2832 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", | ||
| 2833 | "runtime": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", | ||
| 2834 | "test": "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", | ||
| 2835 | } | ||
| 2836 | # A relationship has specific context implications during an element's build phase, during development. | ||
| 2837 | build = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build" | ||
| 2838 | # A relationship has specific context implications during an element's design. | ||
| 2839 | design = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design" | ||
| 2840 | # A relationship has specific context implications during development phase of an element. | ||
| 2841 | development = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development" | ||
| 2842 | # A relationship has other specific context information necessary to capture that the above set of enumerations does not handle. | ||
| 2843 | other = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other" | ||
| 2844 | # A relationship has specific context implications during the execution phase of an element. | ||
| 2845 | runtime = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime" | ||
| 2846 | # A relationship has specific context implications during an element's testing phase, during development. | ||
| 2847 | test = "https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test" | ||
| 2848 | |||
| 2849 | |||
| 2850 | # A mapping between prefixes and namespace partial URIs. | ||
| 2851 | @register("https://spdx.org/rdf/3.0.1/terms/Core/NamespaceMap", compact_type="NamespaceMap", abstract=False) | ||
| 2852 | class NamespaceMap(SHACLObject): | ||
| 2853 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2854 | NAMED_INDIVIDUALS = { | ||
| 2855 | } | ||
| 2856 | |||
| 2857 | @classmethod | ||
| 2858 | def _register_props(cls): | ||
| 2859 | super()._register_props() | ||
| 2860 | # Provides an unambiguous mechanism for conveying a URI fragment portion of an | ||
| 2861 | # Element ID. | ||
| 2862 | cls._add_property( | ||
| 2863 | "namespace", | ||
| 2864 | AnyURIProp(), | ||
| 2865 | iri="https://spdx.org/rdf/3.0.1/terms/Core/namespace", | ||
| 2866 | min_count=1, | ||
| 2867 | compact="namespace", | ||
| 2868 | ) | ||
| 2869 | # A substitute for a URI. | ||
| 2870 | cls._add_property( | ||
| 2871 | "prefix", | ||
| 2872 | StringProp(), | ||
| 2873 | iri="https://spdx.org/rdf/3.0.1/terms/Core/prefix", | ||
| 2874 | min_count=1, | ||
| 2875 | compact="prefix", | ||
| 2876 | ) | ||
| 2877 | |||
| 2878 | |||
| 2879 | # An SPDX version 2.X compatible verification method for software packages. | ||
| 2880 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PackageVerificationCode", compact_type="PackageVerificationCode", abstract=False) | ||
| 2881 | class PackageVerificationCode(IntegrityMethod): | ||
| 2882 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2883 | NAMED_INDIVIDUALS = { | ||
| 2884 | } | ||
| 2885 | |||
| 2886 | @classmethod | ||
| 2887 | def _register_props(cls): | ||
| 2888 | super()._register_props() | ||
| 2889 | # Specifies the algorithm used for calculating the hash value. | ||
| 2890 | cls._add_property( | ||
| 2891 | "algorithm", | ||
| 2892 | EnumProp([ | ||
| 2893 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"), | ||
| 2894 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"), | ||
| 2895 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"), | ||
| 2896 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"), | ||
| 2897 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"), | ||
| 2898 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"), | ||
| 2899 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"), | ||
| 2900 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"), | ||
| 2901 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"), | ||
| 2902 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"), | ||
| 2903 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"), | ||
| 2904 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"), | ||
| 2905 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"), | ||
| 2906 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"), | ||
| 2907 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"), | ||
| 2908 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"), | ||
| 2909 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"), | ||
| 2910 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"), | ||
| 2911 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"), | ||
| 2912 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"), | ||
| 2913 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"), | ||
| 2914 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"), | ||
| 2915 | ]), | ||
| 2916 | iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm", | ||
| 2917 | min_count=1, | ||
| 2918 | compact="algorithm", | ||
| 2919 | ) | ||
| 2920 | # The result of applying a hash algorithm to an Element. | ||
| 2921 | cls._add_property( | ||
| 2922 | "hashValue", | ||
| 2923 | StringProp(), | ||
| 2924 | iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue", | ||
| 2925 | min_count=1, | ||
| 2926 | compact="hashValue", | ||
| 2927 | ) | ||
| 2928 | # The relative file name of a file to be excluded from the | ||
| 2929 | # `PackageVerificationCode`. | ||
| 2930 | cls._add_property( | ||
| 2931 | "packageVerificationCodeExcludedFile", | ||
| 2932 | ListProp(StringProp()), | ||
| 2933 | iri="https://spdx.org/rdf/3.0.1/terms/Core/packageVerificationCodeExcludedFile", | ||
| 2934 | compact="packageVerificationCodeExcludedFile", | ||
| 2935 | ) | ||
| 2936 | |||
| 2937 | |||
| 2938 | # A tuple of two positive integers that define a range. | ||
| 2939 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PositiveIntegerRange", compact_type="PositiveIntegerRange", abstract=False) | ||
| 2940 | class PositiveIntegerRange(SHACLObject): | ||
| 2941 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2942 | NAMED_INDIVIDUALS = { | ||
| 2943 | } | ||
| 2944 | |||
| 2945 | @classmethod | ||
| 2946 | def _register_props(cls): | ||
| 2947 | super()._register_props() | ||
| 2948 | # Defines the beginning of a range. | ||
| 2949 | cls._add_property( | ||
| 2950 | "beginIntegerRange", | ||
| 2951 | PositiveIntegerProp(), | ||
| 2952 | iri="https://spdx.org/rdf/3.0.1/terms/Core/beginIntegerRange", | ||
| 2953 | min_count=1, | ||
| 2954 | compact="beginIntegerRange", | ||
| 2955 | ) | ||
| 2956 | # Defines the end of a range. | ||
| 2957 | cls._add_property( | ||
| 2958 | "endIntegerRange", | ||
| 2959 | PositiveIntegerProp(), | ||
| 2960 | iri="https://spdx.org/rdf/3.0.1/terms/Core/endIntegerRange", | ||
| 2961 | min_count=1, | ||
| 2962 | compact="endIntegerRange", | ||
| 2963 | ) | ||
| 2964 | |||
| 2965 | |||
| 2966 | # Categories of presence or absence. | ||
| 2967 | @register("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType", compact_type="PresenceType", abstract=False) | ||
| 2968 | class PresenceType(SHACLObject): | ||
| 2969 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2970 | NAMED_INDIVIDUALS = { | ||
| 2971 | "no": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", | ||
| 2972 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", | ||
| 2973 | "yes": "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", | ||
| 2974 | } | ||
| 2975 | # Indicates absence of the field. | ||
| 2976 | no = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no" | ||
| 2977 | # Makes no assertion about the field. | ||
| 2978 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion" | ||
| 2979 | # Indicates presence of the field. | ||
| 2980 | yes = "https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes" | ||
| 2981 | |||
| 2982 | |||
| 2983 | # Enumeration of the valid profiles. | ||
| 2984 | @register("https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType", compact_type="ProfileIdentifierType", abstract=False) | ||
| 2985 | class ProfileIdentifierType(SHACLObject): | ||
| 2986 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 2987 | NAMED_INDIVIDUALS = { | ||
| 2988 | "ai": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai", | ||
| 2989 | "build": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build", | ||
| 2990 | "core": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core", | ||
| 2991 | "dataset": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset", | ||
| 2992 | "expandedLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing", | ||
| 2993 | "extension": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension", | ||
| 2994 | "lite": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite", | ||
| 2995 | "security": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security", | ||
| 2996 | "simpleLicensing": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing", | ||
| 2997 | "software": "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software", | ||
| 2998 | } | ||
| 2999 | # the element follows the AI profile specification | ||
| 3000 | ai = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/ai" | ||
| 3001 | # the element follows the Build profile specification | ||
| 3002 | build = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/build" | ||
| 3003 | # the element follows the Core profile specification | ||
| 3004 | core = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/core" | ||
| 3005 | # the element follows the Dataset profile specification | ||
| 3006 | dataset = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/dataset" | ||
| 3007 | # the element follows the ExpandedLicensing profile specification | ||
| 3008 | expandedLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/expandedLicensing" | ||
| 3009 | # the element follows the Extension profile specification | ||
| 3010 | extension = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/extension" | ||
| 3011 | # the element follows the Lite profile specification | ||
| 3012 | lite = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/lite" | ||
| 3013 | # the element follows the Security profile specification | ||
| 3014 | security = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/security" | ||
| 3015 | # the element follows the SimpleLicensing profile specification | ||
| 3016 | simpleLicensing = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/simpleLicensing" | ||
| 3017 | # the element follows the Software profile specification | ||
| 3018 | software = "https://spdx.org/rdf/3.0.1/terms/Core/ProfileIdentifierType/software" | ||
| 3019 | |||
| 3020 | |||
| 3021 | # Describes a relationship between one or more elements. | ||
| 3022 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Relationship", compact_type="Relationship", abstract=False) | ||
| 3023 | class Relationship(Element): | ||
| 3024 | NODE_KIND = NodeKind.IRI | ||
| 3025 | ID_ALIAS = "spdxId" | ||
| 3026 | NAMED_INDIVIDUALS = { | ||
| 3027 | } | ||
| 3028 | |||
| 3029 | @classmethod | ||
| 3030 | def _register_props(cls): | ||
| 3031 | super()._register_props() | ||
| 3032 | # Provides information about the completeness of relationships. | ||
| 3033 | cls._add_property( | ||
| 3034 | "completeness", | ||
| 3035 | EnumProp([ | ||
| 3036 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", "complete"), | ||
| 3037 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", "incomplete"), | ||
| 3038 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", "noAssertion"), | ||
| 3039 | ]), | ||
| 3040 | iri="https://spdx.org/rdf/3.0.1/terms/Core/completeness", | ||
| 3041 | compact="completeness", | ||
| 3042 | ) | ||
| 3043 | # Specifies the time from which an element is no longer applicable / valid. | ||
| 3044 | cls._add_property( | ||
| 3045 | "endTime", | ||
| 3046 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 3047 | iri="https://spdx.org/rdf/3.0.1/terms/Core/endTime", | ||
| 3048 | compact="endTime", | ||
| 3049 | ) | ||
| 3050 | # References the Element on the left-hand side of a relationship. | ||
| 3051 | cls._add_property( | ||
| 3052 | "from_", | ||
| 3053 | ObjectProp(Element, True, context=[ | ||
| 3054 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
| 3055 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 3056 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 3057 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 3058 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
| 3059 | ],), | ||
| 3060 | iri="https://spdx.org/rdf/3.0.1/terms/Core/from", | ||
| 3061 | min_count=1, | ||
| 3062 | compact="from", | ||
| 3063 | ) | ||
| 3064 | # Information about the relationship between two Elements. | ||
| 3065 | cls._add_property( | ||
| 3066 | "relationshipType", | ||
| 3067 | EnumProp([ | ||
| 3068 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", "affects"), | ||
| 3069 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", "amendedBy"), | ||
| 3070 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", "ancestorOf"), | ||
| 3071 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", "availableFrom"), | ||
| 3072 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", "configures"), | ||
| 3073 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", "contains"), | ||
| 3074 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", "coordinatedBy"), | ||
| 3075 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", "copiedTo"), | ||
| 3076 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", "delegatedTo"), | ||
| 3077 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", "dependsOn"), | ||
| 3078 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", "descendantOf"), | ||
| 3079 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", "describes"), | ||
| 3080 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", "doesNotAffect"), | ||
| 3081 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", "expandsTo"), | ||
| 3082 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", "exploitCreatedBy"), | ||
| 3083 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", "fixedBy"), | ||
| 3084 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", "fixedIn"), | ||
| 3085 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", "foundBy"), | ||
| 3086 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", "generates"), | ||
| 3087 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", "hasAddedFile"), | ||
| 3088 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", "hasAssessmentFor"), | ||
| 3089 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", "hasAssociatedVulnerability"), | ||
| 3090 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", "hasConcludedLicense"), | ||
| 3091 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", "hasDataFile"), | ||
| 3092 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", "hasDeclaredLicense"), | ||
| 3093 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", "hasDeletedFile"), | ||
| 3094 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", "hasDependencyManifest"), | ||
| 3095 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", "hasDistributionArtifact"), | ||
| 3096 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", "hasDocumentation"), | ||
| 3097 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", "hasDynamicLink"), | ||
| 3098 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", "hasEvidence"), | ||
| 3099 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", "hasExample"), | ||
| 3100 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", "hasHost"), | ||
| 3101 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", "hasInput"), | ||
| 3102 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", "hasMetadata"), | ||
| 3103 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", "hasOptionalComponent"), | ||
| 3104 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", "hasOptionalDependency"), | ||
| 3105 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", "hasOutput"), | ||
| 3106 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", "hasPrerequisite"), | ||
| 3107 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", "hasProvidedDependency"), | ||
| 3108 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", "hasRequirement"), | ||
| 3109 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", "hasSpecification"), | ||
| 3110 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", "hasStaticLink"), | ||
| 3111 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", "hasTest"), | ||
| 3112 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", "hasTestCase"), | ||
| 3113 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", "hasVariant"), | ||
| 3114 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", "invokedBy"), | ||
| 3115 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", "modifiedBy"), | ||
| 3116 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", "other"), | ||
| 3117 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", "packagedBy"), | ||
| 3118 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", "patchedBy"), | ||
| 3119 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", "publishedBy"), | ||
| 3120 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", "reportedBy"), | ||
| 3121 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", "republishedBy"), | ||
| 3122 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", "serializedInArtifact"), | ||
| 3123 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", "testedOn"), | ||
| 3124 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", "trainedOn"), | ||
| 3125 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", "underInvestigationFor"), | ||
| 3126 | ("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", "usesTool"), | ||
| 3127 | ]), | ||
| 3128 | iri="https://spdx.org/rdf/3.0.1/terms/Core/relationshipType", | ||
| 3129 | min_count=1, | ||
| 3130 | compact="relationshipType", | ||
| 3131 | ) | ||
| 3132 | # Specifies the time from which an element is applicable / valid. | ||
| 3133 | cls._add_property( | ||
| 3134 | "startTime", | ||
| 3135 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 3136 | iri="https://spdx.org/rdf/3.0.1/terms/Core/startTime", | ||
| 3137 | compact="startTime", | ||
| 3138 | ) | ||
| 3139 | # References an Element on the right-hand side of a relationship. | ||
| 3140 | cls._add_property( | ||
| 3141 | "to", | ||
| 3142 | ListProp(ObjectProp(Element, False, context=[ | ||
| 3143 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
| 3144 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 3145 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 3146 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 3147 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
| 3148 | ],)), | ||
| 3149 | iri="https://spdx.org/rdf/3.0.1/terms/Core/to", | ||
| 3150 | min_count=1, | ||
| 3151 | compact="to", | ||
| 3152 | ) | ||
| 3153 | |||
| 3154 | |||
| 3155 | # Indicates whether a relationship is known to be complete, incomplete, or if no assertion is made with respect to relationship completeness. | ||
| 3156 | @register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness", compact_type="RelationshipCompleteness", abstract=False) | ||
| 3157 | class RelationshipCompleteness(SHACLObject): | ||
| 3158 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3159 | NAMED_INDIVIDUALS = { | ||
| 3160 | "complete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete", | ||
| 3161 | "incomplete": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete", | ||
| 3162 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion", | ||
| 3163 | } | ||
| 3164 | # The relationship is known to be exhaustive. | ||
| 3165 | complete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/complete" | ||
| 3166 | # The relationship is known not to be exhaustive. | ||
| 3167 | incomplete = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/incomplete" | ||
| 3168 | # No assertion can be made about the completeness of the relationship. | ||
| 3169 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipCompleteness/noAssertion" | ||
| 3170 | |||
| 3171 | |||
| 3172 | # Information about the relationship between two Elements. | ||
| 3173 | @register("https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType", compact_type="RelationshipType", abstract=False) | ||
| 3174 | class RelationshipType(SHACLObject): | ||
| 3175 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3176 | NAMED_INDIVIDUALS = { | ||
| 3177 | "affects": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects", | ||
| 3178 | "amendedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy", | ||
| 3179 | "ancestorOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf", | ||
| 3180 | "availableFrom": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom", | ||
| 3181 | "configures": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures", | ||
| 3182 | "contains": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains", | ||
| 3183 | "coordinatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy", | ||
| 3184 | "copiedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo", | ||
| 3185 | "delegatedTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo", | ||
| 3186 | "dependsOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn", | ||
| 3187 | "descendantOf": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf", | ||
| 3188 | "describes": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes", | ||
| 3189 | "doesNotAffect": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect", | ||
| 3190 | "expandsTo": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo", | ||
| 3191 | "exploitCreatedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy", | ||
| 3192 | "fixedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy", | ||
| 3193 | "fixedIn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn", | ||
| 3194 | "foundBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy", | ||
| 3195 | "generates": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates", | ||
| 3196 | "hasAddedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile", | ||
| 3197 | "hasAssessmentFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor", | ||
| 3198 | "hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability", | ||
| 3199 | "hasConcludedLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense", | ||
| 3200 | "hasDataFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile", | ||
| 3201 | "hasDeclaredLicense": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense", | ||
| 3202 | "hasDeletedFile": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile", | ||
| 3203 | "hasDependencyManifest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest", | ||
| 3204 | "hasDistributionArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact", | ||
| 3205 | "hasDocumentation": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation", | ||
| 3206 | "hasDynamicLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink", | ||
| 3207 | "hasEvidence": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence", | ||
| 3208 | "hasExample": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample", | ||
| 3209 | "hasHost": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost", | ||
| 3210 | "hasInput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput", | ||
| 3211 | "hasMetadata": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata", | ||
| 3212 | "hasOptionalComponent": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent", | ||
| 3213 | "hasOptionalDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency", | ||
| 3214 | "hasOutput": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput", | ||
| 3215 | "hasPrerequisite": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite", | ||
| 3216 | "hasProvidedDependency": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency", | ||
| 3217 | "hasRequirement": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement", | ||
| 3218 | "hasSpecification": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification", | ||
| 3219 | "hasStaticLink": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink", | ||
| 3220 | "hasTest": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest", | ||
| 3221 | "hasTestCase": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase", | ||
| 3222 | "hasVariant": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant", | ||
| 3223 | "invokedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy", | ||
| 3224 | "modifiedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy", | ||
| 3225 | "other": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other", | ||
| 3226 | "packagedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy", | ||
| 3227 | "patchedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy", | ||
| 3228 | "publishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy", | ||
| 3229 | "reportedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy", | ||
| 3230 | "republishedBy": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy", | ||
| 3231 | "serializedInArtifact": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact", | ||
| 3232 | "testedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn", | ||
| 3233 | "trainedOn": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn", | ||
| 3234 | "underInvestigationFor": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor", | ||
| 3235 | "usesTool": "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool", | ||
| 3236 | } | ||
| 3237 | # The `from` Vulnerability affects each `to` Element. The use of the `affects` type is constrained to `VexAffectedVulnAssessmentRelationship` classed relationships. | ||
| 3238 | affects = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/affects" | ||
| 3239 | # The `from` Element is amended by each `to` Element. | ||
| 3240 | amendedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/amendedBy" | ||
| 3241 | # The `from` Element is an ancestor of each `to` Element. | ||
| 3242 | ancestorOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/ancestorOf" | ||
| 3243 | # The `from` Element is available from the additional supplier described by each `to` Element. | ||
| 3244 | availableFrom = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/availableFrom" | ||
| 3245 | # The `from` Element is a configuration applied to each `to` Element, during a LifecycleScopeType period. | ||
| 3246 | configures = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/configures" | ||
| 3247 | # The `from` Element contains each `to` Element. | ||
| 3248 | contains = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/contains" | ||
| 3249 | # The `from` Vulnerability is coordinatedBy the `to` Agent(s) (vendor, researcher, or consumer agent). | ||
| 3250 | coordinatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/coordinatedBy" | ||
| 3251 | # The `from` Element has been copied to each `to` Element. | ||
| 3252 | copiedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/copiedTo" | ||
| 3253 | # The `from` Agent is delegating an action to the Agent of the `to` Relationship (which must be of type invokedBy), during a LifecycleScopeType (e.g. the `to` invokedBy Relationship is being done on behalf of `from`). | ||
| 3254 | delegatedTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/delegatedTo" | ||
| 3255 | # The `from` Element depends on each `to` Element, during a LifecycleScopeType period. | ||
| 3256 | dependsOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/dependsOn" | ||
| 3257 | # The `from` Element is a descendant of each `to` Element. | ||
| 3258 | descendantOf = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/descendantOf" | ||
| 3259 | # The `from` Element describes each `to` Element. To denote the root(s) of a tree of elements in a collection, the rootElement property should be used. | ||
| 3260 | describes = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/describes" | ||
| 3261 | # The `from` Vulnerability has no impact on each `to` Element. The use of the `doesNotAffect` is constrained to `VexNotAffectedVulnAssessmentRelationship` classed relationships. | ||
| 3262 | doesNotAffect = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/doesNotAffect" | ||
| 3263 | # The `from` archive expands out as an artifact described by each `to` Element. | ||
| 3264 | expandsTo = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/expandsTo" | ||
| 3265 | # The `from` Vulnerability has had an exploit created against it by each `to` Agent. | ||
| 3266 | exploitCreatedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/exploitCreatedBy" | ||
| 3267 | # Designates a `from` Vulnerability has been fixed by the `to` Agent(s). | ||
| 3268 | fixedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedBy" | ||
| 3269 | # A `from` Vulnerability has been fixed in each `to` Element. The use of the `fixedIn` type is constrained to `VexFixedVulnAssessmentRelationship` classed relationships. | ||
| 3270 | fixedIn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/fixedIn" | ||
| 3271 | # Designates a `from` Vulnerability was originally discovered by the `to` Agent(s). | ||
| 3272 | foundBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/foundBy" | ||
| 3273 | # The `from` Element generates each `to` Element. | ||
| 3274 | generates = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/generates" | ||
| 3275 | # Every `to` Element is a file added to the `from` Element (`from` hasAddedFile `to`). | ||
| 3276 | hasAddedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAddedFile" | ||
| 3277 | # Relates a `from` Vulnerability and each `to` Element with a security assessment. To be used with `VulnAssessmentRelationship` types. | ||
| 3278 | hasAssessmentFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssessmentFor" | ||
| 3279 | # Used to associate a `from` Artifact with each `to` Vulnerability. | ||
| 3280 | hasAssociatedVulnerability = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasAssociatedVulnerability" | ||
| 3281 | # The `from` SoftwareArtifact is concluded by the SPDX data creator to be governed by each `to` license. | ||
| 3282 | hasConcludedLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasConcludedLicense" | ||
| 3283 | # The `from` Element treats each `to` Element as a data file. A data file is an artifact that stores data required or optional for the `from` Element's functionality. A data file can be a database file, an index file, a log file, an AI model file, a calibration data file, a temporary file, a backup file, and more. For AI training dataset, test dataset, test artifact, configuration data, build input data, and build output data, please consider using the more specific relationship types: `trainedOn`, `testedOn`, `hasTest`, `configures`, `hasInput`, and `hasOutput`, respectively. This relationship does not imply dependency. | ||
| 3284 | hasDataFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDataFile" | ||
| 3285 | # The `from` SoftwareArtifact was discovered to actually contain each `to` license, for example as detected by use of automated tooling. | ||
| 3286 | hasDeclaredLicense = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeclaredLicense" | ||
| 3287 | # Every `to` Element is a file deleted from the `from` Element (`from` hasDeletedFile `to`). | ||
| 3288 | hasDeletedFile = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDeletedFile" | ||
| 3289 | # The `from` Element has manifest files that contain dependency information in each `to` Element. | ||
| 3290 | hasDependencyManifest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDependencyManifest" | ||
| 3291 | # The `from` Element is distributed as an artifact in each `to` Element (e.g. an RPM or archive file). | ||
| 3292 | hasDistributionArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDistributionArtifact" | ||
| 3293 | # The `from` Element is documented by each `to` Element. | ||
| 3294 | hasDocumentation = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDocumentation" | ||
| 3295 | # The `from` Element dynamically links in each `to` Element, during a LifecycleScopeType period. | ||
| 3296 | hasDynamicLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasDynamicLink" | ||
| 3297 | # Every `to` Element is considered as evidence for the `from` Element (`from` hasEvidence `to`). | ||
| 3298 | hasEvidence = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasEvidence" | ||
| 3299 | # Every `to` Element is an example for the `from` Element (`from` hasExample `to`). | ||
| 3300 | hasExample = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasExample" | ||
| 3301 | # The `from` Build was run on the `to` Element during a LifecycleScopeType period (e.g. the host that the build runs on). | ||
| 3302 | hasHost = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasHost" | ||
| 3303 | # The `from` Build has each `to` Element as an input, during a LifecycleScopeType period. | ||
| 3304 | hasInput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasInput" | ||
| 3305 | # Every `to` Element is metadata about the `from` Element (`from` hasMetadata `to`). | ||
| 3306 | hasMetadata = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasMetadata" | ||
| 3307 | # Every `to` Element is an optional component of the `from` Element (`from` hasOptionalComponent `to`). | ||
| 3308 | hasOptionalComponent = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalComponent" | ||
| 3309 | # The `from` Element optionally depends on each `to` Element, during a LifecycleScopeType period. | ||
| 3310 | hasOptionalDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOptionalDependency" | ||
| 3311 | # The `from` Build element generates each `to` Element as an output, during a LifecycleScopeType period. | ||
| 3312 | hasOutput = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasOutput" | ||
| 3313 | # The `from` Element has a prerequisite on each `to` Element, during a LifecycleScopeType period. | ||
| 3314 | hasPrerequisite = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasPrerequisite" | ||
| 3315 | # The `from` Element has a dependency on each `to` Element, dependency is not in the distributed artifact, but assumed to be provided, during a LifecycleScopeType period. | ||
| 3316 | hasProvidedDependency = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasProvidedDependency" | ||
| 3317 | # The `from` Element has a requirement on each `to` Element, during a LifecycleScopeType period. | ||
| 3318 | hasRequirement = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasRequirement" | ||
| 3319 | # Every `to` Element is a specification for the `from` Element (`from` hasSpecification `to`), during a LifecycleScopeType period. | ||
| 3320 | hasSpecification = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasSpecification" | ||
| 3321 | # The `from` Element statically links in each `to` Element, during a LifecycleScopeType period. | ||
| 3322 | hasStaticLink = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasStaticLink" | ||
| 3323 | # Every `to` Element is a test artifact for the `from` Element (`from` hasTest `to`), during a LifecycleScopeType period. | ||
| 3324 | hasTest = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTest" | ||
| 3325 | # Every `to` Element is a test case for the `from` Element (`from` hasTestCase `to`). | ||
| 3326 | hasTestCase = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasTestCase" | ||
| 3327 | # Every `to` Element is a variant the `from` Element (`from` hasVariant `to`). | ||
| 3328 | hasVariant = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/hasVariant" | ||
| 3329 | # The `from` Element was invoked by the `to` Agent, during a LifecycleScopeType period (for example, a Build element that describes a build step). | ||
| 3330 | invokedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/invokedBy" | ||
| 3331 | # The `from` Element is modified by each `to` Element. | ||
| 3332 | modifiedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/modifiedBy" | ||
| 3333 | # Every `to` Element is related to the `from` Element where the relationship type is not described by any of the SPDX relationship types (this relationship is directionless). | ||
| 3334 | other = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/other" | ||
| 3335 | # Every `to` Element is a packaged instance of the `from` Element (`from` packagedBy `to`). | ||
| 3336 | packagedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/packagedBy" | ||
| 3337 | # Every `to` Element is a patch for the `from` Element (`from` patchedBy `to`). | ||
| 3338 | patchedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/patchedBy" | ||
| 3339 | # Designates a `from` Vulnerability was made available for public use or reference by each `to` Agent. | ||
| 3340 | publishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/publishedBy" | ||
| 3341 | # Designates a `from` Vulnerability was first reported to a project, vendor, or tracking database for formal identification by each `to` Agent. | ||
| 3342 | reportedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/reportedBy" | ||
| 3343 | # Designates a `from` Vulnerability's details were tracked, aggregated, and/or enriched to improve context (i.e. NVD) by each `to` Agent. | ||
| 3344 | republishedBy = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/republishedBy" | ||
| 3345 | # The `from` SpdxDocument can be found in a serialized form in each `to` Artifact. | ||
| 3346 | serializedInArtifact = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/serializedInArtifact" | ||
| 3347 | # The `from` Element has been tested on the `to` Element(s). | ||
| 3348 | testedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/testedOn" | ||
| 3349 | # The `from` Element has been trained on the `to` Element(s). | ||
| 3350 | trainedOn = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/trainedOn" | ||
| 3351 | # The `from` Vulnerability impact is being investigated for each `to` Element. The use of the `underInvestigationFor` type is constrained to `VexUnderInvestigationVulnAssessmentRelationship` classed relationships. | ||
| 3352 | underInvestigationFor = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/underInvestigationFor" | ||
| 3353 | # The `from` Element uses each `to` Element as a tool, during a LifecycleScopeType period. | ||
| 3354 | usesTool = "https://spdx.org/rdf/3.0.1/terms/Core/RelationshipType/usesTool" | ||
| 3355 | |||
| 3356 | |||
| 3357 | # A collection of SPDX Elements that could potentially be serialized. | ||
| 3358 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SpdxDocument", compact_type="SpdxDocument", abstract=False) | ||
| 3359 | class SpdxDocument(ElementCollection): | ||
| 3360 | NODE_KIND = NodeKind.IRI | ||
| 3361 | ID_ALIAS = "spdxId" | ||
| 3362 | NAMED_INDIVIDUALS = { | ||
| 3363 | } | ||
| 3364 | |||
| 3365 | @classmethod | ||
| 3366 | def _register_props(cls): | ||
| 3367 | super()._register_props() | ||
| 3368 | # Provides the license under which the SPDX documentation of the Element can be | ||
| 3369 | # used. | ||
| 3370 | cls._add_property( | ||
| 3371 | "dataLicense", | ||
| 3372 | ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
| 3373 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 3374 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 3375 | ],), | ||
| 3376 | iri="https://spdx.org/rdf/3.0.1/terms/Core/dataLicense", | ||
| 3377 | compact="dataLicense", | ||
| 3378 | ) | ||
| 3379 | # Provides an ExternalMap of Element identifiers. | ||
| 3380 | cls._add_property( | ||
| 3381 | "import_", | ||
| 3382 | ListProp(ObjectProp(ExternalMap, False)), | ||
| 3383 | iri="https://spdx.org/rdf/3.0.1/terms/Core/import", | ||
| 3384 | compact="import", | ||
| 3385 | ) | ||
| 3386 | # Provides a NamespaceMap of prefixes and associated namespace partial URIs applicable to an SpdxDocument and independent of any specific serialization format or instance. | ||
| 3387 | cls._add_property( | ||
| 3388 | "namespaceMap", | ||
| 3389 | ListProp(ObjectProp(NamespaceMap, False)), | ||
| 3390 | iri="https://spdx.org/rdf/3.0.1/terms/Core/namespaceMap", | ||
| 3391 | compact="namespaceMap", | ||
| 3392 | ) | ||
| 3393 | |||
| 3394 | |||
| 3395 | # Indicates the type of support that is associated with an artifact. | ||
| 3396 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SupportType", compact_type="SupportType", abstract=False) | ||
| 3397 | class SupportType(SHACLObject): | ||
| 3398 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3399 | NAMED_INDIVIDUALS = { | ||
| 3400 | "deployed": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", | ||
| 3401 | "development": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", | ||
| 3402 | "endOfSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", | ||
| 3403 | "limitedSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", | ||
| 3404 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", | ||
| 3405 | "noSupport": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", | ||
| 3406 | "support": "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", | ||
| 3407 | } | ||
| 3408 | # in addition to being supported by the supplier, the software is known to have been deployed and is in use. For a software as a service provider, this implies the software is now available as a service. | ||
| 3409 | deployed = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed" | ||
| 3410 | # the artifact is in active development and is not considered ready for formal support from the supplier. | ||
| 3411 | development = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development" | ||
| 3412 | # there is a defined end of support for the artifact from the supplier. This may also be referred to as end of life. There is a validUntilDate that can be used to signal when support ends for the artifact. | ||
| 3413 | endOfSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport" | ||
| 3414 | # the artifact has been released, and there is limited support available from the supplier. There is a validUntilDate that can provide additional information about the duration of support. | ||
| 3415 | limitedSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport" | ||
| 3416 | # no assertion about the type of support is made. This is considered the default if no other support type is used. | ||
| 3417 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion" | ||
| 3418 | # there is no support for the artifact from the supplier, consumer assumes any support obligations. | ||
| 3419 | noSupport = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport" | ||
| 3420 | # the artifact has been released, and is supported from the supplier. There is a validUntilDate that can provide additional information about the duration of support. | ||
| 3421 | support = "https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support" | ||
| 3422 | |||
| 3423 | |||
| 3424 | # An element of hardware and/or software utilized to carry out a particular function. | ||
| 3425 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Tool", compact_type="Tool", abstract=False) | ||
| 3426 | class Tool(Element): | ||
| 3427 | NODE_KIND = NodeKind.IRI | ||
| 3428 | ID_ALIAS = "spdxId" | ||
| 3429 | NAMED_INDIVIDUALS = { | ||
| 3430 | } | ||
| 3431 | |||
| 3432 | |||
| 3433 | # Categories of confidentiality level. | ||
| 3434 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType", compact_type="dataset_ConfidentialityLevelType", abstract=False) | ||
| 3435 | class dataset_ConfidentialityLevelType(SHACLObject): | ||
| 3436 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3437 | NAMED_INDIVIDUALS = { | ||
| 3438 | "amber": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", | ||
| 3439 | "clear": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", | ||
| 3440 | "green": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", | ||
| 3441 | "red": "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", | ||
| 3442 | } | ||
| 3443 | # Data points in the dataset can be shared only with specific organizations and their clients on a need to know basis. | ||
| 3444 | amber = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber" | ||
| 3445 | # Dataset may be distributed freely, without restriction. | ||
| 3446 | clear = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear" | ||
| 3447 | # Dataset can be shared within a community of peers and partners. | ||
| 3448 | green = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green" | ||
| 3449 | # Data points in the dataset are highly confidential and can only be shared with named recipients. | ||
| 3450 | red = "https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red" | ||
| 3451 | |||
| 3452 | |||
| 3453 | # Availability of dataset. | ||
| 3454 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType", compact_type="dataset_DatasetAvailabilityType", abstract=False) | ||
| 3455 | class dataset_DatasetAvailabilityType(SHACLObject): | ||
| 3456 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3457 | NAMED_INDIVIDUALS = { | ||
| 3458 | "clickthrough": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", | ||
| 3459 | "directDownload": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", | ||
| 3460 | "query": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", | ||
| 3461 | "registration": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", | ||
| 3462 | "scrapingScript": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", | ||
| 3463 | } | ||
| 3464 | # the dataset is not publicly available and can only be accessed after affirmatively accepting terms on a clickthrough webpage. | ||
| 3465 | clickthrough = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough" | ||
| 3466 | # the dataset is publicly available and can be downloaded directly. | ||
| 3467 | directDownload = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload" | ||
| 3468 | # the dataset is publicly available, but not all at once, and can only be accessed through queries which return parts of the dataset. | ||
| 3469 | query = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query" | ||
| 3470 | # the dataset is not publicly available and an email registration is required before accessing the dataset, although without an affirmative acceptance of terms. | ||
| 3471 | registration = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration" | ||
| 3472 | # the dataset provider is not making available the underlying data and the dataset must be reassembled, typically using the provided script for scraping the data. | ||
| 3473 | scrapingScript = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript" | ||
| 3474 | |||
| 3475 | |||
| 3476 | # Enumeration of dataset types. | ||
| 3477 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType", compact_type="dataset_DatasetType", abstract=False) | ||
| 3478 | class dataset_DatasetType(SHACLObject): | ||
| 3479 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3480 | NAMED_INDIVIDUALS = { | ||
| 3481 | "audio": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", | ||
| 3482 | "categorical": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", | ||
| 3483 | "graph": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", | ||
| 3484 | "image": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", | ||
| 3485 | "noAssertion": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", | ||
| 3486 | "numeric": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", | ||
| 3487 | "other": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", | ||
| 3488 | "sensor": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", | ||
| 3489 | "structured": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", | ||
| 3490 | "syntactic": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", | ||
| 3491 | "text": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", | ||
| 3492 | "timeseries": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", | ||
| 3493 | "timestamp": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", | ||
| 3494 | "video": "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", | ||
| 3495 | } | ||
| 3496 | # data is audio based, such as a collection of music from the 80s. | ||
| 3497 | audio = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio" | ||
| 3498 | # data that is classified into a discrete number of categories, such as the eye color of a population of people. | ||
| 3499 | categorical = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical" | ||
| 3500 | # data is in the form of a graph where entries are somehow related to each other through edges, such a social network of friends. | ||
| 3501 | graph = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph" | ||
| 3502 | # data is a collection of images such as pictures of animals. | ||
| 3503 | image = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image" | ||
| 3504 | # data type is not known. | ||
| 3505 | noAssertion = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion" | ||
| 3506 | # data consists only of numeric entries. | ||
| 3507 | numeric = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric" | ||
| 3508 | # data is of a type not included in this list. | ||
| 3509 | other = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other" | ||
| 3510 | # data is recorded from a physical sensor, such as a thermometer reading or biometric device. | ||
| 3511 | sensor = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor" | ||
| 3512 | # data is stored in tabular format or retrieved from a relational database. | ||
| 3513 | structured = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured" | ||
| 3514 | # data describes the syntax or semantics of a language or text, such as a parse tree used for natural language processing. | ||
| 3515 | syntactic = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic" | ||
| 3516 | # data consists of unstructured text, such as a book, Wikipedia article (without images), or transcript. | ||
| 3517 | text = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text" | ||
| 3518 | # data is recorded in an ordered sequence of timestamped entries, such as the price of a stock over the course of a day. | ||
| 3519 | timeseries = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries" | ||
| 3520 | # data is recorded with a timestamp for each entry, but not necessarily ordered or at specific intervals, such as when a taxi ride starts and ends. | ||
| 3521 | timestamp = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp" | ||
| 3522 | # data is video based, such as a collection of movie clips featuring Tom Hanks. | ||
| 3523 | video = "https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video" | ||
| 3524 | |||
| 3525 | |||
| 3526 | # Abstract class for additional text intended to be added to a License, but | ||
| 3527 | # which is not itself a standalone License. | ||
| 3528 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/LicenseAddition", compact_type="expandedlicensing_LicenseAddition", abstract=True) | ||
| 3529 | class expandedlicensing_LicenseAddition(Element): | ||
| 3530 | NODE_KIND = NodeKind.IRI | ||
| 3531 | ID_ALIAS = "spdxId" | ||
| 3532 | NAMED_INDIVIDUALS = { | ||
| 3533 | } | ||
| 3534 | |||
| 3535 | @classmethod | ||
| 3536 | def _register_props(cls): | ||
| 3537 | super()._register_props() | ||
| 3538 | # Identifies the full text of a LicenseAddition. | ||
| 3539 | cls._add_property( | ||
| 3540 | "expandedlicensing_additionText", | ||
| 3541 | StringProp(), | ||
| 3542 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/additionText", | ||
| 3543 | min_count=1, | ||
| 3544 | compact="expandedlicensing_additionText", | ||
| 3545 | ) | ||
| 3546 | # Specifies whether an additional text identifier has been marked as deprecated. | ||
| 3547 | cls._add_property( | ||
| 3548 | "expandedlicensing_isDeprecatedAdditionId", | ||
| 3549 | BooleanProp(), | ||
| 3550 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedAdditionId", | ||
| 3551 | compact="expandedlicensing_isDeprecatedAdditionId", | ||
| 3552 | ) | ||
| 3553 | # Identifies all the text and metadata associated with a license in the license | ||
| 3554 | # XML format. | ||
| 3555 | cls._add_property( | ||
| 3556 | "expandedlicensing_licenseXml", | ||
| 3557 | StringProp(), | ||
| 3558 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml", | ||
| 3559 | compact="expandedlicensing_licenseXml", | ||
| 3560 | ) | ||
| 3561 | # Specifies the licenseId that is preferred to be used in place of a deprecated | ||
| 3562 | # License or LicenseAddition. | ||
| 3563 | cls._add_property( | ||
| 3564 | "expandedlicensing_obsoletedBy", | ||
| 3565 | StringProp(), | ||
| 3566 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy", | ||
| 3567 | compact="expandedlicensing_obsoletedBy", | ||
| 3568 | ) | ||
| 3569 | # Contains a URL where the License or LicenseAddition can be found in use. | ||
| 3570 | cls._add_property( | ||
| 3571 | "expandedlicensing_seeAlso", | ||
| 3572 | ListProp(AnyURIProp()), | ||
| 3573 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso", | ||
| 3574 | compact="expandedlicensing_seeAlso", | ||
| 3575 | ) | ||
| 3576 | # Identifies the full text of a LicenseAddition, in SPDX templating format. | ||
| 3577 | cls._add_property( | ||
| 3578 | "expandedlicensing_standardAdditionTemplate", | ||
| 3579 | StringProp(), | ||
| 3580 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardAdditionTemplate", | ||
| 3581 | compact="expandedlicensing_standardAdditionTemplate", | ||
| 3582 | ) | ||
| 3583 | |||
| 3584 | |||
| 3585 | # A license exception that is listed on the SPDX Exceptions list. | ||
| 3586 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicenseException", compact_type="expandedlicensing_ListedLicenseException", abstract=False) | ||
| 3587 | class expandedlicensing_ListedLicenseException(expandedlicensing_LicenseAddition): | ||
| 3588 | NODE_KIND = NodeKind.IRI | ||
| 3589 | ID_ALIAS = "spdxId" | ||
| 3590 | NAMED_INDIVIDUALS = { | ||
| 3591 | } | ||
| 3592 | |||
| 3593 | @classmethod | ||
| 3594 | def _register_props(cls): | ||
| 3595 | super()._register_props() | ||
| 3596 | # Specifies the SPDX License List version in which this license or exception | ||
| 3597 | # identifier was deprecated. | ||
| 3598 | cls._add_property( | ||
| 3599 | "expandedlicensing_deprecatedVersion", | ||
| 3600 | StringProp(), | ||
| 3601 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion", | ||
| 3602 | compact="expandedlicensing_deprecatedVersion", | ||
| 3603 | ) | ||
| 3604 | # Specifies the SPDX License List version in which this ListedLicense or | ||
| 3605 | # ListedLicenseException identifier was first added. | ||
| 3606 | cls._add_property( | ||
| 3607 | "expandedlicensing_listVersionAdded", | ||
| 3608 | StringProp(), | ||
| 3609 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded", | ||
| 3610 | compact="expandedlicensing_listVersionAdded", | ||
| 3611 | ) | ||
| 3612 | |||
| 3613 | |||
| 3614 | # A property name with an associated value. | ||
| 3615 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertyEntry", compact_type="extension_CdxPropertyEntry", abstract=False) | ||
| 3616 | class extension_CdxPropertyEntry(SHACLObject): | ||
| 3617 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3618 | NAMED_INDIVIDUALS = { | ||
| 3619 | } | ||
| 3620 | |||
| 3621 | @classmethod | ||
| 3622 | def _register_props(cls): | ||
| 3623 | super()._register_props() | ||
| 3624 | # A name used in a CdxPropertyEntry name-value pair. | ||
| 3625 | cls._add_property( | ||
| 3626 | "extension_cdxPropName", | ||
| 3627 | StringProp(), | ||
| 3628 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropName", | ||
| 3629 | min_count=1, | ||
| 3630 | compact="extension_cdxPropName", | ||
| 3631 | ) | ||
| 3632 | # A value used in a CdxPropertyEntry name-value pair. | ||
| 3633 | cls._add_property( | ||
| 3634 | "extension_cdxPropValue", | ||
| 3635 | StringProp(), | ||
| 3636 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxPropValue", | ||
| 3637 | compact="extension_cdxPropValue", | ||
| 3638 | ) | ||
| 3639 | |||
| 3640 | |||
| 3641 | # A characterization of some aspect of an Element that is associated with the Element in a generalized fashion. | ||
| 3642 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/Extension", compact_type="extension_Extension", abstract=True) | ||
| 3643 | class extension_Extension(SHACLExtensibleObject, SHACLObject): | ||
| 3644 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3645 | NAMED_INDIVIDUALS = { | ||
| 3646 | } | ||
| 3647 | |||
| 3648 | |||
| 3649 | # Specifies the CVSS base, temporal, threat, or environmental severity type. | ||
| 3650 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType", compact_type="security_CvssSeverityType", abstract=False) | ||
| 3651 | class security_CvssSeverityType(SHACLObject): | ||
| 3652 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3653 | NAMED_INDIVIDUALS = { | ||
| 3654 | "critical": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", | ||
| 3655 | "high": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", | ||
| 3656 | "low": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", | ||
| 3657 | "medium": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", | ||
| 3658 | "none": "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", | ||
| 3659 | } | ||
| 3660 | # When a CVSS score is between 9.0 - 10.0 | ||
| 3661 | critical = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical" | ||
| 3662 | # When a CVSS score is between 7.0 - 8.9 | ||
| 3663 | high = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high" | ||
| 3664 | # When a CVSS score is between 0.1 - 3.9 | ||
| 3665 | low = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low" | ||
| 3666 | # When a CVSS score is between 4.0 - 6.9 | ||
| 3667 | medium = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium" | ||
| 3668 | # When a CVSS score is 0.0 | ||
| 3669 | none = "https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none" | ||
| 3670 | |||
| 3671 | |||
| 3672 | # Specifies the exploit catalog type. | ||
| 3673 | @register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType", compact_type="security_ExploitCatalogType", abstract=False) | ||
| 3674 | class security_ExploitCatalogType(SHACLObject): | ||
| 3675 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3676 | NAMED_INDIVIDUALS = { | ||
| 3677 | "kev": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", | ||
| 3678 | "other": "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", | ||
| 3679 | } | ||
| 3680 | # CISA's Known Exploited Vulnerability (KEV) Catalog | ||
| 3681 | kev = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev" | ||
| 3682 | # Other exploit catalogs | ||
| 3683 | other = "https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other" | ||
| 3684 | |||
| 3685 | |||
| 3686 | # Specifies the SSVC decision type. | ||
| 3687 | @register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType", compact_type="security_SsvcDecisionType", abstract=False) | ||
| 3688 | class security_SsvcDecisionType(SHACLObject): | ||
| 3689 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3690 | NAMED_INDIVIDUALS = { | ||
| 3691 | "act": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", | ||
| 3692 | "attend": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", | ||
| 3693 | "track": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", | ||
| 3694 | "trackStar": "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", | ||
| 3695 | } | ||
| 3696 | # The vulnerability requires attention from the organization's internal, supervisory-level and leadership-level individuals. Necessary actions include requesting assistance or information about the vulnerability, as well as publishing a notification either internally and/or externally. Typically, internal groups would meet to determine the overall response and then execute agreed upon actions. CISA recommends remediating Act vulnerabilities as soon as possible. | ||
| 3697 | act = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act" | ||
| 3698 | # The vulnerability requires attention from the organization's internal, supervisory-level individuals. Necessary actions include requesting assistance or information about the vulnerability, and may involve publishing a notification either internally and/or externally. CISA recommends remediating Attend vulnerabilities sooner than standard update timelines. | ||
| 3699 | attend = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend" | ||
| 3700 | # The vulnerability does not require action at this time. The organization would continue to track the vulnerability and reassess it if new information becomes available. CISA recommends remediating Track vulnerabilities within standard update timelines. | ||
| 3701 | track = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track" | ||
| 3702 | # ("Track\*" in the SSVC spec) The vulnerability contains specific characteristics that may require closer monitoring for changes. CISA recommends remediating Track\* vulnerabilities within standard update timelines. | ||
| 3703 | trackStar = "https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar" | ||
| 3704 | |||
| 3705 | |||
| 3706 | # Specifies the VEX justification type. | ||
| 3707 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType", compact_type="security_VexJustificationType", abstract=False) | ||
| 3708 | class security_VexJustificationType(SHACLObject): | ||
| 3709 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3710 | NAMED_INDIVIDUALS = { | ||
| 3711 | "componentNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", | ||
| 3712 | "inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", | ||
| 3713 | "vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", | ||
| 3714 | "vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", | ||
| 3715 | "vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", | ||
| 3716 | } | ||
| 3717 | # The software is not affected because the vulnerable component is not in the product. | ||
| 3718 | componentNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent" | ||
| 3719 | # Built-in inline controls or mitigations prevent an adversary from leveraging the vulnerability. | ||
| 3720 | inlineMitigationsAlreadyExist = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist" | ||
| 3721 | # The vulnerable component is present, and the component contains the vulnerable code. However, vulnerable code is used in such a way that an attacker cannot mount any anticipated attack. | ||
| 3722 | vulnerableCodeCannotBeControlledByAdversary = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary" | ||
| 3723 | # The affected code is not reachable through the execution of the code, including non-anticipated states of the product. | ||
| 3724 | vulnerableCodeNotInExecutePath = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath" | ||
| 3725 | # The product is not affected because the code underlying the vulnerability is not present in the product. | ||
| 3726 | vulnerableCodeNotPresent = "https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent" | ||
| 3727 | |||
| 3728 | |||
| 3729 | # Abstract ancestor class for all vulnerability assessments | ||
| 3730 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VulnAssessmentRelationship", compact_type="security_VulnAssessmentRelationship", abstract=True) | ||
| 3731 | class security_VulnAssessmentRelationship(Relationship): | ||
| 3732 | NODE_KIND = NodeKind.IRI | ||
| 3733 | ID_ALIAS = "spdxId" | ||
| 3734 | NAMED_INDIVIDUALS = { | ||
| 3735 | } | ||
| 3736 | |||
| 3737 | @classmethod | ||
| 3738 | def _register_props(cls): | ||
| 3739 | super()._register_props() | ||
| 3740 | # Identifies who or what supplied the artifact or VulnAssessmentRelationship | ||
| 3741 | # referenced by the Element. | ||
| 3742 | cls._add_property( | ||
| 3743 | "suppliedBy", | ||
| 3744 | ObjectProp(Agent, False, context=[ | ||
| 3745 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 3746 | ],), | ||
| 3747 | iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy", | ||
| 3748 | compact="suppliedBy", | ||
| 3749 | ) | ||
| 3750 | # Specifies an Element contained in a piece of software where a vulnerability was | ||
| 3751 | # found. | ||
| 3752 | cls._add_property( | ||
| 3753 | "security_assessedElement", | ||
| 3754 | ObjectProp(software_SoftwareArtifact, False), | ||
| 3755 | iri="https://spdx.org/rdf/3.0.1/terms/Security/assessedElement", | ||
| 3756 | compact="security_assessedElement", | ||
| 3757 | ) | ||
| 3758 | # Specifies a time when a vulnerability assessment was modified | ||
| 3759 | cls._add_property( | ||
| 3760 | "security_modifiedTime", | ||
| 3761 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 3762 | iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime", | ||
| 3763 | compact="security_modifiedTime", | ||
| 3764 | ) | ||
| 3765 | # Specifies the time when a vulnerability was published. | ||
| 3766 | cls._add_property( | ||
| 3767 | "security_publishedTime", | ||
| 3768 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 3769 | iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime", | ||
| 3770 | compact="security_publishedTime", | ||
| 3771 | ) | ||
| 3772 | # Specified the time and date when a vulnerability was withdrawn. | ||
| 3773 | cls._add_property( | ||
| 3774 | "security_withdrawnTime", | ||
| 3775 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 3776 | iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime", | ||
| 3777 | compact="security_withdrawnTime", | ||
| 3778 | ) | ||
| 3779 | |||
| 3780 | |||
| 3781 | # Abstract class representing a license combination consisting of one or more licenses. | ||
| 3782 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/AnyLicenseInfo", compact_type="simplelicensing_AnyLicenseInfo", abstract=True) | ||
| 3783 | class simplelicensing_AnyLicenseInfo(Element): | ||
| 3784 | NODE_KIND = NodeKind.IRI | ||
| 3785 | ID_ALIAS = "spdxId" | ||
| 3786 | NAMED_INDIVIDUALS = { | ||
| 3787 | } | ||
| 3788 | |||
| 3789 | |||
| 3790 | # An SPDX Element containing an SPDX license expression string. | ||
| 3791 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/LicenseExpression", compact_type="simplelicensing_LicenseExpression", abstract=False) | ||
| 3792 | class simplelicensing_LicenseExpression(simplelicensing_AnyLicenseInfo): | ||
| 3793 | NODE_KIND = NodeKind.IRI | ||
| 3794 | ID_ALIAS = "spdxId" | ||
| 3795 | NAMED_INDIVIDUALS = { | ||
| 3796 | } | ||
| 3797 | |||
| 3798 | @classmethod | ||
| 3799 | def _register_props(cls): | ||
| 3800 | super()._register_props() | ||
| 3801 | # Maps a LicenseRef or AdditionRef string for a Custom License or a Custom | ||
| 3802 | # License Addition to its URI ID. | ||
| 3803 | cls._add_property( | ||
| 3804 | "simplelicensing_customIdToUri", | ||
| 3805 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
| 3806 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/customIdToUri", | ||
| 3807 | compact="simplelicensing_customIdToUri", | ||
| 3808 | ) | ||
| 3809 | # A string in the license expression format. | ||
| 3810 | cls._add_property( | ||
| 3811 | "simplelicensing_licenseExpression", | ||
| 3812 | StringProp(), | ||
| 3813 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseExpression", | ||
| 3814 | min_count=1, | ||
| 3815 | compact="simplelicensing_licenseExpression", | ||
| 3816 | ) | ||
| 3817 | # The version of the SPDX License List used in the license expression. | ||
| 3818 | cls._add_property( | ||
| 3819 | "simplelicensing_licenseListVersion", | ||
| 3820 | StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",), | ||
| 3821 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseListVersion", | ||
| 3822 | compact="simplelicensing_licenseListVersion", | ||
| 3823 | ) | ||
| 3824 | |||
| 3825 | |||
| 3826 | # A license or addition that is not listed on the SPDX License List. | ||
| 3827 | @register("https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/SimpleLicensingText", compact_type="simplelicensing_SimpleLicensingText", abstract=False) | ||
| 3828 | class simplelicensing_SimpleLicensingText(Element): | ||
| 3829 | NODE_KIND = NodeKind.IRI | ||
| 3830 | ID_ALIAS = "spdxId" | ||
| 3831 | NAMED_INDIVIDUALS = { | ||
| 3832 | } | ||
| 3833 | |||
| 3834 | @classmethod | ||
| 3835 | def _register_props(cls): | ||
| 3836 | super()._register_props() | ||
| 3837 | # Identifies the full text of a License or Addition. | ||
| 3838 | cls._add_property( | ||
| 3839 | "simplelicensing_licenseText", | ||
| 3840 | StringProp(), | ||
| 3841 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText", | ||
| 3842 | min_count=1, | ||
| 3843 | compact="simplelicensing_licenseText", | ||
| 3844 | ) | ||
| 3845 | |||
| 3846 | |||
| 3847 | # A canonical, unique, immutable identifier | ||
| 3848 | @register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifier", compact_type="software_ContentIdentifier", abstract=False) | ||
| 3849 | class software_ContentIdentifier(IntegrityMethod): | ||
| 3850 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3851 | NAMED_INDIVIDUALS = { | ||
| 3852 | } | ||
| 3853 | |||
| 3854 | @classmethod | ||
| 3855 | def _register_props(cls): | ||
| 3856 | super()._register_props() | ||
| 3857 | # Specifies the type of the content identifier. | ||
| 3858 | cls._add_property( | ||
| 3859 | "software_contentIdentifierType", | ||
| 3860 | EnumProp([ | ||
| 3861 | ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", "gitoid"), | ||
| 3862 | ("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", "swhid"), | ||
| 3863 | ]), | ||
| 3864 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierType", | ||
| 3865 | min_count=1, | ||
| 3866 | compact="software_contentIdentifierType", | ||
| 3867 | ) | ||
| 3868 | # Specifies the value of the content identifier. | ||
| 3869 | cls._add_property( | ||
| 3870 | "software_contentIdentifierValue", | ||
| 3871 | AnyURIProp(), | ||
| 3872 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifierValue", | ||
| 3873 | min_count=1, | ||
| 3874 | compact="software_contentIdentifierValue", | ||
| 3875 | ) | ||
| 3876 | |||
| 3877 | |||
| 3878 | # Specifies the type of a content identifier. | ||
| 3879 | @register("https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType", compact_type="software_ContentIdentifierType", abstract=False) | ||
| 3880 | class software_ContentIdentifierType(SHACLObject): | ||
| 3881 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3882 | NAMED_INDIVIDUALS = { | ||
| 3883 | "gitoid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid", | ||
| 3884 | "swhid": "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid", | ||
| 3885 | } | ||
| 3886 | # [Gitoid](https://www.iana.org/assignments/uri-schemes/prov/gitoid), stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects). A gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent either an [Artifact Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-identifier-types) for the software artifact or an [Input Manifest Identifier](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#input-manifest-identifier) for the software artifact's associated [Artifact Input Manifest](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-input-manifest); this ambiguity exists because the Artifact Input Manifest is itself an artifact, and the gitoid of that artifact is its valid identifier. Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's contentIdentifier property. Gitoids calculated on the Artifact Input Manifest (Input Manifest Identifier) should be recorded in the SPDX 3.0 Element's externalIdentifier property. See [OmniBOR Specification](https://github.com/omnibor/spec/), a minimalistic specification for describing software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/eb1ee5c961c16215eb8709b2975d193a2007a35d/spec/SPEC.md#artifact-dependency-graph-adg). | ||
| 3887 | gitoid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/gitoid" | ||
| 3888 | # SoftWare Hash IDentifier, a persistent intrinsic identifier for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The format of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) (ISO/IEC DIS 18670). They typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`. | ||
| 3889 | swhid = "https://spdx.org/rdf/3.0.1/terms/Software/ContentIdentifierType/swhid" | ||
| 3890 | |||
| 3891 | |||
| 3892 | # Enumeration of the different kinds of SPDX file. | ||
| 3893 | @register("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType", compact_type="software_FileKindType", abstract=False) | ||
| 3894 | class software_FileKindType(SHACLObject): | ||
| 3895 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3896 | NAMED_INDIVIDUALS = { | ||
| 3897 | "directory": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", | ||
| 3898 | "file": "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", | ||
| 3899 | } | ||
| 3900 | # The file represents a directory and all content stored in that directory. | ||
| 3901 | directory = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory" | ||
| 3902 | # The file represents a single file (default). | ||
| 3903 | file = "https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file" | ||
| 3904 | |||
| 3905 | |||
| 3906 | # Provides a set of values to be used to describe the common types of SBOMs that | ||
| 3907 | # tools may create. | ||
| 3908 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SbomType", compact_type="software_SbomType", abstract=False) | ||
| 3909 | class software_SbomType(SHACLObject): | ||
| 3910 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3911 | NAMED_INDIVIDUALS = { | ||
| 3912 | "analyzed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", | ||
| 3913 | "build": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", | ||
| 3914 | "deployed": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", | ||
| 3915 | "design": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", | ||
| 3916 | "runtime": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", | ||
| 3917 | "source": "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", | ||
| 3918 | } | ||
| 3919 | # SBOM generated through analysis of artifacts (e.g., executables, packages, containers, and virtual machine images) after its build. Such analysis generally requires a variety of heuristics. In some contexts, this may also be referred to as a "3rd party" SBOM. | ||
| 3920 | analyzed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed" | ||
| 3921 | # SBOM generated as part of the process of building the software to create a releasable artifact (e.g., executable or package) from data such as source files, dependencies, built components, build process ephemeral data, and other SBOMs. | ||
| 3922 | build = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build" | ||
| 3923 | # SBOM provides an inventory of software that is present on a system. This may be an assembly of other SBOMs that combines analysis of configuration options, and examination of execution behavior in a (potentially simulated) deployment environment. | ||
| 3924 | deployed = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed" | ||
| 3925 | # SBOM of intended, planned software project or product with included components (some of which may not yet exist) for a new software artifact. | ||
| 3926 | design = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design" | ||
| 3927 | # SBOM generated through instrumenting the system running the software, to capture only components present in the system, as well as external call-outs or dynamically loaded components. In some contexts, this may also be referred to as an "Instrumented" or "Dynamic" SBOM. | ||
| 3928 | runtime = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime" | ||
| 3929 | # SBOM created directly from the development environment, source files, and included dependencies used to build an product artifact. | ||
| 3930 | source = "https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source" | ||
| 3931 | |||
| 3932 | |||
| 3933 | # Provides information about the primary purpose of an Element. | ||
| 3934 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose", compact_type="software_SoftwarePurpose", abstract=False) | ||
| 3935 | class software_SoftwarePurpose(SHACLObject): | ||
| 3936 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 3937 | NAMED_INDIVIDUALS = { | ||
| 3938 | "application": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", | ||
| 3939 | "archive": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", | ||
| 3940 | "bom": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", | ||
| 3941 | "configuration": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", | ||
| 3942 | "container": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", | ||
| 3943 | "data": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", | ||
| 3944 | "device": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", | ||
| 3945 | "deviceDriver": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", | ||
| 3946 | "diskImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", | ||
| 3947 | "documentation": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", | ||
| 3948 | "evidence": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", | ||
| 3949 | "executable": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", | ||
| 3950 | "file": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", | ||
| 3951 | "filesystemImage": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", | ||
| 3952 | "firmware": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", | ||
| 3953 | "framework": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", | ||
| 3954 | "install": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", | ||
| 3955 | "library": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", | ||
| 3956 | "manifest": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", | ||
| 3957 | "model": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", | ||
| 3958 | "module": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", | ||
| 3959 | "operatingSystem": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", | ||
| 3960 | "other": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", | ||
| 3961 | "patch": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", | ||
| 3962 | "platform": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", | ||
| 3963 | "requirement": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", | ||
| 3964 | "source": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", | ||
| 3965 | "specification": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", | ||
| 3966 | "test": "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", | ||
| 3967 | } | ||
| 3968 | # The Element is a software application. | ||
| 3969 | application = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application" | ||
| 3970 | # The Element is an archived collection of one or more files (.tar, .zip, etc.). | ||
| 3971 | archive = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive" | ||
| 3972 | # The Element is a bill of materials. | ||
| 3973 | bom = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom" | ||
| 3974 | # The Element is configuration data. | ||
| 3975 | configuration = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration" | ||
| 3976 | # The Element is a container image which can be used by a container runtime application. | ||
| 3977 | container = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container" | ||
| 3978 | # The Element is data. | ||
| 3979 | data = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data" | ||
| 3980 | # The Element refers to a chipset, processor, or electronic board. | ||
| 3981 | device = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device" | ||
| 3982 | # The Element represents software that controls hardware devices. | ||
| 3983 | deviceDriver = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver" | ||
| 3984 | # The Element refers to a disk image that can be written to a disk, booted in a VM, etc. A disk image typically contains most or all of the components necessary to boot, such as bootloaders, kernels, firmware, userspace, etc. | ||
| 3985 | diskImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage" | ||
| 3986 | # The Element is documentation. | ||
| 3987 | documentation = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation" | ||
| 3988 | # The Element is the evidence that a specification or requirement has been fulfilled. | ||
| 3989 | evidence = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence" | ||
| 3990 | # The Element is an Artifact that can be run on a computer. | ||
| 3991 | executable = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable" | ||
| 3992 | # The Element is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc.). | ||
| 3993 | file = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file" | ||
| 3994 | # The Element is a file system image that can be written to a disk (or virtual) partition. | ||
| 3995 | filesystemImage = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage" | ||
| 3996 | # The Element provides low level control over a device's hardware. | ||
| 3997 | firmware = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware" | ||
| 3998 | # The Element is a software framework. | ||
| 3999 | framework = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework" | ||
| 4000 | # The Element is used to install software on disk. | ||
| 4001 | install = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install" | ||
| 4002 | # The Element is a software library. | ||
| 4003 | library = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library" | ||
| 4004 | # The Element is a software manifest. | ||
| 4005 | manifest = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest" | ||
| 4006 | # The Element is a machine learning or artificial intelligence model. | ||
| 4007 | model = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model" | ||
| 4008 | # The Element is a module of a piece of software. | ||
| 4009 | module = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module" | ||
| 4010 | # The Element is an operating system. | ||
| 4011 | operatingSystem = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem" | ||
| 4012 | # The Element doesn't fit into any of the other categories. | ||
| 4013 | other = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other" | ||
| 4014 | # The Element contains a set of changes to update, fix, or improve another Element. | ||
| 4015 | patch = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch" | ||
| 4016 | # The Element represents a runtime environment. | ||
| 4017 | platform = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform" | ||
| 4018 | # The Element provides a requirement needed as input for another Element. | ||
| 4019 | requirement = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement" | ||
| 4020 | # The Element is a single or a collection of source files. | ||
| 4021 | source = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source" | ||
| 4022 | # The Element is a plan, guideline or strategy how to create, perform or analyze an application. | ||
| 4023 | specification = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification" | ||
| 4024 | # The Element is a test used to verify functionality on an software element. | ||
| 4025 | test = "https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test" | ||
| 4026 | |||
| 4027 | |||
| 4028 | # Class that describes a build instance of software/artifacts. | ||
| 4029 | @register("https://spdx.org/rdf/3.0.1/terms/Build/Build", compact_type="build_Build", abstract=False) | ||
| 4030 | class build_Build(Element): | ||
| 4031 | NODE_KIND = NodeKind.IRI | ||
| 4032 | ID_ALIAS = "spdxId" | ||
| 4033 | NAMED_INDIVIDUALS = { | ||
| 4034 | } | ||
| 4035 | |||
| 4036 | @classmethod | ||
| 4037 | def _register_props(cls): | ||
| 4038 | super()._register_props() | ||
| 4039 | # Property that describes the time at which a build stops. | ||
| 4040 | cls._add_property( | ||
| 4041 | "build_buildEndTime", | ||
| 4042 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4043 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildEndTime", | ||
| 4044 | compact="build_buildEndTime", | ||
| 4045 | ) | ||
| 4046 | # A buildId is a locally unique identifier used by a builder to identify a unique | ||
| 4047 | # instance of a build produced by it. | ||
| 4048 | cls._add_property( | ||
| 4049 | "build_buildId", | ||
| 4050 | StringProp(), | ||
| 4051 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildId", | ||
| 4052 | compact="build_buildId", | ||
| 4053 | ) | ||
| 4054 | # Property describing the start time of a build. | ||
| 4055 | cls._add_property( | ||
| 4056 | "build_buildStartTime", | ||
| 4057 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4058 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildStartTime", | ||
| 4059 | compact="build_buildStartTime", | ||
| 4060 | ) | ||
| 4061 | # A buildType is a hint that is used to indicate the toolchain, platform, or | ||
| 4062 | # infrastructure that the build was invoked on. | ||
| 4063 | cls._add_property( | ||
| 4064 | "build_buildType", | ||
| 4065 | AnyURIProp(), | ||
| 4066 | iri="https://spdx.org/rdf/3.0.1/terms/Build/buildType", | ||
| 4067 | min_count=1, | ||
| 4068 | compact="build_buildType", | ||
| 4069 | ) | ||
| 4070 | # Property that describes the digest of the build configuration file used to | ||
| 4071 | # invoke a build. | ||
| 4072 | cls._add_property( | ||
| 4073 | "build_configSourceDigest", | ||
| 4074 | ListProp(ObjectProp(Hash, False)), | ||
| 4075 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceDigest", | ||
| 4076 | compact="build_configSourceDigest", | ||
| 4077 | ) | ||
| 4078 | # Property describes the invocation entrypoint of a build. | ||
| 4079 | cls._add_property( | ||
| 4080 | "build_configSourceEntrypoint", | ||
| 4081 | ListProp(StringProp()), | ||
| 4082 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceEntrypoint", | ||
| 4083 | compact="build_configSourceEntrypoint", | ||
| 4084 | ) | ||
| 4085 | # Property that describes the URI of the build configuration source file. | ||
| 4086 | cls._add_property( | ||
| 4087 | "build_configSourceUri", | ||
| 4088 | ListProp(AnyURIProp()), | ||
| 4089 | iri="https://spdx.org/rdf/3.0.1/terms/Build/configSourceUri", | ||
| 4090 | compact="build_configSourceUri", | ||
| 4091 | ) | ||
| 4092 | # Property describing the session in which a build is invoked. | ||
| 4093 | cls._add_property( | ||
| 4094 | "build_environment", | ||
| 4095 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
| 4096 | iri="https://spdx.org/rdf/3.0.1/terms/Build/environment", | ||
| 4097 | compact="build_environment", | ||
| 4098 | ) | ||
| 4099 | # Property describing a parameter used in an instance of a build. | ||
| 4100 | cls._add_property( | ||
| 4101 | "build_parameter", | ||
| 4102 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
| 4103 | iri="https://spdx.org/rdf/3.0.1/terms/Build/parameter", | ||
| 4104 | compact="build_parameter", | ||
| 4105 | ) | ||
| 4106 | |||
| 4107 | |||
| 4108 | # Agent represents anything with the potential to act on a system. | ||
| 4109 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Agent", compact_type="Agent", abstract=False) | ||
| 4110 | class Agent(Element): | ||
| 4111 | NODE_KIND = NodeKind.IRI | ||
| 4112 | ID_ALIAS = "spdxId" | ||
| 4113 | NAMED_INDIVIDUALS = { | ||
| 4114 | } | ||
| 4115 | |||
| 4116 | |||
| 4117 | # An assertion made in relation to one or more elements. | ||
| 4118 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Annotation", compact_type="Annotation", abstract=False) | ||
| 4119 | class Annotation(Element): | ||
| 4120 | NODE_KIND = NodeKind.IRI | ||
| 4121 | ID_ALIAS = "spdxId" | ||
| 4122 | NAMED_INDIVIDUALS = { | ||
| 4123 | } | ||
| 4124 | |||
| 4125 | @classmethod | ||
| 4126 | def _register_props(cls): | ||
| 4127 | super()._register_props() | ||
| 4128 | # Describes the type of annotation. | ||
| 4129 | cls._add_property( | ||
| 4130 | "annotationType", | ||
| 4131 | EnumProp([ | ||
| 4132 | ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/other", "other"), | ||
| 4133 | ("https://spdx.org/rdf/3.0.1/terms/Core/AnnotationType/review", "review"), | ||
| 4134 | ]), | ||
| 4135 | iri="https://spdx.org/rdf/3.0.1/terms/Core/annotationType", | ||
| 4136 | min_count=1, | ||
| 4137 | compact="annotationType", | ||
| 4138 | ) | ||
| 4139 | # Provides information about the content type of an Element or a Property. | ||
| 4140 | cls._add_property( | ||
| 4141 | "contentType", | ||
| 4142 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
| 4143 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
| 4144 | compact="contentType", | ||
| 4145 | ) | ||
| 4146 | # Commentary on an assertion that an annotator has made. | ||
| 4147 | cls._add_property( | ||
| 4148 | "statement", | ||
| 4149 | StringProp(), | ||
| 4150 | iri="https://spdx.org/rdf/3.0.1/terms/Core/statement", | ||
| 4151 | compact="statement", | ||
| 4152 | ) | ||
| 4153 | # An Element an annotator has made an assertion about. | ||
| 4154 | cls._add_property( | ||
| 4155 | "subject", | ||
| 4156 | ObjectProp(Element, True, context=[ | ||
| 4157 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoneElement", "NoneElement"), | ||
| 4158 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 4159 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 4160 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 4161 | ("https://spdx.org/rdf/3.0.1/terms/Core/NoAssertionElement", "NoAssertionElement"), | ||
| 4162 | ],), | ||
| 4163 | iri="https://spdx.org/rdf/3.0.1/terms/Core/subject", | ||
| 4164 | min_count=1, | ||
| 4165 | compact="subject", | ||
| 4166 | ) | ||
| 4167 | |||
| 4168 | |||
| 4169 | # A distinct article or unit within the digital domain. | ||
| 4170 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Artifact", compact_type="Artifact", abstract=True) | ||
| 4171 | class Artifact(Element): | ||
| 4172 | NODE_KIND = NodeKind.IRI | ||
| 4173 | ID_ALIAS = "spdxId" | ||
| 4174 | NAMED_INDIVIDUALS = { | ||
| 4175 | } | ||
| 4176 | |||
| 4177 | @classmethod | ||
| 4178 | def _register_props(cls): | ||
| 4179 | super()._register_props() | ||
| 4180 | # Specifies the time an artifact was built. | ||
| 4181 | cls._add_property( | ||
| 4182 | "builtTime", | ||
| 4183 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4184 | iri="https://spdx.org/rdf/3.0.1/terms/Core/builtTime", | ||
| 4185 | compact="builtTime", | ||
| 4186 | ) | ||
| 4187 | # Identifies from where or whom the Element originally came. | ||
| 4188 | cls._add_property( | ||
| 4189 | "originatedBy", | ||
| 4190 | ListProp(ObjectProp(Agent, False, context=[ | ||
| 4191 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 4192 | ],)), | ||
| 4193 | iri="https://spdx.org/rdf/3.0.1/terms/Core/originatedBy", | ||
| 4194 | compact="originatedBy", | ||
| 4195 | ) | ||
| 4196 | # Specifies the time an artifact was released. | ||
| 4197 | cls._add_property( | ||
| 4198 | "releaseTime", | ||
| 4199 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4200 | iri="https://spdx.org/rdf/3.0.1/terms/Core/releaseTime", | ||
| 4201 | compact="releaseTime", | ||
| 4202 | ) | ||
| 4203 | # The name of a relevant standard that may apply to an artifact. | ||
| 4204 | cls._add_property( | ||
| 4205 | "standardName", | ||
| 4206 | ListProp(StringProp()), | ||
| 4207 | iri="https://spdx.org/rdf/3.0.1/terms/Core/standardName", | ||
| 4208 | compact="standardName", | ||
| 4209 | ) | ||
| 4210 | # Identifies who or what supplied the artifact or VulnAssessmentRelationship | ||
| 4211 | # referenced by the Element. | ||
| 4212 | cls._add_property( | ||
| 4213 | "suppliedBy", | ||
| 4214 | ObjectProp(Agent, False, context=[ | ||
| 4215 | ("https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", "SpdxOrganization"), | ||
| 4216 | ],), | ||
| 4217 | iri="https://spdx.org/rdf/3.0.1/terms/Core/suppliedBy", | ||
| 4218 | compact="suppliedBy", | ||
| 4219 | ) | ||
| 4220 | # Specifies the level of support associated with an artifact. | ||
| 4221 | cls._add_property( | ||
| 4222 | "supportLevel", | ||
| 4223 | ListProp(EnumProp([ | ||
| 4224 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/deployed", "deployed"), | ||
| 4225 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/development", "development"), | ||
| 4226 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/endOfSupport", "endOfSupport"), | ||
| 4227 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/limitedSupport", "limitedSupport"), | ||
| 4228 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noAssertion", "noAssertion"), | ||
| 4229 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/noSupport", "noSupport"), | ||
| 4230 | ("https://spdx.org/rdf/3.0.1/terms/Core/SupportType/support", "support"), | ||
| 4231 | ])), | ||
| 4232 | iri="https://spdx.org/rdf/3.0.1/terms/Core/supportLevel", | ||
| 4233 | compact="supportLevel", | ||
| 4234 | ) | ||
| 4235 | # Specifies until when the artifact can be used before its usage needs to be | ||
| 4236 | # reassessed. | ||
| 4237 | cls._add_property( | ||
| 4238 | "validUntilTime", | ||
| 4239 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4240 | iri="https://spdx.org/rdf/3.0.1/terms/Core/validUntilTime", | ||
| 4241 | compact="validUntilTime", | ||
| 4242 | ) | ||
| 4243 | |||
| 4244 | |||
| 4245 | # A collection of Elements that have a shared context. | ||
| 4246 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Bundle", compact_type="Bundle", abstract=False) | ||
| 4247 | class Bundle(ElementCollection): | ||
| 4248 | NODE_KIND = NodeKind.IRI | ||
| 4249 | ID_ALIAS = "spdxId" | ||
| 4250 | NAMED_INDIVIDUALS = { | ||
| 4251 | } | ||
| 4252 | |||
| 4253 | @classmethod | ||
| 4254 | def _register_props(cls): | ||
| 4255 | super()._register_props() | ||
| 4256 | # Gives information about the circumstances or unifying properties | ||
| 4257 | # that Elements of the bundle have been assembled under. | ||
| 4258 | cls._add_property( | ||
| 4259 | "context", | ||
| 4260 | StringProp(), | ||
| 4261 | iri="https://spdx.org/rdf/3.0.1/terms/Core/context", | ||
| 4262 | compact="context", | ||
| 4263 | ) | ||
| 4264 | |||
| 4265 | |||
| 4266 | # A mathematically calculated representation of a grouping of data. | ||
| 4267 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Hash", compact_type="Hash", abstract=False) | ||
| 4268 | class Hash(IntegrityMethod): | ||
| 4269 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 4270 | NAMED_INDIVIDUALS = { | ||
| 4271 | } | ||
| 4272 | |||
| 4273 | @classmethod | ||
| 4274 | def _register_props(cls): | ||
| 4275 | super()._register_props() | ||
| 4276 | # Specifies the algorithm used for calculating the hash value. | ||
| 4277 | cls._add_property( | ||
| 4278 | "algorithm", | ||
| 4279 | EnumProp([ | ||
| 4280 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/adler32", "adler32"), | ||
| 4281 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b256", "blake2b256"), | ||
| 4282 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b384", "blake2b384"), | ||
| 4283 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake2b512", "blake2b512"), | ||
| 4284 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/blake3", "blake3"), | ||
| 4285 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"), | ||
| 4286 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"), | ||
| 4287 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/falcon", "falcon"), | ||
| 4288 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md2", "md2"), | ||
| 4289 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md4", "md4"), | ||
| 4290 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md5", "md5"), | ||
| 4291 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/md6", "md6"), | ||
| 4292 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/other", "other"), | ||
| 4293 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha1", "sha1"), | ||
| 4294 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha224", "sha224"), | ||
| 4295 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha256", "sha256"), | ||
| 4296 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha384", "sha384"), | ||
| 4297 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_224", "sha3_224"), | ||
| 4298 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_256", "sha3_256"), | ||
| 4299 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_384", "sha3_384"), | ||
| 4300 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha3_512", "sha3_512"), | ||
| 4301 | ("https://spdx.org/rdf/3.0.1/terms/Core/HashAlgorithm/sha512", "sha512"), | ||
| 4302 | ]), | ||
| 4303 | iri="https://spdx.org/rdf/3.0.1/terms/Core/algorithm", | ||
| 4304 | min_count=1, | ||
| 4305 | compact="algorithm", | ||
| 4306 | ) | ||
| 4307 | # The result of applying a hash algorithm to an Element. | ||
| 4308 | cls._add_property( | ||
| 4309 | "hashValue", | ||
| 4310 | StringProp(), | ||
| 4311 | iri="https://spdx.org/rdf/3.0.1/terms/Core/hashValue", | ||
| 4312 | min_count=1, | ||
| 4313 | compact="hashValue", | ||
| 4314 | ) | ||
| 4315 | |||
| 4316 | |||
| 4317 | # Provide context for a relationship that occurs in the lifecycle. | ||
| 4318 | @register("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopedRelationship", compact_type="LifecycleScopedRelationship", abstract=False) | ||
| 4319 | class LifecycleScopedRelationship(Relationship): | ||
| 4320 | NODE_KIND = NodeKind.IRI | ||
| 4321 | ID_ALIAS = "spdxId" | ||
| 4322 | NAMED_INDIVIDUALS = { | ||
| 4323 | } | ||
| 4324 | |||
| 4325 | @classmethod | ||
| 4326 | def _register_props(cls): | ||
| 4327 | super()._register_props() | ||
| 4328 | # Capture the scope of information about a specific relationship between elements. | ||
| 4329 | cls._add_property( | ||
| 4330 | "scope", | ||
| 4331 | EnumProp([ | ||
| 4332 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/build", "build"), | ||
| 4333 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/design", "design"), | ||
| 4334 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/development", "development"), | ||
| 4335 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/other", "other"), | ||
| 4336 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/runtime", "runtime"), | ||
| 4337 | ("https://spdx.org/rdf/3.0.1/terms/Core/LifecycleScopeType/test", "test"), | ||
| 4338 | ]), | ||
| 4339 | iri="https://spdx.org/rdf/3.0.1/terms/Core/scope", | ||
| 4340 | compact="scope", | ||
| 4341 | ) | ||
| 4342 | |||
| 4343 | |||
| 4344 | # A group of people who work together in an organized way for a shared purpose. | ||
| 4345 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Organization", compact_type="Organization", abstract=False) | ||
| 4346 | class Organization(Agent): | ||
| 4347 | NODE_KIND = NodeKind.IRI | ||
| 4348 | ID_ALIAS = "spdxId" | ||
| 4349 | NAMED_INDIVIDUALS = { | ||
| 4350 | "SpdxOrganization": "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization", | ||
| 4351 | } | ||
| 4352 | # An Organization representing the SPDX Project. | ||
| 4353 | SpdxOrganization = "https://spdx.org/rdf/3.0.1/terms/Core/SpdxOrganization" | ||
| 4354 | |||
| 4355 | |||
| 4356 | # An individual human being. | ||
| 4357 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Person", compact_type="Person", abstract=False) | ||
| 4358 | class Person(Agent): | ||
| 4359 | NODE_KIND = NodeKind.IRI | ||
| 4360 | ID_ALIAS = "spdxId" | ||
| 4361 | NAMED_INDIVIDUALS = { | ||
| 4362 | } | ||
| 4363 | |||
| 4364 | |||
| 4365 | # A software agent. | ||
| 4366 | @register("https://spdx.org/rdf/3.0.1/terms/Core/SoftwareAgent", compact_type="SoftwareAgent", abstract=False) | ||
| 4367 | class SoftwareAgent(Agent): | ||
| 4368 | NODE_KIND = NodeKind.IRI | ||
| 4369 | ID_ALIAS = "spdxId" | ||
| 4370 | NAMED_INDIVIDUALS = { | ||
| 4371 | } | ||
| 4372 | |||
| 4373 | |||
| 4374 | # Portion of an AnyLicenseInfo representing a set of licensing information | ||
| 4375 | # where all elements apply. | ||
| 4376 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ConjunctiveLicenseSet", compact_type="expandedlicensing_ConjunctiveLicenseSet", abstract=False) | ||
| 4377 | class expandedlicensing_ConjunctiveLicenseSet(simplelicensing_AnyLicenseInfo): | ||
| 4378 | NODE_KIND = NodeKind.IRI | ||
| 4379 | ID_ALIAS = "spdxId" | ||
| 4380 | NAMED_INDIVIDUALS = { | ||
| 4381 | } | ||
| 4382 | |||
| 4383 | @classmethod | ||
| 4384 | def _register_props(cls): | ||
| 4385 | super()._register_props() | ||
| 4386 | # A license expression participating in a license set. | ||
| 4387 | cls._add_property( | ||
| 4388 | "expandedlicensing_member", | ||
| 4389 | ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
| 4390 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 4391 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 4392 | ],)), | ||
| 4393 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member", | ||
| 4394 | min_count=2, | ||
| 4395 | compact="expandedlicensing_member", | ||
| 4396 | ) | ||
| 4397 | |||
| 4398 | |||
| 4399 | # A license addition that is not listed on the SPDX Exceptions List. | ||
| 4400 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicenseAddition", compact_type="expandedlicensing_CustomLicenseAddition", abstract=False) | ||
| 4401 | class expandedlicensing_CustomLicenseAddition(expandedlicensing_LicenseAddition): | ||
| 4402 | NODE_KIND = NodeKind.IRI | ||
| 4403 | ID_ALIAS = "spdxId" | ||
| 4404 | NAMED_INDIVIDUALS = { | ||
| 4405 | } | ||
| 4406 | |||
| 4407 | |||
| 4408 | # Portion of an AnyLicenseInfo representing a set of licensing information where | ||
| 4409 | # only one of the elements applies. | ||
| 4410 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/DisjunctiveLicenseSet", compact_type="expandedlicensing_DisjunctiveLicenseSet", abstract=False) | ||
| 4411 | class expandedlicensing_DisjunctiveLicenseSet(simplelicensing_AnyLicenseInfo): | ||
| 4412 | NODE_KIND = NodeKind.IRI | ||
| 4413 | ID_ALIAS = "spdxId" | ||
| 4414 | NAMED_INDIVIDUALS = { | ||
| 4415 | } | ||
| 4416 | |||
| 4417 | @classmethod | ||
| 4418 | def _register_props(cls): | ||
| 4419 | super()._register_props() | ||
| 4420 | # A license expression participating in a license set. | ||
| 4421 | cls._add_property( | ||
| 4422 | "expandedlicensing_member", | ||
| 4423 | ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False, context=[ | ||
| 4424 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", "expandedlicensing_NoAssertionLicense"), | ||
| 4425 | ("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", "expandedlicensing_NoneLicense"), | ||
| 4426 | ],)), | ||
| 4427 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/member", | ||
| 4428 | min_count=2, | ||
| 4429 | compact="expandedlicensing_member", | ||
| 4430 | ) | ||
| 4431 | |||
| 4432 | |||
| 4433 | # Abstract class representing a License or an OrLaterOperator. | ||
| 4434 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ExtendableLicense", compact_type="expandedlicensing_ExtendableLicense", abstract=True) | ||
| 4435 | class expandedlicensing_ExtendableLicense(simplelicensing_AnyLicenseInfo): | ||
| 4436 | NODE_KIND = NodeKind.IRI | ||
| 4437 | ID_ALIAS = "spdxId" | ||
| 4438 | NAMED_INDIVIDUALS = { | ||
| 4439 | } | ||
| 4440 | |||
| 4441 | |||
| 4442 | # A concrete subclass of AnyLicenseInfo used by Individuals in the | ||
| 4443 | # ExpandedLicensing profile. | ||
| 4444 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/IndividualLicensingInfo", compact_type="expandedlicensing_IndividualLicensingInfo", abstract=False) | ||
| 4445 | class expandedlicensing_IndividualLicensingInfo(simplelicensing_AnyLicenseInfo): | ||
| 4446 | NODE_KIND = NodeKind.IRI | ||
| 4447 | ID_ALIAS = "spdxId" | ||
| 4448 | NAMED_INDIVIDUALS = { | ||
| 4449 | "NoAssertionLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense", | ||
| 4450 | "NoneLicense": "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense", | ||
| 4451 | } | ||
| 4452 | # An Individual Value for License when no assertion can be made about its actual | ||
| 4453 | # value. | ||
| 4454 | NoAssertionLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoAssertionLicense" | ||
| 4455 | # An Individual Value for License where the SPDX data creator determines that no | ||
| 4456 | # license is present. | ||
| 4457 | NoneLicense = "https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/NoneLicense" | ||
| 4458 | |||
| 4459 | |||
| 4460 | # Abstract class for the portion of an AnyLicenseInfo representing a license. | ||
| 4461 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/License", compact_type="expandedlicensing_License", abstract=True) | ||
| 4462 | class expandedlicensing_License(expandedlicensing_ExtendableLicense): | ||
| 4463 | NODE_KIND = NodeKind.IRI | ||
| 4464 | ID_ALIAS = "spdxId" | ||
| 4465 | NAMED_INDIVIDUALS = { | ||
| 4466 | } | ||
| 4467 | |||
| 4468 | @classmethod | ||
| 4469 | def _register_props(cls): | ||
| 4470 | super()._register_props() | ||
| 4471 | # Specifies whether a license or additional text identifier has been marked as | ||
| 4472 | # deprecated. | ||
| 4473 | cls._add_property( | ||
| 4474 | "expandedlicensing_isDeprecatedLicenseId", | ||
| 4475 | BooleanProp(), | ||
| 4476 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isDeprecatedLicenseId", | ||
| 4477 | compact="expandedlicensing_isDeprecatedLicenseId", | ||
| 4478 | ) | ||
| 4479 | # Specifies whether the License is listed as free by the | ||
| 4480 | # Free Software Foundation (FSF). | ||
| 4481 | cls._add_property( | ||
| 4482 | "expandedlicensing_isFsfLibre", | ||
| 4483 | BooleanProp(), | ||
| 4484 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isFsfLibre", | ||
| 4485 | compact="expandedlicensing_isFsfLibre", | ||
| 4486 | ) | ||
| 4487 | # Specifies whether the License is listed as approved by the | ||
| 4488 | # Open Source Initiative (OSI). | ||
| 4489 | cls._add_property( | ||
| 4490 | "expandedlicensing_isOsiApproved", | ||
| 4491 | BooleanProp(), | ||
| 4492 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/isOsiApproved", | ||
| 4493 | compact="expandedlicensing_isOsiApproved", | ||
| 4494 | ) | ||
| 4495 | # Identifies all the text and metadata associated with a license in the license | ||
| 4496 | # XML format. | ||
| 4497 | cls._add_property( | ||
| 4498 | "expandedlicensing_licenseXml", | ||
| 4499 | StringProp(), | ||
| 4500 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/licenseXml", | ||
| 4501 | compact="expandedlicensing_licenseXml", | ||
| 4502 | ) | ||
| 4503 | # Specifies the licenseId that is preferred to be used in place of a deprecated | ||
| 4504 | # License or LicenseAddition. | ||
| 4505 | cls._add_property( | ||
| 4506 | "expandedlicensing_obsoletedBy", | ||
| 4507 | StringProp(), | ||
| 4508 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/obsoletedBy", | ||
| 4509 | compact="expandedlicensing_obsoletedBy", | ||
| 4510 | ) | ||
| 4511 | # Contains a URL where the License or LicenseAddition can be found in use. | ||
| 4512 | cls._add_property( | ||
| 4513 | "expandedlicensing_seeAlso", | ||
| 4514 | ListProp(AnyURIProp()), | ||
| 4515 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/seeAlso", | ||
| 4516 | compact="expandedlicensing_seeAlso", | ||
| 4517 | ) | ||
| 4518 | # Provides a License author's preferred text to indicate that a file is covered | ||
| 4519 | # by the License. | ||
| 4520 | cls._add_property( | ||
| 4521 | "expandedlicensing_standardLicenseHeader", | ||
| 4522 | StringProp(), | ||
| 4523 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseHeader", | ||
| 4524 | compact="expandedlicensing_standardLicenseHeader", | ||
| 4525 | ) | ||
| 4526 | # Identifies the full text of a License, in SPDX templating format. | ||
| 4527 | cls._add_property( | ||
| 4528 | "expandedlicensing_standardLicenseTemplate", | ||
| 4529 | StringProp(), | ||
| 4530 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/standardLicenseTemplate", | ||
| 4531 | compact="expandedlicensing_standardLicenseTemplate", | ||
| 4532 | ) | ||
| 4533 | # Identifies the full text of a License or Addition. | ||
| 4534 | cls._add_property( | ||
| 4535 | "simplelicensing_licenseText", | ||
| 4536 | StringProp(), | ||
| 4537 | iri="https://spdx.org/rdf/3.0.1/terms/SimpleLicensing/licenseText", | ||
| 4538 | min_count=1, | ||
| 4539 | compact="simplelicensing_licenseText", | ||
| 4540 | ) | ||
| 4541 | |||
| 4542 | |||
| 4543 | # A license that is listed on the SPDX License List. | ||
| 4544 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/ListedLicense", compact_type="expandedlicensing_ListedLicense", abstract=False) | ||
| 4545 | class expandedlicensing_ListedLicense(expandedlicensing_License): | ||
| 4546 | NODE_KIND = NodeKind.IRI | ||
| 4547 | ID_ALIAS = "spdxId" | ||
| 4548 | NAMED_INDIVIDUALS = { | ||
| 4549 | } | ||
| 4550 | |||
| 4551 | @classmethod | ||
| 4552 | def _register_props(cls): | ||
| 4553 | super()._register_props() | ||
| 4554 | # Specifies the SPDX License List version in which this license or exception | ||
| 4555 | # identifier was deprecated. | ||
| 4556 | cls._add_property( | ||
| 4557 | "expandedlicensing_deprecatedVersion", | ||
| 4558 | StringProp(), | ||
| 4559 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/deprecatedVersion", | ||
| 4560 | compact="expandedlicensing_deprecatedVersion", | ||
| 4561 | ) | ||
| 4562 | # Specifies the SPDX License List version in which this ListedLicense or | ||
| 4563 | # ListedLicenseException identifier was first added. | ||
| 4564 | cls._add_property( | ||
| 4565 | "expandedlicensing_listVersionAdded", | ||
| 4566 | StringProp(), | ||
| 4567 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/listVersionAdded", | ||
| 4568 | compact="expandedlicensing_listVersionAdded", | ||
| 4569 | ) | ||
| 4570 | |||
| 4571 | |||
| 4572 | # Portion of an AnyLicenseInfo representing this version, or any later version, | ||
| 4573 | # of the indicated License. | ||
| 4574 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/OrLaterOperator", compact_type="expandedlicensing_OrLaterOperator", abstract=False) | ||
| 4575 | class expandedlicensing_OrLaterOperator(expandedlicensing_ExtendableLicense): | ||
| 4576 | NODE_KIND = NodeKind.IRI | ||
| 4577 | ID_ALIAS = "spdxId" | ||
| 4578 | NAMED_INDIVIDUALS = { | ||
| 4579 | } | ||
| 4580 | |||
| 4581 | @classmethod | ||
| 4582 | def _register_props(cls): | ||
| 4583 | super()._register_props() | ||
| 4584 | # A License participating in an 'or later' model. | ||
| 4585 | cls._add_property( | ||
| 4586 | "expandedlicensing_subjectLicense", | ||
| 4587 | ObjectProp(expandedlicensing_License, True), | ||
| 4588 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectLicense", | ||
| 4589 | min_count=1, | ||
| 4590 | compact="expandedlicensing_subjectLicense", | ||
| 4591 | ) | ||
| 4592 | |||
| 4593 | |||
| 4594 | # Portion of an AnyLicenseInfo representing a License which has additional | ||
| 4595 | # text applied to it. | ||
| 4596 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/WithAdditionOperator", compact_type="expandedlicensing_WithAdditionOperator", abstract=False) | ||
| 4597 | class expandedlicensing_WithAdditionOperator(simplelicensing_AnyLicenseInfo): | ||
| 4598 | NODE_KIND = NodeKind.IRI | ||
| 4599 | ID_ALIAS = "spdxId" | ||
| 4600 | NAMED_INDIVIDUALS = { | ||
| 4601 | } | ||
| 4602 | |||
| 4603 | @classmethod | ||
| 4604 | def _register_props(cls): | ||
| 4605 | super()._register_props() | ||
| 4606 | # A LicenseAddition participating in a 'with addition' model. | ||
| 4607 | cls._add_property( | ||
| 4608 | "expandedlicensing_subjectAddition", | ||
| 4609 | ObjectProp(expandedlicensing_LicenseAddition, True), | ||
| 4610 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectAddition", | ||
| 4611 | min_count=1, | ||
| 4612 | compact="expandedlicensing_subjectAddition", | ||
| 4613 | ) | ||
| 4614 | # A License participating in a 'with addition' model. | ||
| 4615 | cls._add_property( | ||
| 4616 | "expandedlicensing_subjectExtendableLicense", | ||
| 4617 | ObjectProp(expandedlicensing_ExtendableLicense, True), | ||
| 4618 | iri="https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/subjectExtendableLicense", | ||
| 4619 | min_count=1, | ||
| 4620 | compact="expandedlicensing_subjectExtendableLicense", | ||
| 4621 | ) | ||
| 4622 | |||
| 4623 | |||
| 4624 | # A type of extension consisting of a list of name value pairs. | ||
| 4625 | @register("https://spdx.org/rdf/3.0.1/terms/Extension/CdxPropertiesExtension", compact_type="extension_CdxPropertiesExtension", abstract=False) | ||
| 4626 | class extension_CdxPropertiesExtension(extension_Extension): | ||
| 4627 | NODE_KIND = NodeKind.BlankNodeOrIRI | ||
| 4628 | NAMED_INDIVIDUALS = { | ||
| 4629 | } | ||
| 4630 | |||
| 4631 | @classmethod | ||
| 4632 | def _register_props(cls): | ||
| 4633 | super()._register_props() | ||
| 4634 | # Provides a map of a property names to a values. | ||
| 4635 | cls._add_property( | ||
| 4636 | "extension_cdxProperty", | ||
| 4637 | ListProp(ObjectProp(extension_CdxPropertyEntry, False)), | ||
| 4638 | iri="https://spdx.org/rdf/3.0.1/terms/Extension/cdxProperty", | ||
| 4639 | min_count=1, | ||
| 4640 | compact="extension_cdxProperty", | ||
| 4641 | ) | ||
| 4642 | |||
| 4643 | |||
| 4644 | # Provides a CVSS version 2.0 assessment for a vulnerability. | ||
| 4645 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV2VulnAssessmentRelationship", compact_type="security_CvssV2VulnAssessmentRelationship", abstract=False) | ||
| 4646 | class security_CvssV2VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
| 4647 | NODE_KIND = NodeKind.IRI | ||
| 4648 | ID_ALIAS = "spdxId" | ||
| 4649 | NAMED_INDIVIDUALS = { | ||
| 4650 | } | ||
| 4651 | |||
| 4652 | @classmethod | ||
| 4653 | def _register_props(cls): | ||
| 4654 | super()._register_props() | ||
| 4655 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
| 4656 | cls._add_property( | ||
| 4657 | "security_score", | ||
| 4658 | FloatProp(), | ||
| 4659 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
| 4660 | min_count=1, | ||
| 4661 | compact="security_score", | ||
| 4662 | ) | ||
| 4663 | # Specifies the CVSS vector string for a vulnerability. | ||
| 4664 | cls._add_property( | ||
| 4665 | "security_vectorString", | ||
| 4666 | StringProp(), | ||
| 4667 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
| 4668 | min_count=1, | ||
| 4669 | compact="security_vectorString", | ||
| 4670 | ) | ||
| 4671 | |||
| 4672 | |||
| 4673 | # Provides a CVSS version 3 assessment for a vulnerability. | ||
| 4674 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV3VulnAssessmentRelationship", compact_type="security_CvssV3VulnAssessmentRelationship", abstract=False) | ||
| 4675 | class security_CvssV3VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
| 4676 | NODE_KIND = NodeKind.IRI | ||
| 4677 | ID_ALIAS = "spdxId" | ||
| 4678 | NAMED_INDIVIDUALS = { | ||
| 4679 | } | ||
| 4680 | |||
| 4681 | @classmethod | ||
| 4682 | def _register_props(cls): | ||
| 4683 | super()._register_props() | ||
| 4684 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
| 4685 | cls._add_property( | ||
| 4686 | "security_score", | ||
| 4687 | FloatProp(), | ||
| 4688 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
| 4689 | min_count=1, | ||
| 4690 | compact="security_score", | ||
| 4691 | ) | ||
| 4692 | # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software. | ||
| 4693 | cls._add_property( | ||
| 4694 | "security_severity", | ||
| 4695 | EnumProp([ | ||
| 4696 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"), | ||
| 4697 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"), | ||
| 4698 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"), | ||
| 4699 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"), | ||
| 4700 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"), | ||
| 4701 | ]), | ||
| 4702 | iri="https://spdx.org/rdf/3.0.1/terms/Security/severity", | ||
| 4703 | min_count=1, | ||
| 4704 | compact="security_severity", | ||
| 4705 | ) | ||
| 4706 | # Specifies the CVSS vector string for a vulnerability. | ||
| 4707 | cls._add_property( | ||
| 4708 | "security_vectorString", | ||
| 4709 | StringProp(), | ||
| 4710 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
| 4711 | min_count=1, | ||
| 4712 | compact="security_vectorString", | ||
| 4713 | ) | ||
| 4714 | |||
| 4715 | |||
| 4716 | # Provides a CVSS version 4 assessment for a vulnerability. | ||
| 4717 | @register("https://spdx.org/rdf/3.0.1/terms/Security/CvssV4VulnAssessmentRelationship", compact_type="security_CvssV4VulnAssessmentRelationship", abstract=False) | ||
| 4718 | class security_CvssV4VulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
| 4719 | NODE_KIND = NodeKind.IRI | ||
| 4720 | ID_ALIAS = "spdxId" | ||
| 4721 | NAMED_INDIVIDUALS = { | ||
| 4722 | } | ||
| 4723 | |||
| 4724 | @classmethod | ||
| 4725 | def _register_props(cls): | ||
| 4726 | super()._register_props() | ||
| 4727 | # Provides a numerical (0-10) representation of the severity of a vulnerability. | ||
| 4728 | cls._add_property( | ||
| 4729 | "security_score", | ||
| 4730 | FloatProp(), | ||
| 4731 | iri="https://spdx.org/rdf/3.0.1/terms/Security/score", | ||
| 4732 | min_count=1, | ||
| 4733 | compact="security_score", | ||
| 4734 | ) | ||
| 4735 | # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software. | ||
| 4736 | cls._add_property( | ||
| 4737 | "security_severity", | ||
| 4738 | EnumProp([ | ||
| 4739 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/critical", "critical"), | ||
| 4740 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/high", "high"), | ||
| 4741 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/low", "low"), | ||
| 4742 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/medium", "medium"), | ||
| 4743 | ("https://spdx.org/rdf/3.0.1/terms/Security/CvssSeverityType/none", "none"), | ||
| 4744 | ]), | ||
| 4745 | iri="https://spdx.org/rdf/3.0.1/terms/Security/severity", | ||
| 4746 | min_count=1, | ||
| 4747 | compact="security_severity", | ||
| 4748 | ) | ||
| 4749 | # Specifies the CVSS vector string for a vulnerability. | ||
| 4750 | cls._add_property( | ||
| 4751 | "security_vectorString", | ||
| 4752 | StringProp(), | ||
| 4753 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vectorString", | ||
| 4754 | min_count=1, | ||
| 4755 | compact="security_vectorString", | ||
| 4756 | ) | ||
| 4757 | |||
| 4758 | |||
| 4759 | # Provides an EPSS assessment for a vulnerability. | ||
| 4760 | @register("https://spdx.org/rdf/3.0.1/terms/Security/EpssVulnAssessmentRelationship", compact_type="security_EpssVulnAssessmentRelationship", abstract=False) | ||
| 4761 | class security_EpssVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
| 4762 | NODE_KIND = NodeKind.IRI | ||
| 4763 | ID_ALIAS = "spdxId" | ||
| 4764 | NAMED_INDIVIDUALS = { | ||
| 4765 | } | ||
| 4766 | |||
| 4767 | @classmethod | ||
| 4768 | def _register_props(cls): | ||
| 4769 | super()._register_props() | ||
| 4770 | # The percentile of the current probability score. | ||
| 4771 | cls._add_property( | ||
| 4772 | "security_percentile", | ||
| 4773 | FloatProp(), | ||
| 4774 | iri="https://spdx.org/rdf/3.0.1/terms/Security/percentile", | ||
| 4775 | min_count=1, | ||
| 4776 | compact="security_percentile", | ||
| 4777 | ) | ||
| 4778 | # A probability score between 0 and 1 of a vulnerability being exploited. | ||
| 4779 | cls._add_property( | ||
| 4780 | "security_probability", | ||
| 4781 | FloatProp(), | ||
| 4782 | iri="https://spdx.org/rdf/3.0.1/terms/Security/probability", | ||
| 4783 | min_count=1, | ||
| 4784 | compact="security_probability", | ||
| 4785 | ) | ||
| 4786 | |||
| 4787 | |||
| 4788 | # Provides an exploit assessment of a vulnerability. | ||
| 4789 | @register("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogVulnAssessmentRelationship", compact_type="security_ExploitCatalogVulnAssessmentRelationship", abstract=False) | ||
| 4790 | class security_ExploitCatalogVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
| 4791 | NODE_KIND = NodeKind.IRI | ||
| 4792 | ID_ALIAS = "spdxId" | ||
| 4793 | NAMED_INDIVIDUALS = { | ||
| 4794 | } | ||
| 4795 | |||
| 4796 | @classmethod | ||
| 4797 | def _register_props(cls): | ||
| 4798 | super()._register_props() | ||
| 4799 | # Specifies the exploit catalog type. | ||
| 4800 | cls._add_property( | ||
| 4801 | "security_catalogType", | ||
| 4802 | EnumProp([ | ||
| 4803 | ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/kev", "kev"), | ||
| 4804 | ("https://spdx.org/rdf/3.0.1/terms/Security/ExploitCatalogType/other", "other"), | ||
| 4805 | ]), | ||
| 4806 | iri="https://spdx.org/rdf/3.0.1/terms/Security/catalogType", | ||
| 4807 | min_count=1, | ||
| 4808 | compact="security_catalogType", | ||
| 4809 | ) | ||
| 4810 | # Describe that a CVE is known to have an exploit because it's been listed in an exploit catalog. | ||
| 4811 | cls._add_property( | ||
| 4812 | "security_exploited", | ||
| 4813 | BooleanProp(), | ||
| 4814 | iri="https://spdx.org/rdf/3.0.1/terms/Security/exploited", | ||
| 4815 | min_count=1, | ||
| 4816 | compact="security_exploited", | ||
| 4817 | ) | ||
| 4818 | # Provides the location of an exploit catalog. | ||
| 4819 | cls._add_property( | ||
| 4820 | "security_locator", | ||
| 4821 | AnyURIProp(), | ||
| 4822 | iri="https://spdx.org/rdf/3.0.1/terms/Security/locator", | ||
| 4823 | min_count=1, | ||
| 4824 | compact="security_locator", | ||
| 4825 | ) | ||
| 4826 | |||
| 4827 | |||
| 4828 | # Provides an SSVC assessment for a vulnerability. | ||
| 4829 | @register("https://spdx.org/rdf/3.0.1/terms/Security/SsvcVulnAssessmentRelationship", compact_type="security_SsvcVulnAssessmentRelationship", abstract=False) | ||
| 4830 | class security_SsvcVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
| 4831 | NODE_KIND = NodeKind.IRI | ||
| 4832 | ID_ALIAS = "spdxId" | ||
| 4833 | NAMED_INDIVIDUALS = { | ||
| 4834 | } | ||
| 4835 | |||
| 4836 | @classmethod | ||
| 4837 | def _register_props(cls): | ||
| 4838 | super()._register_props() | ||
| 4839 | # Provide the enumeration of possible decisions in the | ||
| 4840 | # [Stakeholder-Specific Vulnerability Categorization (SSVC) decision tree](https://www.cisa.gov/stakeholder-specific-vulnerability-categorization-ssvc). | ||
| 4841 | cls._add_property( | ||
| 4842 | "security_decisionType", | ||
| 4843 | EnumProp([ | ||
| 4844 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/act", "act"), | ||
| 4845 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/attend", "attend"), | ||
| 4846 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/track", "track"), | ||
| 4847 | ("https://spdx.org/rdf/3.0.1/terms/Security/SsvcDecisionType/trackStar", "trackStar"), | ||
| 4848 | ]), | ||
| 4849 | iri="https://spdx.org/rdf/3.0.1/terms/Security/decisionType", | ||
| 4850 | min_count=1, | ||
| 4851 | compact="security_decisionType", | ||
| 4852 | ) | ||
| 4853 | |||
| 4854 | |||
| 4855 | # Abstract ancestor class for all VEX relationships | ||
| 4856 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexVulnAssessmentRelationship", compact_type="security_VexVulnAssessmentRelationship", abstract=True) | ||
| 4857 | class security_VexVulnAssessmentRelationship(security_VulnAssessmentRelationship): | ||
| 4858 | NODE_KIND = NodeKind.IRI | ||
| 4859 | ID_ALIAS = "spdxId" | ||
| 4860 | NAMED_INDIVIDUALS = { | ||
| 4861 | } | ||
| 4862 | |||
| 4863 | @classmethod | ||
| 4864 | def _register_props(cls): | ||
| 4865 | super()._register_props() | ||
| 4866 | # Conveys information about how VEX status was determined. | ||
| 4867 | cls._add_property( | ||
| 4868 | "security_statusNotes", | ||
| 4869 | StringProp(), | ||
| 4870 | iri="https://spdx.org/rdf/3.0.1/terms/Security/statusNotes", | ||
| 4871 | compact="security_statusNotes", | ||
| 4872 | ) | ||
| 4873 | # Specifies the version of a VEX statement. | ||
| 4874 | cls._add_property( | ||
| 4875 | "security_vexVersion", | ||
| 4876 | StringProp(), | ||
| 4877 | iri="https://spdx.org/rdf/3.0.1/terms/Security/vexVersion", | ||
| 4878 | compact="security_vexVersion", | ||
| 4879 | ) | ||
| 4880 | |||
| 4881 | |||
| 4882 | # Specifies a vulnerability and its associated information. | ||
| 4883 | @register("https://spdx.org/rdf/3.0.1/terms/Security/Vulnerability", compact_type="security_Vulnerability", abstract=False) | ||
| 4884 | class security_Vulnerability(Artifact): | ||
| 4885 | NODE_KIND = NodeKind.IRI | ||
| 4886 | ID_ALIAS = "spdxId" | ||
| 4887 | NAMED_INDIVIDUALS = { | ||
| 4888 | } | ||
| 4889 | |||
| 4890 | @classmethod | ||
| 4891 | def _register_props(cls): | ||
| 4892 | super()._register_props() | ||
| 4893 | # Specifies a time when a vulnerability assessment was modified | ||
| 4894 | cls._add_property( | ||
| 4895 | "security_modifiedTime", | ||
| 4896 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4897 | iri="https://spdx.org/rdf/3.0.1/terms/Security/modifiedTime", | ||
| 4898 | compact="security_modifiedTime", | ||
| 4899 | ) | ||
| 4900 | # Specifies the time when a vulnerability was published. | ||
| 4901 | cls._add_property( | ||
| 4902 | "security_publishedTime", | ||
| 4903 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4904 | iri="https://spdx.org/rdf/3.0.1/terms/Security/publishedTime", | ||
| 4905 | compact="security_publishedTime", | ||
| 4906 | ) | ||
| 4907 | # Specified the time and date when a vulnerability was withdrawn. | ||
| 4908 | cls._add_property( | ||
| 4909 | "security_withdrawnTime", | ||
| 4910 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 4911 | iri="https://spdx.org/rdf/3.0.1/terms/Security/withdrawnTime", | ||
| 4912 | compact="security_withdrawnTime", | ||
| 4913 | ) | ||
| 4914 | |||
| 4915 | |||
| 4916 | # A distinct article or unit related to Software. | ||
| 4917 | @register("https://spdx.org/rdf/3.0.1/terms/Software/SoftwareArtifact", compact_type="software_SoftwareArtifact", abstract=True) | ||
| 4918 | class software_SoftwareArtifact(Artifact): | ||
| 4919 | NODE_KIND = NodeKind.IRI | ||
| 4920 | ID_ALIAS = "spdxId" | ||
| 4921 | NAMED_INDIVIDUALS = { | ||
| 4922 | } | ||
| 4923 | |||
| 4924 | @classmethod | ||
| 4925 | def _register_props(cls): | ||
| 4926 | super()._register_props() | ||
| 4927 | # Provides additional purpose information of the software artifact. | ||
| 4928 | cls._add_property( | ||
| 4929 | "software_additionalPurpose", | ||
| 4930 | ListProp(EnumProp([ | ||
| 4931 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"), | ||
| 4932 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"), | ||
| 4933 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"), | ||
| 4934 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"), | ||
| 4935 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"), | ||
| 4936 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"), | ||
| 4937 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"), | ||
| 4938 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"), | ||
| 4939 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"), | ||
| 4940 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"), | ||
| 4941 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"), | ||
| 4942 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"), | ||
| 4943 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"), | ||
| 4944 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"), | ||
| 4945 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"), | ||
| 4946 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"), | ||
| 4947 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"), | ||
| 4948 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"), | ||
| 4949 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"), | ||
| 4950 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"), | ||
| 4951 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"), | ||
| 4952 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"), | ||
| 4953 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"), | ||
| 4954 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"), | ||
| 4955 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"), | ||
| 4956 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"), | ||
| 4957 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"), | ||
| 4958 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"), | ||
| 4959 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"), | ||
| 4960 | ])), | ||
| 4961 | iri="https://spdx.org/rdf/3.0.1/terms/Software/additionalPurpose", | ||
| 4962 | compact="software_additionalPurpose", | ||
| 4963 | ) | ||
| 4964 | # Provides a place for the SPDX data creator to record acknowledgement text for | ||
| 4965 | # a software Package, File or Snippet. | ||
| 4966 | cls._add_property( | ||
| 4967 | "software_attributionText", | ||
| 4968 | ListProp(StringProp()), | ||
| 4969 | iri="https://spdx.org/rdf/3.0.1/terms/Software/attributionText", | ||
| 4970 | compact="software_attributionText", | ||
| 4971 | ) | ||
| 4972 | # A canonical, unique, immutable identifier of the artifact content, that may be | ||
| 4973 | # used for verifying its identity and/or integrity. | ||
| 4974 | cls._add_property( | ||
| 4975 | "software_contentIdentifier", | ||
| 4976 | ListProp(ObjectProp(software_ContentIdentifier, False)), | ||
| 4977 | iri="https://spdx.org/rdf/3.0.1/terms/Software/contentIdentifier", | ||
| 4978 | compact="software_contentIdentifier", | ||
| 4979 | ) | ||
| 4980 | # Identifies the text of one or more copyright notices for a software Package, | ||
| 4981 | # File or Snippet, if any. | ||
| 4982 | cls._add_property( | ||
| 4983 | "software_copyrightText", | ||
| 4984 | StringProp(), | ||
| 4985 | iri="https://spdx.org/rdf/3.0.1/terms/Software/copyrightText", | ||
| 4986 | compact="software_copyrightText", | ||
| 4987 | ) | ||
| 4988 | # Provides information about the primary purpose of the software artifact. | ||
| 4989 | cls._add_property( | ||
| 4990 | "software_primaryPurpose", | ||
| 4991 | EnumProp([ | ||
| 4992 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/application", "application"), | ||
| 4993 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/archive", "archive"), | ||
| 4994 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/bom", "bom"), | ||
| 4995 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/configuration", "configuration"), | ||
| 4996 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/container", "container"), | ||
| 4997 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/data", "data"), | ||
| 4998 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/device", "device"), | ||
| 4999 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"), | ||
| 5000 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/diskImage", "diskImage"), | ||
| 5001 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/documentation", "documentation"), | ||
| 5002 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/evidence", "evidence"), | ||
| 5003 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/executable", "executable"), | ||
| 5004 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/file", "file"), | ||
| 5005 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"), | ||
| 5006 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/firmware", "firmware"), | ||
| 5007 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/framework", "framework"), | ||
| 5008 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/install", "install"), | ||
| 5009 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/library", "library"), | ||
| 5010 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/manifest", "manifest"), | ||
| 5011 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/model", "model"), | ||
| 5012 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/module", "module"), | ||
| 5013 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"), | ||
| 5014 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/other", "other"), | ||
| 5015 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/patch", "patch"), | ||
| 5016 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/platform", "platform"), | ||
| 5017 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/requirement", "requirement"), | ||
| 5018 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/source", "source"), | ||
| 5019 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/specification", "specification"), | ||
| 5020 | ("https://spdx.org/rdf/3.0.1/terms/Software/SoftwarePurpose/test", "test"), | ||
| 5021 | ]), | ||
| 5022 | iri="https://spdx.org/rdf/3.0.1/terms/Software/primaryPurpose", | ||
| 5023 | compact="software_primaryPurpose", | ||
| 5024 | ) | ||
| 5025 | |||
| 5026 | |||
| 5027 | # A container for a grouping of SPDX-3.0 content characterizing details | ||
| 5028 | # (provenence, composition, licensing, etc.) about a product. | ||
| 5029 | @register("https://spdx.org/rdf/3.0.1/terms/Core/Bom", compact_type="Bom", abstract=False) | ||
| 5030 | class Bom(Bundle): | ||
| 5031 | NODE_KIND = NodeKind.IRI | ||
| 5032 | ID_ALIAS = "spdxId" | ||
| 5033 | NAMED_INDIVIDUALS = { | ||
| 5034 | } | ||
| 5035 | |||
| 5036 | |||
| 5037 | # A license that is not listed on the SPDX License List. | ||
| 5038 | @register("https://spdx.org/rdf/3.0.1/terms/ExpandedLicensing/CustomLicense", compact_type="expandedlicensing_CustomLicense", abstract=False) | ||
| 5039 | class expandedlicensing_CustomLicense(expandedlicensing_License): | ||
| 5040 | NODE_KIND = NodeKind.IRI | ||
| 5041 | ID_ALIAS = "spdxId" | ||
| 5042 | NAMED_INDIVIDUALS = { | ||
| 5043 | } | ||
| 5044 | |||
| 5045 | |||
| 5046 | # Connects a vulnerability and an element designating the element as a product | ||
| 5047 | # affected by the vulnerability. | ||
| 5048 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexAffectedVulnAssessmentRelationship", compact_type="security_VexAffectedVulnAssessmentRelationship", abstract=False) | ||
| 5049 | class security_VexAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
| 5050 | NODE_KIND = NodeKind.IRI | ||
| 5051 | ID_ALIAS = "spdxId" | ||
| 5052 | NAMED_INDIVIDUALS = { | ||
| 5053 | } | ||
| 5054 | |||
| 5055 | @classmethod | ||
| 5056 | def _register_props(cls): | ||
| 5057 | super()._register_props() | ||
| 5058 | # Provides advise on how to mitigate or remediate a vulnerability when a VEX product | ||
| 5059 | # is affected by it. | ||
| 5060 | cls._add_property( | ||
| 5061 | "security_actionStatement", | ||
| 5062 | StringProp(), | ||
| 5063 | iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatement", | ||
| 5064 | min_count=1, | ||
| 5065 | compact="security_actionStatement", | ||
| 5066 | ) | ||
| 5067 | # Records the time when a recommended action was communicated in a VEX statement | ||
| 5068 | # to mitigate a vulnerability. | ||
| 5069 | cls._add_property( | ||
| 5070 | "security_actionStatementTime", | ||
| 5071 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 5072 | iri="https://spdx.org/rdf/3.0.1/terms/Security/actionStatementTime", | ||
| 5073 | compact="security_actionStatementTime", | ||
| 5074 | ) | ||
| 5075 | |||
| 5076 | |||
| 5077 | # Links a vulnerability and elements representing products (in the VEX sense) where | ||
| 5078 | # a fix has been applied and are no longer affected. | ||
| 5079 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexFixedVulnAssessmentRelationship", compact_type="security_VexFixedVulnAssessmentRelationship", abstract=False) | ||
| 5080 | class security_VexFixedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
| 5081 | NODE_KIND = NodeKind.IRI | ||
| 5082 | ID_ALIAS = "spdxId" | ||
| 5083 | NAMED_INDIVIDUALS = { | ||
| 5084 | } | ||
| 5085 | |||
| 5086 | |||
| 5087 | # Links a vulnerability and one or more elements designating the latter as products | ||
| 5088 | # not affected by the vulnerability. | ||
| 5089 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexNotAffectedVulnAssessmentRelationship", compact_type="security_VexNotAffectedVulnAssessmentRelationship", abstract=False) | ||
| 5090 | class security_VexNotAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
| 5091 | NODE_KIND = NodeKind.IRI | ||
| 5092 | ID_ALIAS = "spdxId" | ||
| 5093 | NAMED_INDIVIDUALS = { | ||
| 5094 | } | ||
| 5095 | |||
| 5096 | @classmethod | ||
| 5097 | def _register_props(cls): | ||
| 5098 | super()._register_props() | ||
| 5099 | # Explains why a VEX product is not affected by a vulnerability. It is an | ||
| 5100 | # alternative in VexNotAffectedVulnAssessmentRelationship to the machine-readable | ||
| 5101 | # justification label. | ||
| 5102 | cls._add_property( | ||
| 5103 | "security_impactStatement", | ||
| 5104 | StringProp(), | ||
| 5105 | iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatement", | ||
| 5106 | compact="security_impactStatement", | ||
| 5107 | ) | ||
| 5108 | # Timestamp of impact statement. | ||
| 5109 | cls._add_property( | ||
| 5110 | "security_impactStatementTime", | ||
| 5111 | DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",), | ||
| 5112 | iri="https://spdx.org/rdf/3.0.1/terms/Security/impactStatementTime", | ||
| 5113 | compact="security_impactStatementTime", | ||
| 5114 | ) | ||
| 5115 | # Impact justification label to be used when linking a vulnerability to an element | ||
| 5116 | # representing a VEX product with a VexNotAffectedVulnAssessmentRelationship | ||
| 5117 | # relationship. | ||
| 5118 | cls._add_property( | ||
| 5119 | "security_justificationType", | ||
| 5120 | EnumProp([ | ||
| 5121 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/componentNotPresent", "componentNotPresent"), | ||
| 5122 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", "inlineMitigationsAlreadyExist"), | ||
| 5123 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", "vulnerableCodeCannotBeControlledByAdversary"), | ||
| 5124 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", "vulnerableCodeNotInExecutePath"), | ||
| 5125 | ("https://spdx.org/rdf/3.0.1/terms/Security/VexJustificationType/vulnerableCodeNotPresent", "vulnerableCodeNotPresent"), | ||
| 5126 | ]), | ||
| 5127 | iri="https://spdx.org/rdf/3.0.1/terms/Security/justificationType", | ||
| 5128 | compact="security_justificationType", | ||
| 5129 | ) | ||
| 5130 | |||
| 5131 | |||
| 5132 | # Designates elements as products where the impact of a vulnerability is being | ||
| 5133 | # investigated. | ||
| 5134 | @register("https://spdx.org/rdf/3.0.1/terms/Security/VexUnderInvestigationVulnAssessmentRelationship", compact_type="security_VexUnderInvestigationVulnAssessmentRelationship", abstract=False) | ||
| 5135 | class security_VexUnderInvestigationVulnAssessmentRelationship(security_VexVulnAssessmentRelationship): | ||
| 5136 | NODE_KIND = NodeKind.IRI | ||
| 5137 | ID_ALIAS = "spdxId" | ||
| 5138 | NAMED_INDIVIDUALS = { | ||
| 5139 | } | ||
| 5140 | |||
| 5141 | |||
| 5142 | # Refers to any object that stores content on a computer. | ||
| 5143 | @register("https://spdx.org/rdf/3.0.1/terms/Software/File", compact_type="software_File", abstract=False) | ||
| 5144 | class software_File(software_SoftwareArtifact): | ||
| 5145 | NODE_KIND = NodeKind.IRI | ||
| 5146 | ID_ALIAS = "spdxId" | ||
| 5147 | NAMED_INDIVIDUALS = { | ||
| 5148 | } | ||
| 5149 | |||
| 5150 | @classmethod | ||
| 5151 | def _register_props(cls): | ||
| 5152 | super()._register_props() | ||
| 5153 | # Provides information about the content type of an Element or a Property. | ||
| 5154 | cls._add_property( | ||
| 5155 | "contentType", | ||
| 5156 | StringProp(pattern=r"^[^\/]+\/[^\/]+$",), | ||
| 5157 | iri="https://spdx.org/rdf/3.0.1/terms/Core/contentType", | ||
| 5158 | compact="contentType", | ||
| 5159 | ) | ||
| 5160 | # Describes if a given file is a directory or non-directory kind of file. | ||
| 5161 | cls._add_property( | ||
| 5162 | "software_fileKind", | ||
| 5163 | EnumProp([ | ||
| 5164 | ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/directory", "directory"), | ||
| 5165 | ("https://spdx.org/rdf/3.0.1/terms/Software/FileKindType/file", "file"), | ||
| 5166 | ]), | ||
| 5167 | iri="https://spdx.org/rdf/3.0.1/terms/Software/fileKind", | ||
| 5168 | compact="software_fileKind", | ||
| 5169 | ) | ||
| 5170 | |||
| 5171 | |||
| 5172 | # Refers to any unit of content that can be associated with a distribution of | ||
| 5173 | # software. | ||
| 5174 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Package", compact_type="software_Package", abstract=False) | ||
| 5175 | class software_Package(software_SoftwareArtifact): | ||
| 5176 | NODE_KIND = NodeKind.IRI | ||
| 5177 | ID_ALIAS = "spdxId" | ||
| 5178 | NAMED_INDIVIDUALS = { | ||
| 5179 | } | ||
| 5180 | |||
| 5181 | @classmethod | ||
| 5182 | def _register_props(cls): | ||
| 5183 | super()._register_props() | ||
| 5184 | # Identifies the download Uniform Resource Identifier for the package at the time | ||
| 5185 | # that the document was created. | ||
| 5186 | cls._add_property( | ||
| 5187 | "software_downloadLocation", | ||
| 5188 | AnyURIProp(), | ||
| 5189 | iri="https://spdx.org/rdf/3.0.1/terms/Software/downloadLocation", | ||
| 5190 | compact="software_downloadLocation", | ||
| 5191 | ) | ||
| 5192 | # A place for the SPDX document creator to record a website that serves as the | ||
| 5193 | # package's home page. | ||
| 5194 | cls._add_property( | ||
| 5195 | "software_homePage", | ||
| 5196 | AnyURIProp(), | ||
| 5197 | iri="https://spdx.org/rdf/3.0.1/terms/Software/homePage", | ||
| 5198 | compact="software_homePage", | ||
| 5199 | ) | ||
| 5200 | # Provides a place for the SPDX data creator to record the package URL string | ||
| 5201 | # (in accordance with the Package URL specification) for a software Package. | ||
| 5202 | cls._add_property( | ||
| 5203 | "software_packageUrl", | ||
| 5204 | AnyURIProp(), | ||
| 5205 | iri="https://spdx.org/rdf/3.0.1/terms/Software/packageUrl", | ||
| 5206 | compact="software_packageUrl", | ||
| 5207 | ) | ||
| 5208 | # Identify the version of a package. | ||
| 5209 | cls._add_property( | ||
| 5210 | "software_packageVersion", | ||
| 5211 | StringProp(), | ||
| 5212 | iri="https://spdx.org/rdf/3.0.1/terms/Software/packageVersion", | ||
| 5213 | compact="software_packageVersion", | ||
| 5214 | ) | ||
| 5215 | # Records any relevant background information or additional comments | ||
| 5216 | # about the origin of the package. | ||
| 5217 | cls._add_property( | ||
| 5218 | "software_sourceInfo", | ||
| 5219 | StringProp(), | ||
| 5220 | iri="https://spdx.org/rdf/3.0.1/terms/Software/sourceInfo", | ||
| 5221 | compact="software_sourceInfo", | ||
| 5222 | ) | ||
| 5223 | |||
| 5224 | |||
| 5225 | # A collection of SPDX Elements describing a single package. | ||
| 5226 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Sbom", compact_type="software_Sbom", abstract=False) | ||
| 5227 | class software_Sbom(Bom): | ||
| 5228 | NODE_KIND = NodeKind.IRI | ||
| 5229 | ID_ALIAS = "spdxId" | ||
| 5230 | NAMED_INDIVIDUALS = { | ||
| 5231 | } | ||
| 5232 | |||
| 5233 | @classmethod | ||
| 5234 | def _register_props(cls): | ||
| 5235 | super()._register_props() | ||
| 5236 | # Provides information about the type of an SBOM. | ||
| 5237 | cls._add_property( | ||
| 5238 | "software_sbomType", | ||
| 5239 | ListProp(EnumProp([ | ||
| 5240 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/analyzed", "analyzed"), | ||
| 5241 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/build", "build"), | ||
| 5242 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/deployed", "deployed"), | ||
| 5243 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/design", "design"), | ||
| 5244 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/runtime", "runtime"), | ||
| 5245 | ("https://spdx.org/rdf/3.0.1/terms/Software/SbomType/source", "source"), | ||
| 5246 | ])), | ||
| 5247 | iri="https://spdx.org/rdf/3.0.1/terms/Software/sbomType", | ||
| 5248 | compact="software_sbomType", | ||
| 5249 | ) | ||
| 5250 | |||
| 5251 | |||
| 5252 | # Describes a certain part of a file. | ||
| 5253 | @register("https://spdx.org/rdf/3.0.1/terms/Software/Snippet", compact_type="software_Snippet", abstract=False) | ||
| 5254 | class software_Snippet(software_SoftwareArtifact): | ||
| 5255 | NODE_KIND = NodeKind.IRI | ||
| 5256 | ID_ALIAS = "spdxId" | ||
| 5257 | NAMED_INDIVIDUALS = { | ||
| 5258 | } | ||
| 5259 | |||
| 5260 | @classmethod | ||
| 5261 | def _register_props(cls): | ||
| 5262 | super()._register_props() | ||
| 5263 | # Defines the byte range in the original host file that the snippet information | ||
| 5264 | # applies to. | ||
| 5265 | cls._add_property( | ||
| 5266 | "software_byteRange", | ||
| 5267 | ObjectProp(PositiveIntegerRange, False), | ||
| 5268 | iri="https://spdx.org/rdf/3.0.1/terms/Software/byteRange", | ||
| 5269 | compact="software_byteRange", | ||
| 5270 | ) | ||
| 5271 | # Defines the line range in the original host file that the snippet information | ||
| 5272 | # applies to. | ||
| 5273 | cls._add_property( | ||
| 5274 | "software_lineRange", | ||
| 5275 | ObjectProp(PositiveIntegerRange, False), | ||
| 5276 | iri="https://spdx.org/rdf/3.0.1/terms/Software/lineRange", | ||
| 5277 | compact="software_lineRange", | ||
| 5278 | ) | ||
| 5279 | # Defines the original host file that the snippet information applies to. | ||
| 5280 | cls._add_property( | ||
| 5281 | "software_snippetFromFile", | ||
| 5282 | ObjectProp(software_File, True), | ||
| 5283 | iri="https://spdx.org/rdf/3.0.1/terms/Software/snippetFromFile", | ||
| 5284 | min_count=1, | ||
| 5285 | compact="software_snippetFromFile", | ||
| 5286 | ) | ||
| 5287 | |||
| 5288 | |||
| 5289 | # Specifies an AI package and its associated information. | ||
| 5290 | @register("https://spdx.org/rdf/3.0.1/terms/AI/AIPackage", compact_type="ai_AIPackage", abstract=False) | ||
| 5291 | class ai_AIPackage(software_Package): | ||
| 5292 | NODE_KIND = NodeKind.IRI | ||
| 5293 | ID_ALIAS = "spdxId" | ||
| 5294 | NAMED_INDIVIDUALS = { | ||
| 5295 | } | ||
| 5296 | |||
| 5297 | @classmethod | ||
| 5298 | def _register_props(cls): | ||
| 5299 | super()._register_props() | ||
| 5300 | # Indicates whether the system can perform a decision or action without human | ||
| 5301 | # involvement or guidance. | ||
| 5302 | cls._add_property( | ||
| 5303 | "ai_autonomyType", | ||
| 5304 | EnumProp([ | ||
| 5305 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
| 5306 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
| 5307 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
| 5308 | ]), | ||
| 5309 | iri="https://spdx.org/rdf/3.0.1/terms/AI/autonomyType", | ||
| 5310 | compact="ai_autonomyType", | ||
| 5311 | ) | ||
| 5312 | # Captures the domain in which the AI package can be used. | ||
| 5313 | cls._add_property( | ||
| 5314 | "ai_domain", | ||
| 5315 | ListProp(StringProp()), | ||
| 5316 | iri="https://spdx.org/rdf/3.0.1/terms/AI/domain", | ||
| 5317 | compact="ai_domain", | ||
| 5318 | ) | ||
| 5319 | # Indicates the amount of energy consumption incurred by an AI model. | ||
| 5320 | cls._add_property( | ||
| 5321 | "ai_energyConsumption", | ||
| 5322 | ObjectProp(ai_EnergyConsumption, False), | ||
| 5323 | iri="https://spdx.org/rdf/3.0.1/terms/AI/energyConsumption", | ||
| 5324 | compact="ai_energyConsumption", | ||
| 5325 | ) | ||
| 5326 | # Records a hyperparameter used to build the AI model contained in the AI | ||
| 5327 | # package. | ||
| 5328 | cls._add_property( | ||
| 5329 | "ai_hyperparameter", | ||
| 5330 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
| 5331 | iri="https://spdx.org/rdf/3.0.1/terms/AI/hyperparameter", | ||
| 5332 | compact="ai_hyperparameter", | ||
| 5333 | ) | ||
| 5334 | # Provides relevant information about the AI software, not including the model | ||
| 5335 | # description. | ||
| 5336 | cls._add_property( | ||
| 5337 | "ai_informationAboutApplication", | ||
| 5338 | StringProp(), | ||
| 5339 | iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutApplication", | ||
| 5340 | compact="ai_informationAboutApplication", | ||
| 5341 | ) | ||
| 5342 | # Describes relevant information about different steps of the training process. | ||
| 5343 | cls._add_property( | ||
| 5344 | "ai_informationAboutTraining", | ||
| 5345 | StringProp(), | ||
| 5346 | iri="https://spdx.org/rdf/3.0.1/terms/AI/informationAboutTraining", | ||
| 5347 | compact="ai_informationAboutTraining", | ||
| 5348 | ) | ||
| 5349 | # Captures a limitation of the AI software. | ||
| 5350 | cls._add_property( | ||
| 5351 | "ai_limitation", | ||
| 5352 | StringProp(), | ||
| 5353 | iri="https://spdx.org/rdf/3.0.1/terms/AI/limitation", | ||
| 5354 | compact="ai_limitation", | ||
| 5355 | ) | ||
| 5356 | # Records the measurement of prediction quality of the AI model. | ||
| 5357 | cls._add_property( | ||
| 5358 | "ai_metric", | ||
| 5359 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
| 5360 | iri="https://spdx.org/rdf/3.0.1/terms/AI/metric", | ||
| 5361 | compact="ai_metric", | ||
| 5362 | ) | ||
| 5363 | # Captures the threshold that was used for computation of a metric described in | ||
| 5364 | # the metric field. | ||
| 5365 | cls._add_property( | ||
| 5366 | "ai_metricDecisionThreshold", | ||
| 5367 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
| 5368 | iri="https://spdx.org/rdf/3.0.1/terms/AI/metricDecisionThreshold", | ||
| 5369 | compact="ai_metricDecisionThreshold", | ||
| 5370 | ) | ||
| 5371 | # Describes all the preprocessing steps applied to the training data before the | ||
| 5372 | # model training. | ||
| 5373 | cls._add_property( | ||
| 5374 | "ai_modelDataPreprocessing", | ||
| 5375 | ListProp(StringProp()), | ||
| 5376 | iri="https://spdx.org/rdf/3.0.1/terms/AI/modelDataPreprocessing", | ||
| 5377 | compact="ai_modelDataPreprocessing", | ||
| 5378 | ) | ||
| 5379 | # Describes methods that can be used to explain the results from the AI model. | ||
| 5380 | cls._add_property( | ||
| 5381 | "ai_modelExplainability", | ||
| 5382 | ListProp(StringProp()), | ||
| 5383 | iri="https://spdx.org/rdf/3.0.1/terms/AI/modelExplainability", | ||
| 5384 | compact="ai_modelExplainability", | ||
| 5385 | ) | ||
| 5386 | # Records the results of general safety risk assessment of the AI system. | ||
| 5387 | cls._add_property( | ||
| 5388 | "ai_safetyRiskAssessment", | ||
| 5389 | EnumProp([ | ||
| 5390 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/high", "high"), | ||
| 5391 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/low", "low"), | ||
| 5392 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/medium", "medium"), | ||
| 5393 | ("https://spdx.org/rdf/3.0.1/terms/AI/SafetyRiskAssessmentType/serious", "serious"), | ||
| 5394 | ]), | ||
| 5395 | iri="https://spdx.org/rdf/3.0.1/terms/AI/safetyRiskAssessment", | ||
| 5396 | compact="ai_safetyRiskAssessment", | ||
| 5397 | ) | ||
| 5398 | # Captures a standard that is being complied with. | ||
| 5399 | cls._add_property( | ||
| 5400 | "ai_standardCompliance", | ||
| 5401 | ListProp(StringProp()), | ||
| 5402 | iri="https://spdx.org/rdf/3.0.1/terms/AI/standardCompliance", | ||
| 5403 | compact="ai_standardCompliance", | ||
| 5404 | ) | ||
| 5405 | # Records the type of the model used in the AI software. | ||
| 5406 | cls._add_property( | ||
| 5407 | "ai_typeOfModel", | ||
| 5408 | ListProp(StringProp()), | ||
| 5409 | iri="https://spdx.org/rdf/3.0.1/terms/AI/typeOfModel", | ||
| 5410 | compact="ai_typeOfModel", | ||
| 5411 | ) | ||
| 5412 | # Records if sensitive personal information is used during model training or | ||
| 5413 | # could be used during the inference. | ||
| 5414 | cls._add_property( | ||
| 5415 | "ai_useSensitivePersonalInformation", | ||
| 5416 | EnumProp([ | ||
| 5417 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
| 5418 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
| 5419 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
| 5420 | ]), | ||
| 5421 | iri="https://spdx.org/rdf/3.0.1/terms/AI/useSensitivePersonalInformation", | ||
| 5422 | compact="ai_useSensitivePersonalInformation", | ||
| 5423 | ) | ||
| 5424 | |||
| 5425 | |||
| 5426 | # Specifies a data package and its associated information. | ||
| 5427 | @register("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetPackage", compact_type="dataset_DatasetPackage", abstract=False) | ||
| 5428 | class dataset_DatasetPackage(software_Package): | ||
| 5429 | NODE_KIND = NodeKind.IRI | ||
| 5430 | ID_ALIAS = "spdxId" | ||
| 5431 | NAMED_INDIVIDUALS = { | ||
| 5432 | } | ||
| 5433 | |||
| 5434 | @classmethod | ||
| 5435 | def _register_props(cls): | ||
| 5436 | super()._register_props() | ||
| 5437 | # Describes the anonymization methods used. | ||
| 5438 | cls._add_property( | ||
| 5439 | "dataset_anonymizationMethodUsed", | ||
| 5440 | ListProp(StringProp()), | ||
| 5441 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/anonymizationMethodUsed", | ||
| 5442 | compact="dataset_anonymizationMethodUsed", | ||
| 5443 | ) | ||
| 5444 | # Describes the confidentiality level of the data points contained in the dataset. | ||
| 5445 | cls._add_property( | ||
| 5446 | "dataset_confidentialityLevel", | ||
| 5447 | EnumProp([ | ||
| 5448 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/amber", "amber"), | ||
| 5449 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/clear", "clear"), | ||
| 5450 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/green", "green"), | ||
| 5451 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/ConfidentialityLevelType/red", "red"), | ||
| 5452 | ]), | ||
| 5453 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/confidentialityLevel", | ||
| 5454 | compact="dataset_confidentialityLevel", | ||
| 5455 | ) | ||
| 5456 | # Describes how the dataset was collected. | ||
| 5457 | cls._add_property( | ||
| 5458 | "dataset_dataCollectionProcess", | ||
| 5459 | StringProp(), | ||
| 5460 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataCollectionProcess", | ||
| 5461 | compact="dataset_dataCollectionProcess", | ||
| 5462 | ) | ||
| 5463 | # Describes the preprocessing steps that were applied to the raw data to create the given dataset. | ||
| 5464 | cls._add_property( | ||
| 5465 | "dataset_dataPreprocessing", | ||
| 5466 | ListProp(StringProp()), | ||
| 5467 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/dataPreprocessing", | ||
| 5468 | compact="dataset_dataPreprocessing", | ||
| 5469 | ) | ||
| 5470 | # The field describes the availability of a dataset. | ||
| 5471 | cls._add_property( | ||
| 5472 | "dataset_datasetAvailability", | ||
| 5473 | EnumProp([ | ||
| 5474 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/clickthrough", "clickthrough"), | ||
| 5475 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/directDownload", "directDownload"), | ||
| 5476 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/query", "query"), | ||
| 5477 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/registration", "registration"), | ||
| 5478 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetAvailabilityType/scrapingScript", "scrapingScript"), | ||
| 5479 | ]), | ||
| 5480 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetAvailability", | ||
| 5481 | compact="dataset_datasetAvailability", | ||
| 5482 | ) | ||
| 5483 | # Describes potentially noisy elements of the dataset. | ||
| 5484 | cls._add_property( | ||
| 5485 | "dataset_datasetNoise", | ||
| 5486 | StringProp(), | ||
| 5487 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetNoise", | ||
| 5488 | compact="dataset_datasetNoise", | ||
| 5489 | ) | ||
| 5490 | # Captures the size of the dataset. | ||
| 5491 | cls._add_property( | ||
| 5492 | "dataset_datasetSize", | ||
| 5493 | NonNegativeIntegerProp(), | ||
| 5494 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetSize", | ||
| 5495 | compact="dataset_datasetSize", | ||
| 5496 | ) | ||
| 5497 | # Describes the type of the given dataset. | ||
| 5498 | cls._add_property( | ||
| 5499 | "dataset_datasetType", | ||
| 5500 | ListProp(EnumProp([ | ||
| 5501 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/audio", "audio"), | ||
| 5502 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/categorical", "categorical"), | ||
| 5503 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/graph", "graph"), | ||
| 5504 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/image", "image"), | ||
| 5505 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/noAssertion", "noAssertion"), | ||
| 5506 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/numeric", "numeric"), | ||
| 5507 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/other", "other"), | ||
| 5508 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/sensor", "sensor"), | ||
| 5509 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/structured", "structured"), | ||
| 5510 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/syntactic", "syntactic"), | ||
| 5511 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/text", "text"), | ||
| 5512 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timeseries", "timeseries"), | ||
| 5513 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/timestamp", "timestamp"), | ||
| 5514 | ("https://spdx.org/rdf/3.0.1/terms/Dataset/DatasetType/video", "video"), | ||
| 5515 | ])), | ||
| 5516 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetType", | ||
| 5517 | min_count=1, | ||
| 5518 | compact="dataset_datasetType", | ||
| 5519 | ) | ||
| 5520 | # Describes a mechanism to update the dataset. | ||
| 5521 | cls._add_property( | ||
| 5522 | "dataset_datasetUpdateMechanism", | ||
| 5523 | StringProp(), | ||
| 5524 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/datasetUpdateMechanism", | ||
| 5525 | compact="dataset_datasetUpdateMechanism", | ||
| 5526 | ) | ||
| 5527 | # Describes if any sensitive personal information is present in the dataset. | ||
| 5528 | cls._add_property( | ||
| 5529 | "dataset_hasSensitivePersonalInformation", | ||
| 5530 | EnumProp([ | ||
| 5531 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/no", "no"), | ||
| 5532 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/noAssertion", "noAssertion"), | ||
| 5533 | ("https://spdx.org/rdf/3.0.1/terms/Core/PresenceType/yes", "yes"), | ||
| 5534 | ]), | ||
| 5535 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/hasSensitivePersonalInformation", | ||
| 5536 | compact="dataset_hasSensitivePersonalInformation", | ||
| 5537 | ) | ||
| 5538 | # Describes what the given dataset should be used for. | ||
| 5539 | cls._add_property( | ||
| 5540 | "dataset_intendedUse", | ||
| 5541 | StringProp(), | ||
| 5542 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/intendedUse", | ||
| 5543 | compact="dataset_intendedUse", | ||
| 5544 | ) | ||
| 5545 | # Records the biases that the dataset is known to encompass. | ||
| 5546 | cls._add_property( | ||
| 5547 | "dataset_knownBias", | ||
| 5548 | ListProp(StringProp()), | ||
| 5549 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/knownBias", | ||
| 5550 | compact="dataset_knownBias", | ||
| 5551 | ) | ||
| 5552 | # Describes a sensor used for collecting the data. | ||
| 5553 | cls._add_property( | ||
| 5554 | "dataset_sensor", | ||
| 5555 | ListProp(ObjectProp(DictionaryEntry, False)), | ||
| 5556 | iri="https://spdx.org/rdf/3.0.1/terms/Dataset/sensor", | ||
| 5557 | compact="dataset_sensor", | ||
| 5558 | ) | ||
| 5559 | |||
| 5560 | |||
| 5561 | """Format Guard""" | ||
| 5562 | # fmt: on | ||
| 5563 | |||
| 5564 | |||
| 5565 | def main(): | ||
| 5566 | import argparse | ||
| 5567 | from pathlib import Path | ||
| 5568 | |||
| 5569 | parser = argparse.ArgumentParser(description="Python SHACL model test") | ||
| 5570 | parser.add_argument("infile", type=Path, help="Input file") | ||
| 5571 | parser.add_argument("--print", action="store_true", help="Print object tree") | ||
| 5572 | parser.add_argument("--outfile", type=Path, help="Output file") | ||
| 5573 | |||
| 5574 | args = parser.parse_args() | ||
| 5575 | |||
| 5576 | objectset = SHACLObjectSet() | ||
| 5577 | with args.infile.open("r") as f: | ||
| 5578 | d = JSONLDDeserializer() | ||
| 5579 | d.read(f, objectset) | ||
| 5580 | |||
| 5581 | if args.print: | ||
| 5582 | print_tree(objectset.objects) | ||
| 5583 | |||
| 5584 | if args.outfile: | ||
| 5585 | with args.outfile.open("wb") as f: | ||
| 5586 | s = JSONLDSerializer() | ||
| 5587 | s.write(objectset, f) | ||
| 5588 | |||
| 5589 | return 0 | ||
| 5590 | |||
| 5591 | |||
| 5592 | if __name__ == "__main__": | ||
| 5593 | sys.exit(main()) | ||
diff --git a/meta/lib/oe/spdx30_tasks.py b/meta/lib/oe/spdx30_tasks.py deleted file mode 100644 index f2f133005d..0000000000 --- a/meta/lib/oe/spdx30_tasks.py +++ /dev/null | |||
| @@ -1,1367 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import json | ||
| 8 | import oe.cve_check | ||
| 9 | import oe.packagedata | ||
| 10 | import oe.patch | ||
| 11 | import oe.sbom30 | ||
| 12 | import oe.spdx30 | ||
| 13 | import oe.spdx_common | ||
| 14 | import oe.sdk | ||
| 15 | import os | ||
| 16 | |||
| 17 | from contextlib import contextmanager | ||
| 18 | from datetime import datetime, timezone | ||
| 19 | from pathlib import Path | ||
| 20 | |||
| 21 | |||
| 22 | def walk_error(err): | ||
| 23 | bb.error(f"ERROR walking {err.filename}: {err}") | ||
| 24 | |||
| 25 | |||
| 26 | def set_timestamp_now(d, o, prop): | ||
| 27 | if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1": | ||
| 28 | setattr(o, prop, datetime.now(timezone.utc)) | ||
| 29 | else: | ||
| 30 | # Doing this helps to validated that the property actually exists, and | ||
| 31 | # also that it is not mandatory | ||
| 32 | delattr(o, prop) | ||
| 33 | |||
| 34 | |||
| 35 | def add_license_expression(d, objset, license_expression, license_data): | ||
| 36 | simple_license_text = {} | ||
| 37 | license_text_map = {} | ||
| 38 | license_ref_idx = 0 | ||
| 39 | |||
| 40 | def add_license_text(name): | ||
| 41 | nonlocal objset | ||
| 42 | nonlocal simple_license_text | ||
| 43 | |||
| 44 | if name in simple_license_text: | ||
| 45 | return simple_license_text[name] | ||
| 46 | |||
| 47 | lic = objset.find_filter( | ||
| 48 | oe.spdx30.simplelicensing_SimpleLicensingText, | ||
| 49 | name=name, | ||
| 50 | ) | ||
| 51 | |||
| 52 | if lic is not None: | ||
| 53 | simple_license_text[name] = lic | ||
| 54 | return lic | ||
| 55 | |||
| 56 | lic = objset.add( | ||
| 57 | oe.spdx30.simplelicensing_SimpleLicensingText( | ||
| 58 | _id=objset.new_spdxid("license-text", name), | ||
| 59 | creationInfo=objset.doc.creationInfo, | ||
| 60 | name=name, | ||
| 61 | ) | ||
| 62 | ) | ||
| 63 | objset.set_element_alias(lic) | ||
| 64 | simple_license_text[name] = lic | ||
| 65 | |||
| 66 | if name == "PD": | ||
| 67 | lic.simplelicensing_licenseText = "Software released to the public domain" | ||
| 68 | return lic | ||
| 69 | |||
| 70 | # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH | ||
| 71 | for directory in [d.getVar("COMMON_LICENSE_DIR")] + ( | ||
| 72 | d.getVar("LICENSE_PATH") or "" | ||
| 73 | ).split(): | ||
| 74 | try: | ||
| 75 | with (Path(directory) / name).open(errors="replace") as f: | ||
| 76 | lic.simplelicensing_licenseText = f.read() | ||
| 77 | return lic | ||
| 78 | |||
| 79 | except FileNotFoundError: | ||
| 80 | pass | ||
| 81 | |||
| 82 | # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set | ||
| 83 | filename = d.getVarFlag("NO_GENERIC_LICENSE", name) | ||
| 84 | if filename: | ||
| 85 | filename = d.expand("${S}/" + filename) | ||
| 86 | with open(filename, errors="replace") as f: | ||
| 87 | lic.simplelicensing_licenseText = f.read() | ||
| 88 | return lic | ||
| 89 | else: | ||
| 90 | bb.fatal("Cannot find any text for license %s" % name) | ||
| 91 | |||
| 92 | def convert(l): | ||
| 93 | nonlocal license_text_map | ||
| 94 | nonlocal license_ref_idx | ||
| 95 | |||
| 96 | if l == "(" or l == ")": | ||
| 97 | return l | ||
| 98 | |||
| 99 | if l == "&": | ||
| 100 | return "AND" | ||
| 101 | |||
| 102 | if l == "|": | ||
| 103 | return "OR" | ||
| 104 | |||
| 105 | if l == "CLOSED": | ||
| 106 | return "NONE" | ||
| 107 | |||
| 108 | spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l | ||
| 109 | if spdx_license in license_data["licenses"]: | ||
| 110 | return spdx_license | ||
| 111 | |||
| 112 | spdx_license = "LicenseRef-" + l | ||
| 113 | if spdx_license not in license_text_map: | ||
| 114 | license_text_map[spdx_license] = oe.sbom30.get_element_link_id( | ||
| 115 | add_license_text(l) | ||
| 116 | ) | ||
| 117 | |||
| 118 | return spdx_license | ||
| 119 | |||
| 120 | lic_split = ( | ||
| 121 | license_expression.replace("(", " ( ") | ||
| 122 | .replace(")", " ) ") | ||
| 123 | .replace("|", " | ") | ||
| 124 | .replace("&", " & ") | ||
| 125 | .split() | ||
| 126 | ) | ||
| 127 | spdx_license_expression = " ".join(convert(l) for l in lic_split) | ||
| 128 | |||
| 129 | o = objset.new_license_expression( | ||
| 130 | spdx_license_expression, license_data, license_text_map | ||
| 131 | ) | ||
| 132 | objset.set_element_alias(o) | ||
| 133 | return o | ||
| 134 | |||
| 135 | |||
| 136 | def add_package_files( | ||
| 137 | d, | ||
| 138 | objset, | ||
| 139 | topdir, | ||
| 140 | get_spdxid, | ||
| 141 | get_purposes, | ||
| 142 | license_data=None, | ||
| 143 | *, | ||
| 144 | archive=None, | ||
| 145 | ignore_dirs=[], | ||
| 146 | ignore_top_level_dirs=[], | ||
| 147 | ): | ||
| 148 | source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") | ||
| 149 | if source_date_epoch: | ||
| 150 | source_date_epoch = int(source_date_epoch) | ||
| 151 | |||
| 152 | spdx_files = set() | ||
| 153 | |||
| 154 | file_counter = 1 | ||
| 155 | if not os.path.exists(topdir): | ||
| 156 | bb.note(f"Skip {topdir}") | ||
| 157 | return spdx_files | ||
| 158 | |||
| 159 | check_compiled_sources = d.getVar("SPDX_INCLUDE_COMPILED_SOURCES") == "1" | ||
| 160 | if check_compiled_sources: | ||
| 161 | compiled_sources, types = oe.spdx_common.get_compiled_sources(d) | ||
| 162 | bb.debug(1, f"Total compiled files: {len(compiled_sources)}") | ||
| 163 | |||
| 164 | for subdir, dirs, files in os.walk(topdir, onerror=walk_error): | ||
| 165 | dirs[:] = [d for d in dirs if d not in ignore_dirs] | ||
| 166 | if subdir == str(topdir): | ||
| 167 | dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs] | ||
| 168 | |||
| 169 | dirs.sort() | ||
| 170 | files.sort() | ||
| 171 | for file in files: | ||
| 172 | filepath = Path(subdir) / file | ||
| 173 | if filepath.is_symlink() or not filepath.is_file(): | ||
| 174 | continue | ||
| 175 | |||
| 176 | filename = str(filepath.relative_to(topdir)) | ||
| 177 | file_purposes = get_purposes(filepath) | ||
| 178 | |||
| 179 | # Check if file is compiled | ||
| 180 | if check_compiled_sources: | ||
| 181 | if not oe.spdx_common.is_compiled_source(filename, compiled_sources, types): | ||
| 182 | continue | ||
| 183 | |||
| 184 | spdx_file = objset.new_file( | ||
| 185 | get_spdxid(file_counter), | ||
| 186 | filename, | ||
| 187 | filepath, | ||
| 188 | purposes=file_purposes, | ||
| 189 | ) | ||
| 190 | spdx_files.add(spdx_file) | ||
| 191 | |||
| 192 | if ( | ||
| 193 | oe.spdx30.software_SoftwarePurpose.source in file_purposes | ||
| 194 | and license_data is not None | ||
| 195 | ): | ||
| 196 | objset.scan_declared_licenses(spdx_file, filepath, license_data) | ||
| 197 | |||
| 198 | if archive is not None: | ||
| 199 | with filepath.open("rb") as f: | ||
| 200 | info = archive.gettarinfo(fileobj=f) | ||
| 201 | info.name = filename | ||
| 202 | info.uid = 0 | ||
| 203 | info.gid = 0 | ||
| 204 | info.uname = "root" | ||
| 205 | info.gname = "root" | ||
| 206 | |||
| 207 | if source_date_epoch is not None and info.mtime > source_date_epoch: | ||
| 208 | info.mtime = source_date_epoch | ||
| 209 | |||
| 210 | archive.addfile(info, f) | ||
| 211 | |||
| 212 | file_counter += 1 | ||
| 213 | |||
| 214 | bb.debug(1, "Added %d files to %s" % (len(spdx_files), objset.doc._id)) | ||
| 215 | |||
| 216 | return spdx_files | ||
| 217 | |||
| 218 | |||
| 219 | def get_package_sources_from_debug( | ||
| 220 | d, package, package_files, sources, source_hash_cache | ||
| 221 | ): | ||
| 222 | def file_path_match(file_path, pkg_file): | ||
| 223 | if file_path.lstrip("/") == pkg_file.name.lstrip("/"): | ||
| 224 | return True | ||
| 225 | |||
| 226 | for e in pkg_file.extension: | ||
| 227 | if isinstance(e, oe.sbom30.OEFileNameAliasExtension): | ||
| 228 | for a in e.aliases: | ||
| 229 | if file_path.lstrip("/") == a.lstrip("/"): | ||
| 230 | return True | ||
| 231 | |||
| 232 | return False | ||
| 233 | |||
| 234 | debug_search_paths = [ | ||
| 235 | Path(d.getVar("SPDXWORK")), | ||
| 236 | Path(d.getVar("PKGD")), | ||
| 237 | Path(d.getVar("STAGING_DIR_TARGET")), | ||
| 238 | Path(d.getVar("STAGING_DIR_NATIVE")), | ||
| 239 | Path(d.getVar("STAGING_KERNEL_DIR")), | ||
| 240 | ] | ||
| 241 | |||
| 242 | pkg_data = oe.packagedata.read_subpkgdata_extended(package, d) | ||
| 243 | |||
| 244 | if pkg_data is None: | ||
| 245 | return | ||
| 246 | |||
| 247 | dep_source_files = set() | ||
| 248 | |||
| 249 | for file_path, file_data in pkg_data["files_info"].items(): | ||
| 250 | if not "debugsrc" in file_data: | ||
| 251 | continue | ||
| 252 | |||
| 253 | if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files): | ||
| 254 | bb.fatal( | ||
| 255 | "No package file found for %s in %s; SPDX found: %s" | ||
| 256 | % (str(file_path), package, " ".join(p.name for p in package_files)) | ||
| 257 | ) | ||
| 258 | continue | ||
| 259 | |||
| 260 | for debugsrc in file_data["debugsrc"]: | ||
| 261 | for search in debug_search_paths: | ||
| 262 | if debugsrc.startswith("/usr/src/kernel"): | ||
| 263 | debugsrc_path = search / debugsrc.replace("/usr/src/kernel/", "") | ||
| 264 | else: | ||
| 265 | debugsrc_path = search / debugsrc.lstrip("/") | ||
| 266 | |||
| 267 | if debugsrc_path in source_hash_cache: | ||
| 268 | file_sha256 = source_hash_cache[debugsrc_path] | ||
| 269 | if file_sha256 is None: | ||
| 270 | continue | ||
| 271 | else: | ||
| 272 | # We can only hash files below, skip directories, links, etc. | ||
| 273 | if not debugsrc_path.is_file(): | ||
| 274 | source_hash_cache[debugsrc_path] = None | ||
| 275 | continue | ||
| 276 | |||
| 277 | file_sha256 = bb.utils.sha256_file(debugsrc_path) | ||
| 278 | source_hash_cache[debugsrc_path] = file_sha256 | ||
| 279 | |||
| 280 | if file_sha256 in sources: | ||
| 281 | source_file = sources[file_sha256] | ||
| 282 | dep_source_files.add(source_file) | ||
| 283 | else: | ||
| 284 | bb.debug( | ||
| 285 | 1, | ||
| 286 | "Debug source %s with SHA256 %s not found in any dependency" | ||
| 287 | % (str(debugsrc_path), file_sha256), | ||
| 288 | ) | ||
| 289 | break | ||
| 290 | else: | ||
| 291 | bb.debug(1, "Debug source %s not found" % debugsrc) | ||
| 292 | |||
| 293 | return dep_source_files | ||
| 294 | |||
| 295 | |||
| 296 | def collect_dep_objsets(d, build): | ||
| 297 | deps = oe.spdx_common.get_spdx_deps(d) | ||
| 298 | |||
| 299 | dep_objsets = [] | ||
| 300 | dep_builds = set() | ||
| 301 | |||
| 302 | dep_build_spdxids = set() | ||
| 303 | for dep in deps: | ||
| 304 | bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn)) | ||
| 305 | dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld( | ||
| 306 | d, "recipes", "recipe-" + dep.pn, oe.spdx30.build_Build | ||
| 307 | ) | ||
| 308 | # If the dependency is part of the taskhash, return it to be linked | ||
| 309 | # against. Otherwise, it cannot be linked against because this recipe | ||
| 310 | # will not rebuilt if dependency changes | ||
| 311 | if dep.in_taskhash: | ||
| 312 | dep_objsets.append(dep_objset) | ||
| 313 | |||
| 314 | # The build _can_ be linked against (by alias) | ||
| 315 | dep_builds.add(dep_build) | ||
| 316 | |||
| 317 | return dep_objsets, dep_builds | ||
| 318 | |||
| 319 | |||
| 320 | def index_sources_by_hash(sources, dest): | ||
| 321 | for s in sources: | ||
| 322 | if not isinstance(s, oe.spdx30.software_File): | ||
| 323 | continue | ||
| 324 | |||
| 325 | if s.software_primaryPurpose != oe.spdx30.software_SoftwarePurpose.source: | ||
| 326 | continue | ||
| 327 | |||
| 328 | for v in s.verifiedUsing: | ||
| 329 | if v.algorithm == oe.spdx30.HashAlgorithm.sha256: | ||
| 330 | if not v.hashValue in dest: | ||
| 331 | dest[v.hashValue] = s | ||
| 332 | break | ||
| 333 | else: | ||
| 334 | bb.fatal(f"No SHA256 found for {s.name}") | ||
| 335 | |||
| 336 | |||
| 337 | def collect_dep_sources(dep_objsets, dest): | ||
| 338 | for objset in dep_objsets: | ||
| 339 | # Don't collect sources from native recipes as they | ||
| 340 | # match non-native sources also. | ||
| 341 | if objset.is_native(): | ||
| 342 | continue | ||
| 343 | |||
| 344 | bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name)) | ||
| 345 | |||
| 346 | dep_build = objset.find_root(oe.spdx30.build_Build) | ||
| 347 | if not dep_build: | ||
| 348 | bb.fatal("Unable to find a build") | ||
| 349 | |||
| 350 | for e in objset.foreach_type(oe.spdx30.Relationship): | ||
| 351 | if dep_build is not e.from_: | ||
| 352 | continue | ||
| 353 | |||
| 354 | if e.relationshipType != oe.spdx30.RelationshipType.hasInput: | ||
| 355 | continue | ||
| 356 | |||
| 357 | index_sources_by_hash(e.to, dest) | ||
| 358 | |||
| 359 | |||
| 360 | def add_download_files(d, objset): | ||
| 361 | inputs = set() | ||
| 362 | |||
| 363 | urls = d.getVar("SRC_URI").split() | ||
| 364 | fetch = bb.fetch2.Fetch(urls, d) | ||
| 365 | |||
| 366 | for download_idx, src_uri in enumerate(urls): | ||
| 367 | fd = fetch.ud[src_uri] | ||
| 368 | |||
| 369 | file_name = os.path.basename(fetch.localpath(src_uri)) | ||
| 370 | if oe.patch.patch_path(src_uri, fetch, "", expand=False): | ||
| 371 | primary_purpose = oe.spdx30.software_SoftwarePurpose.patch | ||
| 372 | else: | ||
| 373 | primary_purpose = oe.spdx30.software_SoftwarePurpose.source | ||
| 374 | |||
| 375 | if fd.type == "file": | ||
| 376 | if os.path.isdir(fd.localpath): | ||
| 377 | walk_idx = 1 | ||
| 378 | for root, dirs, files in os.walk(fd.localpath, onerror=walk_error): | ||
| 379 | dirs.sort() | ||
| 380 | files.sort() | ||
| 381 | for f in files: | ||
| 382 | f_path = os.path.join(root, f) | ||
| 383 | if os.path.islink(f_path): | ||
| 384 | # TODO: SPDX doesn't support symlinks yet | ||
| 385 | continue | ||
| 386 | |||
| 387 | file = objset.new_file( | ||
| 388 | objset.new_spdxid( | ||
| 389 | "source", str(download_idx + 1), str(walk_idx) | ||
| 390 | ), | ||
| 391 | os.path.join( | ||
| 392 | file_name, os.path.relpath(f_path, fd.localpath) | ||
| 393 | ), | ||
| 394 | f_path, | ||
| 395 | purposes=[primary_purpose], | ||
| 396 | ) | ||
| 397 | |||
| 398 | inputs.add(file) | ||
| 399 | walk_idx += 1 | ||
| 400 | |||
| 401 | else: | ||
| 402 | file = objset.new_file( | ||
| 403 | objset.new_spdxid("source", str(download_idx + 1)), | ||
| 404 | file_name, | ||
| 405 | fd.localpath, | ||
| 406 | purposes=[primary_purpose], | ||
| 407 | ) | ||
| 408 | inputs.add(file) | ||
| 409 | |||
| 410 | else: | ||
| 411 | dl = objset.add( | ||
| 412 | oe.spdx30.software_Package( | ||
| 413 | _id=objset.new_spdxid("source", str(download_idx + 1)), | ||
| 414 | creationInfo=objset.doc.creationInfo, | ||
| 415 | name=file_name, | ||
| 416 | software_primaryPurpose=primary_purpose, | ||
| 417 | software_downloadLocation=oe.spdx_common.fetch_data_to_uri( | ||
| 418 | fd, fd.name | ||
| 419 | ), | ||
| 420 | ) | ||
| 421 | ) | ||
| 422 | |||
| 423 | if fd.method.supports_checksum(fd): | ||
| 424 | # TODO Need something better than hard coding this | ||
| 425 | for checksum_id in ["sha256", "sha1"]: | ||
| 426 | expected_checksum = getattr( | ||
| 427 | fd, "%s_expected" % checksum_id, None | ||
| 428 | ) | ||
| 429 | if expected_checksum is None: | ||
| 430 | continue | ||
| 431 | |||
| 432 | dl.verifiedUsing.append( | ||
| 433 | oe.spdx30.Hash( | ||
| 434 | algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id), | ||
| 435 | hashValue=expected_checksum, | ||
| 436 | ) | ||
| 437 | ) | ||
| 438 | |||
| 439 | inputs.add(dl) | ||
| 440 | |||
| 441 | return inputs | ||
| 442 | |||
| 443 | |||
| 444 | def set_purposes(d, element, *var_names, force_purposes=[]): | ||
| 445 | purposes = force_purposes[:] | ||
| 446 | |||
| 447 | for var_name in var_names: | ||
| 448 | val = d.getVar(var_name) | ||
| 449 | if val: | ||
| 450 | purposes.extend(val.split()) | ||
| 451 | break | ||
| 452 | |||
| 453 | if not purposes: | ||
| 454 | bb.warn("No SPDX purposes found in %s" % " ".join(var_names)) | ||
| 455 | return | ||
| 456 | |||
| 457 | element.software_primaryPurpose = getattr( | ||
| 458 | oe.spdx30.software_SoftwarePurpose, purposes[0] | ||
| 459 | ) | ||
| 460 | element.software_additionalPurpose = [ | ||
| 461 | getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:] | ||
| 462 | ] | ||
| 463 | |||
| 464 | |||
| 465 | def create_spdx(d): | ||
| 466 | def set_var_field(var, obj, name, package=None): | ||
| 467 | val = None | ||
| 468 | if package: | ||
| 469 | val = d.getVar("%s:%s" % (var, package)) | ||
| 470 | |||
| 471 | if not val: | ||
| 472 | val = d.getVar(var) | ||
| 473 | |||
| 474 | if val: | ||
| 475 | setattr(obj, name, val) | ||
| 476 | |||
| 477 | license_data = oe.spdx_common.load_spdx_license_data(d) | ||
| 478 | |||
| 479 | deploydir = Path(d.getVar("SPDXDEPLOY")) | ||
| 480 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 481 | spdx_workdir = Path(d.getVar("SPDXWORK")) | ||
| 482 | include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1" | ||
| 483 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
| 484 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class( | ||
| 485 | "cross", d | ||
| 486 | ) | ||
| 487 | include_vex = d.getVar("SPDX_INCLUDE_VEX") | ||
| 488 | if not include_vex in ("none", "current", "all"): | ||
| 489 | bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'") | ||
| 490 | |||
| 491 | build_objset = oe.sbom30.ObjectSet.new_objset(d, "recipe-" + d.getVar("PN")) | ||
| 492 | |||
| 493 | build = build_objset.new_task_build("recipe", "recipe") | ||
| 494 | build_objset.set_element_alias(build) | ||
| 495 | |||
| 496 | build_objset.doc.rootElement.append(build) | ||
| 497 | |||
| 498 | build_objset.set_is_native(is_native) | ||
| 499 | |||
| 500 | for var in (d.getVar("SPDX_CUSTOM_ANNOTATION_VARS") or "").split(): | ||
| 501 | new_annotation( | ||
| 502 | d, | ||
| 503 | build_objset, | ||
| 504 | build, | ||
| 505 | "%s=%s" % (var, d.getVar(var)), | ||
| 506 | oe.spdx30.AnnotationType.other, | ||
| 507 | ) | ||
| 508 | |||
| 509 | build_inputs = set() | ||
| 510 | |||
| 511 | # Add CVEs | ||
| 512 | cve_by_status = {} | ||
| 513 | if include_vex != "none": | ||
| 514 | patched_cves = oe.cve_check.get_patched_cves(d) | ||
| 515 | for cve, patched_cve in patched_cves.items(): | ||
| 516 | decoded_status = { | ||
| 517 | "mapping": patched_cve["abbrev-status"], | ||
| 518 | "detail": patched_cve["status"], | ||
| 519 | "description": patched_cve.get("justification", None) | ||
| 520 | } | ||
| 521 | |||
| 522 | # If this CVE is fixed upstream, skip it unless all CVEs are | ||
| 523 | # specified. | ||
| 524 | if ( | ||
| 525 | include_vex != "all" | ||
| 526 | and "detail" in decoded_status | ||
| 527 | and decoded_status["detail"] | ||
| 528 | in ( | ||
| 529 | "fixed-version", | ||
| 530 | "cpe-stable-backport", | ||
| 531 | ) | ||
| 532 | ): | ||
| 533 | bb.debug(1, "Skipping %s since it is already fixed upstream" % cve) | ||
| 534 | continue | ||
| 535 | |||
| 536 | spdx_cve = build_objset.new_cve_vuln(cve) | ||
| 537 | build_objset.set_element_alias(spdx_cve) | ||
| 538 | |||
| 539 | cve_by_status.setdefault(decoded_status["mapping"], {})[cve] = ( | ||
| 540 | spdx_cve, | ||
| 541 | decoded_status["detail"], | ||
| 542 | decoded_status["description"], | ||
| 543 | ) | ||
| 544 | |||
| 545 | cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION")) | ||
| 546 | |||
| 547 | source_files = add_download_files(d, build_objset) | ||
| 548 | build_inputs |= source_files | ||
| 549 | |||
| 550 | recipe_spdx_license = add_license_expression( | ||
| 551 | d, build_objset, d.getVar("LICENSE"), license_data | ||
| 552 | ) | ||
| 553 | build_objset.new_relationship( | ||
| 554 | source_files, | ||
| 555 | oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
| 556 | [oe.sbom30.get_element_link_id(recipe_spdx_license)], | ||
| 557 | ) | ||
| 558 | |||
| 559 | dep_sources = {} | ||
| 560 | if oe.spdx_common.process_sources(d) and include_sources: | ||
| 561 | bb.debug(1, "Adding source files to SPDX") | ||
| 562 | oe.spdx_common.get_patched_src(d) | ||
| 563 | |||
| 564 | files = add_package_files( | ||
| 565 | d, | ||
| 566 | build_objset, | ||
| 567 | spdx_workdir, | ||
| 568 | lambda file_counter: build_objset.new_spdxid( | ||
| 569 | "sourcefile", str(file_counter) | ||
| 570 | ), | ||
| 571 | lambda filepath: [oe.spdx30.software_SoftwarePurpose.source], | ||
| 572 | license_data, | ||
| 573 | ignore_dirs=[".git"], | ||
| 574 | ignore_top_level_dirs=["temp"], | ||
| 575 | archive=None, | ||
| 576 | ) | ||
| 577 | build_inputs |= files | ||
| 578 | index_sources_by_hash(files, dep_sources) | ||
| 579 | |||
| 580 | dep_objsets, dep_builds = collect_dep_objsets(d, build) | ||
| 581 | if dep_builds: | ||
| 582 | build_objset.new_scoped_relationship( | ||
| 583 | [build], | ||
| 584 | oe.spdx30.RelationshipType.dependsOn, | ||
| 585 | oe.spdx30.LifecycleScopeType.build, | ||
| 586 | sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds), | ||
| 587 | ) | ||
| 588 | |||
| 589 | debug_source_ids = set() | ||
| 590 | source_hash_cache = {} | ||
| 591 | |||
| 592 | # Write out the package SPDX data now. It is not complete as we cannot | ||
| 593 | # write the runtime data, so write it to a staging area and a later task | ||
| 594 | # will write out the final collection | ||
| 595 | |||
| 596 | # TODO: Handle native recipe output | ||
| 597 | if not is_native: | ||
| 598 | bb.debug(1, "Collecting Dependency sources files") | ||
| 599 | collect_dep_sources(dep_objsets, dep_sources) | ||
| 600 | |||
| 601 | bb.build.exec_func("read_subpackage_metadata", d) | ||
| 602 | |||
| 603 | pkgdest = Path(d.getVar("PKGDEST")) | ||
| 604 | for package in d.getVar("PACKAGES").split(): | ||
| 605 | if not oe.packagedata.packaged(package, d): | ||
| 606 | continue | ||
| 607 | |||
| 608 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
| 609 | |||
| 610 | bb.debug(1, "Creating SPDX for package %s" % pkg_name) | ||
| 611 | |||
| 612 | pkg_objset = oe.sbom30.ObjectSet.new_objset(d, "package-" + pkg_name) | ||
| 613 | |||
| 614 | spdx_package = pkg_objset.add_root( | ||
| 615 | oe.spdx30.software_Package( | ||
| 616 | _id=pkg_objset.new_spdxid("package", pkg_name), | ||
| 617 | creationInfo=pkg_objset.doc.creationInfo, | ||
| 618 | name=pkg_name, | ||
| 619 | software_packageVersion=d.getVar("SPDX_PACKAGE_VERSION"), | ||
| 620 | ) | ||
| 621 | ) | ||
| 622 | set_timestamp_now(d, spdx_package, "builtTime") | ||
| 623 | |||
| 624 | set_purposes( | ||
| 625 | d, | ||
| 626 | spdx_package, | ||
| 627 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package, | ||
| 628 | "SPDX_PACKAGE_ADDITIONAL_PURPOSE", | ||
| 629 | force_purposes=["install"], | ||
| 630 | ) | ||
| 631 | |||
| 632 | supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER") | ||
| 633 | if supplier is not None: | ||
| 634 | spdx_package.suppliedBy = ( | ||
| 635 | supplier if isinstance(supplier, str) else supplier._id | ||
| 636 | ) | ||
| 637 | |||
| 638 | set_var_field( | ||
| 639 | "HOMEPAGE", spdx_package, "software_homePage", package=package | ||
| 640 | ) | ||
| 641 | set_var_field("SUMMARY", spdx_package, "summary", package=package) | ||
| 642 | set_var_field("DESCRIPTION", spdx_package, "description", package=package) | ||
| 643 | |||
| 644 | if d.getVar("SPDX_PACKAGE_URL:%s" % package) or d.getVar("SPDX_PACKAGE_URL"): | ||
| 645 | set_var_field( | ||
| 646 | "SPDX_PACKAGE_URL", | ||
| 647 | spdx_package, | ||
| 648 | "software_packageUrl", | ||
| 649 | package=package | ||
| 650 | ) | ||
| 651 | |||
| 652 | pkg_objset.new_scoped_relationship( | ||
| 653 | [oe.sbom30.get_element_link_id(build)], | ||
| 654 | oe.spdx30.RelationshipType.hasOutput, | ||
| 655 | oe.spdx30.LifecycleScopeType.build, | ||
| 656 | [spdx_package], | ||
| 657 | ) | ||
| 658 | |||
| 659 | for cpe_id in cpe_ids: | ||
| 660 | spdx_package.externalIdentifier.append( | ||
| 661 | oe.spdx30.ExternalIdentifier( | ||
| 662 | externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23, | ||
| 663 | identifier=cpe_id, | ||
| 664 | ) | ||
| 665 | ) | ||
| 666 | |||
| 667 | # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file | ||
| 668 | # generated and link it to the package | ||
| 669 | # spdx_package_file = pkg_objset.add(oe.spdx30.software_File( | ||
| 670 | # _id=pkg_objset.new_spdxid("distribution", pkg_name), | ||
| 671 | # creationInfo=pkg_objset.doc.creationInfo, | ||
| 672 | # name=pkg_name, | ||
| 673 | # software_primaryPurpose=spdx_package.software_primaryPurpose, | ||
| 674 | # software_additionalPurpose=spdx_package.software_additionalPurpose, | ||
| 675 | # )) | ||
| 676 | # set_timestamp_now(d, spdx_package_file, "builtTime") | ||
| 677 | |||
| 678 | ## TODO add hashes | ||
| 679 | # pkg_objset.new_relationship( | ||
| 680 | # [spdx_package], | ||
| 681 | # oe.spdx30.RelationshipType.hasDistributionArtifact, | ||
| 682 | # [spdx_package_file], | ||
| 683 | # ) | ||
| 684 | |||
| 685 | # NOTE: licenses live in the recipe collection and are referenced | ||
| 686 | # by ID in the package collection(s). This helps reduce duplication | ||
| 687 | # (since a lot of packages will have the same license), and also | ||
| 688 | # prevents duplicate license SPDX IDs in the packages | ||
| 689 | package_license = d.getVar("LICENSE:%s" % package) | ||
| 690 | if package_license and package_license != d.getVar("LICENSE"): | ||
| 691 | package_spdx_license = add_license_expression( | ||
| 692 | d, build_objset, package_license, license_data | ||
| 693 | ) | ||
| 694 | else: | ||
| 695 | package_spdx_license = recipe_spdx_license | ||
| 696 | |||
| 697 | pkg_objset.new_relationship( | ||
| 698 | [spdx_package], | ||
| 699 | oe.spdx30.RelationshipType.hasDeclaredLicense, | ||
| 700 | [oe.sbom30.get_element_link_id(package_spdx_license)], | ||
| 701 | ) | ||
| 702 | |||
| 703 | # NOTE: CVE Elements live in the recipe collection | ||
| 704 | all_cves = set() | ||
| 705 | for status, cves in cve_by_status.items(): | ||
| 706 | for cve, items in cves.items(): | ||
| 707 | spdx_cve, detail, description = items | ||
| 708 | spdx_cve_id = oe.sbom30.get_element_link_id(spdx_cve) | ||
| 709 | |||
| 710 | all_cves.add(spdx_cve_id) | ||
| 711 | |||
| 712 | if status == "Patched": | ||
| 713 | pkg_objset.new_vex_patched_relationship( | ||
| 714 | [spdx_cve_id], [spdx_package] | ||
| 715 | ) | ||
| 716 | elif status == "Unpatched": | ||
| 717 | pkg_objset.new_vex_unpatched_relationship( | ||
| 718 | [spdx_cve_id], [spdx_package] | ||
| 719 | ) | ||
| 720 | elif status == "Ignored": | ||
| 721 | spdx_vex = pkg_objset.new_vex_ignored_relationship( | ||
| 722 | [spdx_cve_id], | ||
| 723 | [spdx_package], | ||
| 724 | impact_statement=description, | ||
| 725 | ) | ||
| 726 | |||
| 727 | vex_just_type = d.getVarFlag( | ||
| 728 | "CVE_CHECK_VEX_JUSTIFICATION", detail | ||
| 729 | ) | ||
| 730 | if vex_just_type: | ||
| 731 | if ( | ||
| 732 | vex_just_type | ||
| 733 | not in oe.spdx30.security_VexJustificationType.NAMED_INDIVIDUALS | ||
| 734 | ): | ||
| 735 | bb.fatal( | ||
| 736 | f"Unknown vex justification '{vex_just_type}', detail '{detail}', for ignored {cve}" | ||
| 737 | ) | ||
| 738 | |||
| 739 | for v in spdx_vex: | ||
| 740 | v.security_justificationType = oe.spdx30.security_VexJustificationType.NAMED_INDIVIDUALS[ | ||
| 741 | vex_just_type | ||
| 742 | ] | ||
| 743 | |||
| 744 | elif status == "Unknown": | ||
| 745 | bb.note(f"Skipping {cve} with status 'Unknown'") | ||
| 746 | else: | ||
| 747 | bb.fatal(f"Unknown {cve} status '{status}'") | ||
| 748 | |||
| 749 | if all_cves: | ||
| 750 | pkg_objset.new_relationship( | ||
| 751 | [spdx_package], | ||
| 752 | oe.spdx30.RelationshipType.hasAssociatedVulnerability, | ||
| 753 | sorted(list(all_cves)), | ||
| 754 | ) | ||
| 755 | |||
| 756 | bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name) | ||
| 757 | package_files = add_package_files( | ||
| 758 | d, | ||
| 759 | pkg_objset, | ||
| 760 | pkgdest / package, | ||
| 761 | lambda file_counter: pkg_objset.new_spdxid( | ||
| 762 | "package", pkg_name, "file", str(file_counter) | ||
| 763 | ), | ||
| 764 | # TODO: Can we know the purpose here? | ||
| 765 | lambda filepath: [], | ||
| 766 | license_data, | ||
| 767 | ignore_top_level_dirs=["CONTROL", "DEBIAN"], | ||
| 768 | archive=None, | ||
| 769 | ) | ||
| 770 | |||
| 771 | if package_files: | ||
| 772 | pkg_objset.new_relationship( | ||
| 773 | [spdx_package], | ||
| 774 | oe.spdx30.RelationshipType.contains, | ||
| 775 | sorted(list(package_files)), | ||
| 776 | ) | ||
| 777 | |||
| 778 | if include_sources: | ||
| 779 | debug_sources = get_package_sources_from_debug( | ||
| 780 | d, package, package_files, dep_sources, source_hash_cache | ||
| 781 | ) | ||
| 782 | debug_source_ids |= set( | ||
| 783 | oe.sbom30.get_element_link_id(d) for d in debug_sources | ||
| 784 | ) | ||
| 785 | |||
| 786 | oe.sbom30.write_recipe_jsonld_doc( | ||
| 787 | d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False | ||
| 788 | ) | ||
| 789 | |||
| 790 | if include_sources: | ||
| 791 | bb.debug(1, "Adding sysroot files to SPDX") | ||
| 792 | sysroot_files = add_package_files( | ||
| 793 | d, | ||
| 794 | build_objset, | ||
| 795 | d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"), | ||
| 796 | lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)), | ||
| 797 | lambda filepath: [], | ||
| 798 | license_data, | ||
| 799 | archive=None, | ||
| 800 | ) | ||
| 801 | |||
| 802 | if sysroot_files: | ||
| 803 | build_objset.new_scoped_relationship( | ||
| 804 | [build], | ||
| 805 | oe.spdx30.RelationshipType.hasOutput, | ||
| 806 | oe.spdx30.LifecycleScopeType.build, | ||
| 807 | sorted(list(sysroot_files)), | ||
| 808 | ) | ||
| 809 | |||
| 810 | if build_inputs or debug_source_ids: | ||
| 811 | build_objset.new_scoped_relationship( | ||
| 812 | [build], | ||
| 813 | oe.spdx30.RelationshipType.hasInput, | ||
| 814 | oe.spdx30.LifecycleScopeType.build, | ||
| 815 | sorted(list(build_inputs)) + sorted(list(debug_source_ids)), | ||
| 816 | ) | ||
| 817 | |||
| 818 | oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir) | ||
| 819 | |||
| 820 | |||
| 821 | def create_package_spdx(d): | ||
| 822 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 823 | deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY")) | ||
| 824 | is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class( | ||
| 825 | "cross", d | ||
| 826 | ) | ||
| 827 | |||
| 828 | providers = oe.spdx_common.collect_package_providers(d) | ||
| 829 | pkg_arch = d.getVar("SSTATE_PKGARCH") | ||
| 830 | |||
| 831 | if is_native: | ||
| 832 | return | ||
| 833 | |||
| 834 | bb.build.exec_func("read_subpackage_metadata", d) | ||
| 835 | |||
| 836 | dep_package_cache = {} | ||
| 837 | |||
| 838 | # Any element common to all packages that need to be referenced by ID | ||
| 839 | # should be written into this objset set | ||
| 840 | common_objset = oe.sbom30.ObjectSet.new_objset( | ||
| 841 | d, "%s-package-common" % d.getVar("PN") | ||
| 842 | ) | ||
| 843 | |||
| 844 | pkgdest = Path(d.getVar("PKGDEST")) | ||
| 845 | for package in d.getVar("PACKAGES").split(): | ||
| 846 | localdata = bb.data.createCopy(d) | ||
| 847 | pkg_name = d.getVar("PKG:%s" % package) or package | ||
| 848 | localdata.setVar("PKG", pkg_name) | ||
| 849 | localdata.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + ":" + package) | ||
| 850 | |||
| 851 | if not oe.packagedata.packaged(package, localdata): | ||
| 852 | continue | ||
| 853 | |||
| 854 | spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld( | ||
| 855 | d, | ||
| 856 | pkg_arch, | ||
| 857 | "packages-staging", | ||
| 858 | "package-" + pkg_name, | ||
| 859 | oe.spdx30.software_Package, | ||
| 860 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
| 861 | ) | ||
| 862 | |||
| 863 | # We will write out a new collection, so link it to the new | ||
| 864 | # creation info in the common package data. The old creation info | ||
| 865 | # should still exist and be referenced by all the existing elements | ||
| 866 | # in the package | ||
| 867 | pkg_objset.creationInfo = pkg_objset.copy_creation_info( | ||
| 868 | common_objset.doc.creationInfo | ||
| 869 | ) | ||
| 870 | |||
| 871 | runtime_spdx_deps = set() | ||
| 872 | |||
| 873 | deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") | ||
| 874 | seen_deps = set() | ||
| 875 | for dep, _ in deps.items(): | ||
| 876 | if dep in seen_deps: | ||
| 877 | continue | ||
| 878 | |||
| 879 | if dep not in providers: | ||
| 880 | continue | ||
| 881 | |||
| 882 | (dep, _) = providers[dep] | ||
| 883 | |||
| 884 | if not oe.packagedata.packaged(dep, localdata): | ||
| 885 | continue | ||
| 886 | |||
| 887 | dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d) | ||
| 888 | dep_pkg = dep_pkg_data["PKG"] | ||
| 889 | |||
| 890 | if dep in dep_package_cache: | ||
| 891 | dep_spdx_package = dep_package_cache[dep] | ||
| 892 | else: | ||
| 893 | bb.debug(1, "Searching for %s" % dep_pkg) | ||
| 894 | dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
| 895 | d, | ||
| 896 | "packages-staging", | ||
| 897 | "package-" + dep_pkg, | ||
| 898 | oe.spdx30.software_Package, | ||
| 899 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
| 900 | ) | ||
| 901 | dep_package_cache[dep] = dep_spdx_package | ||
| 902 | |||
| 903 | runtime_spdx_deps.add(dep_spdx_package) | ||
| 904 | seen_deps.add(dep) | ||
| 905 | |||
| 906 | if runtime_spdx_deps: | ||
| 907 | pkg_objset.new_scoped_relationship( | ||
| 908 | [spdx_package], | ||
| 909 | oe.spdx30.RelationshipType.dependsOn, | ||
| 910 | oe.spdx30.LifecycleScopeType.runtime, | ||
| 911 | [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps], | ||
| 912 | ) | ||
| 913 | |||
| 914 | oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir) | ||
| 915 | |||
| 916 | oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir) | ||
| 917 | |||
| 918 | |||
| 919 | def write_bitbake_spdx(d): | ||
| 920 | # Set PN to "bitbake" so that SPDX IDs can be generated | ||
| 921 | d.setVar("PN", "bitbake") | ||
| 922 | d.setVar("BB_TASKHASH", "bitbake") | ||
| 923 | oe.spdx_common.load_spdx_license_data(d) | ||
| 924 | |||
| 925 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 926 | |||
| 927 | objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False) | ||
| 928 | |||
| 929 | host_import_key = d.getVar("SPDX_BUILD_HOST") | ||
| 930 | invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False) | ||
| 931 | on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False) | ||
| 932 | |||
| 933 | if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1": | ||
| 934 | # Since the Build objects are unique, we may as well set the creation | ||
| 935 | # time to the current time instead of the fallback SDE | ||
| 936 | objset.doc.creationInfo.created = datetime.now(timezone.utc) | ||
| 937 | |||
| 938 | # Each invocation of bitbake should have a unique ID since it is a | ||
| 939 | # unique build | ||
| 940 | nonce = os.urandom(16).hex() | ||
| 941 | |||
| 942 | build = objset.add_root( | ||
| 943 | oe.spdx30.build_Build( | ||
| 944 | _id=objset.new_spdxid(nonce, include_unihash=False), | ||
| 945 | creationInfo=objset.doc.creationInfo, | ||
| 946 | build_buildType=oe.sbom30.SPDX_BUILD_TYPE, | ||
| 947 | ) | ||
| 948 | ) | ||
| 949 | set_timestamp_now(d, build, "build_buildStartTime") | ||
| 950 | |||
| 951 | if host_import_key: | ||
| 952 | objset.new_scoped_relationship( | ||
| 953 | [build], | ||
| 954 | oe.spdx30.RelationshipType.hasHost, | ||
| 955 | oe.spdx30.LifecycleScopeType.build, | ||
| 956 | [objset.new_import(host_import_key)], | ||
| 957 | ) | ||
| 958 | |||
| 959 | if invoked_by: | ||
| 960 | objset.add(invoked_by) | ||
| 961 | invoked_by_spdx = objset.new_scoped_relationship( | ||
| 962 | [build], | ||
| 963 | oe.spdx30.RelationshipType.invokedBy, | ||
| 964 | oe.spdx30.LifecycleScopeType.build, | ||
| 965 | [invoked_by], | ||
| 966 | ) | ||
| 967 | |||
| 968 | if on_behalf_of: | ||
| 969 | objset.add(on_behalf_of) | ||
| 970 | objset.new_scoped_relationship( | ||
| 971 | [on_behalf_of], | ||
| 972 | oe.spdx30.RelationshipType.delegatedTo, | ||
| 973 | oe.spdx30.LifecycleScopeType.build, | ||
| 974 | invoked_by_spdx, | ||
| 975 | ) | ||
| 976 | |||
| 977 | elif on_behalf_of: | ||
| 978 | bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set") | ||
| 979 | |||
| 980 | else: | ||
| 981 | if host_import_key: | ||
| 982 | bb.warn( | ||
| 983 | "SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
| 984 | ) | ||
| 985 | |||
| 986 | if invoked_by: | ||
| 987 | bb.warn( | ||
| 988 | "SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
| 989 | ) | ||
| 990 | |||
| 991 | if on_behalf_of: | ||
| 992 | bb.warn( | ||
| 993 | "SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set" | ||
| 994 | ) | ||
| 995 | |||
| 996 | for obj in objset.foreach_type(oe.spdx30.Element): | ||
| 997 | obj.extension.append(oe.sbom30.OEIdAliasExtension()) | ||
| 998 | |||
| 999 | oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json") | ||
| 1000 | |||
| 1001 | |||
| 1002 | def collect_build_package_inputs(d, objset, build, packages, files_by_hash=None): | ||
| 1003 | import oe.sbom30 | ||
| 1004 | |||
| 1005 | providers = oe.spdx_common.collect_package_providers(d) | ||
| 1006 | |||
| 1007 | build_deps = set() | ||
| 1008 | missing_providers = set() | ||
| 1009 | |||
| 1010 | for name in sorted(packages.keys()): | ||
| 1011 | if name not in providers: | ||
| 1012 | missing_providers.add(name) | ||
| 1013 | continue | ||
| 1014 | |||
| 1015 | pkg_name, pkg_hashfn = providers[name] | ||
| 1016 | |||
| 1017 | # Copy all of the package SPDX files into the Sbom elements | ||
| 1018 | pkg_spdx, pkg_objset = oe.sbom30.find_root_obj_in_jsonld( | ||
| 1019 | d, | ||
| 1020 | "packages", | ||
| 1021 | "package-" + pkg_name, | ||
| 1022 | oe.spdx30.software_Package, | ||
| 1023 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install, | ||
| 1024 | ) | ||
| 1025 | build_deps.add(oe.sbom30.get_element_link_id(pkg_spdx)) | ||
| 1026 | |||
| 1027 | if files_by_hash is not None: | ||
| 1028 | for h, f in pkg_objset.by_sha256_hash.items(): | ||
| 1029 | files_by_hash.setdefault(h, set()).update(f) | ||
| 1030 | |||
| 1031 | if missing_providers: | ||
| 1032 | bb.fatal( | ||
| 1033 | f"Unable to find SPDX provider(s) for: {', '.join(sorted(missing_providers))}" | ||
| 1034 | ) | ||
| 1035 | |||
| 1036 | if build_deps: | ||
| 1037 | objset.new_scoped_relationship( | ||
| 1038 | [build], | ||
| 1039 | oe.spdx30.RelationshipType.hasInput, | ||
| 1040 | oe.spdx30.LifecycleScopeType.build, | ||
| 1041 | sorted(list(build_deps)), | ||
| 1042 | ) | ||
| 1043 | |||
| 1044 | |||
| 1045 | def create_rootfs_spdx(d): | ||
| 1046 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 1047 | deploydir = Path(d.getVar("SPDXROOTFSDEPLOY")) | ||
| 1048 | root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES")) | ||
| 1049 | image_basename = d.getVar("IMAGE_BASENAME") | ||
| 1050 | image_rootfs = d.getVar("IMAGE_ROOTFS") | ||
| 1051 | machine = d.getVar("MACHINE") | ||
| 1052 | |||
| 1053 | with root_packages_file.open("r") as f: | ||
| 1054 | packages = json.load(f) | ||
| 1055 | |||
| 1056 | objset = oe.sbom30.ObjectSet.new_objset( | ||
| 1057 | d, "%s-%s-rootfs" % (image_basename, machine) | ||
| 1058 | ) | ||
| 1059 | |||
| 1060 | rootfs = objset.add_root( | ||
| 1061 | oe.spdx30.software_Package( | ||
| 1062 | _id=objset.new_spdxid("rootfs", image_basename), | ||
| 1063 | creationInfo=objset.doc.creationInfo, | ||
| 1064 | name=image_basename, | ||
| 1065 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
| 1066 | ) | ||
| 1067 | ) | ||
| 1068 | set_timestamp_now(d, rootfs, "builtTime") | ||
| 1069 | |||
| 1070 | rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs")) | ||
| 1071 | set_timestamp_now(d, rootfs_build, "build_buildEndTime") | ||
| 1072 | |||
| 1073 | objset.new_scoped_relationship( | ||
| 1074 | [rootfs_build], | ||
| 1075 | oe.spdx30.RelationshipType.hasOutput, | ||
| 1076 | oe.spdx30.LifecycleScopeType.build, | ||
| 1077 | [rootfs], | ||
| 1078 | ) | ||
| 1079 | |||
| 1080 | files_by_hash = {} | ||
| 1081 | collect_build_package_inputs(d, objset, rootfs_build, packages, files_by_hash) | ||
| 1082 | |||
| 1083 | files = set() | ||
| 1084 | for dirpath, dirnames, filenames in os.walk(image_rootfs, onerror=walk_error): | ||
| 1085 | dirnames.sort() | ||
| 1086 | filenames.sort() | ||
| 1087 | for fn in filenames: | ||
| 1088 | fpath = Path(dirpath) / fn | ||
| 1089 | if fpath.is_symlink() or not fpath.is_file(): | ||
| 1090 | continue | ||
| 1091 | |||
| 1092 | relpath = str(fpath.relative_to(image_rootfs)) | ||
| 1093 | h = bb.utils.sha256_file(fpath) | ||
| 1094 | |||
| 1095 | found = False | ||
| 1096 | if h in files_by_hash: | ||
| 1097 | for f in files_by_hash[h]: | ||
| 1098 | if isinstance(f, oe.spdx30.software_File) and f.name == relpath: | ||
| 1099 | files.add(oe.sbom30.get_element_link_id(f)) | ||
| 1100 | found = True | ||
| 1101 | break | ||
| 1102 | |||
| 1103 | if not found: | ||
| 1104 | files.add( | ||
| 1105 | objset.new_file( | ||
| 1106 | objset.new_spdxid("rootfs-file", relpath), | ||
| 1107 | relpath, | ||
| 1108 | fpath, | ||
| 1109 | ) | ||
| 1110 | ) | ||
| 1111 | |||
| 1112 | if files: | ||
| 1113 | objset.new_relationship( | ||
| 1114 | [rootfs], | ||
| 1115 | oe.spdx30.RelationshipType.contains, | ||
| 1116 | sorted(list(files)), | ||
| 1117 | ) | ||
| 1118 | |||
| 1119 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir) | ||
| 1120 | |||
| 1121 | |||
| 1122 | def create_image_spdx(d): | ||
| 1123 | import oe.sbom30 | ||
| 1124 | |||
| 1125 | image_deploy_dir = Path(d.getVar("IMGDEPLOYDIR")) | ||
| 1126 | manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST")) | ||
| 1127 | spdx_work_dir = Path(d.getVar("SPDXIMAGEWORK")) | ||
| 1128 | |||
| 1129 | image_basename = d.getVar("IMAGE_BASENAME") | ||
| 1130 | machine = d.getVar("MACHINE") | ||
| 1131 | |||
| 1132 | objset = oe.sbom30.ObjectSet.new_objset( | ||
| 1133 | d, "%s-%s-image" % (image_basename, machine) | ||
| 1134 | ) | ||
| 1135 | |||
| 1136 | with manifest_path.open("r") as f: | ||
| 1137 | manifest = json.load(f) | ||
| 1138 | |||
| 1139 | builds = [] | ||
| 1140 | for task in manifest: | ||
| 1141 | imagetype = task["imagetype"] | ||
| 1142 | taskname = task["taskname"] | ||
| 1143 | |||
| 1144 | image_build = objset.add_root( | ||
| 1145 | objset.new_task_build(taskname, "image/%s" % imagetype) | ||
| 1146 | ) | ||
| 1147 | set_timestamp_now(d, image_build, "build_buildEndTime") | ||
| 1148 | builds.append(image_build) | ||
| 1149 | |||
| 1150 | artifacts = [] | ||
| 1151 | |||
| 1152 | for image in task["images"]: | ||
| 1153 | image_filename = image["filename"] | ||
| 1154 | image_path = image_deploy_dir / image_filename | ||
| 1155 | if os.path.isdir(image_path): | ||
| 1156 | a = add_package_files( | ||
| 1157 | d, | ||
| 1158 | objset, | ||
| 1159 | image_path, | ||
| 1160 | lambda file_counter: objset.new_spdxid( | ||
| 1161 | "imagefile", str(file_counter) | ||
| 1162 | ), | ||
| 1163 | lambda filepath: [], | ||
| 1164 | license_data=None, | ||
| 1165 | ignore_dirs=[], | ||
| 1166 | ignore_top_level_dirs=[], | ||
| 1167 | archive=None, | ||
| 1168 | ) | ||
| 1169 | artifacts.extend(a) | ||
| 1170 | else: | ||
| 1171 | a = objset.add_root( | ||
| 1172 | oe.spdx30.software_File( | ||
| 1173 | _id=objset.new_spdxid("image", image_filename), | ||
| 1174 | creationInfo=objset.doc.creationInfo, | ||
| 1175 | name=image_filename, | ||
| 1176 | verifiedUsing=[ | ||
| 1177 | oe.spdx30.Hash( | ||
| 1178 | algorithm=oe.spdx30.HashAlgorithm.sha256, | ||
| 1179 | hashValue=bb.utils.sha256_file(image_path), | ||
| 1180 | ) | ||
| 1181 | ], | ||
| 1182 | ) | ||
| 1183 | ) | ||
| 1184 | |||
| 1185 | artifacts.append(a) | ||
| 1186 | |||
| 1187 | for a in artifacts: | ||
| 1188 | set_purposes( | ||
| 1189 | d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE" | ||
| 1190 | ) | ||
| 1191 | |||
| 1192 | set_timestamp_now(d, a, "builtTime") | ||
| 1193 | |||
| 1194 | |||
| 1195 | if artifacts: | ||
| 1196 | objset.new_scoped_relationship( | ||
| 1197 | [image_build], | ||
| 1198 | oe.spdx30.RelationshipType.hasOutput, | ||
| 1199 | oe.spdx30.LifecycleScopeType.build, | ||
| 1200 | artifacts, | ||
| 1201 | ) | ||
| 1202 | |||
| 1203 | if builds: | ||
| 1204 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
| 1205 | d, | ||
| 1206 | "rootfs", | ||
| 1207 | "%s-%s-rootfs" % (image_basename, machine), | ||
| 1208 | oe.spdx30.software_Package, | ||
| 1209 | # TODO: Should use a purpose to filter here? | ||
| 1210 | ) | ||
| 1211 | objset.new_scoped_relationship( | ||
| 1212 | builds, | ||
| 1213 | oe.spdx30.RelationshipType.hasInput, | ||
| 1214 | oe.spdx30.LifecycleScopeType.build, | ||
| 1215 | [oe.sbom30.get_element_link_id(rootfs_image)], | ||
| 1216 | ) | ||
| 1217 | |||
| 1218 | objset.add_aliases() | ||
| 1219 | objset.link() | ||
| 1220 | oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir) | ||
| 1221 | |||
| 1222 | |||
| 1223 | def create_image_sbom_spdx(d): | ||
| 1224 | import oe.sbom30 | ||
| 1225 | |||
| 1226 | image_name = d.getVar("IMAGE_NAME") | ||
| 1227 | image_basename = d.getVar("IMAGE_BASENAME") | ||
| 1228 | image_link_name = d.getVar("IMAGE_LINK_NAME") | ||
| 1229 | imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR")) | ||
| 1230 | machine = d.getVar("MACHINE") | ||
| 1231 | |||
| 1232 | spdx_path = imgdeploydir / (image_name + ".spdx.json") | ||
| 1233 | |||
| 1234 | root_elements = [] | ||
| 1235 | |||
| 1236 | # TODO: Do we need to add the rootfs or are the image files sufficient? | ||
| 1237 | rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld( | ||
| 1238 | d, | ||
| 1239 | "rootfs", | ||
| 1240 | "%s-%s-rootfs" % (image_basename, machine), | ||
| 1241 | oe.spdx30.software_Package, | ||
| 1242 | # TODO: Should use a purpose here? | ||
| 1243 | ) | ||
| 1244 | root_elements.append(oe.sbom30.get_element_link_id(rootfs_image)) | ||
| 1245 | |||
| 1246 | image_objset, _ = oe.sbom30.find_jsonld( | ||
| 1247 | d, "image", "%s-%s-image" % (image_basename, machine), required=True | ||
| 1248 | ) | ||
| 1249 | for o in image_objset.foreach_root(oe.spdx30.software_File): | ||
| 1250 | root_elements.append(oe.sbom30.get_element_link_id(o)) | ||
| 1251 | |||
| 1252 | objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements) | ||
| 1253 | |||
| 1254 | oe.sbom30.write_jsonld_doc(d, objset, spdx_path) | ||
| 1255 | |||
| 1256 | def make_image_link(target_path, suffix): | ||
| 1257 | if image_link_name: | ||
| 1258 | link = imgdeploydir / (image_link_name + suffix) | ||
| 1259 | if link != target_path: | ||
| 1260 | link.symlink_to(os.path.relpath(target_path, link.parent)) | ||
| 1261 | |||
| 1262 | make_image_link(spdx_path, ".spdx.json") | ||
| 1263 | |||
| 1264 | |||
| 1265 | def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname): | ||
| 1266 | sdk_name = toolchain_outputname + "-" + sdk_type | ||
| 1267 | sdk_packages = oe.sdk.sdk_list_installed_packages(d, sdk_type == "target") | ||
| 1268 | |||
| 1269 | objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name) | ||
| 1270 | |||
| 1271 | sdk_rootfs = objset.add_root( | ||
| 1272 | oe.spdx30.software_Package( | ||
| 1273 | _id=objset.new_spdxid("sdk-rootfs", sdk_name), | ||
| 1274 | creationInfo=objset.doc.creationInfo, | ||
| 1275 | name=sdk_name, | ||
| 1276 | software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive, | ||
| 1277 | ) | ||
| 1278 | ) | ||
| 1279 | set_timestamp_now(d, sdk_rootfs, "builtTime") | ||
| 1280 | |||
| 1281 | sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs")) | ||
| 1282 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
| 1283 | |||
| 1284 | objset.new_scoped_relationship( | ||
| 1285 | [sdk_build], | ||
| 1286 | oe.spdx30.RelationshipType.hasOutput, | ||
| 1287 | oe.spdx30.LifecycleScopeType.build, | ||
| 1288 | [sdk_rootfs], | ||
| 1289 | ) | ||
| 1290 | |||
| 1291 | collect_build_package_inputs(d, objset, sdk_build, sdk_packages) | ||
| 1292 | |||
| 1293 | objset.add_aliases() | ||
| 1294 | oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json") | ||
| 1295 | |||
| 1296 | |||
| 1297 | def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname): | ||
| 1298 | # Load the document written earlier | ||
| 1299 | rootfs_objset = oe.sbom30.load_jsonld( | ||
| 1300 | d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True | ||
| 1301 | ) | ||
| 1302 | |||
| 1303 | # Create a new build for the SDK installer | ||
| 1304 | sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate") | ||
| 1305 | set_timestamp_now(d, sdk_build, "build_buildEndTime") | ||
| 1306 | |||
| 1307 | rootfs = rootfs_objset.find_root(oe.spdx30.software_Package) | ||
| 1308 | if rootfs is None: | ||
| 1309 | bb.fatal("Unable to find rootfs artifact") | ||
| 1310 | |||
| 1311 | rootfs_objset.new_scoped_relationship( | ||
| 1312 | [sdk_build], | ||
| 1313 | oe.spdx30.RelationshipType.hasInput, | ||
| 1314 | oe.spdx30.LifecycleScopeType.build, | ||
| 1315 | [rootfs], | ||
| 1316 | ) | ||
| 1317 | |||
| 1318 | files = set() | ||
| 1319 | root_files = [] | ||
| 1320 | |||
| 1321 | # NOTE: os.walk() doesn't return symlinks | ||
| 1322 | for dirpath, dirnames, filenames in os.walk(sdk_deploydir, onerror=walk_error): | ||
| 1323 | dirnames.sort() | ||
| 1324 | filenames.sort() | ||
| 1325 | for fn in filenames: | ||
| 1326 | fpath = Path(dirpath) / fn | ||
| 1327 | if not fpath.is_file() or fpath.is_symlink(): | ||
| 1328 | continue | ||
| 1329 | |||
| 1330 | relpath = str(fpath.relative_to(sdk_deploydir)) | ||
| 1331 | |||
| 1332 | f = rootfs_objset.new_file( | ||
| 1333 | rootfs_objset.new_spdxid("sdk-installer", relpath), | ||
| 1334 | relpath, | ||
| 1335 | fpath, | ||
| 1336 | ) | ||
| 1337 | set_timestamp_now(d, f, "builtTime") | ||
| 1338 | |||
| 1339 | if fn.endswith(".manifest"): | ||
| 1340 | f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest | ||
| 1341 | elif fn.endswith(".testdata.json"): | ||
| 1342 | f.software_primaryPurpose = ( | ||
| 1343 | oe.spdx30.software_SoftwarePurpose.configuration | ||
| 1344 | ) | ||
| 1345 | else: | ||
| 1346 | set_purposes(d, f, "SPDX_SDK_PURPOSE") | ||
| 1347 | root_files.append(f) | ||
| 1348 | |||
| 1349 | files.add(f) | ||
| 1350 | |||
| 1351 | if files: | ||
| 1352 | rootfs_objset.new_scoped_relationship( | ||
| 1353 | [sdk_build], | ||
| 1354 | oe.spdx30.RelationshipType.hasOutput, | ||
| 1355 | oe.spdx30.LifecycleScopeType.build, | ||
| 1356 | files, | ||
| 1357 | ) | ||
| 1358 | else: | ||
| 1359 | bb.warn(f"No SDK output files found in {sdk_deploydir}") | ||
| 1360 | |||
| 1361 | objset, sbom = oe.sbom30.create_sbom( | ||
| 1362 | d, toolchain_outputname, sorted(list(files)), [rootfs_objset] | ||
| 1363 | ) | ||
| 1364 | |||
| 1365 | oe.sbom30.write_jsonld_doc( | ||
| 1366 | d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json") | ||
| 1367 | ) | ||
diff --git a/meta/lib/oe/spdx_common.py b/meta/lib/oe/spdx_common.py deleted file mode 100644 index c2dec65563..0000000000 --- a/meta/lib/oe/spdx_common.py +++ /dev/null | |||
| @@ -1,285 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import bb | ||
| 8 | import collections | ||
| 9 | import json | ||
| 10 | import oe.packagedata | ||
| 11 | import re | ||
| 12 | import shutil | ||
| 13 | |||
| 14 | from pathlib import Path | ||
| 15 | from dataclasses import dataclass | ||
| 16 | |||
| 17 | LIC_REGEX = re.compile( | ||
| 18 | rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$", | ||
| 19 | re.MULTILINE, | ||
| 20 | ) | ||
| 21 | |||
| 22 | |||
| 23 | def extract_licenses(filename): | ||
| 24 | """ | ||
| 25 | Extract SPDX License identifiers from a file | ||
| 26 | """ | ||
| 27 | try: | ||
| 28 | with open(filename, "rb") as f: | ||
| 29 | size = min(15000, os.stat(filename).st_size) | ||
| 30 | txt = f.read(size) | ||
| 31 | licenses = re.findall(LIC_REGEX, txt) | ||
| 32 | if licenses: | ||
| 33 | ascii_licenses = [lic.decode("ascii") for lic in licenses] | ||
| 34 | return ascii_licenses | ||
| 35 | except Exception as e: | ||
| 36 | bb.warn(f"Exception reading {filename}: {e}") | ||
| 37 | return [] | ||
| 38 | |||
| 39 | |||
| 40 | def is_work_shared_spdx(d): | ||
| 41 | return '/work-shared/' in d.getVar('S') | ||
| 42 | |||
| 43 | |||
| 44 | def load_spdx_license_data(d): | ||
| 45 | with open(d.getVar("SPDX_LICENSES"), "r") as f: | ||
| 46 | data = json.load(f) | ||
| 47 | # Transform the license array to a dictionary | ||
| 48 | data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} | ||
| 49 | |||
| 50 | return data | ||
| 51 | |||
| 52 | |||
| 53 | def process_sources(d): | ||
| 54 | """ | ||
| 55 | Returns True if the sources for this recipe should be included in the SPDX | ||
| 56 | or False if not | ||
| 57 | """ | ||
| 58 | pn = d.getVar("PN") | ||
| 59 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
| 60 | if pn in assume_provided: | ||
| 61 | for p in d.getVar("PROVIDES").split(): | ||
| 62 | if p != pn: | ||
| 63 | pn = p | ||
| 64 | break | ||
| 65 | |||
| 66 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
| 67 | # so avoid archiving source here. | ||
| 68 | if pn.startswith("glibc-locale"): | ||
| 69 | return False | ||
| 70 | if d.getVar("PN") == "libtool-cross": | ||
| 71 | return False | ||
| 72 | if d.getVar("PN") == "libgcc-initial": | ||
| 73 | return False | ||
| 74 | if d.getVar("PN") == "shadow-sysroot": | ||
| 75 | return False | ||
| 76 | |||
| 77 | return True | ||
| 78 | |||
| 79 | |||
| 80 | @dataclass(frozen=True) | ||
| 81 | class Dep(object): | ||
| 82 | pn: str | ||
| 83 | hashfn: str | ||
| 84 | in_taskhash: bool | ||
| 85 | |||
| 86 | |||
| 87 | def collect_direct_deps(d, dep_task): | ||
| 88 | """ | ||
| 89 | Find direct dependencies of current task | ||
| 90 | |||
| 91 | Returns the list of recipes that have a dep_task that the current task | ||
| 92 | depends on | ||
| 93 | """ | ||
| 94 | current_task = "do_" + d.getVar("BB_CURRENTTASK") | ||
| 95 | pn = d.getVar("PN") | ||
| 96 | |||
| 97 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
| 98 | |||
| 99 | for this_dep in taskdepdata.values(): | ||
| 100 | if this_dep[0] == pn and this_dep[1] == current_task: | ||
| 101 | break | ||
| 102 | else: | ||
| 103 | bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") | ||
| 104 | |||
| 105 | deps = set() | ||
| 106 | |||
| 107 | for dep_name in this_dep.deps: | ||
| 108 | dep_data = taskdepdata[dep_name] | ||
| 109 | if dep_data.taskname == dep_task and dep_data.pn != pn: | ||
| 110 | deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) | ||
| 111 | |||
| 112 | return sorted(deps) | ||
| 113 | |||
| 114 | |||
| 115 | def get_spdx_deps(d): | ||
| 116 | """ | ||
| 117 | Reads the SPDX dependencies JSON file and returns the data | ||
| 118 | """ | ||
| 119 | spdx_deps_file = Path(d.getVar("SPDXDEPS")) | ||
| 120 | |||
| 121 | deps = [] | ||
| 122 | with spdx_deps_file.open("r") as f: | ||
| 123 | for d in json.load(f): | ||
| 124 | deps.append(Dep(*d)) | ||
| 125 | return deps | ||
| 126 | |||
| 127 | |||
| 128 | def collect_package_providers(d): | ||
| 129 | """ | ||
| 130 | Returns a dictionary where each RPROVIDES is mapped to the package that | ||
| 131 | provides it | ||
| 132 | """ | ||
| 133 | deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) | ||
| 134 | |||
| 135 | providers = {} | ||
| 136 | |||
| 137 | deps = collect_direct_deps(d, "do_create_spdx") | ||
| 138 | deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) | ||
| 139 | |||
| 140 | for dep_pn, dep_hashfn, _ in deps: | ||
| 141 | localdata = d | ||
| 142 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
| 143 | if not recipe_data: | ||
| 144 | localdata = bb.data.createCopy(d) | ||
| 145 | localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") | ||
| 146 | recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) | ||
| 147 | |||
| 148 | for pkg in recipe_data.get("PACKAGES", "").split(): | ||
| 149 | pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) | ||
| 150 | rprovides = set( | ||
| 151 | n | ||
| 152 | for n, _ in bb.utils.explode_dep_versions2( | ||
| 153 | pkg_data.get("RPROVIDES", "") | ||
| 154 | ).items() | ||
| 155 | ) | ||
| 156 | rprovides.add(pkg) | ||
| 157 | |||
| 158 | if "PKG" in pkg_data: | ||
| 159 | pkg = pkg_data["PKG"] | ||
| 160 | rprovides.add(pkg) | ||
| 161 | |||
| 162 | for r in rprovides: | ||
| 163 | providers[r] = (pkg, dep_hashfn) | ||
| 164 | |||
| 165 | return providers | ||
| 166 | |||
| 167 | |||
| 168 | def get_patched_src(d): | ||
| 169 | """ | ||
| 170 | Save patched source of the recipe in SPDX_WORKDIR. | ||
| 171 | """ | ||
| 172 | spdx_workdir = d.getVar("SPDXWORK") | ||
| 173 | spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE") | ||
| 174 | pn = d.getVar("PN") | ||
| 175 | |||
| 176 | workdir = d.getVar("WORKDIR") | ||
| 177 | |||
| 178 | try: | ||
| 179 | # The kernel class functions require it to be on work-shared, so we dont change WORKDIR | ||
| 180 | if not is_work_shared_spdx(d): | ||
| 181 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
| 182 | d.setVar("WORKDIR", spdx_workdir) | ||
| 183 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
| 184 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
| 185 | |||
| 186 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
| 187 | # possibly requiring of the following tasks (such as some recipes's | ||
| 188 | # do_patch required 'B' existed). | ||
| 189 | bb.utils.mkdirhier(d.getVar("B")) | ||
| 190 | |||
| 191 | bb.build.exec_func("do_unpack", d) | ||
| 192 | |||
| 193 | if d.getVar("SRC_URI") != "": | ||
| 194 | if bb.data.inherits_class('dos2unix', d): | ||
| 195 | bb.build.exec_func('do_convert_crlf_to_lf', d) | ||
| 196 | bb.build.exec_func("do_patch", d) | ||
| 197 | |||
| 198 | # Copy source from work-share to spdx_workdir | ||
| 199 | if is_work_shared_spdx(d): | ||
| 200 | share_src = d.getVar('S') | ||
| 201 | d.setVar("WORKDIR", spdx_workdir) | ||
| 202 | d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) | ||
| 203 | # Copy source to ${SPDXWORK}, same basename dir of ${S}; | ||
| 204 | src_dir = ( | ||
| 205 | spdx_workdir | ||
| 206 | + "/" | ||
| 207 | + os.path.basename(share_src) | ||
| 208 | ) | ||
| 209 | # For kernel souce, rename suffix dir 'kernel-source' | ||
| 210 | # to ${BP} (${BPN}-${PV}) | ||
| 211 | if bb.data.inherits_class("kernel", d): | ||
| 212 | src_dir = spdx_workdir + "/" + d.getVar('BP') | ||
| 213 | |||
| 214 | bb.note(f"copyhardlinktree {share_src} to {src_dir}") | ||
| 215 | oe.path.copyhardlinktree(share_src, src_dir) | ||
| 216 | |||
| 217 | # Some userland has no source. | ||
| 218 | if not os.path.exists(spdx_workdir): | ||
| 219 | bb.utils.mkdirhier(spdx_workdir) | ||
| 220 | finally: | ||
| 221 | d.setVar("WORKDIR", workdir) | ||
| 222 | |||
| 223 | |||
| 224 | def has_task(d, task): | ||
| 225 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) | ||
| 226 | |||
| 227 | |||
| 228 | def fetch_data_to_uri(fd, name): | ||
| 229 | """ | ||
| 230 | Translates a bitbake FetchData to a string URI | ||
| 231 | """ | ||
| 232 | uri = fd.type | ||
| 233 | # Map gitsm to git, since gitsm:// is not a valid URI protocol | ||
| 234 | if uri == "gitsm": | ||
| 235 | uri = "git" | ||
| 236 | proto = getattr(fd, "proto", None) | ||
| 237 | if proto is not None: | ||
| 238 | uri = uri + "+" + proto | ||
| 239 | uri = uri + "://" + fd.host + fd.path | ||
| 240 | |||
| 241 | if fd.method.supports_srcrev(): | ||
| 242 | uri = uri + "@" + fd.revision | ||
| 243 | |||
| 244 | return uri | ||
| 245 | |||
| 246 | def is_compiled_source (filename, compiled_sources, types): | ||
| 247 | """ | ||
| 248 | Check if the file is a compiled file | ||
| 249 | """ | ||
| 250 | import os | ||
| 251 | # If we don't have compiled source, we assume all are compiled. | ||
| 252 | if not compiled_sources: | ||
| 253 | return True | ||
| 254 | |||
| 255 | # We return always true if the file type is not in the list of compiled files. | ||
| 256 | # Some files in the source directory are not compiled, for example, Makefiles, | ||
| 257 | # but also python .py file. We need to include them in the SPDX. | ||
| 258 | basename = os.path.basename(filename) | ||
| 259 | ext = basename.partition(".")[2] | ||
| 260 | if ext not in types: | ||
| 261 | return True | ||
| 262 | # Check that the file is in the list | ||
| 263 | return filename in compiled_sources | ||
| 264 | |||
| 265 | def get_compiled_sources(d): | ||
| 266 | """ | ||
| 267 | Get list of compiled sources from debug information and normalize the paths | ||
| 268 | """ | ||
| 269 | import itertools | ||
| 270 | source_info = oe.package.read_debugsources_info(d) | ||
| 271 | if not source_info: | ||
| 272 | bb.debug(1, "Do not have debugsources.list. Skipping") | ||
| 273 | return [], [] | ||
| 274 | |||
| 275 | # Sources are not split now in SPDX, so we aggregate them | ||
| 276 | sources = set(itertools.chain.from_iterable(source_info.values())) | ||
| 277 | # Check extensions of files | ||
| 278 | types = set() | ||
| 279 | for src in sources: | ||
| 280 | basename = os.path.basename(src) | ||
| 281 | ext = basename.partition(".")[2] | ||
| 282 | if ext not in types and ext: | ||
| 283 | types.add(ext) | ||
| 284 | bb.debug(1, f"Num of sources: {len(sources)} and types: {len(types)} {str(types)}") | ||
| 285 | return sources, types | ||
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py deleted file mode 100644 index ef687f5d41..0000000000 --- a/meta/lib/oe/sstatesig.py +++ /dev/null | |||
| @@ -1,736 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | import bb.parse | ||
| 7 | import bb.siggen | ||
| 8 | import bb.runqueue | ||
| 9 | import oe | ||
| 10 | import netrc | ||
| 11 | |||
| 12 | def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): | ||
| 13 | # Return True if we should keep the dependency, False to drop it | ||
| 14 | def isNative(x): | ||
| 15 | return x.endswith("-native") | ||
| 16 | def isCross(x): | ||
| 17 | return "-cross-" in x | ||
| 18 | def isNativeSDK(x): | ||
| 19 | return x.startswith("nativesdk-") | ||
| 20 | def isKernel(mc, fn): | ||
| 21 | inherits = " ".join(dataCaches[mc].inherits[fn]) | ||
| 22 | return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1 | ||
| 23 | def isPackageGroup(mc, fn): | ||
| 24 | inherits = " ".join(dataCaches[mc].inherits[fn]) | ||
| 25 | return "/packagegroup.bbclass" in inherits | ||
| 26 | def isAllArch(mc, fn): | ||
| 27 | inherits = " ".join(dataCaches[mc].inherits[fn]) | ||
| 28 | return "/allarch.bbclass" in inherits | ||
| 29 | def isImage(mc, fn): | ||
| 30 | return "/image.bbclass" in " ".join(dataCaches[mc].inherits[fn]) | ||
| 31 | |||
| 32 | depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep) | ||
| 33 | mc, _ = bb.runqueue.split_mc(fn) | ||
| 34 | |||
| 35 | # We can skip the rm_work task signature to avoid running the task | ||
| 36 | # when we remove some tasks from the dependencie chain | ||
| 37 | # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work | ||
| 38 | if task == "do_rm_work": | ||
| 39 | return False | ||
| 40 | |||
| 41 | # (Almost) always include our own inter-task dependencies (unless it comes | ||
| 42 | # from a mcdepends). The exception is the special | ||
| 43 | # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass. | ||
| 44 | if recipename == depname and depmc == mc: | ||
| 45 | if task == "do_kernel_configme" and deptaskname == "do_unpack_and_patch": | ||
| 46 | return False | ||
| 47 | return True | ||
| 48 | |||
| 49 | # Exclude well defined recipe->dependency | ||
| 50 | if "%s->%s" % (recipename, depname) in siggen.saferecipedeps: | ||
| 51 | return False | ||
| 52 | |||
| 53 | # Check for special wildcard | ||
| 54 | if "*->%s" % depname in siggen.saferecipedeps and recipename != depname: | ||
| 55 | return False | ||
| 56 | |||
| 57 | # Don't change native/cross/nativesdk recipe dependencies any further | ||
| 58 | if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename): | ||
| 59 | return True | ||
| 60 | |||
| 61 | # Only target packages beyond here | ||
| 62 | |||
| 63 | # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes | ||
| 64 | if isPackageGroup(mc, fn) and isAllArch(mc, fn) and not isNative(depname): | ||
| 65 | return False | ||
| 66 | |||
| 67 | # Exclude well defined machine specific configurations which don't change ABI | ||
| 68 | if depname in siggen.abisaferecipes and not isImage(mc, fn): | ||
| 69 | return False | ||
| 70 | |||
| 71 | # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum | ||
| 72 | # if we're just doing an RRECOMMENDS:xxx = "kernel-module-*", not least because the checksum | ||
| 73 | # is machine specific. | ||
| 74 | # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) | ||
| 75 | # and we reccomend a kernel-module, we exclude the dependency. | ||
| 76 | if dataCaches and isKernel(depmc, depmcfn) and not isKernel(mc, fn): | ||
| 77 | for pkg in dataCaches[mc].runrecs[fn]: | ||
| 78 | if " ".join(dataCaches[mc].runrecs[fn][pkg]).find("kernel-module-") != -1: | ||
| 79 | return False | ||
| 80 | |||
| 81 | # Default to keep dependencies | ||
| 82 | return True | ||
| 83 | |||
| 84 | def sstate_lockedsigs(d): | ||
| 85 | sigs = {} | ||
| 86 | types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split() | ||
| 87 | for t in types: | ||
| 88 | siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t | ||
| 89 | lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split() | ||
| 90 | for ls in lockedsigs: | ||
| 91 | pn, task, h = ls.split(":", 2) | ||
| 92 | if pn not in sigs: | ||
| 93 | sigs[pn] = {} | ||
| 94 | sigs[pn][task] = [h, siggen_lockedsigs_var] | ||
| 95 | return sigs | ||
| 96 | |||
| 97 | def lockedsigs_unihashmap(d): | ||
| 98 | unihashmap = {} | ||
| 99 | data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split() | ||
| 100 | for entry in data: | ||
| 101 | pn, task, taskhash, unihash = entry.split(":") | ||
| 102 | unihashmap[(pn, task)] = (taskhash, unihash) | ||
| 103 | return unihashmap | ||
| 104 | |||
| 105 | class SignatureGeneratorOEBasicHashMixIn(object): | ||
| 106 | supports_multiconfig_datacaches = True | ||
| 107 | |||
| 108 | def init_rundepcheck(self, data): | ||
| 109 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() | ||
| 110 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() | ||
| 111 | self.lockedsigs = sstate_lockedsigs(data) | ||
| 112 | self.unihashmap = lockedsigs_unihashmap(data) | ||
| 113 | self.lockedhashes = {} | ||
| 114 | self.lockedpnmap = {} | ||
| 115 | self.lockedhashfn = {} | ||
| 116 | self.machine = data.getVar("MACHINE") | ||
| 117 | self.mismatch_msgs = [] | ||
| 118 | self.mismatch_number = 0 | ||
| 119 | self.lockedsigs_msgs = "" | ||
| 120 | self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or | ||
| 121 | "").split() | ||
| 122 | self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } | ||
| 123 | self._internal = False | ||
| 124 | pass | ||
| 125 | |||
| 126 | def tasks_resolved(self, virtmap, virtpnmap, dataCache): | ||
| 127 | # Translate virtual/xxx entries to PN values | ||
| 128 | newabisafe = [] | ||
| 129 | for a in self.abisaferecipes: | ||
| 130 | if a in virtpnmap: | ||
| 131 | newabisafe.append(virtpnmap[a]) | ||
| 132 | else: | ||
| 133 | newabisafe.append(a) | ||
| 134 | self.abisaferecipes = newabisafe | ||
| 135 | newsafedeps = [] | ||
| 136 | for a in self.saferecipedeps: | ||
| 137 | a1, a2 = a.split("->") | ||
| 138 | if a1 in virtpnmap: | ||
| 139 | a1 = virtpnmap[a1] | ||
| 140 | if a2 in virtpnmap: | ||
| 141 | a2 = virtpnmap[a2] | ||
| 142 | newsafedeps.append(a1 + "->" + a2) | ||
| 143 | self.saferecipedeps = newsafedeps | ||
| 144 | |||
| 145 | def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None): | ||
| 146 | return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches) | ||
| 147 | |||
| 148 | def get_taskdata(self): | ||
| 149 | return (self.lockedpnmap, self.lockedhashfn, self.lockedhashes) + super().get_taskdata() | ||
| 150 | |||
| 151 | def set_taskdata(self, data): | ||
| 152 | self.lockedpnmap, self.lockedhashfn, self.lockedhashes = data[:3] | ||
| 153 | super().set_taskdata(data[3:]) | ||
| 154 | |||
| 155 | def dump_sigs(self, dataCache, options): | ||
| 156 | if 'lockedsigs' in options: | ||
| 157 | sigfile = os.getcwd() + "/locked-sigs.inc" | ||
| 158 | bb.plain("Writing locked sigs to %s" % sigfile) | ||
| 159 | self.dump_lockedsigs(sigfile) | ||
| 160 | return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) | ||
| 161 | |||
| 162 | |||
| 163 | def get_taskhash(self, tid, deps, dataCaches): | ||
| 164 | if tid in self.lockedhashes: | ||
| 165 | if self.lockedhashes[tid]: | ||
| 166 | return self.lockedhashes[tid] | ||
| 167 | else: | ||
| 168 | return super().get_taskhash(tid, deps, dataCaches) | ||
| 169 | |||
| 170 | h = super().get_taskhash(tid, deps, dataCaches) | ||
| 171 | |||
| 172 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
| 173 | |||
| 174 | recipename = dataCaches[mc].pkg_fn[fn] | ||
| 175 | self.lockedpnmap[fn] = recipename | ||
| 176 | self.lockedhashfn[fn] = dataCaches[mc].hashfn[fn] | ||
| 177 | |||
| 178 | unlocked = False | ||
| 179 | if recipename in self.unlockedrecipes: | ||
| 180 | unlocked = True | ||
| 181 | else: | ||
| 182 | def recipename_from_dep(dep): | ||
| 183 | (depmc, _, _, depfn) = bb.runqueue.split_tid_mcfn(dep) | ||
| 184 | return dataCaches[depmc].pkg_fn[depfn] | ||
| 185 | |||
| 186 | # If any unlocked recipe is in the direct dependencies then the | ||
| 187 | # current recipe should be unlocked as well. | ||
| 188 | depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)] | ||
| 189 | if any(x in y for y in depnames for x in self.unlockedrecipes): | ||
| 190 | self.unlockedrecipes[recipename] = '' | ||
| 191 | unlocked = True | ||
| 192 | |||
| 193 | if not unlocked and recipename in self.lockedsigs: | ||
| 194 | if task in self.lockedsigs[recipename]: | ||
| 195 | h_locked = self.lockedsigs[recipename][task][0] | ||
| 196 | var = self.lockedsigs[recipename][task][1] | ||
| 197 | self.lockedhashes[tid] = h_locked | ||
| 198 | self._internal = True | ||
| 199 | unihash = self.get_unihash(tid) | ||
| 200 | self._internal = False | ||
| 201 | #bb.warn("Using %s %s %s" % (recipename, task, h)) | ||
| 202 | |||
| 203 | if h != h_locked and h_locked != unihash: | ||
| 204 | self.mismatch_number += 1 | ||
| 205 | self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' | ||
| 206 | % (recipename, task, h, h_locked, var)) | ||
| 207 | |||
| 208 | return h_locked | ||
| 209 | |||
| 210 | self.lockedhashes[tid] = False | ||
| 211 | #bb.warn("%s %s %s" % (recipename, task, h)) | ||
| 212 | return h | ||
| 213 | |||
| 214 | def get_stampfile_hash(self, tid): | ||
| 215 | if tid in self.lockedhashes and self.lockedhashes[tid]: | ||
| 216 | return self.lockedhashes[tid] | ||
| 217 | return super().get_stampfile_hash(tid) | ||
| 218 | |||
| 219 | def get_cached_unihash(self, tid): | ||
| 220 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: | ||
| 221 | return self.lockedhashes[tid] | ||
| 222 | |||
| 223 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
| 224 | recipename = self.lockedpnmap[fn] | ||
| 225 | |||
| 226 | if (recipename, task) in self.unihashmap: | ||
| 227 | taskhash, unihash = self.unihashmap[(recipename, task)] | ||
| 228 | if taskhash == self.taskhash[tid]: | ||
| 229 | return unihash | ||
| 230 | |||
| 231 | return super().get_cached_unihash(tid) | ||
| 232 | |||
| 233 | def dump_sigtask(self, fn, task, stampbase, runtime): | ||
| 234 | tid = fn + ":" + task | ||
| 235 | if tid in self.lockedhashes and self.lockedhashes[tid]: | ||
| 236 | return | ||
| 237 | super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime) | ||
| 238 | |||
| 239 | def dump_lockedsigs(self, sigfile, taskfilter=None): | ||
| 240 | types = {} | ||
| 241 | unihashmap = {} | ||
| 242 | for tid in self.runtaskdeps: | ||
| 243 | # Bitbake changed this to a tuple in newer versions | ||
| 244 | if isinstance(tid, tuple): | ||
| 245 | tid = tid[1] | ||
| 246 | if taskfilter: | ||
| 247 | if not tid in taskfilter: | ||
| 248 | continue | ||
| 249 | (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
| 250 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] | ||
| 251 | t = 't-' + t.replace('_', '-') | ||
| 252 | if t not in types: | ||
| 253 | types[t] = [] | ||
| 254 | types[t].append(tid) | ||
| 255 | |||
| 256 | taskhash = self.taskhash[tid] | ||
| 257 | unihash = self.get_unihash(tid) | ||
| 258 | if taskhash != unihash: | ||
| 259 | unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash | ||
| 260 | |||
| 261 | with open(sigfile, "w") as f: | ||
| 262 | l = sorted(types) | ||
| 263 | for t in l: | ||
| 264 | f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t) | ||
| 265 | types[t].sort() | ||
| 266 | sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)]) | ||
| 267 | for tid in sortedtid: | ||
| 268 | (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
| 269 | if tid not in self.taskhash: | ||
| 270 | continue | ||
| 271 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") | ||
| 272 | f.write(' "\n') | ||
| 273 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l))) | ||
| 274 | f.write('SIGGEN_UNIHASHMAP += "\\\n') | ||
| 275 | sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)]) | ||
| 276 | for tid in sortedtid: | ||
| 277 | f.write(unihashmap[tid] + " \\\n") | ||
| 278 | f.write(' "\n') | ||
| 279 | |||
| 280 | def dump_siglist(self, sigfile, path_prefix_strip=None): | ||
| 281 | def strip_fn(fn): | ||
| 282 | nonlocal path_prefix_strip | ||
| 283 | if not path_prefix_strip: | ||
| 284 | return fn | ||
| 285 | |||
| 286 | fn_exp = fn.split(":") | ||
| 287 | if fn_exp[-1].startswith(path_prefix_strip): | ||
| 288 | fn_exp[-1] = fn_exp[-1][len(path_prefix_strip):] | ||
| 289 | |||
| 290 | return ":".join(fn_exp) | ||
| 291 | |||
| 292 | with open(sigfile, "w") as f: | ||
| 293 | tasks = [] | ||
| 294 | for taskitem in self.taskhash: | ||
| 295 | (fn, task) = taskitem.rsplit(":", 1) | ||
| 296 | pn = self.lockedpnmap[fn] | ||
| 297 | tasks.append((pn, task, strip_fn(fn), self.taskhash[taskitem])) | ||
| 298 | for (pn, task, fn, taskhash) in sorted(tasks): | ||
| 299 | f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) | ||
| 300 | |||
| 301 | def checkhashes(self, sq_data, missed, found, d): | ||
| 302 | warn_msgs = [] | ||
| 303 | error_msgs = [] | ||
| 304 | sstate_missing_msgs = [] | ||
| 305 | info_msgs = None | ||
| 306 | |||
| 307 | if self.lockedsigs: | ||
| 308 | if len(self.lockedsigs) > 10: | ||
| 309 | self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number) | ||
| 310 | else: | ||
| 311 | self.lockedsigs_msgs = "The following recipes have locked tasks:" | ||
| 312 | for pn in self.lockedsigs: | ||
| 313 | self.lockedsigs_msgs += " %s" % (pn) | ||
| 314 | |||
| 315 | for tid in sq_data['hash']: | ||
| 316 | if tid not in found: | ||
| 317 | for pn in self.lockedsigs: | ||
| 318 | taskname = bb.runqueue.taskname_from_tid(tid) | ||
| 319 | if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()): | ||
| 320 | if taskname == 'do_shared_workdir': | ||
| 321 | continue | ||
| 322 | sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" | ||
| 323 | % (pn, taskname, sq_data['hash'][tid])) | ||
| 324 | |||
| 325 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") | ||
| 326 | if checklevel == 'info': | ||
| 327 | info_msgs = self.lockedsigs_msgs | ||
| 328 | if checklevel == 'warn' or checklevel == 'info': | ||
| 329 | warn_msgs += self.mismatch_msgs | ||
| 330 | elif checklevel == 'error': | ||
| 331 | error_msgs += self.mismatch_msgs | ||
| 332 | |||
| 333 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK") | ||
| 334 | if checklevel == 'warn': | ||
| 335 | warn_msgs += sstate_missing_msgs | ||
| 336 | elif checklevel == 'error': | ||
| 337 | error_msgs += sstate_missing_msgs | ||
| 338 | |||
| 339 | if info_msgs: | ||
| 340 | bb.note(info_msgs) | ||
| 341 | if warn_msgs: | ||
| 342 | bb.warn("\n".join(warn_msgs)) | ||
| 343 | if error_msgs: | ||
| 344 | bb.fatal("\n".join(error_msgs)) | ||
| 345 | |||
| 346 | class SignatureGeneratorOEBasicHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorBasicHash): | ||
| 347 | name = "OEBasicHash" | ||
| 348 | |||
| 349 | class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorUniHashMixIn, bb.siggen.SignatureGeneratorBasicHash): | ||
| 350 | name = "OEEquivHash" | ||
| 351 | |||
| 352 | def init_rundepcheck(self, data): | ||
| 353 | super().init_rundepcheck(data) | ||
| 354 | self.server = data.getVar('BB_HASHSERVE') | ||
| 355 | if not self.server: | ||
| 356 | bb.fatal("OEEquivHash requires BB_HASHSERVE to be set") | ||
| 357 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') | ||
| 358 | if not self.method: | ||
| 359 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") | ||
| 360 | self.username = data.getVar("BB_HASHSERVE_USERNAME") | ||
| 361 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") | ||
| 362 | if not self.username or not self.password: | ||
| 363 | try: | ||
| 364 | n = netrc.netrc() | ||
| 365 | auth = n.authenticators(self.server) | ||
| 366 | if auth is not None: | ||
| 367 | self.username, _, self.password = auth | ||
| 368 | except FileNotFoundError: | ||
| 369 | pass | ||
| 370 | except netrc.NetrcParseError as e: | ||
| 371 | bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg)) | ||
| 372 | |||
| 373 | # Insert these classes into siggen's namespace so it can see and select them | ||
| 374 | bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash | ||
| 375 | bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash | ||
| 376 | |||
| 377 | |||
| 378 | def find_siginfo(pn, taskname, taskhashlist, d): | ||
| 379 | """ Find signature data files for comparison purposes """ | ||
| 380 | |||
| 381 | import fnmatch | ||
| 382 | import glob | ||
| 383 | |||
| 384 | if not taskname: | ||
| 385 | # We have to derive pn and taskname | ||
| 386 | key = pn | ||
| 387 | if key.startswith("mc:"): | ||
| 388 | # mc:<mc>:<pn>:<task> | ||
| 389 | _, _, pn, taskname = key.split(':', 3) | ||
| 390 | else: | ||
| 391 | # <pn>:<task> | ||
| 392 | pn, taskname = key.split(':', 1) | ||
| 393 | |||
| 394 | hashfiles = {} | ||
| 395 | |||
| 396 | def get_hashval(siginfo): | ||
| 397 | if siginfo.endswith('.siginfo'): | ||
| 398 | return siginfo.rpartition(':')[2].partition('_')[0] | ||
| 399 | else: | ||
| 400 | return siginfo.rpartition('.')[2] | ||
| 401 | |||
| 402 | def get_time(fullpath): | ||
| 403 | # NFS can end up in a weird state where the file exists but has no stat info. | ||
| 404 | # If that happens, we assume it doesn't acutally exist and show a warning | ||
| 405 | try: | ||
| 406 | return os.stat(fullpath).st_mtime | ||
| 407 | except FileNotFoundError: | ||
| 408 | bb.warn("Could not obtain mtime for {}".format(fullpath)) | ||
| 409 | return None | ||
| 410 | |||
| 411 | # First search in stamps dir | ||
| 412 | localdata = d.createCopy() | ||
| 413 | localdata.setVar('MULTIMACH_TARGET_SYS', '*') | ||
| 414 | localdata.setVar('PN', pn) | ||
| 415 | localdata.setVar('PV', '*') | ||
| 416 | localdata.setVar('PR', '*') | ||
| 417 | localdata.setVar('EXTENDPE', '') | ||
| 418 | stamp = localdata.getVar('STAMP') | ||
| 419 | if pn.startswith("gcc-source"): | ||
| 420 | # gcc-source shared workdir is a special case :( | ||
| 421 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") | ||
| 422 | elif pn.startswith("llvm-project-source"): | ||
| 423 | # llvm-project-source shared workdir is also a special case :*( | ||
| 424 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}") | ||
| 425 | |||
| 426 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) | ||
| 427 | foundall = False | ||
| 428 | import glob | ||
| 429 | bb.debug(1, "Calling glob.glob on {}".format(filespec)) | ||
| 430 | for fullpath in glob.glob(filespec): | ||
| 431 | match = False | ||
| 432 | if taskhashlist: | ||
| 433 | for taskhash in taskhashlist: | ||
| 434 | if fullpath.endswith('.%s' % taskhash): | ||
| 435 | mtime = get_time(fullpath) | ||
| 436 | if mtime: | ||
| 437 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
| 438 | if len(hashfiles) == len(taskhashlist): | ||
| 439 | foundall = True | ||
| 440 | break | ||
| 441 | else: | ||
| 442 | hashval = get_hashval(fullpath) | ||
| 443 | mtime = get_time(fullpath) | ||
| 444 | if mtime: | ||
| 445 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
| 446 | |||
| 447 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): | ||
| 448 | # That didn't work, look in sstate-cache | ||
| 449 | hashes = taskhashlist or ['?' * 64] | ||
| 450 | localdata = bb.data.createCopy(d) | ||
| 451 | for hashval in hashes: | ||
| 452 | localdata.setVar('PACKAGE_ARCH', '*') | ||
| 453 | localdata.setVar('TARGET_VENDOR', '*') | ||
| 454 | localdata.setVar('TARGET_OS', '*') | ||
| 455 | localdata.setVar('PN', pn) | ||
| 456 | # gcc-source is a special case, same as with local stamps above | ||
| 457 | if pn.startswith("gcc-source"): | ||
| 458 | localdata.setVar('PN', "gcc") | ||
| 459 | localdata.setVar('PV', '*') | ||
| 460 | localdata.setVar('PR', '*') | ||
| 461 | localdata.setVar('BB_TASKHASH', hashval) | ||
| 462 | localdata.setVar('SSTATE_CURRTASK', taskname[3:]) | ||
| 463 | swspec = localdata.getVar('SSTATE_SWSPEC') | ||
| 464 | if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: | ||
| 465 | localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') | ||
| 466 | elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: | ||
| 467 | localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") | ||
| 468 | filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG') | ||
| 469 | |||
| 470 | bb.debug(1, "Calling glob.glob on {}".format(filespec)) | ||
| 471 | matchedfiles = glob.glob(filespec) | ||
| 472 | for fullpath in matchedfiles: | ||
| 473 | actual_hashval = get_hashval(fullpath) | ||
| 474 | if actual_hashval in hashfiles: | ||
| 475 | continue | ||
| 476 | mtime = get_time(fullpath) | ||
| 477 | if mtime: | ||
| 478 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime} | ||
| 479 | |||
| 480 | return hashfiles | ||
| 481 | |||
| 482 | bb.siggen.find_siginfo = find_siginfo | ||
| 483 | bb.siggen.find_siginfo_version = 2 | ||
| 484 | |||
| 485 | |||
| 486 | def sstate_get_manifest_filename(task, d): | ||
| 487 | """ | ||
| 488 | Return the sstate manifest file path for a particular task. | ||
| 489 | Also returns the datastore that can be used to query related variables. | ||
| 490 | """ | ||
| 491 | d2 = d.createCopy() | ||
| 492 | extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info') | ||
| 493 | if extrainf: | ||
| 494 | d2.setVar("SSTATE_MANMACH", extrainf) | ||
| 495 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) | ||
| 496 | |||
| 497 | @bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS") | ||
| 498 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | ||
| 499 | d2 = d | ||
| 500 | variant = '' | ||
| 501 | curr_variant = '' | ||
| 502 | if d.getVar("BBEXTENDCURR") == "multilib": | ||
| 503 | curr_variant = d.getVar("BBEXTENDVARIANT") | ||
| 504 | if "virtclass-multilib" not in d.getVar("OVERRIDES"): | ||
| 505 | curr_variant = "invalid" | ||
| 506 | if taskdata2.startswith("virtual:multilib"): | ||
| 507 | variant = taskdata2.split(":")[2] | ||
| 508 | if curr_variant != variant: | ||
| 509 | if variant not in multilibcache: | ||
| 510 | multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d) | ||
| 511 | d2 = multilibcache[variant] | ||
| 512 | |||
| 513 | if taskdata.endswith("-native"): | ||
| 514 | pkgarchs = ["${BUILD_ARCH}", "${BUILD_ARCH}_${ORIGNATIVELSBSTRING}"] | ||
| 515 | elif taskdata.startswith("nativesdk-"): | ||
| 516 | pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"] | ||
| 517 | elif "-cross-canadian" in taskdata: | ||
| 518 | pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] | ||
| 519 | elif "-cross-" in taskdata: | ||
| 520 | pkgarchs = ["${BUILD_ARCH}"] | ||
| 521 | elif "-crosssdk" in taskdata: | ||
| 522 | pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] | ||
| 523 | else: | ||
| 524 | pkgarchs = ['${MACHINE_ARCH}'] | ||
| 525 | pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split())) | ||
| 526 | pkgarchs.append('allarch') | ||
| 527 | pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') | ||
| 528 | |||
| 529 | searched_manifests = [] | ||
| 530 | |||
| 531 | for pkgarch in pkgarchs: | ||
| 532 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) | ||
| 533 | if os.path.exists(manifest): | ||
| 534 | return manifest, d2 | ||
| 535 | searched_manifests.append(manifest) | ||
| 536 | bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s" | ||
| 537 | % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests))) | ||
| 538 | return None, d2 | ||
| 539 | |||
| 540 | def OEOuthashBasic(path, sigfile, task, d): | ||
| 541 | """ | ||
| 542 | Basic output hash function | ||
| 543 | |||
| 544 | Calculates the output hash of a task by hashing all output file metadata, | ||
| 545 | and file contents. | ||
| 546 | """ | ||
| 547 | import hashlib | ||
| 548 | import stat | ||
| 549 | import pwd | ||
| 550 | import grp | ||
| 551 | import re | ||
| 552 | import fnmatch | ||
| 553 | |||
| 554 | def update_hash(s): | ||
| 555 | s = s.encode('utf-8') | ||
| 556 | h.update(s) | ||
| 557 | if sigfile: | ||
| 558 | sigfile.write(s) | ||
| 559 | |||
| 560 | h = hashlib.sha256() | ||
| 561 | prev_dir = os.getcwd() | ||
| 562 | corebase = d.getVar("COREBASE") | ||
| 563 | tmpdir = d.getVar("TMPDIR") | ||
| 564 | include_owners = os.environ.get('PSEUDO_DISABLED') == '0' | ||
| 565 | if "package_write_" in task or task == "package_qa": | ||
| 566 | include_owners = False | ||
| 567 | include_timestamps = False | ||
| 568 | include_root = True | ||
| 569 | if task == "package": | ||
| 570 | include_timestamps = True | ||
| 571 | include_root = False | ||
| 572 | source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH")) | ||
| 573 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') | ||
| 574 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") | ||
| 575 | |||
| 576 | filemaps = {} | ||
| 577 | for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split(): | ||
| 578 | entry = m.split(":") | ||
| 579 | if len(entry) != 3 or entry[0] != task: | ||
| 580 | continue | ||
| 581 | filemaps.setdefault(entry[1], []) | ||
| 582 | filemaps[entry[1]].append(entry[2]) | ||
| 583 | |||
| 584 | try: | ||
| 585 | os.chdir(path) | ||
| 586 | basepath = os.path.normpath(path) | ||
| 587 | |||
| 588 | update_hash("OEOuthashBasic\n") | ||
| 589 | if hash_version: | ||
| 590 | update_hash(hash_version + "\n") | ||
| 591 | |||
| 592 | if extra_sigdata: | ||
| 593 | update_hash(extra_sigdata + "\n") | ||
| 594 | |||
| 595 | # It is only currently useful to get equivalent hashes for things that | ||
| 596 | # can be restored from sstate. Since the sstate object is named using | ||
| 597 | # SSTATE_PKGSPEC and the task name, those should be included in the | ||
| 598 | # output hash calculation. | ||
| 599 | update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC')) | ||
| 600 | update_hash("task=%s\n" % task) | ||
| 601 | |||
| 602 | for root, dirs, files in os.walk('.', topdown=True): | ||
| 603 | # Sort directories to ensure consistent ordering when recursing | ||
| 604 | dirs.sort() | ||
| 605 | files.sort() | ||
| 606 | |||
| 607 | def process(path): | ||
| 608 | s = os.lstat(path) | ||
| 609 | |||
| 610 | if stat.S_ISDIR(s.st_mode): | ||
| 611 | update_hash('d') | ||
| 612 | elif stat.S_ISCHR(s.st_mode): | ||
| 613 | update_hash('c') | ||
| 614 | elif stat.S_ISBLK(s.st_mode): | ||
| 615 | update_hash('b') | ||
| 616 | elif stat.S_ISSOCK(s.st_mode): | ||
| 617 | update_hash('s') | ||
| 618 | elif stat.S_ISLNK(s.st_mode): | ||
| 619 | update_hash('l') | ||
| 620 | elif stat.S_ISFIFO(s.st_mode): | ||
| 621 | update_hash('p') | ||
| 622 | else: | ||
| 623 | update_hash('-') | ||
| 624 | |||
| 625 | def add_perm(mask, on, off='-'): | ||
| 626 | if mask & s.st_mode: | ||
| 627 | update_hash(on) | ||
| 628 | else: | ||
| 629 | update_hash(off) | ||
| 630 | |||
| 631 | add_perm(stat.S_IRUSR, 'r') | ||
| 632 | add_perm(stat.S_IWUSR, 'w') | ||
| 633 | if stat.S_ISUID & s.st_mode: | ||
| 634 | add_perm(stat.S_IXUSR, 's', 'S') | ||
| 635 | else: | ||
| 636 | add_perm(stat.S_IXUSR, 'x') | ||
| 637 | |||
| 638 | if include_owners: | ||
| 639 | # Group/other permissions are only relevant in pseudo context | ||
| 640 | add_perm(stat.S_IRGRP, 'r') | ||
| 641 | add_perm(stat.S_IWGRP, 'w') | ||
| 642 | if stat.S_ISGID & s.st_mode: | ||
| 643 | add_perm(stat.S_IXGRP, 's', 'S') | ||
| 644 | else: | ||
| 645 | add_perm(stat.S_IXGRP, 'x') | ||
| 646 | |||
| 647 | add_perm(stat.S_IROTH, 'r') | ||
| 648 | add_perm(stat.S_IWOTH, 'w') | ||
| 649 | if stat.S_ISVTX & s.st_mode: | ||
| 650 | update_hash('t') | ||
| 651 | else: | ||
| 652 | add_perm(stat.S_IXOTH, 'x') | ||
| 653 | |||
| 654 | try: | ||
| 655 | update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) | ||
| 656 | update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) | ||
| 657 | except KeyError as e: | ||
| 658 | msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match " | ||
| 659 | "any user/group on target. This may be due to host contamination." % | ||
| 660 | (e, os.path.abspath(path), s.st_uid, s.st_gid)) | ||
| 661 | raise Exception(msg).with_traceback(e.__traceback__) | ||
| 662 | |||
| 663 | if include_timestamps: | ||
| 664 | # Need to clamp to SOURCE_DATE_EPOCH | ||
| 665 | if s.st_mtime > source_date_epoch: | ||
| 666 | update_hash(" %10d" % source_date_epoch) | ||
| 667 | else: | ||
| 668 | update_hash(" %10d" % s.st_mtime) | ||
| 669 | |||
| 670 | update_hash(" ") | ||
| 671 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): | ||
| 672 | update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev)))) | ||
| 673 | else: | ||
| 674 | update_hash(" " * 9) | ||
| 675 | |||
| 676 | filterfile = False | ||
| 677 | for entry in filemaps: | ||
| 678 | if fnmatch.fnmatch(path, entry): | ||
| 679 | filterfile = True | ||
| 680 | |||
| 681 | update_hash(" ") | ||
| 682 | if stat.S_ISREG(s.st_mode) and not filterfile: | ||
| 683 | update_hash("%10d" % s.st_size) | ||
| 684 | else: | ||
| 685 | update_hash(" " * 10) | ||
| 686 | |||
| 687 | update_hash(" ") | ||
| 688 | fh = hashlib.sha256() | ||
| 689 | if stat.S_ISREG(s.st_mode): | ||
| 690 | # Hash file contents | ||
| 691 | if filterfile: | ||
| 692 | # Need to ignore paths in crossscripts and postinst-useradd files. | ||
| 693 | with open(path, 'rb') as d: | ||
| 694 | chunk = d.read() | ||
| 695 | chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'') | ||
| 696 | for entry in filemaps: | ||
| 697 | if not fnmatch.fnmatch(path, entry): | ||
| 698 | continue | ||
| 699 | for r in filemaps[entry]: | ||
| 700 | if r.startswith("regex-"): | ||
| 701 | chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk) | ||
| 702 | else: | ||
| 703 | chunk = chunk.replace(bytes(r, encoding='utf8'), b'') | ||
| 704 | fh.update(chunk) | ||
| 705 | else: | ||
| 706 | with open(path, 'rb') as d: | ||
| 707 | for chunk in iter(lambda: d.read(4096), b""): | ||
| 708 | fh.update(chunk) | ||
| 709 | update_hash(fh.hexdigest()) | ||
| 710 | else: | ||
| 711 | update_hash(" " * len(fh.hexdigest())) | ||
| 712 | |||
| 713 | update_hash(" %s" % path) | ||
| 714 | |||
| 715 | if stat.S_ISLNK(s.st_mode): | ||
| 716 | update_hash(" -> %s" % os.readlink(path)) | ||
| 717 | |||
| 718 | update_hash("\n") | ||
| 719 | |||
| 720 | # Process this directory and all its child files | ||
| 721 | if include_root or root != ".": | ||
| 722 | process(root) | ||
| 723 | for f in files: | ||
| 724 | if f == 'fixmepath': | ||
| 725 | continue | ||
| 726 | process(os.path.join(root, f)) | ||
| 727 | |||
| 728 | for dir in dirs: | ||
| 729 | if os.path.islink(os.path.join(root, dir)): | ||
| 730 | process(os.path.join(root, dir)) | ||
| 731 | finally: | ||
| 732 | os.chdir(prev_dir) | ||
| 733 | |||
| 734 | return h.hexdigest() | ||
| 735 | |||
| 736 | |||
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py deleted file mode 100644 index 4412bc14c1..0000000000 --- a/meta/lib/oe/terminal.py +++ /dev/null | |||
| @@ -1,332 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | import logging | ||
| 7 | import oe.classutils | ||
| 8 | import shlex | ||
| 9 | from bb.process import Popen, ExecutionError | ||
| 10 | |||
| 11 | logger = logging.getLogger('BitBake.OE.Terminal') | ||
| 12 | |||
| 13 | |||
| 14 | class UnsupportedTerminal(Exception): | ||
| 15 | pass | ||
| 16 | |||
| 17 | class NoSupportedTerminals(Exception): | ||
| 18 | def __init__(self, terms): | ||
| 19 | self.terms = terms | ||
| 20 | |||
| 21 | |||
| 22 | class Registry(oe.classutils.ClassRegistry): | ||
| 23 | command = None | ||
| 24 | |||
| 25 | def __init__(cls, name, bases, attrs): | ||
| 26 | super(Registry, cls).__init__(name.lower(), bases, attrs) | ||
| 27 | |||
| 28 | @property | ||
| 29 | def implemented(cls): | ||
| 30 | return bool(cls.command) | ||
| 31 | |||
| 32 | |||
| 33 | class Terminal(Popen, metaclass=Registry): | ||
| 34 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 35 | from subprocess import STDOUT | ||
| 36 | fmt_sh_cmd = self.format_command(sh_cmd, title) | ||
| 37 | try: | ||
| 38 | Popen.__init__(self, fmt_sh_cmd, env=env, stderr=STDOUT) | ||
| 39 | except OSError as exc: | ||
| 40 | import errno | ||
| 41 | if exc.errno == errno.ENOENT: | ||
| 42 | raise UnsupportedTerminal(self.name) | ||
| 43 | else: | ||
| 44 | raise | ||
| 45 | |||
| 46 | def format_command(self, sh_cmd, title): | ||
| 47 | fmt = {'title': title or 'Terminal', 'command': sh_cmd, 'cwd': os.getcwd() } | ||
| 48 | if isinstance(self.command, str): | ||
| 49 | return shlex.split(self.command.format(**fmt)) | ||
| 50 | else: | ||
| 51 | return [element.format(**fmt) for element in self.command] | ||
| 52 | |||
| 53 | class XTerminal(Terminal): | ||
| 54 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 55 | Terminal.__init__(self, sh_cmd, title, env, d) | ||
| 56 | if not os.environ.get('DISPLAY'): | ||
| 57 | raise UnsupportedTerminal(self.name) | ||
| 58 | |||
| 59 | class Gnome(XTerminal): | ||
| 60 | command = 'gnome-terminal -t "{title}" -- {command}' | ||
| 61 | priority = 2 | ||
| 62 | |||
| 63 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 64 | # Recent versions of gnome-terminal does not support non-UTF8 charset: | ||
| 65 | # https://bugzilla.gnome.org/show_bug.cgi?id=732127; as a workaround, | ||
| 66 | # clearing the LC_ALL environment variable so it uses the locale. | ||
| 67 | # Once fixed on the gnome-terminal project, this should be removed. | ||
| 68 | if os.getenv('LC_ALL'): os.putenv('LC_ALL','') | ||
| 69 | |||
| 70 | XTerminal.__init__(self, sh_cmd, title, env, d) | ||
| 71 | |||
| 72 | class Mate(XTerminal): | ||
| 73 | command = 'mate-terminal --disable-factory -t "{title}" -x {command}' | ||
| 74 | priority = 2 | ||
| 75 | |||
| 76 | class Xfce(XTerminal): | ||
| 77 | command = 'xfce4-terminal -T "{title}" -e "{command}"' | ||
| 78 | priority = 2 | ||
| 79 | |||
| 80 | class Terminology(XTerminal): | ||
| 81 | command = 'terminology -T="{title}" -e {command}' | ||
| 82 | priority = 2 | ||
| 83 | |||
| 84 | class Konsole(XTerminal): | ||
| 85 | command = 'konsole --separate --workdir . -p tabtitle="{title}" -e {command}' | ||
| 86 | priority = 2 | ||
| 87 | |||
| 88 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 89 | # Check version | ||
| 90 | vernum = check_terminal_version("konsole") | ||
| 91 | if vernum and bb.utils.vercmp_string_op(vernum, "2.0.0", "<"): | ||
| 92 | # Konsole from KDE 3.x | ||
| 93 | self.command = 'konsole -T "{title}" -e {command}' | ||
| 94 | elif vernum and bb.utils.vercmp_string_op(vernum, "16.08.1", "<"): | ||
| 95 | # Konsole pre 16.08.01 Has nofork | ||
| 96 | self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}' | ||
| 97 | XTerminal.__init__(self, sh_cmd, title, env, d) | ||
| 98 | |||
| 99 | class XTerm(XTerminal): | ||
| 100 | command = 'xterm -T "{title}" -e {command}' | ||
| 101 | priority = 1 | ||
| 102 | |||
| 103 | class Rxvt(XTerminal): | ||
| 104 | command = 'rxvt -T "{title}" -e {command}' | ||
| 105 | priority = 1 | ||
| 106 | |||
| 107 | class URxvt(XTerminal): | ||
| 108 | command = 'urxvt -T "{title}" -e {command}' | ||
| 109 | priority = 1 | ||
| 110 | |||
| 111 | class Screen(Terminal): | ||
| 112 | command = 'screen -D -m -t "{title}" -S devshell {command}' | ||
| 113 | |||
| 114 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 115 | s_id = "devshell_%i" % os.getpid() | ||
| 116 | self.command = "screen -D -m -t \"{title}\" -S %s {command}" % s_id | ||
| 117 | Terminal.__init__(self, sh_cmd, title, env, d) | ||
| 118 | msg = 'Screen started. Please connect in another terminal with ' \ | ||
| 119 | '"screen -r %s"' % s_id | ||
| 120 | if (d): | ||
| 121 | bb.event.fire(bb.event.LogExecTTY(msg, "screen -r %s" % s_id, | ||
| 122 | 0.5, 10), d) | ||
| 123 | else: | ||
| 124 | logger.warning(msg) | ||
| 125 | |||
| 126 | class TmuxRunning(Terminal): | ||
| 127 | """Open a new pane in the current running tmux window""" | ||
| 128 | name = 'tmux-running' | ||
| 129 | command = 'tmux split-window -c "{cwd}" "{command}"' | ||
| 130 | priority = 2.75 | ||
| 131 | |||
| 132 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 133 | if not bb.utils.which(os.getenv('PATH'), 'tmux'): | ||
| 134 | raise UnsupportedTerminal('tmux is not installed') | ||
| 135 | |||
| 136 | if not os.getenv('TMUX'): | ||
| 137 | raise UnsupportedTerminal('tmux is not running') | ||
| 138 | |||
| 139 | if not check_tmux_pane_size('tmux'): | ||
| 140 | raise UnsupportedTerminal('tmux pane too small or tmux < 1.9 version is being used') | ||
| 141 | |||
| 142 | Terminal.__init__(self, sh_cmd, title, env, d) | ||
| 143 | |||
| 144 | class TmuxNewWindow(Terminal): | ||
| 145 | """Open a new window in the current running tmux session""" | ||
| 146 | name = 'tmux-new-window' | ||
| 147 | command = 'tmux new-window -c "{cwd}" -n "{title}" "{command}"' | ||
| 148 | priority = 2.70 | ||
| 149 | |||
| 150 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 151 | if not bb.utils.which(os.getenv('PATH'), 'tmux'): | ||
| 152 | raise UnsupportedTerminal('tmux is not installed') | ||
| 153 | |||
| 154 | if not os.getenv('TMUX'): | ||
| 155 | raise UnsupportedTerminal('tmux is not running') | ||
| 156 | |||
| 157 | Terminal.__init__(self, sh_cmd, title, env, d) | ||
| 158 | |||
| 159 | class Tmux(Terminal): | ||
| 160 | """Start a new tmux session and window""" | ||
| 161 | command = 'tmux new -c "{cwd}" -d -s devshell -n devshell "{command}"' | ||
| 162 | priority = 0.75 | ||
| 163 | |||
| 164 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 165 | if not bb.utils.which(os.getenv('PATH'), 'tmux'): | ||
| 166 | raise UnsupportedTerminal('tmux is not installed') | ||
| 167 | |||
| 168 | # TODO: consider using a 'devshell' session shared amongst all | ||
| 169 | # devshells, if it's already there, add a new window to it. | ||
| 170 | window_name = 'devshell-%i' % os.getpid() | ||
| 171 | |||
| 172 | self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"' | ||
| 173 | if not check_tmux_version('1.9'): | ||
| 174 | # `tmux new-session -c` was added in 1.9; | ||
| 175 | # older versions fail with that flag | ||
| 176 | self.command = 'tmux new -d -s {0} -n {0} "{{command}}"' | ||
| 177 | self.command = self.command.format(window_name) | ||
| 178 | Terminal.__init__(self, sh_cmd, title, env, d) | ||
| 179 | |||
| 180 | attach_cmd = 'tmux att -t {0}'.format(window_name) | ||
| 181 | msg = 'Tmux started. Please connect in another terminal with `tmux att -t {0}`'.format(window_name) | ||
| 182 | if d: | ||
| 183 | bb.event.fire(bb.event.LogExecTTY(msg, attach_cmd, 0.5, 10), d) | ||
| 184 | else: | ||
| 185 | logger.warning(msg) | ||
| 186 | |||
| 187 | class Custom(Terminal): | ||
| 188 | command = 'false' # This is a placeholder | ||
| 189 | priority = 3 | ||
| 190 | |||
| 191 | def __init__(self, sh_cmd, title=None, env=None, d=None): | ||
| 192 | self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD') | ||
| 193 | if self.command: | ||
| 194 | if not '{command}' in self.command: | ||
| 195 | self.command += ' {command}' | ||
| 196 | Terminal.__init__(self, sh_cmd, title, env, d) | ||
| 197 | logger.warning('Custom terminal was started.') | ||
| 198 | else: | ||
| 199 | logger.debug('No custom terminal (OE_TERMINAL_CUSTOMCMD) set') | ||
| 200 | raise UnsupportedTerminal('OE_TERMINAL_CUSTOMCMD not set') | ||
| 201 | |||
| 202 | |||
| 203 | def prioritized(): | ||
| 204 | return Registry.prioritized() | ||
| 205 | |||
| 206 | def get_cmd_list(): | ||
| 207 | terms = Registry.prioritized() | ||
| 208 | cmds = [] | ||
| 209 | for term in terms: | ||
| 210 | if term.command: | ||
| 211 | cmds.append(term.command) | ||
| 212 | return cmds | ||
| 213 | |||
| 214 | def spawn_preferred(sh_cmd, title=None, env=None, d=None): | ||
| 215 | """Spawn the first supported terminal, by priority""" | ||
| 216 | for terminal in prioritized(): | ||
| 217 | try: | ||
| 218 | spawn(terminal.name, sh_cmd, title, env, d) | ||
| 219 | break | ||
| 220 | except UnsupportedTerminal: | ||
| 221 | pass | ||
| 222 | except: | ||
| 223 | bb.warn("Terminal %s is supported but did not start" % (terminal.name)) | ||
| 224 | # when we've run out of options | ||
| 225 | else: | ||
| 226 | raise NoSupportedTerminals(get_cmd_list()) | ||
| 227 | |||
| 228 | def spawn(name, sh_cmd, title=None, env=None, d=None): | ||
| 229 | """Spawn the specified terminal, by name""" | ||
| 230 | logger.debug('Attempting to spawn terminal "%s"', name) | ||
| 231 | try: | ||
| 232 | terminal = Registry.registry[name] | ||
| 233 | except KeyError: | ||
| 234 | raise UnsupportedTerminal(name) | ||
| 235 | |||
| 236 | # We need to know when the command completes but some terminals (at least | ||
| 237 | # gnome and tmux) gives us no way to do this. We therefore write the pid | ||
| 238 | # to a file using a "phonehome" wrapper script, then monitor the pid | ||
| 239 | # until it exits. | ||
| 240 | import tempfile | ||
| 241 | import time | ||
| 242 | pidfile = tempfile.NamedTemporaryFile(delete = False).name | ||
| 243 | try: | ||
| 244 | sh_cmd = bb.utils.which(os.getenv('PATH'), "oe-gnome-terminal-phonehome") + " " + pidfile + " " + sh_cmd | ||
| 245 | pipe = terminal(sh_cmd, title, env, d) | ||
| 246 | output = pipe.communicate()[0] | ||
| 247 | if output: | ||
| 248 | output = output.decode("utf-8") | ||
| 249 | if pipe.returncode != 0: | ||
| 250 | raise ExecutionError(sh_cmd, pipe.returncode, output) | ||
| 251 | |||
| 252 | while os.stat(pidfile).st_size <= 0: | ||
| 253 | time.sleep(0.01) | ||
| 254 | continue | ||
| 255 | with open(pidfile, "r") as f: | ||
| 256 | pid = int(f.readline()) | ||
| 257 | finally: | ||
| 258 | os.unlink(pidfile) | ||
| 259 | |||
| 260 | while True: | ||
| 261 | try: | ||
| 262 | os.kill(pid, 0) | ||
| 263 | time.sleep(0.1) | ||
| 264 | except OSError: | ||
| 265 | return | ||
| 266 | |||
| 267 | def check_tmux_version(desired): | ||
| 268 | vernum = check_terminal_version("tmux") | ||
| 269 | if vernum and bb.utils.vercmp_string_op(vernum, desired, "<"): | ||
| 270 | return False | ||
| 271 | return vernum | ||
| 272 | |||
| 273 | def check_tmux_pane_size(tmux): | ||
| 274 | import subprocess as sub | ||
| 275 | # On older tmux versions (<1.9), return false. The reason | ||
| 276 | # is that there is no easy way to get the height of the active panel | ||
| 277 | # on current window without nested formats (available from version 1.9) | ||
| 278 | if not check_tmux_version('1.9'): | ||
| 279 | return False | ||
| 280 | try: | ||
| 281 | p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, | ||
| 282 | shell=True,stdout=sub.PIPE,stderr=sub.PIPE) | ||
| 283 | out, err = p.communicate() | ||
| 284 | size = int(out.strip()) | ||
| 285 | except OSError as exc: | ||
| 286 | import errno | ||
| 287 | if exc.errno == errno.ENOENT: | ||
| 288 | return None | ||
| 289 | else: | ||
| 290 | raise | ||
| 291 | |||
| 292 | return size/2 >= 19 | ||
| 293 | |||
| 294 | def check_terminal_version(terminalName): | ||
| 295 | import subprocess as sub | ||
| 296 | try: | ||
| 297 | cmdversion = '%s --version' % terminalName | ||
| 298 | if terminalName.startswith('tmux'): | ||
| 299 | cmdversion = '%s -V' % terminalName | ||
| 300 | newenv = os.environ.copy() | ||
| 301 | newenv["LANG"] = "C" | ||
| 302 | p = sub.Popen(['sh', '-c', cmdversion], stdout=sub.PIPE, stderr=sub.PIPE, env=newenv) | ||
| 303 | out, err = p.communicate() | ||
| 304 | ver_info = out.decode().rstrip().split('\n') | ||
| 305 | except OSError as exc: | ||
| 306 | import errno | ||
| 307 | if exc.errno == errno.ENOENT: | ||
| 308 | return None | ||
| 309 | else: | ||
| 310 | raise | ||
| 311 | vernum = None | ||
| 312 | for ver in ver_info: | ||
| 313 | if ver.startswith('Konsole'): | ||
| 314 | vernum = ver.split(' ')[-1] | ||
| 315 | if ver.startswith('GNOME Terminal'): | ||
| 316 | vernum = ver.split(' ')[-1] | ||
| 317 | if ver.startswith('MATE Terminal'): | ||
| 318 | vernum = ver.split(' ')[-1] | ||
| 319 | if ver.startswith('tmux'): | ||
| 320 | vernum = ver.split()[-1] | ||
| 321 | if ver.startswith('tmux next-'): | ||
| 322 | vernum = ver.split()[-1][5:] | ||
| 323 | return vernum | ||
| 324 | |||
| 325 | def distro_name(): | ||
| 326 | try: | ||
| 327 | p = Popen(['lsb_release', '-i']) | ||
| 328 | out, err = p.communicate() | ||
| 329 | distro = out.split(':')[1].strip().lower() | ||
| 330 | except: | ||
| 331 | distro = "unknown" | ||
| 332 | return distro | ||
diff --git a/meta/lib/oe/tune.py b/meta/lib/oe/tune.py deleted file mode 100644 index 7fda19430d..0000000000 --- a/meta/lib/oe/tune.py +++ /dev/null | |||
| @@ -1,81 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | # riscv_isa_to_tune(isa) | ||
| 8 | # | ||
| 9 | # Automatically translate a RISC-V ISA string to TUNE_FEATURES | ||
| 10 | # | ||
| 11 | # Abbreviations, such as rv32g -> rv32imaffd_zicsr_zifencei are supported. | ||
| 12 | # | ||
| 13 | # Profiles, such as rva22u64, are NOT supported, you must use ISA strings. | ||
| 14 | # | ||
| 15 | def riscv_isa_to_tune(isa): | ||
| 16 | _isa = isa.lower() | ||
| 17 | |||
| 18 | feature = [] | ||
| 19 | iter = 0 | ||
| 20 | |||
| 21 | # rv or riscv | ||
| 22 | if _isa[iter:].startswith('rv'): | ||
| 23 | feature.append('rv') | ||
| 24 | iter = iter + 2 | ||
| 25 | elif _isa[iter:].startswith('riscv'): | ||
| 26 | feature.append('rv') | ||
| 27 | iter = iter + 5 | ||
| 28 | else: | ||
| 29 | # Not a risc-v ISA! | ||
| 30 | return _isa | ||
| 31 | |||
| 32 | while (_isa[iter:]): | ||
| 33 | # Skip _ and whitespace | ||
| 34 | if _isa[iter] == '_' or _isa[iter].isspace(): | ||
| 35 | iter = iter + 1 | ||
| 36 | continue | ||
| 37 | |||
| 38 | # Length, just capture numbers here | ||
| 39 | if _isa[iter].isdigit(): | ||
| 40 | iter_end = iter | ||
| 41 | while iter_end < len(_isa) and _isa[iter_end].isdigit(): | ||
| 42 | iter_end = iter_end + 1 | ||
| 43 | |||
| 44 | feature.append(_isa[iter:iter_end]) | ||
| 45 | iter = iter_end | ||
| 46 | continue | ||
| 47 | |||
| 48 | # Typically i, e or g is next, followed by extensions. | ||
| 49 | # Extensions are single character, except for Z, Ss, Sh, Sm, Sv, and X | ||
| 50 | |||
| 51 | # If the extension starts with 'Z', 'S' or 'X' use the name until the next _, whitespace or end | ||
| 52 | if _isa[iter] in ['z', 's', 'x']: | ||
| 53 | ext_type = _isa[iter] | ||
| 54 | iter_end = iter + 1 | ||
| 55 | |||
| 56 | # Multicharacter extension, these are supposed to have a _ before the next multicharacter extension | ||
| 57 | # See 37.4 and 37.5: | ||
| 58 | # 37.4: Underscores "_" may be used to separate ISA extensions... | ||
| 59 | # 37.5: All multi-letter extensions ... must be separated from other multi-letter extensions by an underscore... | ||
| 60 | # Some extensions permit only alphabetic characters, while others allow alphanumeric chartacters | ||
| 61 | while iter_end < len(_isa) and _isa[iter_end] != "_" and not _isa[iter_end].isspace(): | ||
| 62 | iter_end = iter_end + 1 | ||
| 63 | |||
| 64 | feature.append(_isa[iter:iter_end]) | ||
| 65 | iter = iter_end | ||
| 66 | continue | ||
| 67 | |||
| 68 | # 'g' is special, it's an abbreviation for imafd_zicsr_zifencei | ||
| 69 | # When expanding the abbreviation, any additional letters must appear before the _z* extensions | ||
| 70 | if _isa[iter] == 'g': | ||
| 71 | _isa = 'imafd' + _isa[iter+1:] + '_zicsr_zifencei' | ||
| 72 | iter = 0 | ||
| 73 | continue | ||
| 74 | |||
| 75 | feature.append(_isa[iter]) | ||
| 76 | iter = iter + 1 | ||
| 77 | continue | ||
| 78 | |||
| 79 | # Eliminate duplicates, but preserve the order | ||
| 80 | feature = list(dict.fromkeys(feature)) | ||
| 81 | return ' '.join(feature) | ||
diff --git a/meta/lib/oe/types.py b/meta/lib/oe/types.py deleted file mode 100644 index b929afb1f3..0000000000 --- a/meta/lib/oe/types.py +++ /dev/null | |||
| @@ -1,188 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import errno | ||
| 8 | import re | ||
| 9 | import os | ||
| 10 | |||
| 11 | |||
| 12 | class OEList(list): | ||
| 13 | """OpenEmbedded 'list' type | ||
| 14 | |||
| 15 | Acts as an ordinary list, but is constructed from a string value and a | ||
| 16 | separator (optional), and re-joins itself when converted to a string with | ||
| 17 | str(). Set the variable type flag to 'list' to use this type, and the | ||
| 18 | 'separator' flag may be specified (defaulting to whitespace).""" | ||
| 19 | |||
| 20 | name = "list" | ||
| 21 | |||
| 22 | def __init__(self, value, separator = None): | ||
| 23 | if value is not None: | ||
| 24 | list.__init__(self, value.split(separator)) | ||
| 25 | else: | ||
| 26 | list.__init__(self) | ||
| 27 | |||
| 28 | if separator is None: | ||
| 29 | self.separator = " " | ||
| 30 | else: | ||
| 31 | self.separator = separator | ||
| 32 | |||
| 33 | def __str__(self): | ||
| 34 | return self.separator.join(self) | ||
| 35 | |||
| 36 | def choice(value, choices): | ||
| 37 | """OpenEmbedded 'choice' type | ||
| 38 | |||
| 39 | Acts as a multiple choice for the user. To use this, set the variable | ||
| 40 | type flag to 'choice', and set the 'choices' flag to a space separated | ||
| 41 | list of valid values.""" | ||
| 42 | if not isinstance(value, str): | ||
| 43 | raise TypeError("choice accepts a string, not '%s'" % type(value)) | ||
| 44 | |||
| 45 | value = value.lower() | ||
| 46 | choices = choices.lower() | ||
| 47 | if value not in choices.split(): | ||
| 48 | raise ValueError("Invalid choice '%s'. Valid choices: %s" % | ||
| 49 | (value, choices)) | ||
| 50 | return value | ||
| 51 | |||
| 52 | class NoMatch(object): | ||
| 53 | """Stub python regex pattern object which never matches anything""" | ||
| 54 | def findall(self, string, flags=0): | ||
| 55 | return None | ||
| 56 | |||
| 57 | def finditer(self, string, flags=0): | ||
| 58 | return None | ||
| 59 | |||
| 60 | def match(self, flags=0): | ||
| 61 | return None | ||
| 62 | |||
| 63 | def search(self, string, flags=0): | ||
| 64 | return None | ||
| 65 | |||
| 66 | def split(self, string, maxsplit=0): | ||
| 67 | return None | ||
| 68 | |||
| 69 | def sub(pattern, repl, string, count=0): | ||
| 70 | return None | ||
| 71 | |||
| 72 | def subn(pattern, repl, string, count=0): | ||
| 73 | return None | ||
| 74 | |||
| 75 | NoMatch = NoMatch() | ||
| 76 | |||
| 77 | def regex(value, regexflags=None): | ||
| 78 | """OpenEmbedded 'regex' type | ||
| 79 | |||
| 80 | Acts as a regular expression, returning the pre-compiled regular | ||
| 81 | expression pattern object. To use this type, set the variable type flag | ||
| 82 | to 'regex', and optionally, set the 'regexflags' type to a space separated | ||
| 83 | list of the flags to control the regular expression matching (e.g. | ||
| 84 | FOO[regexflags] += 'ignorecase'). See the python documentation on the | ||
| 85 | 're' module for a list of valid flags.""" | ||
| 86 | |||
| 87 | flagval = 0 | ||
| 88 | if regexflags: | ||
| 89 | for flag in regexflags.split(): | ||
| 90 | flag = flag.upper() | ||
| 91 | try: | ||
| 92 | flagval |= getattr(re, flag) | ||
| 93 | except AttributeError: | ||
| 94 | raise ValueError("Invalid regex flag '%s'" % flag) | ||
| 95 | |||
| 96 | if not value: | ||
| 97 | # Let's ensure that the default behavior for an undefined or empty | ||
| 98 | # variable is to match nothing. If the user explicitly wants to match | ||
| 99 | # anything, they can match '.*' instead. | ||
| 100 | return NoMatch | ||
| 101 | |||
| 102 | try: | ||
| 103 | return re.compile(value, flagval) | ||
| 104 | except re.error as exc: | ||
| 105 | raise ValueError("Invalid regex value '%s': %s" % | ||
| 106 | (value, exc.args[0])) | ||
| 107 | |||
| 108 | def boolean(value): | ||
| 109 | """OpenEmbedded 'boolean' type | ||
| 110 | |||
| 111 | Valid values for true: 'yes', 'y', 'true', 't', '1' | ||
| 112 | Valid values for false: 'no', 'n', 'false', 'f', '0', None | ||
| 113 | """ | ||
| 114 | if value is None: | ||
| 115 | return False | ||
| 116 | |||
| 117 | if isinstance(value, bool): | ||
| 118 | return value | ||
| 119 | |||
| 120 | if not isinstance(value, str): | ||
| 121 | raise TypeError("boolean accepts a string, not '%s'" % type(value)) | ||
| 122 | |||
| 123 | value = value.lower() | ||
| 124 | if value in ('yes', 'y', 'true', 't', '1'): | ||
| 125 | return True | ||
| 126 | elif value in ('no', 'n', 'false', 'f', '0'): | ||
| 127 | return False | ||
| 128 | raise ValueError("Invalid boolean value '%s'" % value) | ||
| 129 | |||
| 130 | def integer(value, numberbase=10): | ||
| 131 | """OpenEmbedded 'integer' type | ||
| 132 | |||
| 133 | Defaults to base 10, but this can be specified using the optional | ||
| 134 | 'numberbase' flag.""" | ||
| 135 | |||
| 136 | return int(value, int(numberbase)) | ||
| 137 | |||
| 138 | _float = float | ||
| 139 | def float(value, fromhex='false'): | ||
| 140 | """OpenEmbedded floating point type | ||
| 141 | |||
| 142 | To use this type, set the type flag to 'float', and optionally set the | ||
| 143 | 'fromhex' flag to a true value (obeying the same rules as for the | ||
| 144 | 'boolean' type) if the value is in base 16 rather than base 10.""" | ||
| 145 | |||
| 146 | if boolean(fromhex): | ||
| 147 | return _float.fromhex(value) | ||
| 148 | else: | ||
| 149 | return _float(value) | ||
| 150 | |||
| 151 | def path(value, relativeto='', normalize='true', mustexist='false'): | ||
| 152 | value = os.path.join(relativeto, value) | ||
| 153 | |||
| 154 | if boolean(normalize): | ||
| 155 | value = os.path.normpath(value) | ||
| 156 | |||
| 157 | if boolean(mustexist): | ||
| 158 | try: | ||
| 159 | with open(value, 'r'): | ||
| 160 | pass | ||
| 161 | except IOError as exc: | ||
| 162 | if exc.errno == errno.ENOENT: | ||
| 163 | raise ValueError("{0}: {1}".format(value, os.strerror(errno.ENOENT))) | ||
| 164 | |||
| 165 | return value | ||
| 166 | |||
| 167 | def is_x86(arch): | ||
| 168 | """ | ||
| 169 | Check whether arch is x86 or x86_64 | ||
| 170 | """ | ||
| 171 | if arch.startswith('x86_') or re.match('i.*86', arch): | ||
| 172 | return True | ||
| 173 | else: | ||
| 174 | return False | ||
| 175 | |||
| 176 | def qemu_use_kvm(kvm, target_arch): | ||
| 177 | """ | ||
| 178 | Enable kvm if target_arch == build_arch or both of them are x86 archs. | ||
| 179 | """ | ||
| 180 | |||
| 181 | use_kvm = False | ||
| 182 | if kvm and boolean(kvm): | ||
| 183 | build_arch = os.uname()[4] | ||
| 184 | if is_x86(build_arch) and is_x86(target_arch): | ||
| 185 | use_kvm = True | ||
| 186 | elif build_arch == target_arch: | ||
| 187 | use_kvm = True | ||
| 188 | return use_kvm | ||
diff --git a/meta/lib/oe/useradd.py b/meta/lib/oe/useradd.py deleted file mode 100644 index 54aa86feb5..0000000000 --- a/meta/lib/oe/useradd.py +++ /dev/null | |||
| @@ -1,71 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | import argparse | ||
| 7 | import re | ||
| 8 | |||
| 9 | class myArgumentParser(argparse.ArgumentParser): | ||
| 10 | def _print_message(self, message, file=None): | ||
| 11 | bb.warn("%s - %s: %s" % (d.getVar('PN'), pkg, message)) | ||
| 12 | |||
| 13 | # This should never be called... | ||
| 14 | def exit(self, status=0, message=None): | ||
| 15 | message = message or ("%s - %s: useradd.bbclass: Argument parsing exited" % (d.getVar('PN'), pkg)) | ||
| 16 | error(message) | ||
| 17 | |||
| 18 | def error(self, message): | ||
| 19 | bb.fatal(message) | ||
| 20 | |||
| 21 | def split_commands(params): | ||
| 22 | params = re.split('''[ \t]*;[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', params.strip()) | ||
| 23 | # Remove any empty items | ||
| 24 | return [x for x in params if x] | ||
| 25 | |||
| 26 | def split_args(params): | ||
| 27 | params = re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', params.strip()) | ||
| 28 | # Remove any empty items | ||
| 29 | return [x for x in params if x] | ||
| 30 | |||
| 31 | def build_useradd_parser(): | ||
| 32 | # The following comes from --help on useradd from shadow | ||
| 33 | parser = myArgumentParser(prog='useradd') | ||
| 34 | parser.add_argument("-b", "--base-dir", metavar="BASE_DIR", help="base directory for the home directory of the new account") | ||
| 35 | parser.add_argument("-c", "--comment", metavar="COMMENT", help="GECOS field of the new account") | ||
| 36 | parser.add_argument("-d", "--home-dir", metavar="HOME_DIR", help="home directory of the new account") | ||
| 37 | parser.add_argument("-D", "--defaults", help="print or change default useradd configuration", action="store_true") | ||
| 38 | parser.add_argument("-e", "--expiredate", metavar="EXPIRE_DATE", help="expiration date of the new account") | ||
| 39 | parser.add_argument("-f", "--inactive", metavar="INACTIVE", help="password inactivity period of the new account") | ||
| 40 | parser.add_argument("-g", "--gid", metavar="GROUP", help="name or ID of the primary group of the new account") | ||
| 41 | parser.add_argument("-G", "--groups", metavar="GROUPS", help="list of supplementary groups of the new account") | ||
| 42 | parser.add_argument("-k", "--skel", metavar="SKEL_DIR", help="use this alternative skeleton directory") | ||
| 43 | parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults") | ||
| 44 | parser.add_argument("-l", "--no-log-init", help="do not add the user to the lastlog and faillog databases", action="store_true") | ||
| 45 | parser.add_argument("-m", "--create-home", help="create the user's home directory", action="store_const", const=True) | ||
| 46 | parser.add_argument("-M", "--no-create-home", dest="create_home", help="do not create the user's home directory", action="store_const", const=False) | ||
| 47 | parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False) | ||
| 48 | parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true") | ||
| 49 | parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account") | ||
| 50 | parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") | ||
| 51 | parser.add_argument("-r", "--system", help="create a system account", action="store_true") | ||
| 52 | parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account") | ||
| 53 | parser.add_argument("-u", "--uid", metavar="UID", help="user ID of the new account") | ||
| 54 | parser.add_argument("-U", "--user-group", help="create a group with the same name as the user", action="store_const", const=True) | ||
| 55 | parser.add_argument("LOGIN", help="Login name of the new user") | ||
| 56 | |||
| 57 | return parser | ||
| 58 | |||
| 59 | def build_groupadd_parser(): | ||
| 60 | # The following comes from --help on groupadd from shadow | ||
| 61 | parser = myArgumentParser(prog='groupadd') | ||
| 62 | parser.add_argument("-f", "--force", help="exit successfully if the group already exists, and cancel -g if the GID is already used", action="store_true") | ||
| 63 | parser.add_argument("-g", "--gid", metavar="GID", help="use GID for the new group") | ||
| 64 | parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults") | ||
| 65 | parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true") | ||
| 66 | parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group") | ||
| 67 | parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") | ||
| 68 | parser.add_argument("-r", "--system", help="create a system account", action="store_true") | ||
| 69 | parser.add_argument("GROUP", help="Group name of the new group") | ||
| 70 | |||
| 71 | return parser | ||
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py deleted file mode 100644 index 0128ee411d..0000000000 --- a/meta/lib/oe/utils.py +++ /dev/null | |||
| @@ -1,513 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | |||
| 7 | import subprocess | ||
| 8 | import traceback | ||
| 9 | import errno | ||
| 10 | |||
| 11 | import bb.parse | ||
| 12 | from bb import multiprocessing | ||
| 13 | |||
| 14 | def read_file(filename): | ||
| 15 | try: | ||
| 16 | f = open( filename, "r" ) | ||
| 17 | except IOError as reason: | ||
| 18 | return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M: | ||
| 19 | else: | ||
| 20 | data = f.read().strip() | ||
| 21 | f.close() | ||
| 22 | return data | ||
| 23 | return None | ||
| 24 | |||
| 25 | def ifelse(condition, iftrue = True, iffalse = False): | ||
| 26 | if condition: | ||
| 27 | return iftrue | ||
| 28 | else: | ||
| 29 | return iffalse | ||
| 30 | |||
| 31 | def conditional(variable, checkvalue, truevalue, falsevalue, d): | ||
| 32 | if d.getVar(variable) == checkvalue: | ||
| 33 | return truevalue | ||
| 34 | else: | ||
| 35 | return falsevalue | ||
| 36 | |||
| 37 | def vartrue(var, iftrue, iffalse, d): | ||
| 38 | import oe.types | ||
| 39 | if oe.types.boolean(d.getVar(var)): | ||
| 40 | return iftrue | ||
| 41 | else: | ||
| 42 | return iffalse | ||
| 43 | |||
| 44 | def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): | ||
| 45 | if float(d.getVar(variable)) <= float(checkvalue): | ||
| 46 | return truevalue | ||
| 47 | else: | ||
| 48 | return falsevalue | ||
| 49 | |||
| 50 | def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): | ||
| 51 | result = bb.utils.vercmp_string(d.getVar(variable), checkvalue) | ||
| 52 | if result <= 0: | ||
| 53 | return truevalue | ||
| 54 | else: | ||
| 55 | return falsevalue | ||
| 56 | |||
| 57 | def both_contain(variable1, variable2, checkvalue, d): | ||
| 58 | val1 = d.getVar(variable1) | ||
| 59 | val2 = d.getVar(variable2) | ||
| 60 | val1 = set(val1.split()) | ||
| 61 | val2 = set(val2.split()) | ||
| 62 | if isinstance(checkvalue, str): | ||
| 63 | checkvalue = set(checkvalue.split()) | ||
| 64 | else: | ||
| 65 | checkvalue = set(checkvalue) | ||
| 66 | if checkvalue.issubset(val1) and checkvalue.issubset(val2): | ||
| 67 | return " ".join(checkvalue) | ||
| 68 | else: | ||
| 69 | return "" | ||
| 70 | |||
| 71 | def set_intersect(variable1, variable2, d): | ||
| 72 | """ | ||
| 73 | Expand both variables, interpret them as lists of strings, and return the | ||
| 74 | intersection as a flattened string. | ||
| 75 | |||
| 76 | For example: | ||
| 77 | s1 = "a b c" | ||
| 78 | s2 = "b c d" | ||
| 79 | s3 = set_intersect(s1, s2) | ||
| 80 | => s3 = "b c" | ||
| 81 | """ | ||
| 82 | val1 = set(d.getVar(variable1).split()) | ||
| 83 | val2 = set(d.getVar(variable2).split()) | ||
| 84 | return " ".join(val1 & val2) | ||
| 85 | |||
| 86 | def prune_suffix(var, suffixes, d): | ||
| 87 | # See if var ends with any of the suffixes listed and | ||
| 88 | # remove it if found | ||
| 89 | for suffix in suffixes: | ||
| 90 | if suffix and var.endswith(suffix): | ||
| 91 | var = var[:-len(suffix)] | ||
| 92 | |||
| 93 | prefix = d.getVar("MLPREFIX") | ||
| 94 | if prefix and var.startswith(prefix): | ||
| 95 | var = var[len(prefix):] | ||
| 96 | |||
| 97 | return var | ||
| 98 | |||
| 99 | def str_filter(f, str, d): | ||
| 100 | from re import match | ||
| 101 | return " ".join([x for x in str.split() if match(f, x, 0)]) | ||
| 102 | |||
| 103 | def str_filter_out(f, str, d): | ||
| 104 | from re import match | ||
| 105 | return " ".join([x for x in str.split() if not match(f, x, 0)]) | ||
| 106 | |||
| 107 | def build_depends_string(depends, task): | ||
| 108 | """Append a taskname to a string of dependencies as used by the [depends] flag""" | ||
| 109 | return " ".join(dep + ":" + task for dep in depends.split()) | ||
| 110 | |||
| 111 | def inherits(d, *classes): | ||
| 112 | """Return True if the metadata inherits any of the specified classes""" | ||
| 113 | return any(bb.data.inherits_class(cls, d) for cls in classes) | ||
| 114 | |||
| 115 | def features_backfill(var,d): | ||
| 116 | # This construct allows the addition of new features to variable specified | ||
| 117 | # as var | ||
| 118 | # Example for var = "DISTRO_FEATURES" | ||
| 119 | # This construct allows the addition of new features to DISTRO_FEATURES | ||
| 120 | # that if not present would disable existing functionality, without | ||
| 121 | # disturbing distributions that have already set DISTRO_FEATURES. | ||
| 122 | # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should | ||
| 123 | # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED | ||
| 124 | features = (d.getVar(var) or "").split() | ||
| 125 | backfill = (d.getVar(var+"_BACKFILL") or "").split() | ||
| 126 | considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split() | ||
| 127 | |||
| 128 | addfeatures = [] | ||
| 129 | for feature in backfill: | ||
| 130 | if feature not in features and feature not in considered: | ||
| 131 | addfeatures.append(feature) | ||
| 132 | |||
| 133 | if addfeatures: | ||
| 134 | d.appendVar(var, " " + " ".join(addfeatures)) | ||
| 135 | |||
| 136 | def all_distro_features(d, features, truevalue="1", falsevalue=""): | ||
| 137 | """ | ||
| 138 | Returns truevalue if *all* given features are set in DISTRO_FEATURES, | ||
| 139 | else falsevalue. The features can be given as single string or anything | ||
| 140 | that can be turned into a set. | ||
| 141 | |||
| 142 | This is a shorter, more flexible version of | ||
| 143 | bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d). | ||
| 144 | |||
| 145 | Without explicit true/false values it can be used directly where | ||
| 146 | Python expects a boolean: | ||
| 147 | if oe.utils.all_distro_features(d, "foo bar"): | ||
| 148 | bb.fatal("foo and bar are mutually exclusive DISTRO_FEATURES") | ||
| 149 | |||
| 150 | With just a truevalue, it can be used to include files that are meant to be | ||
| 151 | used only when requested via DISTRO_FEATURES: | ||
| 152 | require ${@ oe.utils.all_distro_features(d, "foo bar", "foo-and-bar.inc") | ||
| 153 | """ | ||
| 154 | return bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d) | ||
| 155 | |||
| 156 | def any_distro_features(d, features, truevalue="1", falsevalue=""): | ||
| 157 | """ | ||
| 158 | Returns truevalue if at least *one* of the given features is set in DISTRO_FEATURES, | ||
| 159 | else falsevalue. The features can be given as single string or anything | ||
| 160 | that can be turned into a set. | ||
| 161 | |||
| 162 | This is a shorter, more flexible version of | ||
| 163 | bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d). | ||
| 164 | |||
| 165 | Without explicit true/false values it can be used directly where | ||
| 166 | Python expects a boolean: | ||
| 167 | if not oe.utils.any_distro_features(d, "foo bar"): | ||
| 168 | bb.fatal("foo, bar or both must be set in DISTRO_FEATURES") | ||
| 169 | |||
| 170 | With just a truevalue, it can be used to include files that are meant to be | ||
| 171 | used only when requested via DISTRO_FEATURES: | ||
| 172 | require ${@ oe.utils.any_distro_features(d, "foo bar", "foo-or-bar.inc") | ||
| 173 | |||
| 174 | """ | ||
| 175 | return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d) | ||
| 176 | |||
| 177 | def parallel_make_value(pm): | ||
| 178 | """ | ||
| 179 | Return the integer value for the number of parallel threads to use when | ||
| 180 | building, scraped out of given string. If no parallelization option is | ||
| 181 | found, returns empty string | ||
| 182 | |||
| 183 | e.g. if string is "-j 10", this will return 10 as an integer. | ||
| 184 | """ | ||
| 185 | # look for '-j' and throw other options (e.g. '-l') away | ||
| 186 | while pm: | ||
| 187 | opt = pm.pop(0) | ||
| 188 | if opt == '-j': | ||
| 189 | v = pm.pop(0) | ||
| 190 | elif opt.startswith('-j'): | ||
| 191 | v = opt[2:].strip() | ||
| 192 | else: | ||
| 193 | continue | ||
| 194 | |||
| 195 | return int(v) | ||
| 196 | |||
| 197 | return '' | ||
| 198 | |||
| 199 | def parallel_make(d, makeinst=False): | ||
| 200 | """ | ||
| 201 | Return the integer value for the number of parallel threads to use when | ||
| 202 | building, scraped out of PARALLEL_MAKE. If no parallelization option is | ||
| 203 | found, returns empty string | ||
| 204 | |||
| 205 | e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer. | ||
| 206 | """ | ||
| 207 | if makeinst: | ||
| 208 | pm = (d.getVar('PARALLEL_MAKEINST') or '').split() | ||
| 209 | else: | ||
| 210 | pm = (d.getVar('PARALLEL_MAKE') or '').split() | ||
| 211 | return parallel_make_value(pm) | ||
| 212 | |||
| 213 | def parallel_make_argument(d, fmt, limit=None, makeinst=False): | ||
| 214 | """ | ||
| 215 | Helper utility to construct a parallel make argument from the number of | ||
| 216 | parallel threads specified in PARALLEL_MAKE. | ||
| 217 | |||
| 218 | Returns the input format string `fmt` where a single '%d' will be expanded | ||
| 219 | with the number of parallel threads to use. If `limit` is specified, the | ||
| 220 | number of parallel threads will be no larger than it. If no parallelization | ||
| 221 | option is found in PARALLEL_MAKE, returns an empty string | ||
| 222 | |||
| 223 | e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return | ||
| 224 | "-n 10" | ||
| 225 | """ | ||
| 226 | v = parallel_make(d, makeinst) | ||
| 227 | if v: | ||
| 228 | if limit: | ||
| 229 | v = min(limit, v) | ||
| 230 | return fmt % v | ||
| 231 | return '' | ||
| 232 | |||
| 233 | def packages_filter_out_system(d): | ||
| 234 | """ | ||
| 235 | Return a list of packages from PACKAGES with the "system" packages such as | ||
| 236 | PN-dbg PN-doc PN-locale-eb-gb removed. | ||
| 237 | """ | ||
| 238 | pn = d.getVar('PN') | ||
| 239 | pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')] | ||
| 240 | localepkg = pn + "-locale-" | ||
| 241 | pkgs = [] | ||
| 242 | |||
| 243 | for pkg in d.getVar('PACKAGES').split(): | ||
| 244 | if pkg not in pkgfilter and localepkg not in pkg: | ||
| 245 | pkgs.append(pkg) | ||
| 246 | return pkgs | ||
| 247 | |||
| 248 | def getstatusoutput(cmd): | ||
| 249 | return subprocess.getstatusoutput(cmd) | ||
| 250 | |||
| 251 | |||
| 252 | def trim_version(version, num_parts=2): | ||
| 253 | """ | ||
| 254 | Return just the first <num_parts> of <version>, split by periods. For | ||
| 255 | example, trim_version("1.2.3", 2) will return "1.2". | ||
| 256 | """ | ||
| 257 | if type(version) is not str: | ||
| 258 | raise TypeError("Version should be a string") | ||
| 259 | if num_parts < 1: | ||
| 260 | raise ValueError("Cannot split to parts < 1") | ||
| 261 | |||
| 262 | parts = version.split(".") | ||
| 263 | trimmed = ".".join(parts[:num_parts]) | ||
| 264 | return trimmed | ||
| 265 | |||
| 266 | def cpu_count(at_least=1, at_most=64): | ||
| 267 | cpus = len(os.sched_getaffinity(0)) | ||
| 268 | return max(min(cpus, at_most), at_least) | ||
| 269 | |||
| 270 | def execute_pre_post_process(d, cmds): | ||
| 271 | if cmds is None: | ||
| 272 | return | ||
| 273 | |||
| 274 | cmds = cmds.replace(";", " ") | ||
| 275 | |||
| 276 | for cmd in cmds.split(): | ||
| 277 | bb.note("Executing %s ..." % cmd) | ||
| 278 | bb.build.exec_func(cmd, d) | ||
| 279 | |||
| 280 | @bb.parse.vardepsexclude("BB_NUMBER_THREADS") | ||
| 281 | def get_bb_number_threads(d): | ||
| 282 | return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) | ||
| 283 | |||
| 284 | def multiprocess_launch(target, items, d, extraargs=None): | ||
| 285 | max_process = get_bb_number_threads(d) | ||
| 286 | return multiprocess_launch_mp(target, items, max_process, extraargs) | ||
| 287 | |||
| 288 | # For each item in items, call the function 'target' with item as the first | ||
| 289 | # argument, extraargs as the other arguments and handle any exceptions in the | ||
| 290 | # parent thread | ||
| 291 | def multiprocess_launch_mp(target, items, max_process, extraargs=None): | ||
| 292 | |||
| 293 | class ProcessLaunch(multiprocessing.Process): | ||
| 294 | def __init__(self, *args, **kwargs): | ||
| 295 | multiprocessing.Process.__init__(self, *args, **kwargs) | ||
| 296 | self._pconn, self._cconn = multiprocessing.Pipe() | ||
| 297 | self._exception = None | ||
| 298 | self._result = None | ||
| 299 | |||
| 300 | def run(self): | ||
| 301 | try: | ||
| 302 | ret = self._target(*self._args, **self._kwargs) | ||
| 303 | self._cconn.send((None, ret)) | ||
| 304 | except Exception as e: | ||
| 305 | tb = traceback.format_exc() | ||
| 306 | self._cconn.send((e, tb)) | ||
| 307 | |||
| 308 | def update(self): | ||
| 309 | if self._pconn.poll(): | ||
| 310 | (e, tb) = self._pconn.recv() | ||
| 311 | if e is not None: | ||
| 312 | self._exception = (e, tb) | ||
| 313 | else: | ||
| 314 | self._result = tb | ||
| 315 | |||
| 316 | @property | ||
| 317 | def exception(self): | ||
| 318 | self.update() | ||
| 319 | return self._exception | ||
| 320 | |||
| 321 | @property | ||
| 322 | def result(self): | ||
| 323 | self.update() | ||
| 324 | return self._result | ||
| 325 | |||
| 326 | launched = [] | ||
| 327 | errors = [] | ||
| 328 | results = [] | ||
| 329 | items = list(items) | ||
| 330 | while (items and not errors) or launched: | ||
| 331 | if not errors and items and len(launched) < max_process: | ||
| 332 | args = items.pop() | ||
| 333 | if not type(args) is tuple: | ||
| 334 | args = (args,) | ||
| 335 | if extraargs is not None: | ||
| 336 | args = args + extraargs | ||
| 337 | p = ProcessLaunch(target=target, args=args) | ||
| 338 | p.start() | ||
| 339 | launched.append(p) | ||
| 340 | for q in launched: | ||
| 341 | # Have to manually call update() to avoid deadlocks. The pipe can be full and | ||
| 342 | # transfer stalled until we try and read the results object but the subprocess won't exit | ||
| 343 | # as it still has data to write (https://bugs.python.org/issue8426) | ||
| 344 | q.update() | ||
| 345 | # The finished processes are joined when calling is_alive() | ||
| 346 | if not q.is_alive(): | ||
| 347 | if q.exception: | ||
| 348 | errors.append(q.exception) | ||
| 349 | if q.result: | ||
| 350 | results.append(q.result) | ||
| 351 | launched.remove(q) | ||
| 352 | # Paranoia doesn't hurt | ||
| 353 | for p in launched: | ||
| 354 | p.join() | ||
| 355 | if errors: | ||
| 356 | msg = "" | ||
| 357 | for (e, tb) in errors: | ||
| 358 | if isinstance(e, subprocess.CalledProcessError) and e.output: | ||
| 359 | msg = msg + str(e) + "\n" | ||
| 360 | msg = msg + "Subprocess output:" | ||
| 361 | msg = msg + e.output.decode("utf-8", errors="ignore") | ||
| 362 | else: | ||
| 363 | msg = msg + str(e) + ": " + str(tb) + "\n" | ||
| 364 | bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg) | ||
| 365 | return results | ||
| 366 | |||
| 367 | def squashspaces(string): | ||
| 368 | import re | ||
| 369 | return re.sub(r"\s+", " ", string).strip() | ||
| 370 | |||
| 371 | def rprovides_map(pkgdata_dir, pkg_dict): | ||
| 372 | # Map file -> pkg provider | ||
| 373 | rprov_map = {} | ||
| 374 | |||
| 375 | for pkg in pkg_dict: | ||
| 376 | path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg) | ||
| 377 | if not os.path.isfile(path_to_pkgfile): | ||
| 378 | continue | ||
| 379 | with open(path_to_pkgfile) as f: | ||
| 380 | for line in f: | ||
| 381 | if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'): | ||
| 382 | # List all components provided by pkg. | ||
| 383 | # Exclude version strings, i.e. those starting with ( | ||
| 384 | provides = [x for x in line.split()[1:] if not x.startswith('(')] | ||
| 385 | for prov in provides: | ||
| 386 | if prov in rprov_map: | ||
| 387 | rprov_map[prov].append(pkg) | ||
| 388 | else: | ||
| 389 | rprov_map[prov] = [pkg] | ||
| 390 | |||
| 391 | return rprov_map | ||
| 392 | |||
| 393 | def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None): | ||
| 394 | output = [] | ||
| 395 | |||
| 396 | if ret_format == "arch": | ||
| 397 | for pkg in sorted(pkg_dict): | ||
| 398 | output.append("%s %s" % (pkg, pkg_dict[pkg]["arch"])) | ||
| 399 | elif ret_format == "file": | ||
| 400 | for pkg in sorted(pkg_dict): | ||
| 401 | output.append("%s %s %s" % (pkg, pkg_dict[pkg]["filename"], pkg_dict[pkg]["arch"])) | ||
| 402 | elif ret_format == "ver": | ||
| 403 | for pkg in sorted(pkg_dict): | ||
| 404 | output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"])) | ||
| 405 | elif ret_format == "deps": | ||
| 406 | rprov_map = rprovides_map(pkgdata_dir, pkg_dict) | ||
| 407 | for pkg in sorted(pkg_dict): | ||
| 408 | for dep in pkg_dict[pkg]["deps"]: | ||
| 409 | if dep in rprov_map: | ||
| 410 | # There could be multiple providers within the image | ||
| 411 | for pkg_provider in rprov_map[dep]: | ||
| 412 | output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep)) | ||
| 413 | else: | ||
| 414 | output.append("%s|%s" % (pkg, dep)) | ||
| 415 | else: | ||
| 416 | for pkg in sorted(pkg_dict): | ||
| 417 | output.append(pkg) | ||
| 418 | |||
| 419 | output_str = '\n'.join(output) | ||
| 420 | |||
| 421 | if output_str: | ||
| 422 | # make sure last line is newline terminated | ||
| 423 | output_str += '\n' | ||
| 424 | |||
| 425 | return output_str | ||
| 426 | |||
| 427 | |||
| 428 | # Helper function to get the host gcc version | ||
| 429 | def get_host_gcc_version(d, taskcontextonly=False): | ||
| 430 | import re, subprocess | ||
| 431 | |||
| 432 | if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1': | ||
| 433 | return | ||
| 434 | |||
| 435 | try: | ||
| 436 | env = os.environ.copy() | ||
| 437 | # datastore PATH does not contain session PATH as set by environment-setup-... | ||
| 438 | # this breaks the install-buildtools use-case | ||
| 439 | # env["PATH"] = d.getVar("PATH") | ||
| 440 | output = subprocess.check_output("gcc --version", \ | ||
| 441 | shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8") | ||
| 442 | except subprocess.CalledProcessError as e: | ||
| 443 | bb.fatal("Error running gcc --version: %s" % (e.output.decode("utf-8"))) | ||
| 444 | |||
| 445 | match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0]) | ||
| 446 | if not match: | ||
| 447 | bb.fatal("Can't get compiler version from gcc --version output") | ||
| 448 | |||
| 449 | version = match.group(1) | ||
| 450 | return version | ||
| 451 | |||
| 452 | @bb.parse.vardepsexclude("DEFAULTTUNE_MULTILIB_ORIGINAL", "OVERRIDES") | ||
| 453 | def get_multilib_datastore(variant, d): | ||
| 454 | localdata = bb.data.createCopy(d) | ||
| 455 | if variant: | ||
| 456 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant | ||
| 457 | localdata.setVar("OVERRIDES", overrides) | ||
| 458 | localdata.setVar("MLPREFIX", variant + "-") | ||
| 459 | else: | ||
| 460 | origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL") | ||
| 461 | if origdefault: | ||
| 462 | localdata.setVar("DEFAULTTUNE", origdefault) | ||
| 463 | overrides = localdata.getVar("OVERRIDES", False).split(":") | ||
| 464 | overrides = ":".join([x for x in overrides if not x.startswith("virtclass-multilib-")]) | ||
| 465 | localdata.setVar("OVERRIDES", overrides) | ||
| 466 | localdata.setVar("MLPREFIX", "") | ||
| 467 | return localdata | ||
| 468 | |||
| 469 | def sh_quote(string): | ||
| 470 | import shlex | ||
| 471 | return shlex.quote(string) | ||
| 472 | |||
| 473 | def directory_size(root, blocksize=4096): | ||
| 474 | """ | ||
| 475 | Calculate the size of the directory, taking into account hard links, | ||
| 476 | rounding up every size to multiples of the blocksize. | ||
| 477 | """ | ||
| 478 | def roundup(size): | ||
| 479 | """ | ||
| 480 | Round the size up to the nearest multiple of the block size. | ||
| 481 | """ | ||
| 482 | import math | ||
| 483 | return math.ceil(size / blocksize) * blocksize | ||
| 484 | |||
| 485 | def getsize(filename): | ||
| 486 | """ | ||
| 487 | Get the size of the filename, not following symlinks, taking into | ||
| 488 | account hard links. | ||
| 489 | """ | ||
| 490 | stat = os.lstat(filename) | ||
| 491 | if stat.st_ino not in inodes: | ||
| 492 | inodes.add(stat.st_ino) | ||
| 493 | return stat.st_size | ||
| 494 | else: | ||
| 495 | return 0 | ||
| 496 | |||
| 497 | inodes = set() | ||
| 498 | total = 0 | ||
| 499 | for root, dirs, files in os.walk(root): | ||
| 500 | total += sum(roundup(getsize(os.path.join(root, name))) for name in files) | ||
| 501 | total += roundup(getsize(root)) | ||
| 502 | return total | ||
| 503 | |||
| 504 | # Update the mtime of a file, skip if permission/read-only issues | ||
| 505 | def touch(filename): | ||
| 506 | try: | ||
| 507 | os.utime(filename, None) | ||
| 508 | except PermissionError: | ||
| 509 | pass | ||
| 510 | except OSError as e: | ||
| 511 | # Handle read-only file systems gracefully | ||
| 512 | if e.errno != errno.EROFS: | ||
| 513 | raise e | ||
