diff options
Diffstat (limited to 'meta/lib/oe')
-rw-r--r-- | meta/lib/oe/copy_buildsystem.py | 6 | ||||
-rw-r--r-- | meta/lib/oe/cve_check.py | 154 | ||||
-rw-r--r-- | meta/lib/oe/gpg_sign.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/license.py | 6 | ||||
-rw-r--r-- | meta/lib/oe/package_manager.py | 15 | ||||
-rw-r--r-- | meta/lib/oe/packagedata.py | 11 | ||||
-rw-r--r-- | meta/lib/oe/patch.py | 6 | ||||
-rw-r--r-- | meta/lib/oe/reproducible.py | 13 | ||||
-rw-r--r-- | meta/lib/oe/rootfs.py | 8 | ||||
-rw-r--r-- | meta/lib/oe/sbom.py | 84 | ||||
-rw-r--r-- | meta/lib/oe/spdx.py | 357 | ||||
-rw-r--r-- | meta/lib/oe/sstatesig.py | 5 | ||||
-rw-r--r-- | meta/lib/oe/terminal.py | 20 | ||||
-rw-r--r-- | meta/lib/oe/utils.py | 3 |
14 files changed, 664 insertions, 26 deletions
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 31a84f5b06..d97bf9d1b9 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py | |||
@@ -20,7 +20,7 @@ def _smart_copy(src, dest): | |||
20 | mode = os.stat(src).st_mode | 20 | mode = os.stat(src).st_mode |
21 | if stat.S_ISDIR(mode): | 21 | if stat.S_ISDIR(mode): |
22 | bb.utils.mkdirhier(dest) | 22 | bb.utils.mkdirhier(dest) |
23 | cmd = "tar --exclude='.git' --xattrs --xattrs-include='*' -chf - -C %s -p . \ | 23 | cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -chf - -C %s -p . \ |
24 | | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) | 24 | | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) |
25 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | 25 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) |
26 | else: | 26 | else: |
@@ -259,7 +259,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac | |||
259 | bb.note('Generating sstate-cache...') | 259 | bb.note('Generating sstate-cache...') |
260 | 260 | ||
261 | nativelsbstring = d.getVar('NATIVELSBSTRING') | 261 | nativelsbstring = d.getVar('NATIVELSBSTRING') |
262 | bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) | 262 | bb.process.run("PYTHONDONTWRITEBYTECODE=1 gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) |
263 | if fixedlsbstring and nativelsbstring != fixedlsbstring: | 263 | if fixedlsbstring and nativelsbstring != fixedlsbstring: |
264 | nativedir = output_sstate_cache + '/' + nativelsbstring | 264 | nativedir = output_sstate_cache + '/' + nativelsbstring |
265 | if os.path.isdir(nativedir): | 265 | if os.path.isdir(nativedir): |
@@ -286,7 +286,7 @@ def check_sstate_task_list(d, targets, filteroutfile, cmdprefix='', cwd=None, lo | |||
286 | logparam = '-l %s' % logfile | 286 | logparam = '-l %s' % logfile |
287 | else: | 287 | else: |
288 | logparam = '' | 288 | logparam = '' |
289 | cmd = "%sBB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) | 289 | cmd = "%sPYTHONDONTWRITEBYTECODE=1 BB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) |
290 | env = dict(d.getVar('BB_ORIGENV', False)) | 290 | env = dict(d.getVar('BB_ORIGENV', False)) |
291 | env.pop('BUILDDIR', '') | 291 | env.pop('BUILDDIR', '') |
292 | env.pop('BBPATH', '') | 292 | env.pop('BBPATH', '') |
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py index ce755f940a..ed4af18ced 100644 --- a/meta/lib/oe/cve_check.py +++ b/meta/lib/oe/cve_check.py | |||
@@ -11,8 +11,13 @@ _Version = collections.namedtuple( | |||
11 | class Version(): | 11 | class Version(): |
12 | 12 | ||
13 | def __init__(self, version, suffix=None): | 13 | def __init__(self, version, suffix=None): |
14 | |||
15 | suffixes = ["alphabetical", "patch"] | ||
16 | |||
14 | if str(suffix) == "alphabetical": | 17 | if str(suffix) == "alphabetical": |
15 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | 18 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" |
19 | elif str(suffix) == "patch": | ||
20 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(p|patch)(?P<patch_l>[0-9]+))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | ||
16 | else: | 21 | else: |
17 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | 22 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" |
18 | regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) | 23 | regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) |
@@ -23,7 +28,7 @@ class Version(): | |||
23 | 28 | ||
24 | self._version = _Version( | 29 | self._version = _Version( |
25 | release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), | 30 | release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), |
26 | patch_l=match.group("patch_l") if str(suffix) == "alphabetical" and match.group("patch_l") else "", | 31 | patch_l=match.group("patch_l") if str(suffix) in suffixes and match.group("patch_l") else "", |
27 | pre_l=match.group("pre_l"), | 32 | pre_l=match.group("pre_l"), |
28 | pre_v=match.group("pre_v") | 33 | pre_v=match.group("pre_v") |
29 | ) | 34 | ) |
@@ -58,3 +63,150 @@ def _cmpkey(release, patch_l, pre_l, pre_v): | |||
58 | else: | 63 | else: |
59 | _pre = float(pre_v) if pre_v else float('-inf') | 64 | _pre = float(pre_v) if pre_v else float('-inf') |
60 | return _release, _patch, _pre | 65 | return _release, _patch, _pre |
66 | |||
67 | def cve_check_merge_jsons(output, data): | ||
68 | """ | ||
69 | Merge the data in the "package" property to the main data file | ||
70 | output | ||
71 | """ | ||
72 | if output["version"] != data["version"]: | ||
73 | bb.error("Version mismatch when merging JSON outputs") | ||
74 | return | ||
75 | |||
76 | for product in output["package"]: | ||
77 | if product["name"] == data["package"][0]["name"]: | ||
78 | bb.error("Error adding the same package %s twice" % product["name"]) | ||
79 | return | ||
80 | |||
81 | output["package"].append(data["package"][0]) | ||
82 | |||
83 | def update_symlinks(target_path, link_path): | ||
84 | """ | ||
85 | Update a symbolic link link_path to point to target_path. | ||
86 | Remove the link and recreate it if exist and is different. | ||
87 | """ | ||
88 | if link_path != target_path and os.path.exists(target_path): | ||
89 | if os.path.exists(os.path.realpath(link_path)): | ||
90 | os.remove(link_path) | ||
91 | os.symlink(os.path.basename(target_path), link_path) | ||
92 | |||
93 | def get_patched_cves(d): | ||
94 | """ | ||
95 | Get patches that solve CVEs using the "CVE: " tag. | ||
96 | """ | ||
97 | |||
98 | import re | ||
99 | import oe.patch | ||
100 | |||
101 | pn = d.getVar("PN") | ||
102 | cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") | ||
103 | |||
104 | # Matches the last "CVE-YYYY-ID" in the file name, also if written | ||
105 | # in lowercase. Possible to have multiple CVE IDs in a single | ||
106 | # file name, but only the last one will be detected from the file name. | ||
107 | # However, patch files contents addressing multiple CVE IDs are supported | ||
108 | # (cve_match regular expression) | ||
109 | |||
110 | cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)") | ||
111 | |||
112 | patched_cves = set() | ||
113 | bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) | ||
114 | for url in oe.patch.src_patches(d): | ||
115 | patch_file = bb.fetch.decodeurl(url)[2] | ||
116 | |||
117 | # Check patch file name for CVE ID | ||
118 | fname_match = cve_file_name_match.search(patch_file) | ||
119 | if fname_match: | ||
120 | cve = fname_match.group(1).upper() | ||
121 | patched_cves.add(cve) | ||
122 | bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file)) | ||
123 | |||
124 | # Remote patches won't be present and compressed patches won't be | ||
125 | # unpacked, so say we're not scanning them | ||
126 | if not os.path.isfile(patch_file): | ||
127 | bb.note("%s is remote or compressed, not scanning content" % patch_file) | ||
128 | continue | ||
129 | |||
130 | with open(patch_file, "r", encoding="utf-8") as f: | ||
131 | try: | ||
132 | patch_text = f.read() | ||
133 | except UnicodeDecodeError: | ||
134 | bb.debug(1, "Failed to read patch %s using UTF-8 encoding" | ||
135 | " trying with iso8859-1" % patch_file) | ||
136 | f.close() | ||
137 | with open(patch_file, "r", encoding="iso8859-1") as f: | ||
138 | patch_text = f.read() | ||
139 | |||
140 | # Search for one or more "CVE: " lines | ||
141 | text_match = False | ||
142 | for match in cve_match.finditer(patch_text): | ||
143 | # Get only the CVEs without the "CVE: " tag | ||
144 | cves = patch_text[match.start()+5:match.end()] | ||
145 | for cve in cves.split(): | ||
146 | bb.debug(2, "Patch %s solves %s" % (patch_file, cve)) | ||
147 | patched_cves.add(cve) | ||
148 | text_match = True | ||
149 | |||
150 | if not fname_match and not text_match: | ||
151 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) | ||
152 | |||
153 | return patched_cves | ||
154 | |||
155 | |||
156 | def get_cpe_ids(cve_product, version): | ||
157 | """ | ||
158 | Get list of CPE identifiers for the given product and version | ||
159 | """ | ||
160 | |||
161 | version = version.split("+git")[0] | ||
162 | |||
163 | cpe_ids = [] | ||
164 | for product in cve_product.split(): | ||
165 | # CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not, | ||
166 | # use wildcard for vendor. | ||
167 | if ":" in product: | ||
168 | vendor, product = product.split(":", 1) | ||
169 | else: | ||
170 | vendor = "*" | ||
171 | |||
172 | cpe_id = 'cpe:2.3:a:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version) | ||
173 | cpe_ids.append(cpe_id) | ||
174 | |||
175 | return cpe_ids | ||
176 | |||
177 | def convert_cve_version(version): | ||
178 | """ | ||
179 | This function converts from CVE format to Yocto version format. | ||
180 | eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1 | ||
181 | |||
182 | Unless it is redefined using CVE_VERSION in the recipe, | ||
183 | cve_check uses the version in the name of the recipe (${PV}) | ||
184 | to check vulnerabilities against a CVE in the database downloaded from NVD. | ||
185 | |||
186 | When the version has an update, i.e. | ||
187 | "p1" in OpenSSH 8.3p1, | ||
188 | "-rc1" in linux kernel 6.2-rc1, | ||
189 | the database stores the version as version_update (8.3_p1, 6.2_rc1). | ||
190 | Therefore, we must transform this version before comparing to the | ||
191 | recipe version. | ||
192 | |||
193 | In this case, the parameter of the function is 8.3_p1. | ||
194 | If the version uses the Release Candidate format, "rc", | ||
195 | this function replaces the '_' by '-'. | ||
196 | If the version uses the Update format, "p", | ||
197 | this function removes the '_' completely. | ||
198 | """ | ||
199 | import re | ||
200 | |||
201 | matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version) | ||
202 | |||
203 | if not matches: | ||
204 | return version | ||
205 | |||
206 | version = matches.group(1) | ||
207 | update = matches.group(2) | ||
208 | |||
209 | if matches.group(3) == "rc": | ||
210 | return version + '-' + update | ||
211 | |||
212 | return version + update | ||
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py index 7634d7ef1d..492f096eaa 100644 --- a/meta/lib/oe/gpg_sign.py +++ b/meta/lib/oe/gpg_sign.py | |||
@@ -111,7 +111,7 @@ class LocalSigner(object): | |||
111 | 111 | ||
112 | def verify(self, sig_file): | 112 | def verify(self, sig_file): |
113 | """Verify signature""" | 113 | """Verify signature""" |
114 | cmd = self.gpg_cmd + [" --verify", "--no-permission-warning"] | 114 | cmd = self.gpg_cmd + ["--verify", "--no-permission-warning"] |
115 | if self.gpg_path: | 115 | if self.gpg_path: |
116 | cmd += ["--homedir", self.gpg_path] | 116 | cmd += ["--homedir", self.gpg_path] |
117 | 117 | ||
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py index c1274a61de..c4efbe142b 100644 --- a/meta/lib/oe/license.py +++ b/meta/lib/oe/license.py | |||
@@ -81,6 +81,9 @@ class FlattenVisitor(LicenseVisitor): | |||
81 | def visit_Str(self, node): | 81 | def visit_Str(self, node): |
82 | self.licenses.append(node.s) | 82 | self.licenses.append(node.s) |
83 | 83 | ||
84 | def visit_Constant(self, node): | ||
85 | self.licenses.append(node.value) | ||
86 | |||
84 | def visit_BinOp(self, node): | 87 | def visit_BinOp(self, node): |
85 | if isinstance(node.op, ast.BitOr): | 88 | if isinstance(node.op, ast.BitOr): |
86 | left = FlattenVisitor(self.choose_licenses) | 89 | left = FlattenVisitor(self.choose_licenses) |
@@ -234,6 +237,9 @@ class ListVisitor(LicenseVisitor): | |||
234 | def visit_Str(self, node): | 237 | def visit_Str(self, node): |
235 | self.licenses.add(node.s) | 238 | self.licenses.add(node.s) |
236 | 239 | ||
240 | def visit_Constant(self, node): | ||
241 | self.licenses.add(node.value) | ||
242 | |||
237 | def list_licenses(licensestr): | 243 | def list_licenses(licensestr): |
238 | """Simply get a list of all licenses mentioned in a license string. | 244 | """Simply get a list of all licenses mentioned in a license string. |
239 | Binary operators are not applied or taken into account in any way""" | 245 | Binary operators are not applied or taken into account in any way""" |
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py index b0660411ea..502dfbe3ed 100644 --- a/meta/lib/oe/package_manager.py +++ b/meta/lib/oe/package_manager.py | |||
@@ -403,7 +403,7 @@ class PackageManager(object, metaclass=ABCMeta): | |||
403 | bb.utils.remove(self.intercepts_dir, True) | 403 | bb.utils.remove(self.intercepts_dir, True) |
404 | bb.utils.mkdirhier(self.intercepts_dir) | 404 | bb.utils.mkdirhier(self.intercepts_dir) |
405 | for intercept in postinst_intercepts: | 405 | for intercept in postinst_intercepts: |
406 | bb.utils.copyfile(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) | 406 | shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) |
407 | 407 | ||
408 | @abstractmethod | 408 | @abstractmethod |
409 | def _handle_intercept_failure(self, failed_script): | 409 | def _handle_intercept_failure(self, failed_script): |
@@ -611,12 +611,13 @@ class PackageManager(object, metaclass=ABCMeta): | |||
611 | "'%s' returned %d:\n%s" % | 611 | "'%s' returned %d:\n%s" % |
612 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | 612 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) |
613 | 613 | ||
614 | target_arch = self.d.getVar('TARGET_ARCH') | 614 | if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': |
615 | localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale") | 615 | target_arch = self.d.getVar('TARGET_ARCH') |
616 | if os.path.exists(localedir) and os.listdir(localedir): | 616 | localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale") |
617 | generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir) | 617 | if os.path.exists(localedir) and os.listdir(localedir): |
618 | # And now delete the binary locales | 618 | generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir) |
619 | self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False) | 619 | # And now delete the binary locales |
620 | self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False) | ||
620 | 621 | ||
621 | def deploy_dir_lock(self): | 622 | def deploy_dir_lock(self): |
622 | if self.deploy_dir is None: | 623 | if self.deploy_dir is None: |
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index a82085a792..feb834c0e3 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py | |||
@@ -57,6 +57,17 @@ def read_subpkgdata_dict(pkg, d): | |||
57 | ret[newvar] = subd[var] | 57 | ret[newvar] = subd[var] |
58 | return ret | 58 | return ret |
59 | 59 | ||
60 | def read_subpkgdata_extended(pkg, d): | ||
61 | import json | ||
62 | import gzip | ||
63 | |||
64 | fn = d.expand("${PKGDATA_DIR}/extended/%s.json.gz" % pkg) | ||
65 | try: | ||
66 | with gzip.open(fn, "rt", encoding="utf-8") as f: | ||
67 | return json.load(f) | ||
68 | except FileNotFoundError: | ||
69 | return None | ||
70 | |||
60 | def _pkgmap(d): | 71 | def _pkgmap(d): |
61 | """Return a dictionary mapping package to recipe name.""" | 72 | """Return a dictionary mapping package to recipe name.""" |
62 | 73 | ||
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 7cd8436da5..feb6ee7082 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -2,6 +2,9 @@ | |||
2 | # SPDX-License-Identifier: GPL-2.0-only | 2 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 3 | # |
4 | 4 | ||
5 | import os | ||
6 | import shlex | ||
7 | import subprocess | ||
5 | import oe.path | 8 | import oe.path |
6 | import oe.types | 9 | import oe.types |
7 | 10 | ||
@@ -24,7 +27,6 @@ class CmdError(bb.BBHandledException): | |||
24 | 27 | ||
25 | 28 | ||
26 | def runcmd(args, dir = None): | 29 | def runcmd(args, dir = None): |
27 | import pipes | ||
28 | import subprocess | 30 | import subprocess |
29 | 31 | ||
30 | if dir: | 32 | if dir: |
@@ -35,7 +37,7 @@ def runcmd(args, dir = None): | |||
35 | # print("cwd: %s -> %s" % (olddir, dir)) | 37 | # print("cwd: %s -> %s" % (olddir, dir)) |
36 | 38 | ||
37 | try: | 39 | try: |
38 | args = [ pipes.quote(str(arg)) for arg in args ] | 40 | args = [ shlex.quote(str(arg)) for arg in args ] |
39 | cmd = " ".join(args) | 41 | cmd = " ".join(args) |
40 | # print("cmd: %s" % cmd) | 42 | # print("cmd: %s" % cmd) |
41 | (exitstatus, output) = subprocess.getstatusoutput(cmd) | 43 | (exitstatus, output) = subprocess.getstatusoutput(cmd) |
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py index 0fb02ccdb0..1ed79b18ca 100644 --- a/meta/lib/oe/reproducible.py +++ b/meta/lib/oe/reproducible.py | |||
@@ -41,7 +41,7 @@ def find_git_folder(d, sourcedir): | |||
41 | for root, dirs, files in os.walk(workdir, topdown=True): | 41 | for root, dirs, files in os.walk(workdir, topdown=True): |
42 | dirs[:] = [d for d in dirs if d not in exclude] | 42 | dirs[:] = [d for d in dirs if d not in exclude] |
43 | if '.git' in dirs: | 43 | if '.git' in dirs: |
44 | return root | 44 | return os.path.join(root, ".git") |
45 | 45 | ||
46 | bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) | 46 | bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) |
47 | return None | 47 | return None |
@@ -62,7 +62,8 @@ def get_source_date_epoch_from_git(d, sourcedir): | |||
62 | return None | 62 | return None |
63 | 63 | ||
64 | bb.debug(1, "git repository: %s" % gitpath) | 64 | bb.debug(1, "git repository: %s" % gitpath) |
65 | p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE) | 65 | p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], |
66 | check=True, stdout=subprocess.PIPE) | ||
66 | return int(p.stdout.decode('utf-8')) | 67 | return int(p.stdout.decode('utf-8')) |
67 | 68 | ||
68 | def get_source_date_epoch_from_youngest_file(d, sourcedir): | 69 | def get_source_date_epoch_from_youngest_file(d, sourcedir): |
@@ -90,8 +91,12 @@ def get_source_date_epoch_from_youngest_file(d, sourcedir): | |||
90 | bb.debug(1, "Newest file found: %s" % newest_file) | 91 | bb.debug(1, "Newest file found: %s" % newest_file) |
91 | return source_date_epoch | 92 | return source_date_epoch |
92 | 93 | ||
93 | def fixed_source_date_epoch(): | 94 | def fixed_source_date_epoch(d): |
94 | bb.debug(1, "No tarball or git repo found to determine SOURCE_DATE_EPOCH") | 95 | bb.debug(1, "No tarball or git repo found to determine SOURCE_DATE_EPOCH") |
96 | source_date_epoch = d.getVar('SOURCE_DATE_EPOCH_FALLBACK') | ||
97 | if source_date_epoch: | ||
98 | bb.debug(1, "Using SOURCE_DATE_EPOCH_FALLBACK") | ||
99 | return int(source_date_epoch) | ||
95 | return 0 | 100 | return 0 |
96 | 101 | ||
97 | def get_source_date_epoch(d, sourcedir): | 102 | def get_source_date_epoch(d, sourcedir): |
@@ -99,6 +104,6 @@ def get_source_date_epoch(d, sourcedir): | |||
99 | get_source_date_epoch_from_git(d, sourcedir) or | 104 | get_source_date_epoch_from_git(d, sourcedir) or |
100 | get_source_date_epoch_from_known_files(d, sourcedir) or | 105 | get_source_date_epoch_from_known_files(d, sourcedir) or |
101 | get_source_date_epoch_from_youngest_file(d, sourcedir) or | 106 | get_source_date_epoch_from_youngest_file(d, sourcedir) or |
102 | fixed_source_date_epoch() # Last resort | 107 | fixed_source_date_epoch(d) # Last resort |
103 | ) | 108 | ) |
104 | 109 | ||
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index cd65e62030..5391c25af9 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py | |||
@@ -167,7 +167,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
167 | pass | 167 | pass |
168 | os.rename(self.image_rootfs, self.image_rootfs + '-dbg') | 168 | os.rename(self.image_rootfs, self.image_rootfs + '-dbg') |
169 | 169 | ||
170 | bb.note(" Restoreing original rootfs...") | 170 | bb.note(" Restoring original rootfs...") |
171 | os.rename(self.image_rootfs + '-orig', self.image_rootfs) | 171 | os.rename(self.image_rootfs + '-orig', self.image_rootfs) |
172 | 172 | ||
173 | def _exec_shell_cmd(self, cmd): | 173 | def _exec_shell_cmd(self, cmd): |
@@ -304,7 +304,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
304 | def _check_for_kernel_modules(self, modules_dir): | 304 | def _check_for_kernel_modules(self, modules_dir): |
305 | for root, dirs, files in os.walk(modules_dir, topdown=True): | 305 | for root, dirs, files in os.walk(modules_dir, topdown=True): |
306 | for name in files: | 306 | for name in files: |
307 | found_ko = name.endswith(".ko") | 307 | found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz")) |
308 | if found_ko: | 308 | if found_ko: |
309 | return found_ko | 309 | return found_ko |
310 | return False | 310 | return False |
@@ -321,7 +321,9 @@ class Rootfs(object, metaclass=ABCMeta): | |||
321 | if not os.path.exists(kernel_abi_ver_file): | 321 | if not os.path.exists(kernel_abi_ver_file): |
322 | bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) | 322 | bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) |
323 | 323 | ||
324 | kernel_ver = open(kernel_abi_ver_file).read().strip(' \n') | 324 | with open(kernel_abi_ver_file) as f: |
325 | kernel_ver = f.read().strip(' \n') | ||
326 | |||
325 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) | 327 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) |
326 | 328 | ||
327 | bb.utils.mkdirhier(versioned_modules_dir) | 329 | bb.utils.mkdirhier(versioned_modules_dir) |
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py new file mode 100644 index 0000000000..22ed5070ea --- /dev/null +++ b/meta/lib/oe/sbom.py | |||
@@ -0,0 +1,84 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import collections | ||
8 | |||
9 | DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe")) | ||
10 | DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file")) | ||
11 | |||
12 | |||
13 | def get_recipe_spdxid(d): | ||
14 | return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN")) | ||
15 | |||
16 | |||
17 | def get_download_spdxid(d, idx): | ||
18 | return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx) | ||
19 | |||
20 | |||
21 | def get_package_spdxid(pkg): | ||
22 | return "SPDXRef-Package-%s" % pkg | ||
23 | |||
24 | |||
25 | def get_source_file_spdxid(d, idx): | ||
26 | return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx) | ||
27 | |||
28 | |||
29 | def get_packaged_file_spdxid(pkg, idx): | ||
30 | return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx) | ||
31 | |||
32 | |||
33 | def get_image_spdxid(img): | ||
34 | return "SPDXRef-Image-%s" % img | ||
35 | |||
36 | |||
37 | def get_sdk_spdxid(sdk): | ||
38 | return "SPDXRef-SDK-%s" % sdk | ||
39 | |||
40 | |||
41 | def write_doc(d, spdx_doc, subdir, spdx_deploy=None, indent=None): | ||
42 | from pathlib import Path | ||
43 | |||
44 | if spdx_deploy is None: | ||
45 | spdx_deploy = Path(d.getVar("SPDXDEPLOY")) | ||
46 | |||
47 | dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json") | ||
48 | dest.parent.mkdir(exist_ok=True, parents=True) | ||
49 | with dest.open("wb") as f: | ||
50 | doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent) | ||
51 | |||
52 | l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_") | ||
53 | l.parent.mkdir(exist_ok=True, parents=True) | ||
54 | l.symlink_to(os.path.relpath(dest, l.parent)) | ||
55 | |||
56 | return doc_sha1 | ||
57 | |||
58 | |||
59 | def read_doc(fn): | ||
60 | import hashlib | ||
61 | import oe.spdx | ||
62 | import io | ||
63 | import contextlib | ||
64 | |||
65 | @contextlib.contextmanager | ||
66 | def get_file(): | ||
67 | if isinstance(fn, io.IOBase): | ||
68 | yield fn | ||
69 | else: | ||
70 | with fn.open("rb") as f: | ||
71 | yield f | ||
72 | |||
73 | with get_file() as f: | ||
74 | sha1 = hashlib.sha1() | ||
75 | while True: | ||
76 | chunk = f.read(4096) | ||
77 | if not chunk: | ||
78 | break | ||
79 | sha1.update(chunk) | ||
80 | |||
81 | f.seek(0) | ||
82 | doc = oe.spdx.SPDXDocument.from_json(f) | ||
83 | |||
84 | return (doc, sha1.hexdigest()) | ||
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py new file mode 100644 index 0000000000..7aaf2af5ed --- /dev/null +++ b/meta/lib/oe/spdx.py | |||
@@ -0,0 +1,357 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | # | ||
8 | # This library is intended to capture the JSON SPDX specification in a type | ||
9 | # safe manner. It is not intended to encode any particular OE specific | ||
10 | # behaviors, see the sbom.py for that. | ||
11 | # | ||
12 | # The documented SPDX spec document doesn't cover the JSON syntax for | ||
13 | # particular configuration, which can make it hard to determine what the JSON | ||
14 | # syntax should be. I've found it is actually much simpler to read the official | ||
15 | # SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec | ||
16 | # in schemas/spdx-schema.json | ||
17 | # | ||
18 | |||
19 | import hashlib | ||
20 | import itertools | ||
21 | import json | ||
22 | |||
23 | SPDX_VERSION = "2.2" | ||
24 | |||
25 | |||
26 | # | ||
27 | # The following are the support classes that are used to implement SPDX object | ||
28 | # | ||
29 | |||
30 | class _Property(object): | ||
31 | """ | ||
32 | A generic SPDX object property. The different types will derive from this | ||
33 | class | ||
34 | """ | ||
35 | |||
36 | def __init__(self, *, default=None): | ||
37 | self.default = default | ||
38 | |||
39 | def setdefault(self, dest, name): | ||
40 | if self.default is not None: | ||
41 | dest.setdefault(name, self.default) | ||
42 | |||
43 | |||
44 | class _String(_Property): | ||
45 | """ | ||
46 | A scalar string property for an SPDX object | ||
47 | """ | ||
48 | |||
49 | def __init__(self, **kwargs): | ||
50 | super().__init__(**kwargs) | ||
51 | |||
52 | def set_property(self, attrs, name): | ||
53 | def get_helper(obj): | ||
54 | return obj._spdx[name] | ||
55 | |||
56 | def set_helper(obj, value): | ||
57 | obj._spdx[name] = value | ||
58 | |||
59 | def del_helper(obj): | ||
60 | del obj._spdx[name] | ||
61 | |||
62 | attrs[name] = property(get_helper, set_helper, del_helper) | ||
63 | |||
64 | def init(self, source): | ||
65 | return source | ||
66 | |||
67 | |||
68 | class _Object(_Property): | ||
69 | """ | ||
70 | A scalar SPDX object property of a SPDX object | ||
71 | """ | ||
72 | |||
73 | def __init__(self, cls, **kwargs): | ||
74 | super().__init__(**kwargs) | ||
75 | self.cls = cls | ||
76 | |||
77 | def set_property(self, attrs, name): | ||
78 | def get_helper(obj): | ||
79 | if not name in obj._spdx: | ||
80 | obj._spdx[name] = self.cls() | ||
81 | return obj._spdx[name] | ||
82 | |||
83 | def set_helper(obj, value): | ||
84 | obj._spdx[name] = value | ||
85 | |||
86 | def del_helper(obj): | ||
87 | del obj._spdx[name] | ||
88 | |||
89 | attrs[name] = property(get_helper, set_helper) | ||
90 | |||
91 | def init(self, source): | ||
92 | return self.cls(**source) | ||
93 | |||
94 | |||
95 | class _ListProperty(_Property): | ||
96 | """ | ||
97 | A list of SPDX properties | ||
98 | """ | ||
99 | |||
100 | def __init__(self, prop, **kwargs): | ||
101 | super().__init__(**kwargs) | ||
102 | self.prop = prop | ||
103 | |||
104 | def set_property(self, attrs, name): | ||
105 | def get_helper(obj): | ||
106 | if not name in obj._spdx: | ||
107 | obj._spdx[name] = [] | ||
108 | return obj._spdx[name] | ||
109 | |||
110 | def set_helper(obj, value): | ||
111 | obj._spdx[name] = list(value) | ||
112 | |||
113 | def del_helper(obj): | ||
114 | del obj._spdx[name] | ||
115 | |||
116 | attrs[name] = property(get_helper, set_helper, del_helper) | ||
117 | |||
118 | def init(self, source): | ||
119 | return [self.prop.init(o) for o in source] | ||
120 | |||
121 | |||
122 | class _StringList(_ListProperty): | ||
123 | """ | ||
124 | A list of strings as a property for an SPDX object | ||
125 | """ | ||
126 | |||
127 | def __init__(self, **kwargs): | ||
128 | super().__init__(_String(), **kwargs) | ||
129 | |||
130 | |||
131 | class _ObjectList(_ListProperty): | ||
132 | """ | ||
133 | A list of SPDX objects as a property for an SPDX object | ||
134 | """ | ||
135 | |||
136 | def __init__(self, cls, **kwargs): | ||
137 | super().__init__(_Object(cls), **kwargs) | ||
138 | |||
139 | |||
140 | class MetaSPDXObject(type): | ||
141 | """ | ||
142 | A metaclass that allows properties (anything derived from a _Property | ||
143 | class) to be defined for a SPDX object | ||
144 | """ | ||
145 | def __new__(mcls, name, bases, attrs): | ||
146 | attrs["_properties"] = {} | ||
147 | |||
148 | for key in attrs.keys(): | ||
149 | if isinstance(attrs[key], _Property): | ||
150 | prop = attrs[key] | ||
151 | attrs["_properties"][key] = prop | ||
152 | prop.set_property(attrs, key) | ||
153 | |||
154 | return super().__new__(mcls, name, bases, attrs) | ||
155 | |||
156 | |||
157 | class SPDXObject(metaclass=MetaSPDXObject): | ||
158 | """ | ||
159 | The base SPDX object; all SPDX spec classes must derive from this class | ||
160 | """ | ||
161 | def __init__(self, **d): | ||
162 | self._spdx = {} | ||
163 | |||
164 | for name, prop in self._properties.items(): | ||
165 | prop.setdefault(self._spdx, name) | ||
166 | if name in d: | ||
167 | self._spdx[name] = prop.init(d[name]) | ||
168 | |||
169 | def serializer(self): | ||
170 | return self._spdx | ||
171 | |||
172 | def __setattr__(self, name, value): | ||
173 | if name in self._properties or name == "_spdx": | ||
174 | super().__setattr__(name, value) | ||
175 | return | ||
176 | raise KeyError("%r is not a valid SPDX property" % name) | ||
177 | |||
178 | # | ||
179 | # These are the SPDX objects implemented from the spec. The *only* properties | ||
180 | # that can be added to these objects are ones directly specified in the SPDX | ||
181 | # spec, however you may add helper functions to make operations easier. | ||
182 | # | ||
183 | # Defaults should *only* be specified if the SPDX spec says there is a certain | ||
184 | # required value for a field (e.g. dataLicense), or if the field is mandatory | ||
185 | # and has some sane "this field is unknown" (e.g. "NOASSERTION") | ||
186 | # | ||
187 | |||
188 | class SPDXAnnotation(SPDXObject): | ||
189 | annotationDate = _String() | ||
190 | annotationType = _String() | ||
191 | annotator = _String() | ||
192 | comment = _String() | ||
193 | |||
194 | class SPDXChecksum(SPDXObject): | ||
195 | algorithm = _String() | ||
196 | checksumValue = _String() | ||
197 | |||
198 | |||
199 | class SPDXRelationship(SPDXObject): | ||
200 | spdxElementId = _String() | ||
201 | relatedSpdxElement = _String() | ||
202 | relationshipType = _String() | ||
203 | comment = _String() | ||
204 | annotations = _ObjectList(SPDXAnnotation) | ||
205 | |||
206 | |||
207 | class SPDXExternalReference(SPDXObject): | ||
208 | referenceCategory = _String() | ||
209 | referenceType = _String() | ||
210 | referenceLocator = _String() | ||
211 | |||
212 | |||
213 | class SPDXPackageVerificationCode(SPDXObject): | ||
214 | packageVerificationCodeValue = _String() | ||
215 | packageVerificationCodeExcludedFiles = _StringList() | ||
216 | |||
217 | |||
218 | class SPDXPackage(SPDXObject): | ||
219 | ALLOWED_CHECKSUMS = [ | ||
220 | "SHA1", | ||
221 | "SHA224", | ||
222 | "SHA256", | ||
223 | "SHA384", | ||
224 | "SHA512", | ||
225 | "MD2", | ||
226 | "MD4", | ||
227 | "MD5", | ||
228 | "MD6", | ||
229 | ] | ||
230 | |||
231 | name = _String() | ||
232 | SPDXID = _String() | ||
233 | versionInfo = _String() | ||
234 | downloadLocation = _String(default="NOASSERTION") | ||
235 | supplier = _String(default="NOASSERTION") | ||
236 | homepage = _String() | ||
237 | licenseConcluded = _String(default="NOASSERTION") | ||
238 | licenseDeclared = _String(default="NOASSERTION") | ||
239 | summary = _String() | ||
240 | description = _String() | ||
241 | sourceInfo = _String() | ||
242 | copyrightText = _String(default="NOASSERTION") | ||
243 | licenseInfoFromFiles = _StringList(default=["NOASSERTION"]) | ||
244 | externalRefs = _ObjectList(SPDXExternalReference) | ||
245 | packageVerificationCode = _Object(SPDXPackageVerificationCode) | ||
246 | hasFiles = _StringList() | ||
247 | packageFileName = _String() | ||
248 | annotations = _ObjectList(SPDXAnnotation) | ||
249 | checksums = _ObjectList(SPDXChecksum) | ||
250 | |||
251 | |||
252 | class SPDXFile(SPDXObject): | ||
253 | SPDXID = _String() | ||
254 | fileName = _String() | ||
255 | licenseConcluded = _String(default="NOASSERTION") | ||
256 | copyrightText = _String(default="NOASSERTION") | ||
257 | licenseInfoInFiles = _StringList(default=["NOASSERTION"]) | ||
258 | checksums = _ObjectList(SPDXChecksum) | ||
259 | fileTypes = _StringList() | ||
260 | |||
261 | |||
262 | class SPDXCreationInfo(SPDXObject): | ||
263 | created = _String() | ||
264 | licenseListVersion = _String() | ||
265 | comment = _String() | ||
266 | creators = _StringList() | ||
267 | |||
268 | |||
269 | class SPDXExternalDocumentRef(SPDXObject): | ||
270 | externalDocumentId = _String() | ||
271 | spdxDocument = _String() | ||
272 | checksum = _Object(SPDXChecksum) | ||
273 | |||
274 | |||
275 | class SPDXExtractedLicensingInfo(SPDXObject): | ||
276 | name = _String() | ||
277 | comment = _String() | ||
278 | licenseId = _String() | ||
279 | extractedText = _String() | ||
280 | |||
281 | |||
282 | class SPDXDocument(SPDXObject): | ||
283 | spdxVersion = _String(default="SPDX-" + SPDX_VERSION) | ||
284 | dataLicense = _String(default="CC0-1.0") | ||
285 | SPDXID = _String(default="SPDXRef-DOCUMENT") | ||
286 | name = _String() | ||
287 | documentNamespace = _String() | ||
288 | creationInfo = _Object(SPDXCreationInfo) | ||
289 | packages = _ObjectList(SPDXPackage) | ||
290 | files = _ObjectList(SPDXFile) | ||
291 | relationships = _ObjectList(SPDXRelationship) | ||
292 | externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef) | ||
293 | hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo) | ||
294 | |||
295 | def __init__(self, **d): | ||
296 | super().__init__(**d) | ||
297 | |||
298 | def to_json(self, f, *, sort_keys=False, indent=None, separators=None): | ||
299 | class Encoder(json.JSONEncoder): | ||
300 | def default(self, o): | ||
301 | if isinstance(o, SPDXObject): | ||
302 | return o.serializer() | ||
303 | |||
304 | return super().default(o) | ||
305 | |||
306 | sha1 = hashlib.sha1() | ||
307 | for chunk in Encoder( | ||
308 | sort_keys=sort_keys, | ||
309 | indent=indent, | ||
310 | separators=separators, | ||
311 | ).iterencode(self): | ||
312 | chunk = chunk.encode("utf-8") | ||
313 | f.write(chunk) | ||
314 | sha1.update(chunk) | ||
315 | |||
316 | return sha1.hexdigest() | ||
317 | |||
318 | @classmethod | ||
319 | def from_json(cls, f): | ||
320 | return cls(**json.load(f)) | ||
321 | |||
322 | def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None): | ||
323 | if isinstance(_from, SPDXObject): | ||
324 | from_spdxid = _from.SPDXID | ||
325 | else: | ||
326 | from_spdxid = _from | ||
327 | |||
328 | if isinstance(_to, SPDXObject): | ||
329 | to_spdxid = _to.SPDXID | ||
330 | else: | ||
331 | to_spdxid = _to | ||
332 | |||
333 | r = SPDXRelationship( | ||
334 | spdxElementId=from_spdxid, | ||
335 | relatedSpdxElement=to_spdxid, | ||
336 | relationshipType=relationship, | ||
337 | ) | ||
338 | |||
339 | if comment is not None: | ||
340 | r.comment = comment | ||
341 | |||
342 | if annotation is not None: | ||
343 | r.annotations.append(annotation) | ||
344 | |||
345 | self.relationships.append(r) | ||
346 | |||
347 | def find_by_spdxid(self, spdxid): | ||
348 | for o in itertools.chain(self.packages, self.files): | ||
349 | if o.SPDXID == spdxid: | ||
350 | return o | ||
351 | return None | ||
352 | |||
353 | def find_external_document_ref(self, namespace): | ||
354 | for r in self.externalDocumentRefs: | ||
355 | if r.spdxDocument == namespace: | ||
356 | return r | ||
357 | return None | ||
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index aeceb100d7..65bb4efe25 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -480,8 +480,10 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
480 | if "package_write_" in task or task == "package_qa": | 480 | if "package_write_" in task or task == "package_qa": |
481 | include_owners = False | 481 | include_owners = False |
482 | include_timestamps = False | 482 | include_timestamps = False |
483 | include_root = True | ||
483 | if task == "package": | 484 | if task == "package": |
484 | include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' | 485 | include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' |
486 | include_root = False | ||
485 | extra_content = d.getVar('HASHEQUIV_HASH_VERSION') | 487 | extra_content = d.getVar('HASHEQUIV_HASH_VERSION') |
486 | 488 | ||
487 | try: | 489 | try: |
@@ -592,7 +594,8 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
592 | update_hash("\n") | 594 | update_hash("\n") |
593 | 595 | ||
594 | # Process this directory and all its child files | 596 | # Process this directory and all its child files |
595 | process(root) | 597 | if include_root or root != ".": |
598 | process(root) | ||
596 | for f in files: | 599 | for f in files: |
597 | if f == 'fixmepath': | 600 | if f == 'fixmepath': |
598 | continue | 601 | continue |
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py index eb10a6e33e..a0c166d884 100644 --- a/meta/lib/oe/terminal.py +++ b/meta/lib/oe/terminal.py | |||
@@ -102,6 +102,10 @@ class Rxvt(XTerminal): | |||
102 | command = 'rxvt -T "{title}" -e {command}' | 102 | command = 'rxvt -T "{title}" -e {command}' |
103 | priority = 1 | 103 | priority = 1 |
104 | 104 | ||
105 | class URxvt(XTerminal): | ||
106 | command = 'urxvt -T "{title}" -e {command}' | ||
107 | priority = 1 | ||
108 | |||
105 | class Screen(Terminal): | 109 | class Screen(Terminal): |
106 | command = 'screen -D -m -t "{title}" -S devshell {command}' | 110 | command = 'screen -D -m -t "{title}" -S devshell {command}' |
107 | 111 | ||
@@ -163,7 +167,12 @@ class Tmux(Terminal): | |||
163 | # devshells, if it's already there, add a new window to it. | 167 | # devshells, if it's already there, add a new window to it. |
164 | window_name = 'devshell-%i' % os.getpid() | 168 | window_name = 'devshell-%i' % os.getpid() |
165 | 169 | ||
166 | self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'.format(window_name) | 170 | self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"' |
171 | if not check_tmux_version('1.9'): | ||
172 | # `tmux new-session -c` was added in 1.9; | ||
173 | # older versions fail with that flag | ||
174 | self.command = 'tmux new -d -s {0} -n {0} "{{command}}"' | ||
175 | self.command = self.command.format(window_name) | ||
167 | Terminal.__init__(self, sh_cmd, title, env, d) | 176 | Terminal.__init__(self, sh_cmd, title, env, d) |
168 | 177 | ||
169 | attach_cmd = 'tmux att -t {0}'.format(window_name) | 178 | attach_cmd = 'tmux att -t {0}'.format(window_name) |
@@ -253,13 +262,18 @@ def spawn(name, sh_cmd, title=None, env=None, d=None): | |||
253 | except OSError: | 262 | except OSError: |
254 | return | 263 | return |
255 | 264 | ||
265 | def check_tmux_version(desired): | ||
266 | vernum = check_terminal_version("tmux") | ||
267 | if vernum and LooseVersion(vernum) < desired: | ||
268 | return False | ||
269 | return vernum | ||
270 | |||
256 | def check_tmux_pane_size(tmux): | 271 | def check_tmux_pane_size(tmux): |
257 | import subprocess as sub | 272 | import subprocess as sub |
258 | # On older tmux versions (<1.9), return false. The reason | 273 | # On older tmux versions (<1.9), return false. The reason |
259 | # is that there is no easy way to get the height of the active panel | 274 | # is that there is no easy way to get the height of the active panel |
260 | # on current window without nested formats (available from version 1.9) | 275 | # on current window without nested formats (available from version 1.9) |
261 | vernum = check_terminal_version("tmux") | 276 | if not check_tmux_version('1.9'): |
262 | if vernum and LooseVersion(vernum) < '1.9': | ||
263 | return False | 277 | return False |
264 | try: | 278 | try: |
265 | p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, | 279 | p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, |
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 83d298906b..3e016244c5 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -481,7 +481,8 @@ class ThreadedWorker(Thread): | |||
481 | try: | 481 | try: |
482 | func(self, *args, **kargs) | 482 | func(self, *args, **kargs) |
483 | except Exception as e: | 483 | except Exception as e: |
484 | print(e) | 484 | # Eat all exceptions |
485 | bb.mainlogger.debug("Worker task raised %s" % e, exc_info=e) | ||
485 | finally: | 486 | finally: |
486 | self.tasks.task_done() | 487 | self.tasks.task_done() |
487 | 488 | ||