diff options
Diffstat (limited to 'meta/lib')
67 files changed, 1394 insertions, 210 deletions
diff --git a/meta/lib/bblayers/create.py b/meta/lib/bblayers/create.py index 542f31fc81..f49b48d1b4 100644 --- a/meta/lib/bblayers/create.py +++ b/meta/lib/bblayers/create.py | |||
@@ -71,7 +71,7 @@ class CreatePlugin(LayerPlugin): | |||
71 | def register_commands(self, sp): | 71 | def register_commands(self, sp): |
72 | parser_create_layer = self.add_command(sp, 'create-layer', self.do_create_layer, parserecipes=False) | 72 | parser_create_layer = self.add_command(sp, 'create-layer', self.do_create_layer, parserecipes=False) |
73 | parser_create_layer.add_argument('layerdir', help='Layer directory to create') | 73 | parser_create_layer.add_argument('layerdir', help='Layer directory to create') |
74 | parser_create_layer.add_argument('--priority', '-p', default=6, help='Layer directory to create') | 74 | parser_create_layer.add_argument('--priority', '-p', default=6, help='Priority of recipes in layer') |
75 | parser_create_layer.add_argument('--example-recipe-name', '-e', dest='examplerecipe', default='example', help='Filename of the example recipe') | 75 | parser_create_layer.add_argument('--example-recipe-name', '-e', dest='examplerecipe', default='example', help='Filename of the example recipe') |
76 | parser_create_layer.add_argument('--example-recipe-version', '-v', dest='version', default='0.1', help='Version number for the example recipe') | 76 | parser_create_layer.add_argument('--example-recipe-version', '-v', dest='version', default='0.1', help='Version number for the example recipe') |
77 | 77 | ||
diff --git a/meta/lib/buildstats.py b/meta/lib/buildstats.py index 8627ed3c31..c52b6c3b72 100644 --- a/meta/lib/buildstats.py +++ b/meta/lib/buildstats.py | |||
@@ -43,8 +43,8 @@ class SystemStats: | |||
43 | # depends on the heartbeat event, which fires less often. | 43 | # depends on the heartbeat event, which fires less often. |
44 | self.min_seconds = 1 | 44 | self.min_seconds = 1 |
45 | 45 | ||
46 | self.meminfo_regex = re.compile(b'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') | 46 | self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') |
47 | self.diskstats_regex = re.compile(b'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') | 47 | self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') |
48 | self.diskstats_ltime = None | 48 | self.diskstats_ltime = None |
49 | self.diskstats_data = None | 49 | self.diskstats_data = None |
50 | self.stat_ltimes = None | 50 | self.stat_ltimes = None |
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 31a84f5b06..d97bf9d1b9 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py | |||
@@ -20,7 +20,7 @@ def _smart_copy(src, dest): | |||
20 | mode = os.stat(src).st_mode | 20 | mode = os.stat(src).st_mode |
21 | if stat.S_ISDIR(mode): | 21 | if stat.S_ISDIR(mode): |
22 | bb.utils.mkdirhier(dest) | 22 | bb.utils.mkdirhier(dest) |
23 | cmd = "tar --exclude='.git' --xattrs --xattrs-include='*' -chf - -C %s -p . \ | 23 | cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -chf - -C %s -p . \ |
24 | | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) | 24 | | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) |
25 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | 25 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) |
26 | else: | 26 | else: |
@@ -259,7 +259,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac | |||
259 | bb.note('Generating sstate-cache...') | 259 | bb.note('Generating sstate-cache...') |
260 | 260 | ||
261 | nativelsbstring = d.getVar('NATIVELSBSTRING') | 261 | nativelsbstring = d.getVar('NATIVELSBSTRING') |
262 | bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) | 262 | bb.process.run("PYTHONDONTWRITEBYTECODE=1 gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) |
263 | if fixedlsbstring and nativelsbstring != fixedlsbstring: | 263 | if fixedlsbstring and nativelsbstring != fixedlsbstring: |
264 | nativedir = output_sstate_cache + '/' + nativelsbstring | 264 | nativedir = output_sstate_cache + '/' + nativelsbstring |
265 | if os.path.isdir(nativedir): | 265 | if os.path.isdir(nativedir): |
@@ -286,7 +286,7 @@ def check_sstate_task_list(d, targets, filteroutfile, cmdprefix='', cwd=None, lo | |||
286 | logparam = '-l %s' % logfile | 286 | logparam = '-l %s' % logfile |
287 | else: | 287 | else: |
288 | logparam = '' | 288 | logparam = '' |
289 | cmd = "%sBB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) | 289 | cmd = "%sPYTHONDONTWRITEBYTECODE=1 BB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) |
290 | env = dict(d.getVar('BB_ORIGENV', False)) | 290 | env = dict(d.getVar('BB_ORIGENV', False)) |
291 | env.pop('BUILDDIR', '') | 291 | env.pop('BUILDDIR', '') |
292 | env.pop('BBPATH', '') | 292 | env.pop('BBPATH', '') |
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py index ce755f940a..ed4af18ced 100644 --- a/meta/lib/oe/cve_check.py +++ b/meta/lib/oe/cve_check.py | |||
@@ -11,8 +11,13 @@ _Version = collections.namedtuple( | |||
11 | class Version(): | 11 | class Version(): |
12 | 12 | ||
13 | def __init__(self, version, suffix=None): | 13 | def __init__(self, version, suffix=None): |
14 | |||
15 | suffixes = ["alphabetical", "patch"] | ||
16 | |||
14 | if str(suffix) == "alphabetical": | 17 | if str(suffix) == "alphabetical": |
15 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | 18 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" |
19 | elif str(suffix) == "patch": | ||
20 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(p|patch)(?P<patch_l>[0-9]+))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | ||
16 | else: | 21 | else: |
17 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" | 22 | version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" |
18 | regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) | 23 | regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) |
@@ -23,7 +28,7 @@ class Version(): | |||
23 | 28 | ||
24 | self._version = _Version( | 29 | self._version = _Version( |
25 | release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), | 30 | release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), |
26 | patch_l=match.group("patch_l") if str(suffix) == "alphabetical" and match.group("patch_l") else "", | 31 | patch_l=match.group("patch_l") if str(suffix) in suffixes and match.group("patch_l") else "", |
27 | pre_l=match.group("pre_l"), | 32 | pre_l=match.group("pre_l"), |
28 | pre_v=match.group("pre_v") | 33 | pre_v=match.group("pre_v") |
29 | ) | 34 | ) |
@@ -58,3 +63,150 @@ def _cmpkey(release, patch_l, pre_l, pre_v): | |||
58 | else: | 63 | else: |
59 | _pre = float(pre_v) if pre_v else float('-inf') | 64 | _pre = float(pre_v) if pre_v else float('-inf') |
60 | return _release, _patch, _pre | 65 | return _release, _patch, _pre |
66 | |||
67 | def cve_check_merge_jsons(output, data): | ||
68 | """ | ||
69 | Merge the data in the "package" property to the main data file | ||
70 | output | ||
71 | """ | ||
72 | if output["version"] != data["version"]: | ||
73 | bb.error("Version mismatch when merging JSON outputs") | ||
74 | return | ||
75 | |||
76 | for product in output["package"]: | ||
77 | if product["name"] == data["package"][0]["name"]: | ||
78 | bb.error("Error adding the same package %s twice" % product["name"]) | ||
79 | return | ||
80 | |||
81 | output["package"].append(data["package"][0]) | ||
82 | |||
83 | def update_symlinks(target_path, link_path): | ||
84 | """ | ||
85 | Update a symbolic link link_path to point to target_path. | ||
86 | Remove the link and recreate it if exist and is different. | ||
87 | """ | ||
88 | if link_path != target_path and os.path.exists(target_path): | ||
89 | if os.path.exists(os.path.realpath(link_path)): | ||
90 | os.remove(link_path) | ||
91 | os.symlink(os.path.basename(target_path), link_path) | ||
92 | |||
93 | def get_patched_cves(d): | ||
94 | """ | ||
95 | Get patches that solve CVEs using the "CVE: " tag. | ||
96 | """ | ||
97 | |||
98 | import re | ||
99 | import oe.patch | ||
100 | |||
101 | pn = d.getVar("PN") | ||
102 | cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") | ||
103 | |||
104 | # Matches the last "CVE-YYYY-ID" in the file name, also if written | ||
105 | # in lowercase. Possible to have multiple CVE IDs in a single | ||
106 | # file name, but only the last one will be detected from the file name. | ||
107 | # However, patch files contents addressing multiple CVE IDs are supported | ||
108 | # (cve_match regular expression) | ||
109 | |||
110 | cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)") | ||
111 | |||
112 | patched_cves = set() | ||
113 | bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) | ||
114 | for url in oe.patch.src_patches(d): | ||
115 | patch_file = bb.fetch.decodeurl(url)[2] | ||
116 | |||
117 | # Check patch file name for CVE ID | ||
118 | fname_match = cve_file_name_match.search(patch_file) | ||
119 | if fname_match: | ||
120 | cve = fname_match.group(1).upper() | ||
121 | patched_cves.add(cve) | ||
122 | bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file)) | ||
123 | |||
124 | # Remote patches won't be present and compressed patches won't be | ||
125 | # unpacked, so say we're not scanning them | ||
126 | if not os.path.isfile(patch_file): | ||
127 | bb.note("%s is remote or compressed, not scanning content" % patch_file) | ||
128 | continue | ||
129 | |||
130 | with open(patch_file, "r", encoding="utf-8") as f: | ||
131 | try: | ||
132 | patch_text = f.read() | ||
133 | except UnicodeDecodeError: | ||
134 | bb.debug(1, "Failed to read patch %s using UTF-8 encoding" | ||
135 | " trying with iso8859-1" % patch_file) | ||
136 | f.close() | ||
137 | with open(patch_file, "r", encoding="iso8859-1") as f: | ||
138 | patch_text = f.read() | ||
139 | |||
140 | # Search for one or more "CVE: " lines | ||
141 | text_match = False | ||
142 | for match in cve_match.finditer(patch_text): | ||
143 | # Get only the CVEs without the "CVE: " tag | ||
144 | cves = patch_text[match.start()+5:match.end()] | ||
145 | for cve in cves.split(): | ||
146 | bb.debug(2, "Patch %s solves %s" % (patch_file, cve)) | ||
147 | patched_cves.add(cve) | ||
148 | text_match = True | ||
149 | |||
150 | if not fname_match and not text_match: | ||
151 | bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file) | ||
152 | |||
153 | return patched_cves | ||
154 | |||
155 | |||
156 | def get_cpe_ids(cve_product, version): | ||
157 | """ | ||
158 | Get list of CPE identifiers for the given product and version | ||
159 | """ | ||
160 | |||
161 | version = version.split("+git")[0] | ||
162 | |||
163 | cpe_ids = [] | ||
164 | for product in cve_product.split(): | ||
165 | # CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not, | ||
166 | # use wildcard for vendor. | ||
167 | if ":" in product: | ||
168 | vendor, product = product.split(":", 1) | ||
169 | else: | ||
170 | vendor = "*" | ||
171 | |||
172 | cpe_id = 'cpe:2.3:a:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version) | ||
173 | cpe_ids.append(cpe_id) | ||
174 | |||
175 | return cpe_ids | ||
176 | |||
177 | def convert_cve_version(version): | ||
178 | """ | ||
179 | This function converts from CVE format to Yocto version format. | ||
180 | eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1 | ||
181 | |||
182 | Unless it is redefined using CVE_VERSION in the recipe, | ||
183 | cve_check uses the version in the name of the recipe (${PV}) | ||
184 | to check vulnerabilities against a CVE in the database downloaded from NVD. | ||
185 | |||
186 | When the version has an update, i.e. | ||
187 | "p1" in OpenSSH 8.3p1, | ||
188 | "-rc1" in linux kernel 6.2-rc1, | ||
189 | the database stores the version as version_update (8.3_p1, 6.2_rc1). | ||
190 | Therefore, we must transform this version before comparing to the | ||
191 | recipe version. | ||
192 | |||
193 | In this case, the parameter of the function is 8.3_p1. | ||
194 | If the version uses the Release Candidate format, "rc", | ||
195 | this function replaces the '_' by '-'. | ||
196 | If the version uses the Update format, "p", | ||
197 | this function removes the '_' completely. | ||
198 | """ | ||
199 | import re | ||
200 | |||
201 | matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version) | ||
202 | |||
203 | if not matches: | ||
204 | return version | ||
205 | |||
206 | version = matches.group(1) | ||
207 | update = matches.group(2) | ||
208 | |||
209 | if matches.group(3) == "rc": | ||
210 | return version + '-' + update | ||
211 | |||
212 | return version + update | ||
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py index 7634d7ef1d..492f096eaa 100644 --- a/meta/lib/oe/gpg_sign.py +++ b/meta/lib/oe/gpg_sign.py | |||
@@ -111,7 +111,7 @@ class LocalSigner(object): | |||
111 | 111 | ||
112 | def verify(self, sig_file): | 112 | def verify(self, sig_file): |
113 | """Verify signature""" | 113 | """Verify signature""" |
114 | cmd = self.gpg_cmd + [" --verify", "--no-permission-warning"] | 114 | cmd = self.gpg_cmd + ["--verify", "--no-permission-warning"] |
115 | if self.gpg_path: | 115 | if self.gpg_path: |
116 | cmd += ["--homedir", self.gpg_path] | 116 | cmd += ["--homedir", self.gpg_path] |
117 | 117 | ||
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py index c1274a61de..c4efbe142b 100644 --- a/meta/lib/oe/license.py +++ b/meta/lib/oe/license.py | |||
@@ -81,6 +81,9 @@ class FlattenVisitor(LicenseVisitor): | |||
81 | def visit_Str(self, node): | 81 | def visit_Str(self, node): |
82 | self.licenses.append(node.s) | 82 | self.licenses.append(node.s) |
83 | 83 | ||
84 | def visit_Constant(self, node): | ||
85 | self.licenses.append(node.value) | ||
86 | |||
84 | def visit_BinOp(self, node): | 87 | def visit_BinOp(self, node): |
85 | if isinstance(node.op, ast.BitOr): | 88 | if isinstance(node.op, ast.BitOr): |
86 | left = FlattenVisitor(self.choose_licenses) | 89 | left = FlattenVisitor(self.choose_licenses) |
@@ -234,6 +237,9 @@ class ListVisitor(LicenseVisitor): | |||
234 | def visit_Str(self, node): | 237 | def visit_Str(self, node): |
235 | self.licenses.add(node.s) | 238 | self.licenses.add(node.s) |
236 | 239 | ||
240 | def visit_Constant(self, node): | ||
241 | self.licenses.add(node.value) | ||
242 | |||
237 | def list_licenses(licensestr): | 243 | def list_licenses(licensestr): |
238 | """Simply get a list of all licenses mentioned in a license string. | 244 | """Simply get a list of all licenses mentioned in a license string. |
239 | Binary operators are not applied or taken into account in any way""" | 245 | Binary operators are not applied or taken into account in any way""" |
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py index b0660411ea..502dfbe3ed 100644 --- a/meta/lib/oe/package_manager.py +++ b/meta/lib/oe/package_manager.py | |||
@@ -403,7 +403,7 @@ class PackageManager(object, metaclass=ABCMeta): | |||
403 | bb.utils.remove(self.intercepts_dir, True) | 403 | bb.utils.remove(self.intercepts_dir, True) |
404 | bb.utils.mkdirhier(self.intercepts_dir) | 404 | bb.utils.mkdirhier(self.intercepts_dir) |
405 | for intercept in postinst_intercepts: | 405 | for intercept in postinst_intercepts: |
406 | bb.utils.copyfile(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) | 406 | shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) |
407 | 407 | ||
408 | @abstractmethod | 408 | @abstractmethod |
409 | def _handle_intercept_failure(self, failed_script): | 409 | def _handle_intercept_failure(self, failed_script): |
@@ -611,12 +611,13 @@ class PackageManager(object, metaclass=ABCMeta): | |||
611 | "'%s' returned %d:\n%s" % | 611 | "'%s' returned %d:\n%s" % |
612 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | 612 | (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) |
613 | 613 | ||
614 | target_arch = self.d.getVar('TARGET_ARCH') | 614 | if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1': |
615 | localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale") | 615 | target_arch = self.d.getVar('TARGET_ARCH') |
616 | if os.path.exists(localedir) and os.listdir(localedir): | 616 | localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale") |
617 | generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir) | 617 | if os.path.exists(localedir) and os.listdir(localedir): |
618 | # And now delete the binary locales | 618 | generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir) |
619 | self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False) | 619 | # And now delete the binary locales |
620 | self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False) | ||
620 | 621 | ||
621 | def deploy_dir_lock(self): | 622 | def deploy_dir_lock(self): |
622 | if self.deploy_dir is None: | 623 | if self.deploy_dir is None: |
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index a82085a792..feb834c0e3 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py | |||
@@ -57,6 +57,17 @@ def read_subpkgdata_dict(pkg, d): | |||
57 | ret[newvar] = subd[var] | 57 | ret[newvar] = subd[var] |
58 | return ret | 58 | return ret |
59 | 59 | ||
60 | def read_subpkgdata_extended(pkg, d): | ||
61 | import json | ||
62 | import gzip | ||
63 | |||
64 | fn = d.expand("${PKGDATA_DIR}/extended/%s.json.gz" % pkg) | ||
65 | try: | ||
66 | with gzip.open(fn, "rt", encoding="utf-8") as f: | ||
67 | return json.load(f) | ||
68 | except FileNotFoundError: | ||
69 | return None | ||
70 | |||
60 | def _pkgmap(d): | 71 | def _pkgmap(d): |
61 | """Return a dictionary mapping package to recipe name.""" | 72 | """Return a dictionary mapping package to recipe name.""" |
62 | 73 | ||
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 7cd8436da5..feb6ee7082 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -2,6 +2,9 @@ | |||
2 | # SPDX-License-Identifier: GPL-2.0-only | 2 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 3 | # |
4 | 4 | ||
5 | import os | ||
6 | import shlex | ||
7 | import subprocess | ||
5 | import oe.path | 8 | import oe.path |
6 | import oe.types | 9 | import oe.types |
7 | 10 | ||
@@ -24,7 +27,6 @@ class CmdError(bb.BBHandledException): | |||
24 | 27 | ||
25 | 28 | ||
26 | def runcmd(args, dir = None): | 29 | def runcmd(args, dir = None): |
27 | import pipes | ||
28 | import subprocess | 30 | import subprocess |
29 | 31 | ||
30 | if dir: | 32 | if dir: |
@@ -35,7 +37,7 @@ def runcmd(args, dir = None): | |||
35 | # print("cwd: %s -> %s" % (olddir, dir)) | 37 | # print("cwd: %s -> %s" % (olddir, dir)) |
36 | 38 | ||
37 | try: | 39 | try: |
38 | args = [ pipes.quote(str(arg)) for arg in args ] | 40 | args = [ shlex.quote(str(arg)) for arg in args ] |
39 | cmd = " ".join(args) | 41 | cmd = " ".join(args) |
40 | # print("cmd: %s" % cmd) | 42 | # print("cmd: %s" % cmd) |
41 | (exitstatus, output) = subprocess.getstatusoutput(cmd) | 43 | (exitstatus, output) = subprocess.getstatusoutput(cmd) |
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py index 0fb02ccdb0..1ed79b18ca 100644 --- a/meta/lib/oe/reproducible.py +++ b/meta/lib/oe/reproducible.py | |||
@@ -41,7 +41,7 @@ def find_git_folder(d, sourcedir): | |||
41 | for root, dirs, files in os.walk(workdir, topdown=True): | 41 | for root, dirs, files in os.walk(workdir, topdown=True): |
42 | dirs[:] = [d for d in dirs if d not in exclude] | 42 | dirs[:] = [d for d in dirs if d not in exclude] |
43 | if '.git' in dirs: | 43 | if '.git' in dirs: |
44 | return root | 44 | return os.path.join(root, ".git") |
45 | 45 | ||
46 | bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) | 46 | bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) |
47 | return None | 47 | return None |
@@ -62,7 +62,8 @@ def get_source_date_epoch_from_git(d, sourcedir): | |||
62 | return None | 62 | return None |
63 | 63 | ||
64 | bb.debug(1, "git repository: %s" % gitpath) | 64 | bb.debug(1, "git repository: %s" % gitpath) |
65 | p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE) | 65 | p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], |
66 | check=True, stdout=subprocess.PIPE) | ||
66 | return int(p.stdout.decode('utf-8')) | 67 | return int(p.stdout.decode('utf-8')) |
67 | 68 | ||
68 | def get_source_date_epoch_from_youngest_file(d, sourcedir): | 69 | def get_source_date_epoch_from_youngest_file(d, sourcedir): |
@@ -90,8 +91,12 @@ def get_source_date_epoch_from_youngest_file(d, sourcedir): | |||
90 | bb.debug(1, "Newest file found: %s" % newest_file) | 91 | bb.debug(1, "Newest file found: %s" % newest_file) |
91 | return source_date_epoch | 92 | return source_date_epoch |
92 | 93 | ||
93 | def fixed_source_date_epoch(): | 94 | def fixed_source_date_epoch(d): |
94 | bb.debug(1, "No tarball or git repo found to determine SOURCE_DATE_EPOCH") | 95 | bb.debug(1, "No tarball or git repo found to determine SOURCE_DATE_EPOCH") |
96 | source_date_epoch = d.getVar('SOURCE_DATE_EPOCH_FALLBACK') | ||
97 | if source_date_epoch: | ||
98 | bb.debug(1, "Using SOURCE_DATE_EPOCH_FALLBACK") | ||
99 | return int(source_date_epoch) | ||
95 | return 0 | 100 | return 0 |
96 | 101 | ||
97 | def get_source_date_epoch(d, sourcedir): | 102 | def get_source_date_epoch(d, sourcedir): |
@@ -99,6 +104,6 @@ def get_source_date_epoch(d, sourcedir): | |||
99 | get_source_date_epoch_from_git(d, sourcedir) or | 104 | get_source_date_epoch_from_git(d, sourcedir) or |
100 | get_source_date_epoch_from_known_files(d, sourcedir) or | 105 | get_source_date_epoch_from_known_files(d, sourcedir) or |
101 | get_source_date_epoch_from_youngest_file(d, sourcedir) or | 106 | get_source_date_epoch_from_youngest_file(d, sourcedir) or |
102 | fixed_source_date_epoch() # Last resort | 107 | fixed_source_date_epoch(d) # Last resort |
103 | ) | 108 | ) |
104 | 109 | ||
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index cd65e62030..5391c25af9 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py | |||
@@ -167,7 +167,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
167 | pass | 167 | pass |
168 | os.rename(self.image_rootfs, self.image_rootfs + '-dbg') | 168 | os.rename(self.image_rootfs, self.image_rootfs + '-dbg') |
169 | 169 | ||
170 | bb.note(" Restoreing original rootfs...") | 170 | bb.note(" Restoring original rootfs...") |
171 | os.rename(self.image_rootfs + '-orig', self.image_rootfs) | 171 | os.rename(self.image_rootfs + '-orig', self.image_rootfs) |
172 | 172 | ||
173 | def _exec_shell_cmd(self, cmd): | 173 | def _exec_shell_cmd(self, cmd): |
@@ -304,7 +304,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
304 | def _check_for_kernel_modules(self, modules_dir): | 304 | def _check_for_kernel_modules(self, modules_dir): |
305 | for root, dirs, files in os.walk(modules_dir, topdown=True): | 305 | for root, dirs, files in os.walk(modules_dir, topdown=True): |
306 | for name in files: | 306 | for name in files: |
307 | found_ko = name.endswith(".ko") | 307 | found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz")) |
308 | if found_ko: | 308 | if found_ko: |
309 | return found_ko | 309 | return found_ko |
310 | return False | 310 | return False |
@@ -321,7 +321,9 @@ class Rootfs(object, metaclass=ABCMeta): | |||
321 | if not os.path.exists(kernel_abi_ver_file): | 321 | if not os.path.exists(kernel_abi_ver_file): |
322 | bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) | 322 | bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) |
323 | 323 | ||
324 | kernel_ver = open(kernel_abi_ver_file).read().strip(' \n') | 324 | with open(kernel_abi_ver_file) as f: |
325 | kernel_ver = f.read().strip(' \n') | ||
326 | |||
325 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) | 327 | versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) |
326 | 328 | ||
327 | bb.utils.mkdirhier(versioned_modules_dir) | 329 | bb.utils.mkdirhier(versioned_modules_dir) |
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py new file mode 100644 index 0000000000..22ed5070ea --- /dev/null +++ b/meta/lib/oe/sbom.py | |||
@@ -0,0 +1,84 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import collections | ||
8 | |||
9 | DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe")) | ||
10 | DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file")) | ||
11 | |||
12 | |||
13 | def get_recipe_spdxid(d): | ||
14 | return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN")) | ||
15 | |||
16 | |||
17 | def get_download_spdxid(d, idx): | ||
18 | return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx) | ||
19 | |||
20 | |||
21 | def get_package_spdxid(pkg): | ||
22 | return "SPDXRef-Package-%s" % pkg | ||
23 | |||
24 | |||
25 | def get_source_file_spdxid(d, idx): | ||
26 | return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx) | ||
27 | |||
28 | |||
29 | def get_packaged_file_spdxid(pkg, idx): | ||
30 | return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx) | ||
31 | |||
32 | |||
33 | def get_image_spdxid(img): | ||
34 | return "SPDXRef-Image-%s" % img | ||
35 | |||
36 | |||
37 | def get_sdk_spdxid(sdk): | ||
38 | return "SPDXRef-SDK-%s" % sdk | ||
39 | |||
40 | |||
41 | def write_doc(d, spdx_doc, subdir, spdx_deploy=None, indent=None): | ||
42 | from pathlib import Path | ||
43 | |||
44 | if spdx_deploy is None: | ||
45 | spdx_deploy = Path(d.getVar("SPDXDEPLOY")) | ||
46 | |||
47 | dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json") | ||
48 | dest.parent.mkdir(exist_ok=True, parents=True) | ||
49 | with dest.open("wb") as f: | ||
50 | doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent) | ||
51 | |||
52 | l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_") | ||
53 | l.parent.mkdir(exist_ok=True, parents=True) | ||
54 | l.symlink_to(os.path.relpath(dest, l.parent)) | ||
55 | |||
56 | return doc_sha1 | ||
57 | |||
58 | |||
59 | def read_doc(fn): | ||
60 | import hashlib | ||
61 | import oe.spdx | ||
62 | import io | ||
63 | import contextlib | ||
64 | |||
65 | @contextlib.contextmanager | ||
66 | def get_file(): | ||
67 | if isinstance(fn, io.IOBase): | ||
68 | yield fn | ||
69 | else: | ||
70 | with fn.open("rb") as f: | ||
71 | yield f | ||
72 | |||
73 | with get_file() as f: | ||
74 | sha1 = hashlib.sha1() | ||
75 | while True: | ||
76 | chunk = f.read(4096) | ||
77 | if not chunk: | ||
78 | break | ||
79 | sha1.update(chunk) | ||
80 | |||
81 | f.seek(0) | ||
82 | doc = oe.spdx.SPDXDocument.from_json(f) | ||
83 | |||
84 | return (doc, sha1.hexdigest()) | ||
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py new file mode 100644 index 0000000000..7aaf2af5ed --- /dev/null +++ b/meta/lib/oe/spdx.py | |||
@@ -0,0 +1,357 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | # | ||
8 | # This library is intended to capture the JSON SPDX specification in a type | ||
9 | # safe manner. It is not intended to encode any particular OE specific | ||
10 | # behaviors, see the sbom.py for that. | ||
11 | # | ||
12 | # The documented SPDX spec document doesn't cover the JSON syntax for | ||
13 | # particular configuration, which can make it hard to determine what the JSON | ||
14 | # syntax should be. I've found it is actually much simpler to read the official | ||
15 | # SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec | ||
16 | # in schemas/spdx-schema.json | ||
17 | # | ||
18 | |||
19 | import hashlib | ||
20 | import itertools | ||
21 | import json | ||
22 | |||
23 | SPDX_VERSION = "2.2" | ||
24 | |||
25 | |||
26 | # | ||
27 | # The following are the support classes that are used to implement SPDX object | ||
28 | # | ||
29 | |||
30 | class _Property(object): | ||
31 | """ | ||
32 | A generic SPDX object property. The different types will derive from this | ||
33 | class | ||
34 | """ | ||
35 | |||
36 | def __init__(self, *, default=None): | ||
37 | self.default = default | ||
38 | |||
39 | def setdefault(self, dest, name): | ||
40 | if self.default is not None: | ||
41 | dest.setdefault(name, self.default) | ||
42 | |||
43 | |||
44 | class _String(_Property): | ||
45 | """ | ||
46 | A scalar string property for an SPDX object | ||
47 | """ | ||
48 | |||
49 | def __init__(self, **kwargs): | ||
50 | super().__init__(**kwargs) | ||
51 | |||
52 | def set_property(self, attrs, name): | ||
53 | def get_helper(obj): | ||
54 | return obj._spdx[name] | ||
55 | |||
56 | def set_helper(obj, value): | ||
57 | obj._spdx[name] = value | ||
58 | |||
59 | def del_helper(obj): | ||
60 | del obj._spdx[name] | ||
61 | |||
62 | attrs[name] = property(get_helper, set_helper, del_helper) | ||
63 | |||
64 | def init(self, source): | ||
65 | return source | ||
66 | |||
67 | |||
68 | class _Object(_Property): | ||
69 | """ | ||
70 | A scalar SPDX object property of a SPDX object | ||
71 | """ | ||
72 | |||
73 | def __init__(self, cls, **kwargs): | ||
74 | super().__init__(**kwargs) | ||
75 | self.cls = cls | ||
76 | |||
77 | def set_property(self, attrs, name): | ||
78 | def get_helper(obj): | ||
79 | if not name in obj._spdx: | ||
80 | obj._spdx[name] = self.cls() | ||
81 | return obj._spdx[name] | ||
82 | |||
83 | def set_helper(obj, value): | ||
84 | obj._spdx[name] = value | ||
85 | |||
86 | def del_helper(obj): | ||
87 | del obj._spdx[name] | ||
88 | |||
89 | attrs[name] = property(get_helper, set_helper) | ||
90 | |||
91 | def init(self, source): | ||
92 | return self.cls(**source) | ||
93 | |||
94 | |||
95 | class _ListProperty(_Property): | ||
96 | """ | ||
97 | A list of SPDX properties | ||
98 | """ | ||
99 | |||
100 | def __init__(self, prop, **kwargs): | ||
101 | super().__init__(**kwargs) | ||
102 | self.prop = prop | ||
103 | |||
104 | def set_property(self, attrs, name): | ||
105 | def get_helper(obj): | ||
106 | if not name in obj._spdx: | ||
107 | obj._spdx[name] = [] | ||
108 | return obj._spdx[name] | ||
109 | |||
110 | def set_helper(obj, value): | ||
111 | obj._spdx[name] = list(value) | ||
112 | |||
113 | def del_helper(obj): | ||
114 | del obj._spdx[name] | ||
115 | |||
116 | attrs[name] = property(get_helper, set_helper, del_helper) | ||
117 | |||
118 | def init(self, source): | ||
119 | return [self.prop.init(o) for o in source] | ||
120 | |||
121 | |||
122 | class _StringList(_ListProperty): | ||
123 | """ | ||
124 | A list of strings as a property for an SPDX object | ||
125 | """ | ||
126 | |||
127 | def __init__(self, **kwargs): | ||
128 | super().__init__(_String(), **kwargs) | ||
129 | |||
130 | |||
131 | class _ObjectList(_ListProperty): | ||
132 | """ | ||
133 | A list of SPDX objects as a property for an SPDX object | ||
134 | """ | ||
135 | |||
136 | def __init__(self, cls, **kwargs): | ||
137 | super().__init__(_Object(cls), **kwargs) | ||
138 | |||
139 | |||
140 | class MetaSPDXObject(type): | ||
141 | """ | ||
142 | A metaclass that allows properties (anything derived from a _Property | ||
143 | class) to be defined for a SPDX object | ||
144 | """ | ||
145 | def __new__(mcls, name, bases, attrs): | ||
146 | attrs["_properties"] = {} | ||
147 | |||
148 | for key in attrs.keys(): | ||
149 | if isinstance(attrs[key], _Property): | ||
150 | prop = attrs[key] | ||
151 | attrs["_properties"][key] = prop | ||
152 | prop.set_property(attrs, key) | ||
153 | |||
154 | return super().__new__(mcls, name, bases, attrs) | ||
155 | |||
156 | |||
157 | class SPDXObject(metaclass=MetaSPDXObject): | ||
158 | """ | ||
159 | The base SPDX object; all SPDX spec classes must derive from this class | ||
160 | """ | ||
161 | def __init__(self, **d): | ||
162 | self._spdx = {} | ||
163 | |||
164 | for name, prop in self._properties.items(): | ||
165 | prop.setdefault(self._spdx, name) | ||
166 | if name in d: | ||
167 | self._spdx[name] = prop.init(d[name]) | ||
168 | |||
169 | def serializer(self): | ||
170 | return self._spdx | ||
171 | |||
172 | def __setattr__(self, name, value): | ||
173 | if name in self._properties or name == "_spdx": | ||
174 | super().__setattr__(name, value) | ||
175 | return | ||
176 | raise KeyError("%r is not a valid SPDX property" % name) | ||
177 | |||
178 | # | ||
179 | # These are the SPDX objects implemented from the spec. The *only* properties | ||
180 | # that can be added to these objects are ones directly specified in the SPDX | ||
181 | # spec, however you may add helper functions to make operations easier. | ||
182 | # | ||
183 | # Defaults should *only* be specified if the SPDX spec says there is a certain | ||
184 | # required value for a field (e.g. dataLicense), or if the field is mandatory | ||
185 | # and has some sane "this field is unknown" (e.g. "NOASSERTION") | ||
186 | # | ||
187 | |||
188 | class SPDXAnnotation(SPDXObject): | ||
189 | annotationDate = _String() | ||
190 | annotationType = _String() | ||
191 | annotator = _String() | ||
192 | comment = _String() | ||
193 | |||
194 | class SPDXChecksum(SPDXObject): | ||
195 | algorithm = _String() | ||
196 | checksumValue = _String() | ||
197 | |||
198 | |||
199 | class SPDXRelationship(SPDXObject): | ||
200 | spdxElementId = _String() | ||
201 | relatedSpdxElement = _String() | ||
202 | relationshipType = _String() | ||
203 | comment = _String() | ||
204 | annotations = _ObjectList(SPDXAnnotation) | ||
205 | |||
206 | |||
207 | class SPDXExternalReference(SPDXObject): | ||
208 | referenceCategory = _String() | ||
209 | referenceType = _String() | ||
210 | referenceLocator = _String() | ||
211 | |||
212 | |||
213 | class SPDXPackageVerificationCode(SPDXObject): | ||
214 | packageVerificationCodeValue = _String() | ||
215 | packageVerificationCodeExcludedFiles = _StringList() | ||
216 | |||
217 | |||
218 | class SPDXPackage(SPDXObject): | ||
219 | ALLOWED_CHECKSUMS = [ | ||
220 | "SHA1", | ||
221 | "SHA224", | ||
222 | "SHA256", | ||
223 | "SHA384", | ||
224 | "SHA512", | ||
225 | "MD2", | ||
226 | "MD4", | ||
227 | "MD5", | ||
228 | "MD6", | ||
229 | ] | ||
230 | |||
231 | name = _String() | ||
232 | SPDXID = _String() | ||
233 | versionInfo = _String() | ||
234 | downloadLocation = _String(default="NOASSERTION") | ||
235 | supplier = _String(default="NOASSERTION") | ||
236 | homepage = _String() | ||
237 | licenseConcluded = _String(default="NOASSERTION") | ||
238 | licenseDeclared = _String(default="NOASSERTION") | ||
239 | summary = _String() | ||
240 | description = _String() | ||
241 | sourceInfo = _String() | ||
242 | copyrightText = _String(default="NOASSERTION") | ||
243 | licenseInfoFromFiles = _StringList(default=["NOASSERTION"]) | ||
244 | externalRefs = _ObjectList(SPDXExternalReference) | ||
245 | packageVerificationCode = _Object(SPDXPackageVerificationCode) | ||
246 | hasFiles = _StringList() | ||
247 | packageFileName = _String() | ||
248 | annotations = _ObjectList(SPDXAnnotation) | ||
249 | checksums = _ObjectList(SPDXChecksum) | ||
250 | |||
251 | |||
252 | class SPDXFile(SPDXObject): | ||
253 | SPDXID = _String() | ||
254 | fileName = _String() | ||
255 | licenseConcluded = _String(default="NOASSERTION") | ||
256 | copyrightText = _String(default="NOASSERTION") | ||
257 | licenseInfoInFiles = _StringList(default=["NOASSERTION"]) | ||
258 | checksums = _ObjectList(SPDXChecksum) | ||
259 | fileTypes = _StringList() | ||
260 | |||
261 | |||
262 | class SPDXCreationInfo(SPDXObject): | ||
263 | created = _String() | ||
264 | licenseListVersion = _String() | ||
265 | comment = _String() | ||
266 | creators = _StringList() | ||
267 | |||
268 | |||
269 | class SPDXExternalDocumentRef(SPDXObject): | ||
270 | externalDocumentId = _String() | ||
271 | spdxDocument = _String() | ||
272 | checksum = _Object(SPDXChecksum) | ||
273 | |||
274 | |||
275 | class SPDXExtractedLicensingInfo(SPDXObject): | ||
276 | name = _String() | ||
277 | comment = _String() | ||
278 | licenseId = _String() | ||
279 | extractedText = _String() | ||
280 | |||
281 | |||
282 | class SPDXDocument(SPDXObject): | ||
283 | spdxVersion = _String(default="SPDX-" + SPDX_VERSION) | ||
284 | dataLicense = _String(default="CC0-1.0") | ||
285 | SPDXID = _String(default="SPDXRef-DOCUMENT") | ||
286 | name = _String() | ||
287 | documentNamespace = _String() | ||
288 | creationInfo = _Object(SPDXCreationInfo) | ||
289 | packages = _ObjectList(SPDXPackage) | ||
290 | files = _ObjectList(SPDXFile) | ||
291 | relationships = _ObjectList(SPDXRelationship) | ||
292 | externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef) | ||
293 | hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo) | ||
294 | |||
295 | def __init__(self, **d): | ||
296 | super().__init__(**d) | ||
297 | |||
298 | def to_json(self, f, *, sort_keys=False, indent=None, separators=None): | ||
299 | class Encoder(json.JSONEncoder): | ||
300 | def default(self, o): | ||
301 | if isinstance(o, SPDXObject): | ||
302 | return o.serializer() | ||
303 | |||
304 | return super().default(o) | ||
305 | |||
306 | sha1 = hashlib.sha1() | ||
307 | for chunk in Encoder( | ||
308 | sort_keys=sort_keys, | ||
309 | indent=indent, | ||
310 | separators=separators, | ||
311 | ).iterencode(self): | ||
312 | chunk = chunk.encode("utf-8") | ||
313 | f.write(chunk) | ||
314 | sha1.update(chunk) | ||
315 | |||
316 | return sha1.hexdigest() | ||
317 | |||
318 | @classmethod | ||
319 | def from_json(cls, f): | ||
320 | return cls(**json.load(f)) | ||
321 | |||
322 | def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None): | ||
323 | if isinstance(_from, SPDXObject): | ||
324 | from_spdxid = _from.SPDXID | ||
325 | else: | ||
326 | from_spdxid = _from | ||
327 | |||
328 | if isinstance(_to, SPDXObject): | ||
329 | to_spdxid = _to.SPDXID | ||
330 | else: | ||
331 | to_spdxid = _to | ||
332 | |||
333 | r = SPDXRelationship( | ||
334 | spdxElementId=from_spdxid, | ||
335 | relatedSpdxElement=to_spdxid, | ||
336 | relationshipType=relationship, | ||
337 | ) | ||
338 | |||
339 | if comment is not None: | ||
340 | r.comment = comment | ||
341 | |||
342 | if annotation is not None: | ||
343 | r.annotations.append(annotation) | ||
344 | |||
345 | self.relationships.append(r) | ||
346 | |||
347 | def find_by_spdxid(self, spdxid): | ||
348 | for o in itertools.chain(self.packages, self.files): | ||
349 | if o.SPDXID == spdxid: | ||
350 | return o | ||
351 | return None | ||
352 | |||
353 | def find_external_document_ref(self, namespace): | ||
354 | for r in self.externalDocumentRefs: | ||
355 | if r.spdxDocument == namespace: | ||
356 | return r | ||
357 | return None | ||
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index aeceb100d7..65bb4efe25 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -480,8 +480,10 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
480 | if "package_write_" in task or task == "package_qa": | 480 | if "package_write_" in task or task == "package_qa": |
481 | include_owners = False | 481 | include_owners = False |
482 | include_timestamps = False | 482 | include_timestamps = False |
483 | include_root = True | ||
483 | if task == "package": | 484 | if task == "package": |
484 | include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' | 485 | include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' |
486 | include_root = False | ||
485 | extra_content = d.getVar('HASHEQUIV_HASH_VERSION') | 487 | extra_content = d.getVar('HASHEQUIV_HASH_VERSION') |
486 | 488 | ||
487 | try: | 489 | try: |
@@ -592,7 +594,8 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
592 | update_hash("\n") | 594 | update_hash("\n") |
593 | 595 | ||
594 | # Process this directory and all its child files | 596 | # Process this directory and all its child files |
595 | process(root) | 597 | if include_root or root != ".": |
598 | process(root) | ||
596 | for f in files: | 599 | for f in files: |
597 | if f == 'fixmepath': | 600 | if f == 'fixmepath': |
598 | continue | 601 | continue |
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py index eb10a6e33e..a0c166d884 100644 --- a/meta/lib/oe/terminal.py +++ b/meta/lib/oe/terminal.py | |||
@@ -102,6 +102,10 @@ class Rxvt(XTerminal): | |||
102 | command = 'rxvt -T "{title}" -e {command}' | 102 | command = 'rxvt -T "{title}" -e {command}' |
103 | priority = 1 | 103 | priority = 1 |
104 | 104 | ||
105 | class URxvt(XTerminal): | ||
106 | command = 'urxvt -T "{title}" -e {command}' | ||
107 | priority = 1 | ||
108 | |||
105 | class Screen(Terminal): | 109 | class Screen(Terminal): |
106 | command = 'screen -D -m -t "{title}" -S devshell {command}' | 110 | command = 'screen -D -m -t "{title}" -S devshell {command}' |
107 | 111 | ||
@@ -163,7 +167,12 @@ class Tmux(Terminal): | |||
163 | # devshells, if it's already there, add a new window to it. | 167 | # devshells, if it's already there, add a new window to it. |
164 | window_name = 'devshell-%i' % os.getpid() | 168 | window_name = 'devshell-%i' % os.getpid() |
165 | 169 | ||
166 | self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'.format(window_name) | 170 | self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"' |
171 | if not check_tmux_version('1.9'): | ||
172 | # `tmux new-session -c` was added in 1.9; | ||
173 | # older versions fail with that flag | ||
174 | self.command = 'tmux new -d -s {0} -n {0} "{{command}}"' | ||
175 | self.command = self.command.format(window_name) | ||
167 | Terminal.__init__(self, sh_cmd, title, env, d) | 176 | Terminal.__init__(self, sh_cmd, title, env, d) |
168 | 177 | ||
169 | attach_cmd = 'tmux att -t {0}'.format(window_name) | 178 | attach_cmd = 'tmux att -t {0}'.format(window_name) |
@@ -253,13 +262,18 @@ def spawn(name, sh_cmd, title=None, env=None, d=None): | |||
253 | except OSError: | 262 | except OSError: |
254 | return | 263 | return |
255 | 264 | ||
265 | def check_tmux_version(desired): | ||
266 | vernum = check_terminal_version("tmux") | ||
267 | if vernum and LooseVersion(vernum) < desired: | ||
268 | return False | ||
269 | return vernum | ||
270 | |||
256 | def check_tmux_pane_size(tmux): | 271 | def check_tmux_pane_size(tmux): |
257 | import subprocess as sub | 272 | import subprocess as sub |
258 | # On older tmux versions (<1.9), return false. The reason | 273 | # On older tmux versions (<1.9), return false. The reason |
259 | # is that there is no easy way to get the height of the active panel | 274 | # is that there is no easy way to get the height of the active panel |
260 | # on current window without nested formats (available from version 1.9) | 275 | # on current window without nested formats (available from version 1.9) |
261 | vernum = check_terminal_version("tmux") | 276 | if not check_tmux_version('1.9'): |
262 | if vernum and LooseVersion(vernum) < '1.9': | ||
263 | return False | 277 | return False |
264 | try: | 278 | try: |
265 | p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, | 279 | p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, |
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 83d298906b..3e016244c5 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -481,7 +481,8 @@ class ThreadedWorker(Thread): | |||
481 | try: | 481 | try: |
482 | func(self, *args, **kargs) | 482 | func(self, *args, **kargs) |
483 | except Exception as e: | 483 | except Exception as e: |
484 | print(e) | 484 | # Eat all exceptions |
485 | bb.mainlogger.debug("Worker task raised %s" % e, exc_info=e) | ||
485 | finally: | 486 | finally: |
486 | self.tasks.task_done() | 487 | self.tasks.task_done() |
487 | 488 | ||
diff --git a/meta/lib/oeqa/core/case.py b/meta/lib/oeqa/core/case.py index aae451fef2..bc4446a938 100644 --- a/meta/lib/oeqa/core/case.py +++ b/meta/lib/oeqa/core/case.py | |||
@@ -43,8 +43,13 @@ class OETestCase(unittest.TestCase): | |||
43 | clss.tearDownClassMethod() | 43 | clss.tearDownClassMethod() |
44 | 44 | ||
45 | def _oeSetUp(self): | 45 | def _oeSetUp(self): |
46 | for d in self.decorators: | 46 | try: |
47 | d.setUpDecorator() | 47 | for d in self.decorators: |
48 | d.setUpDecorator() | ||
49 | except: | ||
50 | for d in self.decorators: | ||
51 | d.tearDownDecorator() | ||
52 | raise | ||
48 | self.setUpMethod() | 53 | self.setUpMethod() |
49 | 54 | ||
50 | def _oeTearDown(self): | 55 | def _oeTearDown(self): |
diff --git a/meta/lib/oeqa/core/decorator/oetimeout.py b/meta/lib/oeqa/core/decorator/oetimeout.py index df90d1c798..5e6873ad48 100644 --- a/meta/lib/oeqa/core/decorator/oetimeout.py +++ b/meta/lib/oeqa/core/decorator/oetimeout.py | |||
@@ -24,5 +24,6 @@ class OETimeout(OETestDecorator): | |||
24 | 24 | ||
25 | def tearDownDecorator(self): | 25 | def tearDownDecorator(self): |
26 | signal.alarm(0) | 26 | signal.alarm(0) |
27 | signal.signal(signal.SIGALRM, self.alarmSignal) | 27 | if hasattr(self, 'alarmSignal'): |
28 | self.logger.debug("Removed SIGALRM handler") | 28 | signal.signal(signal.SIGALRM, self.alarmSignal) |
29 | self.logger.debug("Removed SIGALRM handler") | ||
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py index aefb576805..832b6216f6 100644 --- a/meta/lib/oeqa/core/target/ssh.py +++ b/meta/lib/oeqa/core/target/ssh.py | |||
@@ -34,6 +34,7 @@ class OESSHTarget(OETarget): | |||
34 | self.timeout = timeout | 34 | self.timeout = timeout |
35 | self.user = user | 35 | self.user = user |
36 | ssh_options = [ | 36 | ssh_options = [ |
37 | '-o', 'HostKeyAlgorithms=+ssh-rsa', | ||
37 | '-o', 'UserKnownHostsFile=/dev/null', | 38 | '-o', 'UserKnownHostsFile=/dev/null', |
38 | '-o', 'StrictHostKeyChecking=no', | 39 | '-o', 'StrictHostKeyChecking=no', |
39 | '-o', 'LogLevel=ERROR' | 40 | '-o', 'LogLevel=ERROR' |
@@ -225,6 +226,9 @@ def SSHCall(command, logger, timeout=None, **opts): | |||
225 | endtime = time.time() + timeout | 226 | endtime = time.time() + timeout |
226 | except InterruptedError: | 227 | except InterruptedError: |
227 | continue | 228 | continue |
229 | except BlockingIOError: | ||
230 | logger.debug('BlockingIOError') | ||
231 | continue | ||
228 | 232 | ||
229 | # process hasn't returned yet | 233 | # process hasn't returned yet |
230 | if not eof: | 234 | if not eof: |
diff --git a/meta/lib/oeqa/core/tests/cases/timeout.py b/meta/lib/oeqa/core/tests/cases/timeout.py index 5dfecc7b7c..69cf969a67 100644 --- a/meta/lib/oeqa/core/tests/cases/timeout.py +++ b/meta/lib/oeqa/core/tests/cases/timeout.py | |||
@@ -8,6 +8,7 @@ from time import sleep | |||
8 | 8 | ||
9 | from oeqa.core.case import OETestCase | 9 | from oeqa.core.case import OETestCase |
10 | from oeqa.core.decorator.oetimeout import OETimeout | 10 | from oeqa.core.decorator.oetimeout import OETimeout |
11 | from oeqa.core.decorator.depends import OETestDepends | ||
11 | 12 | ||
12 | class TimeoutTest(OETestCase): | 13 | class TimeoutTest(OETestCase): |
13 | 14 | ||
@@ -19,3 +20,15 @@ class TimeoutTest(OETestCase): | |||
19 | def testTimeoutFail(self): | 20 | def testTimeoutFail(self): |
20 | sleep(2) | 21 | sleep(2) |
21 | self.assertTrue(True, msg='How is this possible?') | 22 | self.assertTrue(True, msg='How is this possible?') |
23 | |||
24 | |||
25 | def testTimeoutSkip(self): | ||
26 | self.skipTest("This test needs to be skipped, so that testTimeoutDepends()'s OETestDepends kicks in") | ||
27 | |||
28 | @OETestDepends(["timeout.TimeoutTest.testTimeoutSkip"]) | ||
29 | @OETimeout(3) | ||
30 | def testTimeoutDepends(self): | ||
31 | self.assertTrue(False, msg='How is this possible?') | ||
32 | |||
33 | def testTimeoutUnrelated(self): | ||
34 | sleep(6) | ||
diff --git a/meta/lib/oeqa/core/tests/test_decorators.py b/meta/lib/oeqa/core/tests/test_decorators.py index b798bf7d33..5095f39948 100755 --- a/meta/lib/oeqa/core/tests/test_decorators.py +++ b/meta/lib/oeqa/core/tests/test_decorators.py | |||
@@ -133,5 +133,11 @@ class TestTimeoutDecorator(TestBase): | |||
133 | msg = "OETestTimeout didn't restore SIGALRM" | 133 | msg = "OETestTimeout didn't restore SIGALRM" |
134 | self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg) | 134 | self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg) |
135 | 135 | ||
136 | def test_timeout_cancel(self): | ||
137 | tests = ['timeout.TimeoutTest.testTimeoutSkip', 'timeout.TimeoutTest.testTimeoutDepends', 'timeout.TimeoutTest.testTimeoutUnrelated'] | ||
138 | msg = 'Unrelated test failed to complete' | ||
139 | tc = self._testLoader(modules=self.modules, tests=tests) | ||
140 | self.assertTrue(tc.runTests().wasSuccessful(), msg=msg) | ||
141 | |||
136 | if __name__ == '__main__': | 142 | if __name__ == '__main__': |
137 | unittest.main() | 143 | unittest.main() |
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json index d77d0e673b..6c110d0656 100644 --- a/meta/lib/oeqa/manual/eclipse-plugin.json +++ b/meta/lib/oeqa/manual/eclipse-plugin.json | |||
@@ -44,7 +44,7 @@ | |||
44 | "expected_results": "" | 44 | "expected_results": "" |
45 | }, | 45 | }, |
46 | "2": { | 46 | "2": { |
47 | "action": "wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/qemu (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n", | 47 | "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n", |
48 | "expected_results": " Qemu can be lauched normally." | 48 | "expected_results": " Qemu can be lauched normally." |
49 | }, | 49 | }, |
50 | "3": { | 50 | "3": { |
@@ -60,7 +60,7 @@ | |||
60 | "expected_results": "" | 60 | "expected_results": "" |
61 | }, | 61 | }, |
62 | "6": { | 62 | "6": { |
63 | "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n", | 63 | "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n", |
64 | "expected_results": "" | 64 | "expected_results": "" |
65 | }, | 65 | }, |
66 | "7": { | 66 | "7": { |
@@ -247,7 +247,7 @@ | |||
247 | "execution": { | 247 | "execution": { |
248 | "1": { | 248 | "1": { |
249 | "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n", | 249 | "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n", |
250 | "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on http://autobuilder.yoctoproject.org/pub/releases/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n" | 250 | "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n" |
251 | }, | 251 | }, |
252 | "2": { | 252 | "2": { |
253 | "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n", | 253 | "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n", |
diff --git a/meta/lib/oeqa/manual/toaster-managed-mode.json b/meta/lib/oeqa/manual/toaster-managed-mode.json index 12374c7c64..9566d9d10e 100644 --- a/meta/lib/oeqa/manual/toaster-managed-mode.json +++ b/meta/lib/oeqa/manual/toaster-managed-mode.json | |||
@@ -136,7 +136,7 @@ | |||
136 | "expected_results": "" | 136 | "expected_results": "" |
137 | }, | 137 | }, |
138 | "3": { | 138 | "3": { |
139 | "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL_append - \"Not set\" \n\tPACKAGE_CLASES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n", | 139 | "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL_append - \"Not set\" \n\tPACKAGE_CLASSES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n", |
140 | "expected_results": "" | 140 | "expected_results": "" |
141 | }, | 141 | }, |
142 | "4": { | 142 | "4": { |
diff --git a/meta/lib/oeqa/runtime/cases/date.py b/meta/lib/oeqa/runtime/cases/date.py index fdd2a6ae58..bd6537400e 100644 --- a/meta/lib/oeqa/runtime/cases/date.py +++ b/meta/lib/oeqa/runtime/cases/date.py | |||
@@ -13,12 +13,12 @@ class DateTest(OERuntimeTestCase): | |||
13 | def setUp(self): | 13 | def setUp(self): |
14 | if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': | 14 | if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': |
15 | self.logger.debug('Stopping systemd-timesyncd daemon') | 15 | self.logger.debug('Stopping systemd-timesyncd daemon') |
16 | self.target.run('systemctl disable --now systemd-timesyncd') | 16 | self.target.run('systemctl disable --now --runtime systemd-timesyncd') |
17 | 17 | ||
18 | def tearDown(self): | 18 | def tearDown(self): |
19 | if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': | 19 | if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': |
20 | self.logger.debug('Starting systemd-timesyncd daemon') | 20 | self.logger.debug('Starting systemd-timesyncd daemon') |
21 | self.target.run('systemctl enable --now systemd-timesyncd') | 21 | self.target.run('systemctl enable --now --runtime systemd-timesyncd') |
22 | 22 | ||
23 | @OETestDepends(['ssh.SSHTest.test_ssh']) | 23 | @OETestDepends(['ssh.SSHTest.test_ssh']) |
24 | @OEHasPackage(['coreutils', 'busybox']) | 24 | @OEHasPackage(['coreutils', 'busybox']) |
@@ -28,14 +28,13 @@ class DateTest(OERuntimeTestCase): | |||
28 | self.assertEqual(status, 0, msg=msg) | 28 | self.assertEqual(status, 0, msg=msg) |
29 | oldDate = output | 29 | oldDate = output |
30 | 30 | ||
31 | sampleDate = '"2016-08-09 10:00:00"' | 31 | sampleTimestamp = 1488800000 |
32 | (status, output) = self.target.run("date -s %s" % sampleDate) | 32 | (status, output) = self.target.run("date -s @%d" % sampleTimestamp) |
33 | self.assertEqual(status, 0, msg='Date set failed, output: %s' % output) | 33 | self.assertEqual(status, 0, msg='Date set failed, output: %s' % output) |
34 | 34 | ||
35 | (status, output) = self.target.run("date -R") | 35 | (status, output) = self.target.run('date +"%s"') |
36 | p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output) | ||
37 | msg = 'The date was not set correctly, output: %s' % output | 36 | msg = 'The date was not set correctly, output: %s' % output |
38 | self.assertTrue(p, msg=msg) | 37 | self.assertTrue(int(output) - sampleTimestamp < 300, msg=msg) |
39 | 38 | ||
40 | (status, output) = self.target.run('date -s "%s"' % oldDate) | 39 | (status, output) = self.target.run('date -s "%s"' % oldDate) |
41 | msg = 'Failed to reset date, output: %s' % output | 40 | msg = 'Failed to reset date, output: %s' % output |
diff --git a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py new file mode 100644 index 0000000000..e010612838 --- /dev/null +++ b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py | |||
@@ -0,0 +1,36 @@ | |||
1 | from oeqa.runtime.case import OERuntimeTestCase | ||
2 | from oeqa.core.decorator.depends import OETestDepends | ||
3 | from oeqa.core.decorator.data import skipIfQemu | ||
4 | |||
5 | class Ethernet_Test(OERuntimeTestCase): | ||
6 | |||
7 | def set_ip(self, x): | ||
8 | x = x.split(".") | ||
9 | sample_host_address = '150' | ||
10 | x[3] = sample_host_address | ||
11 | x = '.'.join(x) | ||
12 | return x | ||
13 | |||
14 | @skipIfQemu('qemuall', 'Test only runs on real hardware') | ||
15 | @OETestDepends(['ssh.SSHTest.test_ssh']) | ||
16 | def test_set_virtual_ip(self): | ||
17 | (status, output) = self.target.run("ifconfig eth0 | grep 'inet ' | awk '{print $2}'") | ||
18 | self.assertEqual(status, 0, msg='Failed to get ip address. Make sure you have an ethernet connection on your device, output: %s' % output) | ||
19 | original_ip = output | ||
20 | virtual_ip = self.set_ip(original_ip) | ||
21 | |||
22 | (status, output) = self.target.run("ifconfig eth0:1 %s netmask 255.255.255.0 && sleep 2 && ping -c 5 %s && ifconfig eth0:1 down" % (virtual_ip,virtual_ip)) | ||
23 | self.assertEqual(status, 0, msg='Failed to create virtual ip address, output: %s' % output) | ||
24 | |||
25 | @OETestDepends(['ethernet_ip_connman.Ethernet_Test.test_set_virtual_ip']) | ||
26 | def test_get_ip_from_dhcp(self): | ||
27 | (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'") | ||
28 | self.assertEqual(status, 0, msg='No wired interfaces are detected, output: %s' % output) | ||
29 | wired_interfaces = output | ||
30 | |||
31 | (status, output) = self.target.run("ip route | grep default | awk '{print $3}'") | ||
32 | self.assertEqual(status, 0, msg='Failed to retrieve the default gateway, output: %s' % output) | ||
33 | default_gateway = output | ||
34 | |||
35 | (status, output) = self.target.run("connmanctl config %s --ipv4 dhcp && sleep 2 && ping -c 5 %s" % (wired_interfaces,default_gateway)) | ||
36 | self.assertEqual(status, 0, msg='Failed to get dynamic IP address via DHCP in connmand, output: %s' % output) \ No newline at end of file | ||
diff --git a/meta/lib/oeqa/runtime/cases/ksample.py b/meta/lib/oeqa/runtime/cases/ksample.py index a9a1620ebd..9883aa9aa8 100644 --- a/meta/lib/oeqa/runtime/cases/ksample.py +++ b/meta/lib/oeqa/runtime/cases/ksample.py | |||
@@ -10,7 +10,7 @@ from oeqa.core.decorator.depends import OETestDepends | |||
10 | from oeqa.core.decorator.data import skipIfNotFeature | 10 | from oeqa.core.decorator.data import skipIfNotFeature |
11 | 11 | ||
12 | # need some kernel fragments | 12 | # need some kernel fragments |
13 | # echo "KERNEL_FEATURES_append += \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf | 13 | # echo "KERNEL_FEATURES_append = \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf |
14 | class KSample(OERuntimeTestCase): | 14 | class KSample(OERuntimeTestCase): |
15 | def cmd_and_check(self, cmd='', match_string=''): | 15 | def cmd_and_check(self, cmd='', match_string=''): |
16 | status, output = self.target.run(cmd) | 16 | status, output = self.target.run(cmd) |
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py index a66d5d13d7..879f2a673c 100644 --- a/meta/lib/oeqa/runtime/cases/ltp.py +++ b/meta/lib/oeqa/runtime/cases/ltp.py | |||
@@ -67,7 +67,7 @@ class LtpTest(LtpTestBase): | |||
67 | def runltp(self, ltp_group): | 67 | def runltp(self, ltp_group): |
68 | cmd = '/opt/ltp/runltp -f %s -p -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group) | 68 | cmd = '/opt/ltp/runltp -f %s -p -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group) |
69 | starttime = time.time() | 69 | starttime = time.time() |
70 | (status, output) = self.target.run(cmd) | 70 | (status, output) = self.target.run(cmd, timeout=1200) |
71 | endtime = time.time() | 71 | endtime = time.time() |
72 | 72 | ||
73 | with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f: | 73 | with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f: |
diff --git a/meta/lib/oeqa/runtime/cases/pam.py b/meta/lib/oeqa/runtime/cases/pam.py index 271a1943e3..a482ded945 100644 --- a/meta/lib/oeqa/runtime/cases/pam.py +++ b/meta/lib/oeqa/runtime/cases/pam.py | |||
@@ -8,11 +8,14 @@ | |||
8 | from oeqa.runtime.case import OERuntimeTestCase | 8 | from oeqa.runtime.case import OERuntimeTestCase |
9 | from oeqa.core.decorator.depends import OETestDepends | 9 | from oeqa.core.decorator.depends import OETestDepends |
10 | from oeqa.core.decorator.data import skipIfNotFeature | 10 | from oeqa.core.decorator.data import skipIfNotFeature |
11 | from oeqa.runtime.decorator.package import OEHasPackage | ||
11 | 12 | ||
12 | class PamBasicTest(OERuntimeTestCase): | 13 | class PamBasicTest(OERuntimeTestCase): |
13 | 14 | ||
14 | @skipIfNotFeature('pam', 'Test requires pam to be in DISTRO_FEATURES') | 15 | @skipIfNotFeature('pam', 'Test requires pam to be in DISTRO_FEATURES') |
15 | @OETestDepends(['ssh.SSHTest.test_ssh']) | 16 | @OETestDepends(['ssh.SSHTest.test_ssh']) |
17 | @OEHasPackage(['shadow']) | ||
18 | @OEHasPackage(['shadow-base']) | ||
16 | def test_pam(self): | 19 | def test_pam(self): |
17 | status, output = self.target.run('login --help') | 20 | status, output = self.target.run('login --help') |
18 | msg = ('login command does not work as expected. ' | 21 | msg = ('login command does not work as expected. ' |
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py index a1791b5cca..1cac59725d 100644 --- a/meta/lib/oeqa/runtime/cases/parselogs.py +++ b/meta/lib/oeqa/runtime/cases/parselogs.py | |||
@@ -32,7 +32,7 @@ common_errors = [ | |||
32 | "Failed to load module \"fbdev\"", | 32 | "Failed to load module \"fbdev\"", |
33 | "Failed to load module fbdev", | 33 | "Failed to load module fbdev", |
34 | "Failed to load module glx", | 34 | "Failed to load module glx", |
35 | "[drm] Cannot find any crtc or sizes - going 1024x768", | 35 | "[drm] Cannot find any crtc or sizes", |
36 | "_OSC failed (AE_NOT_FOUND); disabling ASPM", | 36 | "_OSC failed (AE_NOT_FOUND); disabling ASPM", |
37 | "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)", | 37 | "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)", |
38 | "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!", | 38 | "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!", |
@@ -61,6 +61,8 @@ common_errors = [ | |||
61 | "[rdrand]: Initialization Failed", | 61 | "[rdrand]: Initialization Failed", |
62 | "[pulseaudio] authkey.c: Failed to open cookie file", | 62 | "[pulseaudio] authkey.c: Failed to open cookie file", |
63 | "[pulseaudio] authkey.c: Failed to load authentication key", | 63 | "[pulseaudio] authkey.c: Failed to load authentication key", |
64 | "was skipped because of a failed condition check", | ||
65 | "was skipped because all trigger condition checks failed", | ||
64 | ] | 66 | ] |
65 | 67 | ||
66 | video_related = [ | 68 | video_related = [ |
@@ -88,6 +90,9 @@ qemux86_common = [ | |||
88 | 'tsc: HPET/PMTIMER calibration failed', | 90 | 'tsc: HPET/PMTIMER calibration failed', |
89 | "modeset(0): Failed to initialize the DRI2 extension", | 91 | "modeset(0): Failed to initialize the DRI2 extension", |
90 | "glamor initialization failed", | 92 | "glamor initialization failed", |
93 | "blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)", | ||
94 | "floppy: error", | ||
95 | 'failed to IDENTIFY (I/O error, err_mask=0x4)', | ||
91 | ] + common_errors | 96 | ] + common_errors |
92 | 97 | ||
93 | ignore_errors = { | 98 | ignore_errors = { |
@@ -293,7 +298,7 @@ class ParseLogsTest(OERuntimeTestCase): | |||
293 | grepcmd = 'grep ' | 298 | grepcmd = 'grep ' |
294 | grepcmd += '-Ei "' | 299 | grepcmd += '-Ei "' |
295 | for error in errors: | 300 | for error in errors: |
296 | grepcmd += '\<' + error + '\>' + '|' | 301 | grepcmd += r'\<' + error + r'\>' + '|' |
297 | grepcmd = grepcmd[:-1] | 302 | grepcmd = grepcmd[:-1] |
298 | grepcmd += '" ' + str(log) + " | grep -Eiv \'" | 303 | grepcmd += '" ' + str(log) + " | grep -Eiv \'" |
299 | 304 | ||
@@ -304,13 +309,13 @@ class ParseLogsTest(OERuntimeTestCase): | |||
304 | errorlist = ignore_errors['default'] | 309 | errorlist = ignore_errors['default'] |
305 | 310 | ||
306 | for ignore_error in errorlist: | 311 | for ignore_error in errorlist: |
307 | ignore_error = ignore_error.replace('(', '\(') | 312 | ignore_error = ignore_error.replace('(', r'\(') |
308 | ignore_error = ignore_error.replace(')', '\)') | 313 | ignore_error = ignore_error.replace(')', r'\)') |
309 | ignore_error = ignore_error.replace("'", '.') | 314 | ignore_error = ignore_error.replace("'", '.') |
310 | ignore_error = ignore_error.replace('?', '\?') | 315 | ignore_error = ignore_error.replace('?', r'\?') |
311 | ignore_error = ignore_error.replace('[', '\[') | 316 | ignore_error = ignore_error.replace('[', r'\[') |
312 | ignore_error = ignore_error.replace(']', '\]') | 317 | ignore_error = ignore_error.replace(']', r'\]') |
313 | ignore_error = ignore_error.replace('*', '\*') | 318 | ignore_error = ignore_error.replace('*', r'\*') |
314 | ignore_error = ignore_error.replace('0-9', '[0-9]') | 319 | ignore_error = ignore_error.replace('0-9', '[0-9]') |
315 | grepcmd += ignore_error + '|' | 320 | grepcmd += ignore_error + '|' |
316 | grepcmd = grepcmd[:-1] | 321 | grepcmd = grepcmd[:-1] |
diff --git a/meta/lib/oeqa/runtime/cases/ping.py b/meta/lib/oeqa/runtime/cases/ping.py index f6603f75ec..498f80d0a5 100644 --- a/meta/lib/oeqa/runtime/cases/ping.py +++ b/meta/lib/oeqa/runtime/cases/ping.py | |||
@@ -6,6 +6,7 @@ from subprocess import Popen, PIPE | |||
6 | 6 | ||
7 | from oeqa.runtime.case import OERuntimeTestCase | 7 | from oeqa.runtime.case import OERuntimeTestCase |
8 | from oeqa.core.decorator.oetimeout import OETimeout | 8 | from oeqa.core.decorator.oetimeout import OETimeout |
9 | from oeqa.core.exception import OEQATimeoutError | ||
9 | 10 | ||
10 | class PingTest(OERuntimeTestCase): | 11 | class PingTest(OERuntimeTestCase): |
11 | 12 | ||
@@ -13,14 +14,17 @@ class PingTest(OERuntimeTestCase): | |||
13 | def test_ping(self): | 14 | def test_ping(self): |
14 | output = '' | 15 | output = '' |
15 | count = 0 | 16 | count = 0 |
16 | while count < 5: | 17 | try: |
17 | cmd = 'ping -c 1 %s' % self.target.ip | 18 | while count < 5: |
18 | proc = Popen(cmd, shell=True, stdout=PIPE) | 19 | cmd = 'ping -c 1 %s' % self.target.ip |
19 | output += proc.communicate()[0].decode('utf-8') | 20 | proc = Popen(cmd, shell=True, stdout=PIPE) |
20 | if proc.poll() == 0: | 21 | output += proc.communicate()[0].decode('utf-8') |
21 | count += 1 | 22 | if proc.poll() == 0: |
22 | else: | 23 | count += 1 |
23 | count = 0 | 24 | else: |
25 | count = 0 | ||
26 | except OEQATimeoutError: | ||
27 | self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output)) | ||
24 | msg = ('Expected 5 consecutive, got %d.\n' | 28 | msg = ('Expected 5 consecutive, got %d.\n' |
25 | 'ping output is:\n%s' % (count,output)) | 29 | 'ping output is:\n%s' % (count,output)) |
26 | self.assertEqual(count, 5, msg = msg) | 30 | self.assertEqual(count, 5, msg = msg) |
diff --git a/meta/lib/oeqa/runtime/cases/rpm.py b/meta/lib/oeqa/runtime/cases/rpm.py index 8e18b426f8..203fcc8505 100644 --- a/meta/lib/oeqa/runtime/cases/rpm.py +++ b/meta/lib/oeqa/runtime/cases/rpm.py | |||
@@ -49,21 +49,20 @@ class RpmBasicTest(OERuntimeTestCase): | |||
49 | msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output) | 49 | msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output) |
50 | self.assertEqual(status, 0, msg=msg) | 50 | self.assertEqual(status, 0, msg=msg) |
51 | 51 | ||
52 | def check_no_process_for_user(u): | 52 | def wait_for_no_process_for_user(u, timeout = 120): |
53 | _, output = self.target.run(self.tc.target_cmds['ps']) | 53 | timeout_at = time.time() + timeout |
54 | if u + ' ' in output: | 54 | while time.time() < timeout_at: |
55 | return False | 55 | _, output = self.target.run(self.tc.target_cmds['ps']) |
56 | else: | 56 | if u + ' ' not in output: |
57 | return True | 57 | return |
58 | time.sleep(1) | ||
59 | user_pss = [ps for ps in output.split("\n") if u + ' ' in ps] | ||
60 | msg = "User %s has processes still running: %s" % (u, "\n".join(user_pss)) | ||
61 | self.fail(msg=msg) | ||
58 | 62 | ||
59 | def unset_up_test_user(u): | 63 | def unset_up_test_user(u): |
60 | # ensure no test1 process in running | 64 | # ensure no test1 process in running |
61 | timeout = time.time() + 30 | 65 | wait_for_no_process_for_user(u) |
62 | while time.time() < timeout: | ||
63 | if check_no_process_for_user(u): | ||
64 | break | ||
65 | else: | ||
66 | time.sleep(1) | ||
67 | status, output = self.target.run('userdel -r %s' % u) | 66 | status, output = self.target.run('userdel -r %s' % u) |
68 | msg = 'Failed to erase user: %s' % output | 67 | msg = 'Failed to erase user: %s' % output |
69 | self.assertTrue(status == 0, msg=msg) | 68 | self.assertTrue(status == 0, msg=msg) |
@@ -141,13 +140,4 @@ class RpmInstallRemoveTest(OERuntimeTestCase): | |||
141 | 140 | ||
142 | self.tc.target.run('rm -f %s' % self.dst) | 141 | self.tc.target.run('rm -f %s' % self.dst) |
143 | 142 | ||
144 | # if using systemd this should ensure all entries are flushed to /var | ||
145 | status, output = self.target.run("journalctl --sync") | ||
146 | # Get the amount of entries in the log file | ||
147 | status, output = self.target.run(check_log_cmd) | ||
148 | msg = 'Failed to get the final size of the log file.' | ||
149 | self.assertEqual(0, status, msg=msg) | ||
150 | 143 | ||
151 | # Check that there's enough of them | ||
152 | self.assertGreaterEqual(int(output), 80, | ||
153 | 'Cound not find sufficient amount of rpm entries in /var/log/messages, found {} entries'.format(output)) | ||
diff --git a/meta/lib/oeqa/runtime/cases/rtc.py b/meta/lib/oeqa/runtime/cases/rtc.py new file mode 100644 index 0000000000..39f4d29f23 --- /dev/null +++ b/meta/lib/oeqa/runtime/cases/rtc.py | |||
@@ -0,0 +1,40 @@ | |||
1 | from oeqa.runtime.case import OERuntimeTestCase | ||
2 | from oeqa.core.decorator.depends import OETestDepends | ||
3 | from oeqa.core.decorator.data import skipIfFeature | ||
4 | from oeqa.runtime.decorator.package import OEHasPackage | ||
5 | |||
6 | import re | ||
7 | |||
8 | class RTCTest(OERuntimeTestCase): | ||
9 | |||
10 | def setUp(self): | ||
11 | if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': | ||
12 | self.logger.debug('Stopping systemd-timesyncd daemon') | ||
13 | self.target.run('systemctl disable --now --runtime systemd-timesyncd') | ||
14 | |||
15 | def tearDown(self): | ||
16 | if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': | ||
17 | self.logger.debug('Starting systemd-timesyncd daemon') | ||
18 | self.target.run('systemctl enable --now --runtime systemd-timesyncd') | ||
19 | |||
20 | @skipIfFeature('read-only-rootfs', | ||
21 | 'Test does not work with read-only-rootfs in IMAGE_FEATURES') | ||
22 | @OETestDepends(['ssh.SSHTest.test_ssh']) | ||
23 | @OEHasPackage(['coreutils', 'busybox']) | ||
24 | def test_rtc(self): | ||
25 | (status, output) = self.target.run('hwclock -r') | ||
26 | self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output) | ||
27 | |||
28 | (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"') | ||
29 | self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime) | ||
30 | |||
31 | example_datetime = '062309452008' | ||
32 | (status, output) = self.target.run('date %s ; hwclock -w ; hwclock -r' % example_datetime) | ||
33 | check_hwclock = re.search('2008-06-23 09:45:..', output) | ||
34 | self.assertTrue(check_hwclock, msg='The RTC time was not set correctly, output: %s' % output) | ||
35 | |||
36 | (status, output) = self.target.run('date %s' % current_datetime) | ||
37 | self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output) | ||
38 | |||
39 | (status, output) = self.target.run('hwclock -w') | ||
40 | self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output) | ||
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py index 3a5f292152..f2bbc947d6 100644 --- a/meta/lib/oeqa/runtime/cases/scp.py +++ b/meta/lib/oeqa/runtime/cases/scp.py | |||
@@ -23,7 +23,7 @@ class ScpTest(OERuntimeTestCase): | |||
23 | os.remove(cls.tmp_path) | 23 | os.remove(cls.tmp_path) |
24 | 24 | ||
25 | @OETestDepends(['ssh.SSHTest.test_ssh']) | 25 | @OETestDepends(['ssh.SSHTest.test_ssh']) |
26 | @OEHasPackage(['openssh-scp', 'dropbear']) | 26 | @OEHasPackage(['openssh-scp']) |
27 | def test_scp_file(self): | 27 | def test_scp_file(self): |
28 | dst = '/tmp/test_scp_file' | 28 | dst = '/tmp/test_scp_file' |
29 | 29 | ||
diff --git a/meta/lib/oeqa/runtime/cases/suspend.py b/meta/lib/oeqa/runtime/cases/suspend.py new file mode 100644 index 0000000000..67b6f7e56f --- /dev/null +++ b/meta/lib/oeqa/runtime/cases/suspend.py | |||
@@ -0,0 +1,33 @@ | |||
1 | from oeqa.runtime.case import OERuntimeTestCase | ||
2 | from oeqa.core.decorator.depends import OETestDepends | ||
3 | from oeqa.core.decorator.data import skipIfQemu | ||
4 | import threading | ||
5 | import time | ||
6 | |||
7 | class Suspend_Test(OERuntimeTestCase): | ||
8 | |||
9 | def test_date(self): | ||
10 | (status, output) = self.target.run('date') | ||
11 | self.assertEqual(status, 0, msg = 'Failed to run date command, output : %s' % output) | ||
12 | |||
13 | def test_ping(self): | ||
14 | t_thread = threading.Thread(target=self.target.run, args=("ping 8.8.8.8",)) | ||
15 | t_thread.start() | ||
16 | time.sleep(2) | ||
17 | |||
18 | status, output = self.target.run('pidof ping') | ||
19 | self.target.run('kill -9 %s' % output) | ||
20 | self.assertEqual(status, 0, msg = 'Not able to find process that runs ping, output : %s' % output) | ||
21 | |||
22 | def set_suspend(self): | ||
23 | (status, output) = self.target.run('sudo rtcwake -m mem -s 10') | ||
24 | self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) | ||
25 | |||
26 | @skipIfQemu('qemuall', 'Test only runs on real hardware') | ||
27 | @OETestDepends(['ssh.SSHTest.test_ssh']) | ||
28 | def test_suspend(self): | ||
29 | self.test_date() | ||
30 | self.test_ping() | ||
31 | self.set_suspend() | ||
32 | self.test_date() | ||
33 | self.test_ping() | ||
diff --git a/meta/lib/oeqa/runtime/cases/terminal.py b/meta/lib/oeqa/runtime/cases/terminal.py new file mode 100644 index 0000000000..8fcca99f47 --- /dev/null +++ b/meta/lib/oeqa/runtime/cases/terminal.py | |||
@@ -0,0 +1,21 @@ | |||
1 | from oeqa.runtime.case import OERuntimeTestCase | ||
2 | from oeqa.core.decorator.depends import OETestDepends | ||
3 | from oeqa.runtime.decorator.package import OEHasPackage | ||
4 | |||
5 | import threading | ||
6 | import time | ||
7 | |||
8 | class TerminalTest(OERuntimeTestCase): | ||
9 | |||
10 | @OEHasPackage(['matchbox-terminal']) | ||
11 | @OETestDepends(['ssh.SSHTest.test_ssh']) | ||
12 | def test_terminal_running(self): | ||
13 | t_thread = threading.Thread(target=self.target.run, args=("export DISPLAY=:0 && matchbox-terminal -e 'sh -c \"uname -a && exec sh\"'",)) | ||
14 | t_thread.start() | ||
15 | time.sleep(2) | ||
16 | |||
17 | status, output = self.target.run('pidof matchbox-terminal') | ||
18 | number_of_terminal = len(output.split()) | ||
19 | self.assertEqual(number_of_terminal, 1, msg='There should be only one terminal being launched. Number of terminal launched : %s' % number_of_terminal) | ||
20 | self.target.run('kill -9 %s' % output) | ||
21 | self.assertEqual(status, 0, msg='Not able to find process that runs terminal.') | ||
diff --git a/meta/lib/oeqa/runtime/cases/usb_hid.py b/meta/lib/oeqa/runtime/cases/usb_hid.py new file mode 100644 index 0000000000..3c292cf661 --- /dev/null +++ b/meta/lib/oeqa/runtime/cases/usb_hid.py | |||
@@ -0,0 +1,22 @@ | |||
1 | from oeqa.runtime.case import OERuntimeTestCase | ||
2 | from oeqa.core.decorator.depends import OETestDepends | ||
3 | from oeqa.core.decorator.data import skipIfQemu | ||
4 | from oeqa.runtime.decorator.package import OEHasPackage | ||
5 | |||
6 | class USB_HID_Test(OERuntimeTestCase): | ||
7 | |||
8 | def keyboard_mouse_simulation(self): | ||
9 | (status, output) = self.target.run('export DISPLAY=:0 && xdotool key F2 && xdotool mousemove 100 100') | ||
10 | return self.assertEqual(status, 0, msg = 'Failed to simulate keyboard/mouse input event, output : %s' % output) | ||
11 | |||
12 | def set_suspend(self): | ||
13 | (status, output) = self.target.run('sudo rtcwake -m mem -s 10') | ||
14 | return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) | ||
15 | |||
16 | @OEHasPackage(['xdotool']) | ||
17 | @skipIfQemu('qemuall', 'Test only runs on real hardware') | ||
18 | @OETestDepends(['ssh.SSHTest.test_ssh']) | ||
19 | def test_USB_Hid_input(self): | ||
20 | self.keyboard_mouse_simulation() | ||
21 | self.set_suspend() | ||
22 | self.keyboard_mouse_simulation() | ||
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py index 3826f27642..8a0dbd0736 100644 --- a/meta/lib/oeqa/runtime/context.py +++ b/meta/lib/oeqa/runtime/context.py | |||
@@ -5,6 +5,7 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | import os | 7 | import os |
8 | import sys | ||
8 | 9 | ||
9 | from oeqa.core.context import OETestContext, OETestContextExecutor | 10 | from oeqa.core.context import OETestContext, OETestContextExecutor |
10 | from oeqa.core.target.ssh import OESSHTarget | 11 | from oeqa.core.target.ssh import OESSHTarget |
@@ -66,11 +67,11 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
66 | % self.default_target_type) | 67 | % self.default_target_type) |
67 | runtime_group.add_argument('--target-ip', action='store', | 68 | runtime_group.add_argument('--target-ip', action='store', |
68 | default=self.default_target_ip, | 69 | default=self.default_target_ip, |
69 | help="IP address of device under test, default: %s" \ | 70 | help="IP address and optionally ssh port (default 22) of device under test, for example '192.168.0.7:22'. Default: %s" \ |
70 | % self.default_target_ip) | 71 | % self.default_target_ip) |
71 | runtime_group.add_argument('--server-ip', action='store', | 72 | runtime_group.add_argument('--server-ip', action='store', |
72 | default=self.default_target_ip, | 73 | default=self.default_target_ip, |
73 | help="IP address of device under test, default: %s" \ | 74 | help="IP address of the test host from test target machine, default: %s" \ |
74 | % self.default_server_ip) | 75 | % self.default_server_ip) |
75 | 76 | ||
76 | runtime_group.add_argument('--host-dumper-dir', action='store', | 77 | runtime_group.add_argument('--host-dumper-dir', action='store', |
@@ -119,8 +120,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
119 | # XXX: Don't base your targets on this code it will be refactored | 120 | # XXX: Don't base your targets on this code it will be refactored |
120 | # in the near future. | 121 | # in the near future. |
121 | # Custom target module loading | 122 | # Custom target module loading |
122 | target_modules_path = kwargs.get('target_modules_path', '') | 123 | controller = OERuntimeTestContextExecutor.getControllerModule(target_type) |
123 | controller = OERuntimeTestContextExecutor.getControllerModule(target_type, target_modules_path) | ||
124 | target = controller(logger, target_ip, server_ip, **kwargs) | 124 | target = controller(logger, target_ip, server_ip, **kwargs) |
125 | 125 | ||
126 | return target | 126 | return target |
@@ -130,15 +130,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
130 | # AttributeError raised if not found. | 130 | # AttributeError raised if not found. |
131 | # ImportError raised if a provided module can not be imported. | 131 | # ImportError raised if a provided module can not be imported. |
132 | @staticmethod | 132 | @staticmethod |
133 | def getControllerModule(target, target_modules_path): | 133 | def getControllerModule(target): |
134 | controllerslist = OERuntimeTestContextExecutor._getControllerModulenames(target_modules_path) | 134 | controllerslist = OERuntimeTestContextExecutor._getControllerModulenames() |
135 | controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist) | 135 | controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist) |
136 | return controller | 136 | return controller |
137 | 137 | ||
138 | # Return a list of all python modules in lib/oeqa/controllers for each | 138 | # Return a list of all python modules in lib/oeqa/controllers for each |
139 | # layer in bbpath | 139 | # layer in bbpath |
140 | @staticmethod | 140 | @staticmethod |
141 | def _getControllerModulenames(target_modules_path): | 141 | def _getControllerModulenames(): |
142 | 142 | ||
143 | controllerslist = [] | 143 | controllerslist = [] |
144 | 144 | ||
@@ -153,9 +153,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
153 | else: | 153 | else: |
154 | raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module) | 154 | raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module) |
155 | 155 | ||
156 | extpath = target_modules_path.split(':') | 156 | for p in sys.path: |
157 | for p in extpath: | 157 | controllerpath = os.path.join(p, 'oeqa', 'controllers') |
158 | controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers') | ||
159 | if os.path.exists(controllerpath): | 158 | if os.path.exists(controllerpath): |
160 | add_controller_list(controllerpath) | 159 | add_controller_list(controllerpath) |
161 | return controllerslist | 160 | return controllerslist |
@@ -175,16 +174,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor): | |||
175 | # Search for and return a controller or None from given module name | 174 | # Search for and return a controller or None from given module name |
176 | @staticmethod | 175 | @staticmethod |
177 | def _loadControllerFromModule(target, modulename): | 176 | def _loadControllerFromModule(target, modulename): |
178 | obj = None | ||
179 | # import module, allowing it to raise import exception | ||
180 | module = __import__(modulename, globals(), locals(), [target]) | ||
181 | # look for target class in the module, catching any exceptions as it | ||
182 | # is valid that a module may not have the target class. | ||
183 | try: | 177 | try: |
184 | obj = getattr(module, target) | 178 | import importlib |
185 | except: | 179 | module = importlib.import_module(modulename) |
186 | obj = None | 180 | return getattr(module, target) |
187 | return obj | 181 | except AttributeError: |
182 | return None | ||
188 | 183 | ||
189 | @staticmethod | 184 | @staticmethod |
190 | def readPackagesManifest(manifest): | 185 | def readPackagesManifest(manifest): |
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/buildepoxy.py index 385f8ccca8..f69f720cd6 100644 --- a/meta/lib/oeqa/sdk/cases/buildepoxy.py +++ b/meta/lib/oeqa/sdk/cases/buildepoxy.py | |||
@@ -17,7 +17,7 @@ class EpoxyTest(OESDKTestCase): | |||
17 | """ | 17 | """ |
18 | def setUp(self): | 18 | def setUp(self): |
19 | if not (self.tc.hasHostPackage("nativesdk-meson")): | 19 | if not (self.tc.hasHostPackage("nativesdk-meson")): |
20 | raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain Meson") | 20 | raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson") |
21 | 21 | ||
22 | def test_epoxy(self): | 22 | def test_epoxy(self): |
23 | with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: | 23 | with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: |
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py index bc5447d2a3..6a5c8ec71e 100644 --- a/meta/lib/oeqa/selftest/cases/archiver.py +++ b/meta/lib/oeqa/selftest/cases/archiver.py | |||
@@ -35,11 +35,11 @@ class Archiver(OESelftestTestCase): | |||
35 | src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) | 35 | src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) |
36 | 36 | ||
37 | # Check that include_recipe was included | 37 | # Check that include_recipe was included |
38 | included_present = len(glob.glob(src_path + '/%s-*' % include_recipe)) | 38 | included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe)) |
39 | self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) | 39 | self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) |
40 | 40 | ||
41 | # Check that exclude_recipe was excluded | 41 | # Check that exclude_recipe was excluded |
42 | excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe)) | 42 | excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe)) |
43 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) | 43 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) |
44 | 44 | ||
45 | def test_archiver_filters_by_type(self): | 45 | def test_archiver_filters_by_type(self): |
@@ -67,11 +67,11 @@ class Archiver(OESelftestTestCase): | |||
67 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) | 67 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) |
68 | 68 | ||
69 | # Check that target_recipe was included | 69 | # Check that target_recipe was included |
70 | included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe)) | 70 | included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe)) |
71 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) | 71 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) |
72 | 72 | ||
73 | # Check that native_recipe was excluded | 73 | # Check that native_recipe was excluded |
74 | excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe)) | 74 | excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe)) |
75 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) | 75 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) |
76 | 76 | ||
77 | def test_archiver_filters_by_type_and_name(self): | 77 | def test_archiver_filters_by_type_and_name(self): |
@@ -104,17 +104,17 @@ class Archiver(OESelftestTestCase): | |||
104 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) | 104 | src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) |
105 | 105 | ||
106 | # Check that target_recipe[0] and native_recipes[1] were included | 106 | # Check that target_recipe[0] and native_recipes[1] were included |
107 | included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0])) | 107 | included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0])) |
108 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) | 108 | self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) |
109 | 109 | ||
110 | included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1])) | 110 | included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1])) |
111 | self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) | 111 | self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) |
112 | 112 | ||
113 | # Check that native_recipes[0] and target_recipes[1] were excluded | 113 | # Check that native_recipes[0] and target_recipes[1] were excluded |
114 | excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0])) | 114 | excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0])) |
115 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) | 115 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) |
116 | 116 | ||
117 | excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1])) | 117 | excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1])) |
118 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) | 118 | self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) |
119 | 119 | ||
120 | 120 | ||
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py index f131d9856c..7d74833f61 100644 --- a/meta/lib/oeqa/selftest/cases/bblayers.py +++ b/meta/lib/oeqa/selftest/cases/bblayers.py | |||
@@ -12,6 +12,11 @@ from oeqa.selftest.case import OESelftestTestCase | |||
12 | 12 | ||
13 | class BitbakeLayers(OESelftestTestCase): | 13 | class BitbakeLayers(OESelftestTestCase): |
14 | 14 | ||
15 | def test_bitbakelayers_layerindexshowdepends(self): | ||
16 | result = runCmd('bitbake-layers layerindex-show-depends meta-poky') | ||
17 | find_in_contents = re.search("openembedded-core", result.output) | ||
18 | self.assertTrue(find_in_contents, msg = "openembedded-core should have been listed at this step. bitbake-layers layerindex-show-depends meta-poky output: %s" % result.output) | ||
19 | |||
15 | def test_bitbakelayers_showcrossdepends(self): | 20 | def test_bitbakelayers_showcrossdepends(self): |
16 | result = runCmd('bitbake-layers show-cross-depends') | 21 | result = runCmd('bitbake-layers show-cross-depends') |
17 | self.assertIn('aspell', result.output) | 22 | self.assertIn('aspell', result.output) |
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py index dc423ec439..0b88316950 100644 --- a/meta/lib/oeqa/selftest/cases/bbtests.py +++ b/meta/lib/oeqa/selftest/cases/bbtests.py | |||
@@ -148,9 +148,6 @@ INHERIT_remove = \"report-error\" | |||
148 | self.delete_recipeinc('man-db') | 148 | self.delete_recipeinc('man-db') |
149 | self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output) | 149 | self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output) |
150 | self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output) | 150 | self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output) |
151 | line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.') | ||
152 | self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \ | ||
153 | doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output) | ||
154 | 151 | ||
155 | def test_rename_downloaded_file(self): | 152 | def test_rename_downloaded_file(self): |
156 | # TODO unique dldir instead of using cleanall | 153 | # TODO unique dldir instead of using cleanall |
@@ -160,7 +157,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
160 | """) | 157 | """) |
161 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) | 158 | self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) |
162 | 159 | ||
163 | data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' | 160 | data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' |
164 | self.write_recipeinc('aspell', data) | 161 | self.write_recipeinc('aspell', data) |
165 | result = bitbake('-f -c fetch aspell', ignore_status=True) | 162 | result = bitbake('-f -c fetch aspell', ignore_status=True) |
166 | self.delete_recipeinc('aspell') | 163 | self.delete_recipeinc('aspell') |
@@ -188,6 +185,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
188 | self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output) | 185 | self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output) |
189 | 186 | ||
190 | def test_prefile(self): | 187 | def test_prefile(self): |
188 | # Test when the prefile does not exist | ||
189 | result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True) | ||
190 | self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output) | ||
191 | # Test when the prefile exists | ||
191 | preconf = os.path.join(self.builddir, 'conf/prefile.conf') | 192 | preconf = os.path.join(self.builddir, 'conf/prefile.conf') |
192 | self.track_for_cleanup(preconf) | 193 | self.track_for_cleanup(preconf) |
193 | ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"") | 194 | ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"") |
@@ -198,6 +199,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\" | |||
198 | self.assertIn('localconf', result.output) | 199 | self.assertIn('localconf', result.output) |
199 | 200 | ||
200 | def test_postfile(self): | 201 | def test_postfile(self): |
202 | # Test when the postfile does not exist | ||
203 | result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True) | ||
204 | self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output) | ||
205 | # Test when the postfile exists | ||
201 | postconf = os.path.join(self.builddir, 'conf/postfile.conf') | 206 | postconf = os.path.join(self.builddir, 'conf/postfile.conf') |
202 | self.track_for_cleanup(postconf) | 207 | self.track_for_cleanup(postconf) |
203 | ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"") | 208 | ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"") |
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py index e91f0bd18f..b1b9ea7e55 100644 --- a/meta/lib/oeqa/selftest/cases/buildoptions.py +++ b/meta/lib/oeqa/selftest/cases/buildoptions.py | |||
@@ -57,15 +57,15 @@ class ImageOptionsTests(OESelftestTestCase): | |||
57 | class DiskMonTest(OESelftestTestCase): | 57 | class DiskMonTest(OESelftestTestCase): |
58 | 58 | ||
59 | def test_stoptask_behavior(self): | 59 | def test_stoptask_behavior(self): |
60 | self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"') | 60 | self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"') |
61 | res = bitbake("delay -c delay", ignore_status = True) | 61 | res = bitbake("delay -c delay", ignore_status = True) |
62 | self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output) | 62 | self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output) |
63 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) | 63 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) |
64 | self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"') | 64 | self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"') |
65 | res = bitbake("delay -c delay", ignore_status = True) | 65 | res = bitbake("delay -c delay", ignore_status = True) |
66 | self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output) | 66 | self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output) |
67 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) | 67 | self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) |
68 | self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"') | 68 | self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"') |
69 | res = bitbake("delay -c delay") | 69 | res = bitbake("delay -c delay") |
70 | self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output) | 70 | self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output) |
71 | 71 | ||
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py index 3f343a2841..22ffeffd29 100644 --- a/meta/lib/oeqa/selftest/cases/cve_check.py +++ b/meta/lib/oeqa/selftest/cases/cve_check.py | |||
@@ -1,9 +1,13 @@ | |||
1 | from oe.cve_check import Version | 1 | import json |
2 | import os | ||
2 | from oeqa.selftest.case import OESelftestTestCase | 3 | from oeqa.selftest.case import OESelftestTestCase |
4 | from oeqa.utils.commands import bitbake, get_bb_vars | ||
3 | 5 | ||
4 | class CVECheck(OESelftestTestCase): | 6 | class CVECheck(OESelftestTestCase): |
5 | 7 | ||
6 | def test_version_compare(self): | 8 | def test_version_compare(self): |
9 | from oe.cve_check import Version | ||
10 | |||
7 | result = Version("100") > Version("99") | 11 | result = Version("100") > Version("99") |
8 | self.assertTrue( result, msg="Failed to compare version '100' > '99'") | 12 | self.assertTrue( result, msg="Failed to compare version '100' > '99'") |
9 | result = Version("2.3.1") > Version("2.2.3") | 13 | result = Version("2.3.1") > Version("2.2.3") |
@@ -34,3 +38,183 @@ class CVECheck(OESelftestTestCase): | |||
34 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'") | 38 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'") |
35 | result = Version("1.0b","alphabetical") > Version("1.0","alphabetical") | 39 | result = Version("1.0b","alphabetical") > Version("1.0","alphabetical") |
36 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'") | 40 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'") |
41 | |||
42 | # consider the trailing "p" and "patch" as patched released when comparing | ||
43 | result = Version("1.0","patch") < Version("1.0p1","patch") | ||
44 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0' < '1.0p1'") | ||
45 | result = Version("1.0p2","patch") > Version("1.0p1","patch") | ||
46 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'") | ||
47 | result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch") | ||
48 | self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'") | ||
49 | |||
50 | |||
51 | def test_convert_cve_version(self): | ||
52 | from oe.cve_check import convert_cve_version | ||
53 | |||
54 | # Default format | ||
55 | self.assertEqual(convert_cve_version("8.3"), "8.3") | ||
56 | self.assertEqual(convert_cve_version(""), "") | ||
57 | |||
58 | # OpenSSL format version | ||
59 | self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t") | ||
60 | |||
61 | # OpenSSH format | ||
62 | self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1") | ||
63 | self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22") | ||
64 | |||
65 | # Linux kernel format | ||
66 | self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8") | ||
67 | self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31") | ||
68 | |||
69 | |||
70 | def test_recipe_report_json(self): | ||
71 | config = """ | ||
72 | INHERIT += "cve-check" | ||
73 | CVE_CHECK_FORMAT_JSON = "1" | ||
74 | """ | ||
75 | self.write_config(config) | ||
76 | |||
77 | vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
78 | summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
79 | recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json") | ||
80 | |||
81 | try: | ||
82 | os.remove(summary_json) | ||
83 | os.remove(recipe_json) | ||
84 | except FileNotFoundError: | ||
85 | pass | ||
86 | |||
87 | bitbake("m4-native -c cve_check") | ||
88 | |||
89 | def check_m4_json(filename): | ||
90 | with open(filename) as f: | ||
91 | report = json.load(f) | ||
92 | self.assertEqual(report["version"], "1") | ||
93 | self.assertEqual(len(report["package"]), 1) | ||
94 | package = report["package"][0] | ||
95 | self.assertEqual(package["name"], "m4-native") | ||
96 | found_cves = { issue["id"]: issue["status"] for issue in package["issue"]} | ||
97 | self.assertIn("CVE-2008-1687", found_cves) | ||
98 | self.assertEqual(found_cves["CVE-2008-1687"], "Patched") | ||
99 | |||
100 | self.assertExists(summary_json) | ||
101 | check_m4_json(summary_json) | ||
102 | self.assertExists(recipe_json) | ||
103 | check_m4_json(recipe_json) | ||
104 | |||
105 | |||
106 | def test_image_json(self): | ||
107 | config = """ | ||
108 | INHERIT += "cve-check" | ||
109 | CVE_CHECK_FORMAT_JSON = "1" | ||
110 | """ | ||
111 | self.write_config(config) | ||
112 | |||
113 | vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
114 | report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
115 | print(report_json) | ||
116 | try: | ||
117 | os.remove(report_json) | ||
118 | except FileNotFoundError: | ||
119 | pass | ||
120 | |||
121 | bitbake("core-image-minimal-initramfs") | ||
122 | self.assertExists(report_json) | ||
123 | |||
124 | # Check that the summary report lists at least one package | ||
125 | with open(report_json) as f: | ||
126 | report = json.load(f) | ||
127 | self.assertEqual(report["version"], "1") | ||
128 | self.assertGreater(len(report["package"]), 1) | ||
129 | |||
130 | # Check that a random recipe wrote a recipe report to deploy/cve/ | ||
131 | recipename = report["package"][0]["name"] | ||
132 | recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json") | ||
133 | self.assertExists(recipe_report) | ||
134 | with open(recipe_report) as f: | ||
135 | report = json.load(f) | ||
136 | self.assertEqual(report["version"], "1") | ||
137 | self.assertEqual(len(report["package"]), 1) | ||
138 | self.assertEqual(report["package"][0]["name"], recipename) | ||
139 | |||
140 | |||
141 | def test_recipe_report_json_unpatched(self): | ||
142 | config = """ | ||
143 | INHERIT += "cve-check" | ||
144 | CVE_CHECK_FORMAT_JSON = "1" | ||
145 | CVE_CHECK_REPORT_PATCHED = "0" | ||
146 | """ | ||
147 | self.write_config(config) | ||
148 | |||
149 | vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
150 | summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
151 | recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json") | ||
152 | |||
153 | try: | ||
154 | os.remove(summary_json) | ||
155 | os.remove(recipe_json) | ||
156 | except FileNotFoundError: | ||
157 | pass | ||
158 | |||
159 | bitbake("m4-native -c cve_check") | ||
160 | |||
161 | def check_m4_json(filename): | ||
162 | with open(filename) as f: | ||
163 | report = json.load(f) | ||
164 | self.assertEqual(report["version"], "1") | ||
165 | self.assertEqual(len(report["package"]), 1) | ||
166 | package = report["package"][0] | ||
167 | self.assertEqual(package["name"], "m4-native") | ||
168 | #m4 had only Patched CVEs, so the issues array will be empty | ||
169 | self.assertEqual(package["issue"], []) | ||
170 | |||
171 | self.assertExists(summary_json) | ||
172 | check_m4_json(summary_json) | ||
173 | self.assertExists(recipe_json) | ||
174 | check_m4_json(recipe_json) | ||
175 | |||
176 | |||
177 | def test_recipe_report_json_ignored(self): | ||
178 | config = """ | ||
179 | INHERIT += "cve-check" | ||
180 | CVE_CHECK_FORMAT_JSON = "1" | ||
181 | CVE_CHECK_REPORT_PATCHED = "1" | ||
182 | """ | ||
183 | self.write_config(config) | ||
184 | |||
185 | vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
186 | summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"]) | ||
187 | recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json") | ||
188 | |||
189 | try: | ||
190 | os.remove(summary_json) | ||
191 | os.remove(recipe_json) | ||
192 | except FileNotFoundError: | ||
193 | pass | ||
194 | |||
195 | bitbake("logrotate -c cve_check") | ||
196 | |||
197 | def check_m4_json(filename): | ||
198 | with open(filename) as f: | ||
199 | report = json.load(f) | ||
200 | self.assertEqual(report["version"], "1") | ||
201 | self.assertEqual(len(report["package"]), 1) | ||
202 | package = report["package"][0] | ||
203 | self.assertEqual(package["name"], "logrotate") | ||
204 | found_cves = { issue["id"]: issue["status"] for issue in package["issue"]} | ||
205 | # m4 CVE should not be in logrotate | ||
206 | self.assertNotIn("CVE-2008-1687", found_cves) | ||
207 | # logrotate has both Patched and Ignored CVEs | ||
208 | self.assertIn("CVE-2011-1098", found_cves) | ||
209 | self.assertEqual(found_cves["CVE-2011-1098"], "Patched") | ||
210 | self.assertIn("CVE-2011-1548", found_cves) | ||
211 | self.assertEqual(found_cves["CVE-2011-1548"], "Ignored") | ||
212 | self.assertIn("CVE-2011-1549", found_cves) | ||
213 | self.assertEqual(found_cves["CVE-2011-1549"], "Ignored") | ||
214 | self.assertIn("CVE-2011-1550", found_cves) | ||
215 | self.assertEqual(found_cves["CVE-2011-1550"], "Ignored") | ||
216 | |||
217 | self.assertExists(summary_json) | ||
218 | check_m4_json(summary_json) | ||
219 | self.assertExists(recipe_json) | ||
220 | check_m4_json(recipe_json) | ||
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py index 0985434238..9efe342a0d 100644 --- a/meta/lib/oeqa/selftest/cases/devtool.py +++ b/meta/lib/oeqa/selftest/cases/devtool.py | |||
@@ -8,6 +8,7 @@ import shutil | |||
8 | import tempfile | 8 | import tempfile |
9 | import glob | 9 | import glob |
10 | import fnmatch | 10 | import fnmatch |
11 | import unittest | ||
11 | 12 | ||
12 | import oeqa.utils.ftools as ftools | 13 | import oeqa.utils.ftools as ftools |
13 | from oeqa.selftest.case import OESelftestTestCase | 14 | from oeqa.selftest.case import OESelftestTestCase |
@@ -38,6 +39,13 @@ def setUpModule(): | |||
38 | canonical_layerpath = os.path.realpath(canonical_layerpath) + '/' | 39 | canonical_layerpath = os.path.realpath(canonical_layerpath) + '/' |
39 | edited_layers.append(layerpath) | 40 | edited_layers.append(layerpath) |
40 | oldmetapath = os.path.realpath(layerpath) | 41 | oldmetapath = os.path.realpath(layerpath) |
42 | |||
43 | # when downloading poky from tar.gz some tests will be skipped (BUG 12389) | ||
44 | try: | ||
45 | runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath) | ||
46 | except: | ||
47 | raise unittest.SkipTest("devtool tests require folder to be a git repo") | ||
48 | |||
41 | result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath) | 49 | result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath) |
42 | oldreporoot = result.output.rstrip() | 50 | oldreporoot = result.output.rstrip() |
43 | newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot)) | 51 | newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot)) |
@@ -340,7 +348,7 @@ class DevtoolAddTests(DevtoolBase): | |||
340 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' | 348 | checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' |
341 | checkvars['S'] = '${WORKDIR}/git' | 349 | checkvars['S'] = '${WORKDIR}/git' |
342 | checkvars['PV'] = '0.1+git${SRCPV}' | 350 | checkvars['PV'] = '0.1+git${SRCPV}' |
343 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https' | 351 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master' |
344 | checkvars['SRCREV'] = srcrev | 352 | checkvars['SRCREV'] = srcrev |
345 | checkvars['DEPENDS'] = set(['dbus']) | 353 | checkvars['DEPENDS'] = set(['dbus']) |
346 | self._test_recipe_contents(recipefile, checkvars, []) | 354 | self._test_recipe_contents(recipefile, checkvars, []) |
@@ -442,6 +450,7 @@ class DevtoolAddTests(DevtoolBase): | |||
442 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') | 450 | tempdir = tempfile.mkdtemp(prefix='devtoolqa') |
443 | self.track_for_cleanup(tempdir) | 451 | self.track_for_cleanup(tempdir) |
444 | url = 'gitsm://git.yoctoproject.org/mraa' | 452 | url = 'gitsm://git.yoctoproject.org/mraa' |
453 | url_branch = '%s;branch=master' % url | ||
445 | checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' | 454 | checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' |
446 | testrecipe = 'mraa' | 455 | testrecipe = 'mraa' |
447 | srcdir = os.path.join(tempdir, testrecipe) | 456 | srcdir = os.path.join(tempdir, testrecipe) |
@@ -462,7 +471,7 @@ class DevtoolAddTests(DevtoolBase): | |||
462 | checkvars = {} | 471 | checkvars = {} |
463 | checkvars['S'] = '${WORKDIR}/git' | 472 | checkvars['S'] = '${WORKDIR}/git' |
464 | checkvars['PV'] = '1.0+git${SRCPV}' | 473 | checkvars['PV'] = '1.0+git${SRCPV}' |
465 | checkvars['SRC_URI'] = url | 474 | checkvars['SRC_URI'] = url_branch |
466 | checkvars['SRCREV'] = '${AUTOREV}' | 475 | checkvars['SRCREV'] = '${AUTOREV}' |
467 | self._test_recipe_contents(recipefile, checkvars, []) | 476 | self._test_recipe_contents(recipefile, checkvars, []) |
468 | # Try with revision and version specified | 477 | # Try with revision and version specified |
@@ -481,7 +490,7 @@ class DevtoolAddTests(DevtoolBase): | |||
481 | checkvars = {} | 490 | checkvars = {} |
482 | checkvars['S'] = '${WORKDIR}/git' | 491 | checkvars['S'] = '${WORKDIR}/git' |
483 | checkvars['PV'] = '1.5+git${SRCPV}' | 492 | checkvars['PV'] = '1.5+git${SRCPV}' |
484 | checkvars['SRC_URI'] = url | 493 | checkvars['SRC_URI'] = url_branch |
485 | checkvars['SRCREV'] = checkrev | 494 | checkvars['SRCREV'] = checkrev |
486 | self._test_recipe_contents(recipefile, checkvars, []) | 495 | self._test_recipe_contents(recipefile, checkvars, []) |
487 | 496 | ||
@@ -880,7 +889,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
880 | self._check_repo_status(os.path.dirname(recipefile), expected_status) | 889 | self._check_repo_status(os.path.dirname(recipefile), expected_status) |
881 | 890 | ||
882 | result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) | 891 | result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) |
883 | addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"'] | 892 | addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"'] |
884 | srcurilines = src_uri.split() | 893 | srcurilines = src_uri.split() |
885 | srcurilines[0] = 'SRC_URI = "' + srcurilines[0] | 894 | srcurilines[0] = 'SRC_URI = "' + srcurilines[0] |
886 | srcurilines.append('"') | 895 | srcurilines.append('"') |
@@ -1322,7 +1331,7 @@ class DevtoolExtractTests(DevtoolBase): | |||
1322 | # Now really test deploy-target | 1331 | # Now really test deploy-target |
1323 | result = runCmd('devtool deploy-target -c %s root@%s' % (testrecipe, qemu.ip)) | 1332 | result = runCmd('devtool deploy-target -c %s root@%s' % (testrecipe, qemu.ip)) |
1324 | # Run a test command to see if it was installed properly | 1333 | # Run a test command to see if it was installed properly |
1325 | sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' | 1334 | sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o HostKeyAlgorithms=+ssh-rsa' |
1326 | result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand)) | 1335 | result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand)) |
1327 | # Check if it deployed all of the files with the right ownership/perms | 1336 | # Check if it deployed all of the files with the right ownership/perms |
1328 | # First look on the host - need to do this under pseudo to get the correct ownership/perms | 1337 | # First look on the host - need to do this under pseudo to get the correct ownership/perms |
diff --git a/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt b/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt new file mode 100644 index 0000000000..f70f10e4db --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/diffoscope/A/file.txt | |||
@@ -0,0 +1 @@ | |||
A | |||
diff --git a/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt b/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt new file mode 100644 index 0000000000..223b7836fb --- /dev/null +++ b/meta/lib/oeqa/selftest/cases/diffoscope/B/file.txt | |||
@@ -0,0 +1 @@ | |||
B | |||
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py index e1cfc3b621..8e5e24db3d 100644 --- a/meta/lib/oeqa/selftest/cases/distrodata.py +++ b/meta/lib/oeqa/selftest/cases/distrodata.py | |||
@@ -63,7 +63,7 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re | |||
63 | return True | 63 | return True |
64 | return False | 64 | return False |
65 | 65 | ||
66 | feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_WHITELIST += " commercial"\nPARSE_ALL_RECIPES = "1"\n' | 66 | feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_WHITELIST += " commercial"\nPARSE_ALL_RECIPES = "1"\nPACKAGE_CLASSES = "package_ipk package_deb package_rpm"\n' |
67 | self.write_config(feature) | 67 | self.write_config(feature) |
68 | 68 | ||
69 | with bb.tinfoil.Tinfoil() as tinfoil: | 69 | with bb.tinfoil.Tinfoil() as tinfoil: |
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py index c687f6ef93..c1f6e4c1fb 100644 --- a/meta/lib/oeqa/selftest/cases/glibc.py +++ b/meta/lib/oeqa/selftest/cases/glibc.py | |||
@@ -33,7 +33,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
33 | 33 | ||
34 | ptestsuite = "glibc-user" if ssh is None else "glibc" | 34 | ptestsuite = "glibc-user" if ssh is None else "glibc" |
35 | self.ptest_section(ptestsuite) | 35 | self.ptest_section(ptestsuite) |
36 | with open(os.path.join(builddir, "tests.sum"), "r") as f: | 36 | with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f: |
37 | for test, result in parse_values(f): | 37 | for test, result in parse_values(f): |
38 | self.ptest_result(ptestsuite, test, result) | 38 | self.ptest_result(ptestsuite, test, result) |
39 | 39 | ||
@@ -41,7 +41,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
41 | with contextlib.ExitStack() as s: | 41 | with contextlib.ExitStack() as s: |
42 | # use the base work dir, as the nfs mount, since the recipe directory may not exist | 42 | # use the base work dir, as the nfs mount, since the recipe directory may not exist |
43 | tmpdir = get_bb_var("BASE_WORKDIR") | 43 | tmpdir = get_bb_var("BASE_WORKDIR") |
44 | nfsport, mountport = s.enter_context(unfs_server(tmpdir)) | 44 | nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False)) |
45 | 45 | ||
46 | # build core-image-minimal with required packages | 46 | # build core-image-minimal with required packages |
47 | default_installed_packages = [ | 47 | default_installed_packages = [ |
@@ -61,7 +61,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
61 | bitbake("core-image-minimal") | 61 | bitbake("core-image-minimal") |
62 | 62 | ||
63 | # start runqemu | 63 | # start runqemu |
64 | qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic")) | 64 | qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024")) |
65 | 65 | ||
66 | # validate that SSH is working | 66 | # validate that SSH is working |
67 | status, _ = qemu.run("uname") | 67 | status, _ = qemu.run("uname") |
@@ -70,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase): | |||
70 | # setup nfs mount | 70 | # setup nfs mount |
71 | if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0: | 71 | if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0: |
72 | raise Exception("Failed to setup NFS mount directory on target") | 72 | raise Exception("Failed to setup NFS mount directory on target") |
73 | mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir) | 73 | mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir) |
74 | status, output = qemu.run(mountcmd) | 74 | status, output = qemu.run(mountcmd) |
75 | if status != 0: | 75 | if status != 0: |
76 | raise Exception("Failed to setup NFS mount on target ({})".format(repr(output))) | 76 | raise Exception("Failed to setup NFS mount on target ({})".format(repr(output))) |
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py index 3119520f0d..59f80aad28 100644 --- a/meta/lib/oeqa/selftest/cases/gotoolchain.py +++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py | |||
@@ -43,6 +43,12 @@ class oeGoToolchainSelfTest(OESelftestTestCase): | |||
43 | 43 | ||
44 | @classmethod | 44 | @classmethod |
45 | def tearDownClass(cls): | 45 | def tearDownClass(cls): |
46 | # Go creates file which are readonly | ||
47 | for dirpath, dirnames, filenames in os.walk(cls.tmpdir_SDKQA): | ||
48 | for filename in filenames + dirnames: | ||
49 | f = os.path.join(dirpath, filename) | ||
50 | if not os.path.islink(f): | ||
51 | os.chmod(f, 0o775) | ||
46 | shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True) | 52 | shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True) |
47 | super(oeGoToolchainSelfTest, cls).tearDownClass() | 53 | super(oeGoToolchainSelfTest, cls).tearDownClass() |
48 | 54 | ||
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py index 2b9c4998f7..535d80cb86 100644 --- a/meta/lib/oeqa/selftest/cases/imagefeatures.py +++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py | |||
@@ -240,7 +240,7 @@ USERADD_GID_TABLES += "files/static-group" | |||
240 | def test_no_busybox_base_utils(self): | 240 | def test_no_busybox_base_utils(self): |
241 | config = """ | 241 | config = """ |
242 | # Enable x11 | 242 | # Enable x11 |
243 | DISTRO_FEATURES_append += "x11" | 243 | DISTRO_FEATURES_append = " x11" |
244 | 244 | ||
245 | # Switch to systemd | 245 | # Switch to systemd |
246 | DISTRO_FEATURES += "systemd" | 246 | DISTRO_FEATURES += "systemd" |
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py index a7214beb4c..bbf67bf9c9 100644 --- a/meta/lib/oeqa/selftest/cases/oelib/utils.py +++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py | |||
@@ -64,7 +64,7 @@ class TestMultiprocessLaunch(TestCase): | |||
64 | import bb | 64 | import bb |
65 | 65 | ||
66 | def testfunction(item, d): | 66 | def testfunction(item, d): |
67 | if item == "2" or item == "1": | 67 | if item == "2": |
68 | raise KeyError("Invalid number %s" % item) | 68 | raise KeyError("Invalid number %s" % item) |
69 | return "Found %s" % item | 69 | return "Found %s" % item |
70 | 70 | ||
@@ -99,5 +99,4 @@ class TestMultiprocessLaunch(TestCase): | |||
99 | # Assert the function prints exceptions | 99 | # Assert the function prints exceptions |
100 | with captured_output() as (out, err): | 100 | with captured_output() as (out, err): |
101 | self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,)) | 101 | self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,)) |
102 | self.assertIn("KeyError: 'Invalid number 1'", out.getvalue()) | ||
103 | self.assertIn("KeyError: 'Invalid number 2'", out.getvalue()) | 102 | self.assertIn("KeyError: 'Invalid number 2'", out.getvalue()) |
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py index 726daff7c6..fb99be447e 100644 --- a/meta/lib/oeqa/selftest/cases/oescripts.py +++ b/meta/lib/oeqa/selftest/cases/oescripts.py | |||
@@ -133,7 +133,8 @@ class OEListPackageconfigTests(OEScriptTests): | |||
133 | def check_endlines(self, results, expected_endlines): | 133 | def check_endlines(self, results, expected_endlines): |
134 | for line in results.output.splitlines(): | 134 | for line in results.output.splitlines(): |
135 | for el in expected_endlines: | 135 | for el in expected_endlines: |
136 | if line.split() == el.split(): | 136 | if line and line.split()[0] == el.split()[0] and \ |
137 | ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())): | ||
137 | expected_endlines.remove(el) | 138 | expected_endlines.remove(el) |
138 | break | 139 | break |
139 | 140 | ||
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py index 578b2b4dd9..fdc1e40058 100644 --- a/meta/lib/oeqa/selftest/cases/prservice.py +++ b/meta/lib/oeqa/selftest/cases/prservice.py | |||
@@ -75,7 +75,7 @@ class BitbakePrTests(OESelftestTestCase): | |||
75 | exported_db_path = os.path.join(self.builddir, 'export.inc') | 75 | exported_db_path = os.path.join(self.builddir, 'export.inc') |
76 | export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True) | 76 | export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True) |
77 | self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output) | 77 | self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output) |
78 | self.assertTrue(os.path.exists(exported_db_path)) | 78 | self.assertTrue(os.path.exists(exported_db_path), msg="%s didn't exist, tool output %s" % (exported_db_path, export_result.output)) |
79 | 79 | ||
80 | if replace_current_db: | 80 | if replace_current_db: |
81 | current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3') | 81 | current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3') |
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py index c2ade2543a..e8aeea3023 100644 --- a/meta/lib/oeqa/selftest/cases/recipetool.py +++ b/meta/lib/oeqa/selftest/cases/recipetool.py | |||
@@ -370,7 +370,7 @@ class RecipetoolTests(RecipetoolBase): | |||
370 | tempsrc = os.path.join(self.tempdir, 'srctree') | 370 | tempsrc = os.path.join(self.tempdir, 'srctree') |
371 | os.makedirs(tempsrc) | 371 | os.makedirs(tempsrc) |
372 | recipefile = os.path.join(self.tempdir, 'libmatchbox.bb') | 372 | recipefile = os.path.join(self.tempdir, 'libmatchbox.bb') |
373 | srcuri = 'git://git.yoctoproject.org/libmatchbox' | 373 | srcuri = 'git://git.yoctoproject.org/libmatchbox;branch=master' |
374 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc]) | 374 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc]) |
375 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) | 375 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) |
376 | checkvars = {} | 376 | checkvars = {} |
@@ -456,7 +456,7 @@ class RecipetoolTests(RecipetoolBase): | |||
456 | self.assertTrue(os.path.isfile(recipefile)) | 456 | self.assertTrue(os.path.isfile(recipefile)) |
457 | checkvars = {} | 457 | checkvars = {} |
458 | checkvars['LICENSE'] = set(['Apache-2.0']) | 458 | checkvars['LICENSE'] = set(['Apache-2.0']) |
459 | checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https' | 459 | checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=master' |
460 | inherits = ['setuptools3'] | 460 | inherits = ['setuptools3'] |
461 | self._test_recipe_contents(recipefile, checkvars, inherits) | 461 | self._test_recipe_contents(recipefile, checkvars, inherits) |
462 | 462 | ||
@@ -523,7 +523,7 @@ class RecipetoolTests(RecipetoolBase): | |||
523 | self.assertTrue(os.path.isfile(recipefile)) | 523 | self.assertTrue(os.path.isfile(recipefile)) |
524 | checkvars = {} | 524 | checkvars = {} |
525 | checkvars['LICENSE'] = set(['GPLv2']) | 525 | checkvars['LICENSE'] = set(['GPLv2']) |
526 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http' | 526 | checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http;branch=master' |
527 | inherits = ['pkgconfig', 'autotools'] | 527 | inherits = ['pkgconfig', 'autotools'] |
528 | self._test_recipe_contents(recipefile, checkvars, inherits) | 528 | self._test_recipe_contents(recipefile, checkvars, inherits) |
529 | 529 | ||
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py index d4800022df..be4cdcc429 100644 --- a/meta/lib/oeqa/selftest/cases/reproducible.py +++ b/meta/lib/oeqa/selftest/cases/reproducible.py | |||
@@ -17,6 +17,57 @@ import stat | |||
17 | import os | 17 | import os |
18 | import datetime | 18 | import datetime |
19 | 19 | ||
20 | # For sample packages, see: | ||
21 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-0t7wr_oo/ | ||
22 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-4s9ejwyp/ | ||
23 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-haiwdlbr/ | ||
24 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-hwds3mcl/ | ||
25 | # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201203-sua0pzvc/ | ||
26 | # (both packages/ and packages-excluded/) | ||
27 | exclude_packages = [ | ||
28 | 'acpica-src', | ||
29 | 'babeltrace2-ptest', | ||
30 | 'bind', | ||
31 | 'bootchart2-doc', | ||
32 | 'epiphany', | ||
33 | 'gcr', | ||
34 | 'glide', | ||
35 | 'go-dep', | ||
36 | 'go-helloworld', | ||
37 | 'go-runtime', | ||
38 | 'go_', | ||
39 | 'gstreamer1.0-python', | ||
40 | 'hwlatdetect', | ||
41 | 'kernel-devsrc', | ||
42 | 'libcap-ng', | ||
43 | 'libjson', | ||
44 | 'libproxy', | ||
45 | 'lttng-tools-dbg', | ||
46 | 'lttng-tools-ptest', | ||
47 | 'ltp', | ||
48 | 'ovmf-shell-efi', | ||
49 | 'parted-ptest', | ||
50 | 'perf', | ||
51 | 'piglit', | ||
52 | 'pybootchartgui', | ||
53 | 'qemu', | ||
54 | 'quilt-ptest', | ||
55 | 'rsync', | ||
56 | 'ruby', | ||
57 | 'stress-ng', | ||
58 | 'systemd-bootchart', | ||
59 | 'systemtap', | ||
60 | 'valgrind-ptest', | ||
61 | 'webkitgtk', | ||
62 | ] | ||
63 | |||
64 | def is_excluded(package): | ||
65 | package_name = os.path.basename(package) | ||
66 | for i in exclude_packages: | ||
67 | if package_name.startswith(i): | ||
68 | return i | ||
69 | return None | ||
70 | |||
20 | MISSING = 'MISSING' | 71 | MISSING = 'MISSING' |
21 | DIFFERENT = 'DIFFERENT' | 72 | DIFFERENT = 'DIFFERENT' |
22 | SAME = 'SAME' | 73 | SAME = 'SAME' |
@@ -39,14 +90,21 @@ class PackageCompareResults(object): | |||
39 | self.total = [] | 90 | self.total = [] |
40 | self.missing = [] | 91 | self.missing = [] |
41 | self.different = [] | 92 | self.different = [] |
93 | self.different_excluded = [] | ||
42 | self.same = [] | 94 | self.same = [] |
95 | self.active_exclusions = set() | ||
43 | 96 | ||
44 | def add_result(self, r): | 97 | def add_result(self, r): |
45 | self.total.append(r) | 98 | self.total.append(r) |
46 | if r.status == MISSING: | 99 | if r.status == MISSING: |
47 | self.missing.append(r) | 100 | self.missing.append(r) |
48 | elif r.status == DIFFERENT: | 101 | elif r.status == DIFFERENT: |
49 | self.different.append(r) | 102 | exclusion = is_excluded(r.reference) |
103 | if exclusion: | ||
104 | self.different_excluded.append(r) | ||
105 | self.active_exclusions.add(exclusion) | ||
106 | else: | ||
107 | self.different.append(r) | ||
50 | else: | 108 | else: |
51 | self.same.append(r) | 109 | self.same.append(r) |
52 | 110 | ||
@@ -54,10 +112,14 @@ class PackageCompareResults(object): | |||
54 | self.total.sort() | 112 | self.total.sort() |
55 | self.missing.sort() | 113 | self.missing.sort() |
56 | self.different.sort() | 114 | self.different.sort() |
115 | self.different_excluded.sort() | ||
57 | self.same.sort() | 116 | self.same.sort() |
58 | 117 | ||
59 | def __str__(self): | 118 | def __str__(self): |
60 | return 'same=%i different=%i missing=%i total=%i' % (len(self.same), len(self.different), len(self.missing), len(self.total)) | 119 | return 'same=%i different=%i different_excluded=%i missing=%i total=%i\nunused_exclusions=%s' % (len(self.same), len(self.different), len(self.different_excluded), len(self.missing), len(self.total), self.unused_exclusions()) |
120 | |||
121 | def unused_exclusions(self): | ||
122 | return sorted(set(exclude_packages) - self.active_exclusions) | ||
61 | 123 | ||
62 | def compare_file(reference, test, diffutils_sysroot): | 124 | def compare_file(reference, test, diffutils_sysroot): |
63 | result = CompareResult() | 125 | result = CompareResult() |
@@ -68,7 +130,7 @@ def compare_file(reference, test, diffutils_sysroot): | |||
68 | result.status = MISSING | 130 | result.status = MISSING |
69 | return result | 131 | return result |
70 | 132 | ||
71 | r = runCmd(['cmp', '--quiet', reference, test], native_sysroot=diffutils_sysroot, ignore_status=True) | 133 | r = runCmd(['cmp', '--quiet', reference, test], native_sysroot=diffutils_sysroot, ignore_status=True, sync=False) |
72 | 134 | ||
73 | if r.status: | 135 | if r.status: |
74 | result.status = DIFFERENT | 136 | result.status = DIFFERENT |
@@ -77,9 +139,41 @@ def compare_file(reference, test, diffutils_sysroot): | |||
77 | result.status = SAME | 139 | result.status = SAME |
78 | return result | 140 | return result |
79 | 141 | ||
142 | def run_diffoscope(a_dir, b_dir, html_dir, **kwargs): | ||
143 | return runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], | ||
144 | **kwargs) | ||
145 | |||
146 | class DiffoscopeTests(OESelftestTestCase): | ||
147 | diffoscope_test_files = os.path.join(os.path.dirname(os.path.abspath(__file__)), "diffoscope") | ||
148 | |||
149 | def test_diffoscope(self): | ||
150 | bitbake("diffoscope-native -c addto_recipe_sysroot") | ||
151 | diffoscope_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffoscope-native") | ||
152 | |||
153 | # Check that diffoscope doesn't return an error when the files compare | ||
154 | # the same (a general check that diffoscope is working) | ||
155 | with tempfile.TemporaryDirectory() as tmpdir: | ||
156 | run_diffoscope('A', 'A', tmpdir, | ||
157 | native_sysroot=diffoscope_sysroot, cwd=self.diffoscope_test_files) | ||
158 | |||
159 | # Check that diffoscope generates an index.html file when the files are | ||
160 | # different | ||
161 | with tempfile.TemporaryDirectory() as tmpdir: | ||
162 | r = run_diffoscope('A', 'B', tmpdir, | ||
163 | native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=self.diffoscope_test_files) | ||
164 | |||
165 | self.assertNotEqual(r.status, 0, msg="diffoscope was successful when an error was expected") | ||
166 | self.assertTrue(os.path.exists(os.path.join(tmpdir, 'index.html')), "HTML index not found!") | ||
167 | |||
80 | class ReproducibleTests(OESelftestTestCase): | 168 | class ReproducibleTests(OESelftestTestCase): |
169 | # Test the reproducibility of whatever is built between sstate_targets and targets | ||
170 | |||
81 | package_classes = ['deb', 'ipk'] | 171 | package_classes = ['deb', 'ipk'] |
82 | images = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline'] | 172 | |
173 | # targets are the things we want to test the reproducibility of | ||
174 | targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'world'] | ||
175 | # sstate targets are things to pull from sstate to potentially cut build/debugging time | ||
176 | sstate_targets = [] | ||
83 | save_results = False | 177 | save_results = False |
84 | if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ: | 178 | if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ: |
85 | save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT'] | 179 | save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT'] |
@@ -94,7 +188,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
94 | 188 | ||
95 | def setUpLocal(self): | 189 | def setUpLocal(self): |
96 | super().setUpLocal() | 190 | super().setUpLocal() |
97 | needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS'] | 191 | needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS', 'BB_HASHSERVE'] |
98 | bb_vars = get_bb_vars(needed_vars) | 192 | bb_vars = get_bb_vars(needed_vars) |
99 | for v in needed_vars: | 193 | for v in needed_vars: |
100 | setattr(self, v.lower(), bb_vars[v]) | 194 | setattr(self, v.lower(), bb_vars[v]) |
@@ -150,21 +244,29 @@ class ReproducibleTests(OESelftestTestCase): | |||
150 | PACKAGE_CLASSES = "{package_classes}" | 244 | PACKAGE_CLASSES = "{package_classes}" |
151 | INHIBIT_PACKAGE_STRIP = "1" | 245 | INHIBIT_PACKAGE_STRIP = "1" |
152 | TMPDIR = "{tmpdir}" | 246 | TMPDIR = "{tmpdir}" |
247 | LICENSE_FLAGS_WHITELIST = "commercial" | ||
248 | DISTRO_FEATURES_append = ' systemd pam' | ||
153 | ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes), | 249 | ''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes), |
154 | tmpdir=tmpdir) | 250 | tmpdir=tmpdir) |
155 | 251 | ||
156 | if not use_sstate: | 252 | if not use_sstate: |
253 | if self.sstate_targets: | ||
254 | self.logger.info("Building prebuild for %s (sstate allowed)..." % (name)) | ||
255 | self.write_config(config) | ||
256 | bitbake(' '.join(self.sstate_targets)) | ||
257 | |||
157 | # This config fragment will disable using shared and the sstate | 258 | # This config fragment will disable using shared and the sstate |
158 | # mirror, forcing a complete build from scratch | 259 | # mirror, forcing a complete build from scratch |
159 | config += textwrap.dedent('''\ | 260 | config += textwrap.dedent('''\ |
160 | SSTATE_DIR = "${TMPDIR}/sstate" | 261 | SSTATE_DIR = "${TMPDIR}/sstate" |
161 | SSTATE_MIRRORS = "" | 262 | SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH" |
162 | ''') | 263 | ''') |
163 | 264 | ||
164 | self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) | 265 | self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) |
165 | self.write_config(config) | 266 | self.write_config(config) |
166 | d = get_bb_vars(capture_vars) | 267 | d = get_bb_vars(capture_vars) |
167 | bitbake(' '.join(self.images)) | 268 | # targets used to be called images |
269 | bitbake(' '.join(getattr(self, 'images', self.targets))) | ||
168 | return d | 270 | return d |
169 | 271 | ||
170 | def test_reproducible_builds(self): | 272 | def test_reproducible_builds(self): |
@@ -212,6 +314,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
212 | 314 | ||
213 | self.write_package_list(package_class, 'missing', result.missing) | 315 | self.write_package_list(package_class, 'missing', result.missing) |
214 | self.write_package_list(package_class, 'different', result.different) | 316 | self.write_package_list(package_class, 'different', result.different) |
317 | self.write_package_list(package_class, 'different_excluded', result.different_excluded) | ||
215 | self.write_package_list(package_class, 'same', result.same) | 318 | self.write_package_list(package_class, 'same', result.same) |
216 | 319 | ||
217 | if self.save_results: | 320 | if self.save_results: |
@@ -219,8 +322,12 @@ class ReproducibleTests(OESelftestTestCase): | |||
219 | self.copy_file(d.reference, '/'.join([save_dir, 'packages', strip_topdir(d.reference)])) | 322 | self.copy_file(d.reference, '/'.join([save_dir, 'packages', strip_topdir(d.reference)])) |
220 | self.copy_file(d.test, '/'.join([save_dir, 'packages', strip_topdir(d.test)])) | 323 | self.copy_file(d.test, '/'.join([save_dir, 'packages', strip_topdir(d.test)])) |
221 | 324 | ||
325 | for d in result.different_excluded: | ||
326 | self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)])) | ||
327 | self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)])) | ||
328 | |||
222 | if result.missing or result.different: | 329 | if result.missing or result.different: |
223 | fails.append("The following %s packages are missing or different: %s" % | 330 | fails.append("The following %s packages are missing or different and not in exclusion list: %s" % |
224 | (c, '\n'.join(r.test for r in (result.missing + result.different)))) | 331 | (c, '\n'.join(r.test for r in (result.missing + result.different)))) |
225 | 332 | ||
226 | # Clean up empty directories | 333 | # Clean up empty directories |
@@ -235,7 +342,7 @@ class ReproducibleTests(OESelftestTestCase): | |||
235 | # Copy jquery to improve the diffoscope output usability | 342 | # Copy jquery to improve the diffoscope output usability |
236 | self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) | 343 | self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) |
237 | 344 | ||
238 | runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', '--html-dir', package_html_dir, 'reproducibleA', 'reproducibleB'], | 345 | run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, |
239 | native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) | 346 | native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) |
240 | 347 | ||
241 | if fails: | 348 | if fails: |
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py index fa6113d7fa..e9612389fe 100644 --- a/meta/lib/oeqa/selftest/cases/runcmd.py +++ b/meta/lib/oeqa/selftest/cases/runcmd.py | |||
@@ -27,8 +27,8 @@ class RunCmdTests(OESelftestTestCase): | |||
27 | 27 | ||
28 | # The delta is intentionally smaller than the timeout, to detect cases where | 28 | # The delta is intentionally smaller than the timeout, to detect cases where |
29 | # we incorrectly apply the timeout more than once. | 29 | # we incorrectly apply the timeout more than once. |
30 | TIMEOUT = 5 | 30 | TIMEOUT = 10 |
31 | DELTA = 3 | 31 | DELTA = 8 |
32 | 32 | ||
33 | def test_result_okay(self): | 33 | def test_result_okay(self): |
34 | result = runCmd("true") | 34 | result = runCmd("true") |
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py index 7e676bcb41..da22f77b27 100644 --- a/meta/lib/oeqa/selftest/cases/runqemu.py +++ b/meta/lib/oeqa/selftest/cases/runqemu.py | |||
@@ -163,12 +163,11 @@ class QemuTest(OESelftestTestCase): | |||
163 | bitbake(cls.recipe) | 163 | bitbake(cls.recipe) |
164 | 164 | ||
165 | def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout): | 165 | def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout): |
166 | # Allow the runner's LoggingThread instance to exit without errors | ||
167 | # (such as the exception "Console connection closed unexpectedly") | ||
168 | # as qemu will disappear when we shut it down | ||
169 | qemu.runner.allowexit() | ||
166 | qemu.run_serial("shutdown -h now") | 170 | qemu.run_serial("shutdown -h now") |
167 | # Stop thread will stop the LoggingThread instance used for logging | ||
168 | # qemu through serial console, stop thread will prevent this code | ||
169 | # from facing exception (Console connection closed unexpectedly) | ||
170 | # when qemu was shutdown by the above shutdown command | ||
171 | qemu.runner.stop_thread() | ||
172 | time_track = 0 | 171 | time_track = 0 |
173 | try: | 172 | try: |
174 | while True: | 173 | while True: |
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py index 976b513727..cc4190c1d6 100644 --- a/meta/lib/oeqa/selftest/cases/runtime_test.py +++ b/meta/lib/oeqa/selftest/cases/runtime_test.py | |||
@@ -14,11 +14,6 @@ from oeqa.core.decorator.data import skipIfNotQemu | |||
14 | 14 | ||
15 | class TestExport(OESelftestTestCase): | 15 | class TestExport(OESelftestTestCase): |
16 | 16 | ||
17 | @classmethod | ||
18 | def tearDownClass(cls): | ||
19 | runCmd("rm -rf /tmp/sdk") | ||
20 | super(TestExport, cls).tearDownClass() | ||
21 | |||
22 | def test_testexport_basic(self): | 17 | def test_testexport_basic(self): |
23 | """ | 18 | """ |
24 | Summary: Check basic testexport functionality with only ping test enabled. | 19 | Summary: Check basic testexport functionality with only ping test enabled. |
@@ -95,19 +90,20 @@ class TestExport(OESelftestTestCase): | |||
95 | msg = "Couldn't find SDK tarball: %s" % tarball_path | 90 | msg = "Couldn't find SDK tarball: %s" % tarball_path |
96 | self.assertEqual(os.path.isfile(tarball_path), True, msg) | 91 | self.assertEqual(os.path.isfile(tarball_path), True, msg) |
97 | 92 | ||
98 | # Extract SDK and run tar from SDK | 93 | with tempfile.TemporaryDirectory() as tmpdirname: |
99 | result = runCmd("%s -y -d /tmp/sdk" % tarball_path) | 94 | # Extract SDK and run tar from SDK |
100 | self.assertEqual(0, result.status, "Couldn't extract SDK") | 95 | result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname)) |
96 | self.assertEqual(0, result.status, "Couldn't extract SDK") | ||
101 | 97 | ||
102 | env_script = result.output.split()[-1] | 98 | env_script = result.output.split()[-1] |
103 | result = runCmd(". %s; which tar" % env_script, shell=True) | 99 | result = runCmd(". %s; which tar" % env_script, shell=True) |
104 | self.assertEqual(0, result.status, "Couldn't setup SDK environment") | 100 | self.assertEqual(0, result.status, "Couldn't setup SDK environment") |
105 | is_sdk_tar = True if "/tmp/sdk" in result.output else False | 101 | is_sdk_tar = True if tmpdirname in result.output else False |
106 | self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") | 102 | self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") |
107 | 103 | ||
108 | tar_sdk = result.output | 104 | tar_sdk = result.output |
109 | result = runCmd("%s --version" % tar_sdk) | 105 | result = runCmd("%s --version" % tar_sdk) |
110 | self.assertEqual(0, result.status, "Couldn't run tar from SDK") | 106 | self.assertEqual(0, result.status, "Couldn't run tar from SDK") |
111 | 107 | ||
112 | 108 | ||
113 | class TestImage(OESelftestTestCase): | 109 | class TestImage(OESelftestTestCase): |
@@ -179,12 +175,24 @@ class TestImage(OESelftestTestCase): | |||
179 | if "DISPLAY" not in os.environ: | 175 | if "DISPLAY" not in os.environ: |
180 | self.skipTest("virgl gtk test must be run inside a X session") | 176 | self.skipTest("virgl gtk test must be run inside a X session") |
181 | distro = oe.lsb.distro_identifier() | 177 | distro = oe.lsb.distro_identifier() |
178 | if distro and distro.startswith('almalinux'): | ||
179 | self.skipTest('virgl isn\'t working with Alma Linux') | ||
180 | if distro and distro.startswith('rocky'): | ||
181 | self.skipTest('virgl isn\'t working with Rocky Linux') | ||
182 | if distro and distro == 'debian-8': | 182 | if distro and distro == 'debian-8': |
183 | self.skipTest('virgl isn\'t working with Debian 8') | 183 | self.skipTest('virgl isn\'t working with Debian 8') |
184 | if distro and distro == 'centos-7': | 184 | if distro and distro == 'centos-7': |
185 | self.skipTest('virgl isn\'t working with Centos 7') | 185 | self.skipTest('virgl isn\'t working with Centos 7') |
186 | if distro and distro == 'centos-8': | ||
187 | self.skipTest('virgl isn\'t working with Centos 8') | ||
188 | if distro and distro.startswith('fedora'): | ||
189 | self.skipTest('virgl isn\'t working with Fedora') | ||
186 | if distro and distro == 'opensuseleap-15.0': | 190 | if distro and distro == 'opensuseleap-15.0': |
187 | self.skipTest('virgl isn\'t working with Opensuse 15.0') | 191 | self.skipTest('virgl isn\'t working with Opensuse 15.0') |
192 | if distro and distro == 'ubuntu-22.04': | ||
193 | self.skipTest('virgl isn\'t working with Ubuntu 22.04') | ||
194 | if distro and distro == 'ubuntu-22.10': | ||
195 | self.skipTest('virgl isn\'t working with Ubuntu 22.10') | ||
188 | 196 | ||
189 | qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') | 197 | qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') |
190 | sdl_packageconfig = get_bb_var('PACKAGECONFIG', 'libsdl2-native') | 198 | sdl_packageconfig = get_bb_var('PACKAGECONFIG', 'libsdl2-native') |
@@ -220,6 +228,7 @@ class TestImage(OESelftestTestCase): | |||
220 | Author: Alexander Kanavin <alex.kanavin@gmail.com> | 228 | Author: Alexander Kanavin <alex.kanavin@gmail.com> |
221 | """ | 229 | """ |
222 | import subprocess, os | 230 | import subprocess, os |
231 | self.skipTest("Crashes in mesa observed with this test on dunfell: https://bugzilla.yoctoproject.org/show_bug.cgi?id=14527") | ||
223 | try: | 232 | try: |
224 | content = os.listdir("/dev/dri") | 233 | content = os.listdir("/dev/dri") |
225 | if len([i for i in content if i.startswith('render')]) == 0: | 234 | if len([i for i in content if i.startswith('render')]) == 0: |
@@ -227,7 +236,7 @@ class TestImage(OESelftestTestCase): | |||
227 | except FileNotFoundError: | 236 | except FileNotFoundError: |
228 | self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.") | 237 | self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.") |
229 | try: | 238 | try: |
230 | dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True) | 239 | dripath = subprocess.check_output("PATH=/bin:/usr/bin:$PATH pkg-config --variable=dridriverdir dri", shell=True) |
231 | except subprocess.CalledProcessError as e: | 240 | except subprocess.CalledProcessError as e: |
232 | self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.") | 241 | self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.") |
233 | qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') | 242 | qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') |
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py index c46e8ba489..1bfe88c87d 100644 --- a/meta/lib/oeqa/selftest/cases/sstatetests.py +++ b/meta/lib/oeqa/selftest/cases/sstatetests.py | |||
@@ -39,7 +39,7 @@ class SStateTests(SStateBase): | |||
39 | 39 | ||
40 | recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb') | 40 | recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb') |
41 | os.makedirs(os.path.dirname(recipefile)) | 41 | os.makedirs(os.path.dirname(recipefile)) |
42 | srcuri = 'git://' + srcdir + ';protocol=file' | 42 | srcuri = 'git://' + srcdir + ';protocol=file;branch=master' |
43 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri]) | 43 | result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri]) |
44 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) | 44 | self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) |
45 | 45 | ||
@@ -137,7 +137,7 @@ class SStateTests(SStateBase): | |||
137 | filtered_results.append(r) | 137 | filtered_results.append(r) |
138 | self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results))) | 138 | self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results))) |
139 | file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) | 139 | file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) |
140 | self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets))) | 140 | self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets))) |
141 | 141 | ||
142 | self.track_for_cleanup(self.distro_specific_sstate + "_old") | 142 | self.track_for_cleanup(self.distro_specific_sstate + "_old") |
143 | shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old") | 143 | shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old") |
@@ -146,13 +146,13 @@ class SStateTests(SStateBase): | |||
146 | bitbake(['-cclean'] + targets) | 146 | bitbake(['-cclean'] + targets) |
147 | bitbake(targets) | 147 | bitbake(targets) |
148 | file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) | 148 | file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) |
149 | self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets))) | 149 | self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets))) |
150 | 150 | ||
151 | not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2] | 151 | not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2] |
152 | self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated))) | 152 | self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated))) |
153 | 153 | ||
154 | created_once = [x for x in file_tracker_2 if x not in file_tracker_1] | 154 | created_once = [x for x in file_tracker_2 if x not in file_tracker_1] |
155 | self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once))) | 155 | self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once))) |
156 | 156 | ||
157 | def test_rebuild_distro_specific_sstate_cross_native_targets(self): | 157 | def test_rebuild_distro_specific_sstate_cross_native_targets(self): |
158 | self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) | 158 | self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) |
@@ -202,9 +202,9 @@ class SStateTests(SStateBase): | |||
202 | actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)] | 202 | actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)] |
203 | 203 | ||
204 | actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate] | 204 | actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate] |
205 | self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected))) | 205 | self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected))) |
206 | expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate] | 206 | expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate] |
207 | self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual))) | 207 | self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual))) |
208 | 208 | ||
209 | def test_sstate_cache_management_script_using_pr_1(self): | 209 | def test_sstate_cache_management_script_using_pr_1(self): |
210 | global_config = [] | 210 | global_config = [] |
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py index a51c6048d3..6668d7cdc8 100644 --- a/meta/lib/oeqa/selftest/cases/tinfoil.py +++ b/meta/lib/oeqa/selftest/cases/tinfoil.py | |||
@@ -65,6 +65,20 @@ class TinfoilTests(OESelftestTestCase): | |||
65 | localdata.setVar('PN', 'hello') | 65 | localdata.setVar('PN', 'hello') |
66 | self.assertEqual('hello', localdata.getVar('BPN')) | 66 | self.assertEqual('hello', localdata.getVar('BPN')) |
67 | 67 | ||
68 | # The config_data API tp parse_recipe_file is used by: | ||
69 | # layerindex-web layerindex/update_layer.py | ||
70 | def test_parse_recipe_custom_data(self): | ||
71 | with bb.tinfoil.Tinfoil() as tinfoil: | ||
72 | tinfoil.prepare(config_only=False, quiet=2) | ||
73 | localdata = bb.data.createCopy(tinfoil.config_data) | ||
74 | localdata.setVar("TESTVAR", "testval") | ||
75 | testrecipe = 'mdadm' | ||
76 | best = tinfoil.find_best_provider(testrecipe) | ||
77 | if not best: | ||
78 | self.fail('Unable to find recipe providing %s' % testrecipe) | ||
79 | rd = tinfoil.parse_recipe_file(best[3], config_data=localdata) | ||
80 | self.assertEqual("testval", rd.getVar('TESTVAR')) | ||
81 | |||
68 | def test_list_recipes(self): | 82 | def test_list_recipes(self): |
69 | with bb.tinfoil.Tinfoil() as tinfoil: | 83 | with bb.tinfoil.Tinfoil() as tinfoil: |
70 | tinfoil.prepare(config_only=False, quiet=2) | 84 | tinfoil.prepare(config_only=False, quiet=2) |
@@ -87,23 +101,22 @@ class TinfoilTests(OESelftestTestCase): | |||
87 | with bb.tinfoil.Tinfoil() as tinfoil: | 101 | with bb.tinfoil.Tinfoil() as tinfoil: |
88 | tinfoil.prepare(config_only=True) | 102 | tinfoil.prepare(config_only=True) |
89 | 103 | ||
90 | tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) | 104 | tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit']) |
91 | 105 | ||
92 | # Need to drain events otherwise events that were masked may still be in the queue | 106 | # Need to drain events otherwise events that were masked may still be in the queue |
93 | while tinfoil.wait_event(): | 107 | while tinfoil.wait_event(): |
94 | pass | 108 | pass |
95 | 109 | ||
96 | pattern = 'conf' | 110 | pattern = 'conf' |
97 | res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') | 111 | res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False) |
98 | self.assertTrue(res) | 112 | self.assertTrue(res) |
99 | 113 | ||
100 | eventreceived = False | 114 | eventreceived = False |
101 | commandcomplete = False | 115 | commandcomplete = False |
102 | start = time.time() | 116 | start = time.time() |
103 | # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example | 117 | # Wait for maximum 120s in total so we'd detect spurious heartbeat events for example |
104 | # The test is IO load sensitive too | ||
105 | while (not (eventreceived == True and commandcomplete == True) | 118 | while (not (eventreceived == True and commandcomplete == True) |
106 | and (time.time() - start < 60)): | 119 | and (time.time() - start < 120)): |
107 | # if we received both events (on let's say a good day), we are done | 120 | # if we received both events (on let's say a good day), we are done |
108 | event = tinfoil.wait_event(1) | 121 | event = tinfoil.wait_event(1) |
109 | if event: | 122 | if event: |
@@ -111,14 +124,15 @@ class TinfoilTests(OESelftestTestCase): | |||
111 | commandcomplete = True | 124 | commandcomplete = True |
112 | elif isinstance(event, bb.event.FilesMatchingFound): | 125 | elif isinstance(event, bb.event.FilesMatchingFound): |
113 | self.assertEqual(pattern, event._pattern) | 126 | self.assertEqual(pattern, event._pattern) |
114 | self.assertIn('qemuarm.conf', event._matches) | 127 | self.assertIn('A', event._matches) |
128 | self.assertIn('B', event._matches) | ||
115 | eventreceived = True | 129 | eventreceived = True |
116 | elif isinstance(event, logging.LogRecord): | 130 | elif isinstance(event, logging.LogRecord): |
117 | continue | 131 | continue |
118 | else: | 132 | else: |
119 | self.fail('Unexpected event: %s' % event) | 133 | self.fail('Unexpected event: %s' % event) |
120 | 134 | ||
121 | self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server') | 135 | self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived)) |
122 | self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') | 136 | self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') |
123 | 137 | ||
124 | def test_setvariable_clean(self): | 138 | def test_setvariable_clean(self): |
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py index 0435aa29c9..f7abdba015 100644 --- a/meta/lib/oeqa/selftest/cases/wic.py +++ b/meta/lib/oeqa/selftest/cases/wic.py | |||
@@ -905,14 +905,18 @@ class Wic2(WicTestCase): | |||
905 | @only_for_arch(['i586', 'i686', 'x86_64']) | 905 | @only_for_arch(['i586', 'i686', 'x86_64']) |
906 | def test_rawcopy_plugin_qemu(self): | 906 | def test_rawcopy_plugin_qemu(self): |
907 | """Test rawcopy plugin in qemu""" | 907 | """Test rawcopy plugin in qemu""" |
908 | # build ext4 and wic images | 908 | # build ext4 and then use it for a wic image |
909 | for fstype in ("ext4", "wic"): | 909 | config = 'IMAGE_FSTYPES = "ext4"\n' |
910 | config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype | 910 | self.append_config(config) |
911 | self.append_config(config) | 911 | self.assertEqual(0, bitbake('core-image-minimal').status) |
912 | self.assertEqual(0, bitbake('core-image-minimal').status) | 912 | self.remove_config(config) |
913 | self.remove_config(config) | ||
914 | 913 | ||
915 | with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: | 914 | config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' |
915 | self.append_config(config) | ||
916 | self.assertEqual(0, bitbake('core-image-minimal-mtdutils').status) | ||
917 | self.remove_config(config) | ||
918 | |||
919 | with runqemu('core-image-minimal-mtdutils', ssh=False, image_fstype='wic') as qemu: | ||
916 | cmd = "grep sda. /proc/partitions |wc -l" | 920 | cmd = "grep sda. /proc/partitions |wc -l" |
917 | status, output = qemu.run_serial(cmd) | 921 | status, output = qemu.run_serial(cmd) |
918 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) | 922 | self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) |
diff --git a/meta/lib/oeqa/utils/buildproject.py b/meta/lib/oeqa/utils/buildproject.py index e6d80cc8dc..dfb9661868 100644 --- a/meta/lib/oeqa/utils/buildproject.py +++ b/meta/lib/oeqa/utils/buildproject.py | |||
@@ -18,6 +18,7 @@ class BuildProject(metaclass=ABCMeta): | |||
18 | def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None): | 18 | def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None): |
19 | self.uri = uri | 19 | self.uri = uri |
20 | self.archive = os.path.basename(uri) | 20 | self.archive = os.path.basename(uri) |
21 | self.tempdirobj = None | ||
21 | if not tmpdir: | 22 | if not tmpdir: |
22 | self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-') | 23 | self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-') |
23 | tmpdir = self.tempdirobj.name | 24 | tmpdir = self.tempdirobj.name |
@@ -57,6 +58,8 @@ class BuildProject(metaclass=ABCMeta): | |||
57 | return self._run('cd %s; make install %s' % (self.targetdir, install_args)) | 58 | return self._run('cd %s; make install %s' % (self.targetdir, install_args)) |
58 | 59 | ||
59 | def clean(self): | 60 | def clean(self): |
61 | if self.tempdirobj: | ||
62 | self.tempdirobj.cleanup() | ||
60 | if not self.needclean: | 63 | if not self.needclean: |
61 | return | 64 | return |
62 | self._run('rm -rf %s' % self.targetdir) | 65 | self._run('rm -rf %s' % self.targetdir) |
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py index a71c16ab14..024261410e 100644 --- a/meta/lib/oeqa/utils/commands.py +++ b/meta/lib/oeqa/utils/commands.py | |||
@@ -174,11 +174,8 @@ def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=T | |||
174 | if native_sysroot: | 174 | if native_sysroot: |
175 | extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \ | 175 | extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \ |
176 | (native_sysroot, native_sysroot, native_sysroot) | 176 | (native_sysroot, native_sysroot, native_sysroot) |
177 | extra_libpaths = "%s/lib:%s/usr/lib" % \ | ||
178 | (native_sysroot, native_sysroot) | ||
179 | nenv = dict(options.get('env', os.environ)) | 177 | nenv = dict(options.get('env', os.environ)) |
180 | nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '') | 178 | nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '') |
181 | nenv['LD_LIBRARY_PATH'] = extra_libpaths + ':' + nenv.get('LD_LIBRARY_PATH', '') | ||
182 | options['env'] = nenv | 179 | options['env'] = nenv |
183 | 180 | ||
184 | cmd = Command(command, timeout=timeout, output_log=output_log, **options) | 181 | cmd = Command(command, timeout=timeout, output_log=output_log, **options) |
diff --git a/meta/lib/oeqa/utils/metadata.py b/meta/lib/oeqa/utils/metadata.py index 8013aa684d..15ec190c4a 100644 --- a/meta/lib/oeqa/utils/metadata.py +++ b/meta/lib/oeqa/utils/metadata.py | |||
@@ -27,9 +27,9 @@ def metadata_from_bb(): | |||
27 | data_dict = get_bb_vars() | 27 | data_dict = get_bb_vars() |
28 | 28 | ||
29 | # Distro information | 29 | # Distro information |
30 | info_dict['distro'] = {'id': data_dict['DISTRO'], | 30 | info_dict['distro'] = {'id': data_dict.get('DISTRO', 'NODISTRO'), |
31 | 'version_id': data_dict['DISTRO_VERSION'], | 31 | 'version_id': data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'), |
32 | 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])} | 32 | 'pretty_name': '%s %s' % (data_dict.get('DISTRO', 'NODISTRO'), data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'))} |
33 | 33 | ||
34 | # Host distro information | 34 | # Host distro information |
35 | os_release = get_os_release() | 35 | os_release = get_os_release() |
diff --git a/meta/lib/oeqa/utils/nfs.py b/meta/lib/oeqa/utils/nfs.py index a37686c914..c9bac050a4 100644 --- a/meta/lib/oeqa/utils/nfs.py +++ b/meta/lib/oeqa/utils/nfs.py | |||
@@ -8,7 +8,7 @@ from oeqa.utils.commands import bitbake, get_bb_var, Command | |||
8 | from oeqa.utils.network import get_free_port | 8 | from oeqa.utils.network import get_free_port |
9 | 9 | ||
10 | @contextlib.contextmanager | 10 | @contextlib.contextmanager |
11 | def unfs_server(directory, logger = None): | 11 | def unfs_server(directory, logger = None, udp = True): |
12 | unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native") | 12 | unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native") |
13 | if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")): | 13 | if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")): |
14 | # build native tool | 14 | # build native tool |
@@ -22,7 +22,7 @@ def unfs_server(directory, logger = None): | |||
22 | exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode()) | 22 | exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode()) |
23 | 23 | ||
24 | # find some ports for the server | 24 | # find some ports for the server |
25 | nfsport, mountport = get_free_port(udp = True), get_free_port(udp = True) | 25 | nfsport, mountport = get_free_port(udp), get_free_port(udp) |
26 | 26 | ||
27 | nenv = dict(os.environ) | 27 | nenv = dict(os.environ) |
28 | nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '') | 28 | nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '') |
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py index 77ec939ad7..c84d299a80 100644 --- a/meta/lib/oeqa/utils/qemurunner.py +++ b/meta/lib/oeqa/utils/qemurunner.py | |||
@@ -70,6 +70,8 @@ class QemuRunner: | |||
70 | self.monitorpipe = None | 70 | self.monitorpipe = None |
71 | 71 | ||
72 | self.logger = logger | 72 | self.logger = logger |
73 | # Whether we're expecting an exit and should show related errors | ||
74 | self.canexit = False | ||
73 | 75 | ||
74 | # Enable testing other OS's | 76 | # Enable testing other OS's |
75 | # Set commands for target communication, and default to Linux ALWAYS | 77 | # Set commands for target communication, and default to Linux ALWAYS |
@@ -118,7 +120,10 @@ class QemuRunner: | |||
118 | import fcntl | 120 | import fcntl |
119 | fl = fcntl.fcntl(o, fcntl.F_GETFL) | 121 | fl = fcntl.fcntl(o, fcntl.F_GETFL) |
120 | fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK) | 122 | fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK) |
121 | return os.read(o.fileno(), 1000000).decode("utf-8") | 123 | try: |
124 | return os.read(o.fileno(), 1000000).decode("utf-8") | ||
125 | except BlockingIOError: | ||
126 | return "" | ||
122 | 127 | ||
123 | 128 | ||
124 | def handleSIGCHLD(self, signum, frame): | 129 | def handleSIGCHLD(self, signum, frame): |
@@ -229,7 +234,7 @@ class QemuRunner: | |||
229 | r = os.fdopen(r) | 234 | r = os.fdopen(r) |
230 | x = r.read() | 235 | x = r.read() |
231 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) | 236 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) |
232 | sys.exit(0) | 237 | os._exit(0) |
233 | 238 | ||
234 | self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid) | 239 | self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid) |
235 | self.logger.debug("waiting at most %s seconds for qemu pid (%s)" % | 240 | self.logger.debug("waiting at most %s seconds for qemu pid (%s)" % |
@@ -427,12 +432,17 @@ class QemuRunner: | |||
427 | except OSError as e: | 432 | except OSError as e: |
428 | if e.errno != errno.ESRCH: | 433 | if e.errno != errno.ESRCH: |
429 | raise | 434 | raise |
430 | endtime = time.time() + self.runqemutime | 435 | try: |
431 | while self.runqemu.poll() is None and time.time() < endtime: | 436 | outs, errs = self.runqemu.communicate(timeout = self.runqemutime) |
432 | time.sleep(1) | 437 | if outs: |
433 | if self.runqemu.poll() is None: | 438 | self.logger.info("Output from runqemu:\n%s", outs.decode("utf-8")) |
439 | if errs: | ||
440 | self.logger.info("Stderr from runqemu:\n%s", errs.decode("utf-8")) | ||
441 | except TimeoutExpired: | ||
434 | self.logger.debug("Sending SIGKILL to runqemu") | 442 | self.logger.debug("Sending SIGKILL to runqemu") |
435 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL) | 443 | os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL) |
444 | if not self.runqemu.stdout.closed: | ||
445 | self.logger.info("Output from runqemu:\n%s" % self.getOutput(self.runqemu.stdout)) | ||
436 | self.runqemu.stdin.close() | 446 | self.runqemu.stdin.close() |
437 | self.runqemu.stdout.close() | 447 | self.runqemu.stdout.close() |
438 | self.runqemu_exited = True | 448 | self.runqemu_exited = True |
@@ -467,6 +477,11 @@ class QemuRunner: | |||
467 | self.thread.stop() | 477 | self.thread.stop() |
468 | self.thread.join() | 478 | self.thread.join() |
469 | 479 | ||
480 | def allowexit(self): | ||
481 | self.canexit = True | ||
482 | if self.thread: | ||
483 | self.thread.allowexit() | ||
484 | |||
470 | def restart(self, qemuparams = None): | 485 | def restart(self, qemuparams = None): |
471 | self.logger.warning("Restarting qemu process") | 486 | self.logger.warning("Restarting qemu process") |
472 | if self.runqemu.poll() is None: | 487 | if self.runqemu.poll() is None: |
@@ -522,7 +537,9 @@ class QemuRunner: | |||
522 | if re.search(self.boot_patterns['search_cmd_finished'], data): | 537 | if re.search(self.boot_patterns['search_cmd_finished'], data): |
523 | break | 538 | break |
524 | else: | 539 | else: |
525 | raise Exception("No data on serial console socket") | 540 | if self.canexit: |
541 | return (1, "") | ||
542 | raise Exception("No data on serial console socket, connection closed?") | ||
526 | 543 | ||
527 | if data: | 544 | if data: |
528 | if raw: | 545 | if raw: |
@@ -560,6 +577,7 @@ class LoggingThread(threading.Thread): | |||
560 | self.logger = logger | 577 | self.logger = logger |
561 | self.readsock = None | 578 | self.readsock = None |
562 | self.running = False | 579 | self.running = False |
580 | self.canexit = False | ||
563 | 581 | ||
564 | self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL | 582 | self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL |
565 | self.readevents = select.POLLIN | select.POLLPRI | 583 | self.readevents = select.POLLIN | select.POLLPRI |
@@ -593,6 +611,9 @@ class LoggingThread(threading.Thread): | |||
593 | self.close_ignore_error(self.writepipe) | 611 | self.close_ignore_error(self.writepipe) |
594 | self.running = False | 612 | self.running = False |
595 | 613 | ||
614 | def allowexit(self): | ||
615 | self.canexit = True | ||
616 | |||
596 | def eventloop(self): | 617 | def eventloop(self): |
597 | poll = select.poll() | 618 | poll = select.poll() |
598 | event_read_mask = self.errorevents | self.readevents | 619 | event_read_mask = self.errorevents | self.readevents |
@@ -638,7 +659,7 @@ class LoggingThread(threading.Thread): | |||
638 | data = self.readsock.recv(count) | 659 | data = self.readsock.recv(count) |
639 | except socket.error as e: | 660 | except socket.error as e: |
640 | if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK: | 661 | if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK: |
641 | return '' | 662 | return b'' |
642 | else: | 663 | else: |
643 | raise | 664 | raise |
644 | 665 | ||
@@ -649,7 +670,9 @@ class LoggingThread(threading.Thread): | |||
649 | # happened. But for this code it counts as an | 670 | # happened. But for this code it counts as an |
650 | # error since the connection shouldn't go away | 671 | # error since the connection shouldn't go away |
651 | # until qemu exits. | 672 | # until qemu exits. |
652 | raise Exception("Console connection closed unexpectedly") | 673 | if not self.canexit: |
674 | raise Exception("Console connection closed unexpectedly") | ||
675 | return b'' | ||
653 | 676 | ||
654 | return data | 677 | return data |
655 | 678 | ||
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py index 1055810ca3..09738add1d 100644 --- a/meta/lib/oeqa/utils/targetbuild.py +++ b/meta/lib/oeqa/utils/targetbuild.py | |||
@@ -19,6 +19,7 @@ class BuildProject(metaclass=ABCMeta): | |||
19 | self.d = d | 19 | self.d = d |
20 | self.uri = uri | 20 | self.uri = uri |
21 | self.archive = os.path.basename(uri) | 21 | self.archive = os.path.basename(uri) |
22 | self.tempdirobj = None | ||
22 | if not tmpdir: | 23 | if not tmpdir: |
23 | tmpdir = self.d.getVar('WORKDIR') | 24 | tmpdir = self.d.getVar('WORKDIR') |
24 | if not tmpdir: | 25 | if not tmpdir: |
@@ -71,9 +72,10 @@ class BuildProject(metaclass=ABCMeta): | |||
71 | return self._run('cd %s; make install %s' % (self.targetdir, install_args)) | 72 | return self._run('cd %s; make install %s' % (self.targetdir, install_args)) |
72 | 73 | ||
73 | def clean(self): | 74 | def clean(self): |
75 | if self.tempdirobj: | ||
76 | self.tempdirobj.cleanup() | ||
74 | self._run('rm -rf %s' % self.targetdir) | 77 | self._run('rm -rf %s' % self.targetdir) |
75 | subprocess.check_call('rm -f %s' % self.localarchive, shell=True) | 78 | subprocess.check_call('rm -f %s' % self.localarchive, shell=True) |
76 | pass | ||
77 | 79 | ||
78 | class TargetBuildProject(BuildProject): | 80 | class TargetBuildProject(BuildProject): |
79 | 81 | ||