diff options
Diffstat (limited to 'meta/classes/sanity.bbclass')
-rw-r--r-- | meta/classes/sanity.bbclass | 1054 |
1 files changed, 0 insertions, 1054 deletions
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass deleted file mode 100644 index 485173ab48..0000000000 --- a/meta/classes/sanity.bbclass +++ /dev/null | |||
@@ -1,1054 +0,0 @@ | |||
1 | # | ||
2 | # Sanity check the users setup for common misconfigurations | ||
3 | # | ||
4 | |||
5 | SANITY_REQUIRED_UTILITIES ?= "patch diffstat git bzip2 tar \ | ||
6 | gzip gawk chrpath wget cpio perl file which" | ||
7 | |||
8 | def bblayers_conf_file(d): | ||
9 | return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf') | ||
10 | |||
11 | def sanity_conf_read(fn): | ||
12 | with open(fn, 'r') as f: | ||
13 | lines = f.readlines() | ||
14 | return lines | ||
15 | |||
16 | def sanity_conf_find_line(pattern, lines): | ||
17 | import re | ||
18 | return next(((index, line) | ||
19 | for index, line in enumerate(lines) | ||
20 | if re.search(pattern, line)), (None, None)) | ||
21 | |||
22 | def sanity_conf_update(fn, lines, version_var_name, new_version): | ||
23 | index, line = sanity_conf_find_line(r"^%s" % version_var_name, lines) | ||
24 | lines[index] = '%s = "%d"\n' % (version_var_name, new_version) | ||
25 | with open(fn, "w") as f: | ||
26 | f.write(''.join(lines)) | ||
27 | |||
28 | # Functions added to this variable MUST throw a NotImplementedError exception unless | ||
29 | # they successfully changed the config version in the config file. Exceptions | ||
30 | # are used since exec_func doesn't handle return values. | ||
31 | BBLAYERS_CONF_UPDATE_FUNCS += " \ | ||
32 | conf/bblayers.conf:LCONF_VERSION:LAYER_CONF_VERSION:oecore_update_bblayers \ | ||
33 | conf/local.conf:CONF_VERSION:LOCALCONF_VERSION:oecore_update_localconf \ | ||
34 | conf/site.conf:SCONF_VERSION:SITE_CONF_VERSION:oecore_update_siteconf \ | ||
35 | " | ||
36 | |||
37 | SANITY_DIFF_TOOL ?= "meld" | ||
38 | |||
39 | SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample" | ||
40 | python oecore_update_localconf() { | ||
41 | # Check we are using a valid local.conf | ||
42 | current_conf = d.getVar('CONF_VERSION') | ||
43 | conf_version = d.getVar('LOCALCONF_VERSION') | ||
44 | |||
45 | failmsg = """Your version of local.conf was generated from an older/newer version of | ||
46 | local.conf.sample and there have been updates made to this file. Please compare the two | ||
47 | files and merge any changes before continuing. | ||
48 | |||
49 | Matching the version numbers will remove this message. | ||
50 | |||
51 | \"${SANITY_DIFF_TOOL} conf/local.conf ${SANITY_LOCALCONF_SAMPLE}\" | ||
52 | |||
53 | is a good way to visualise the changes.""" | ||
54 | failmsg = d.expand(failmsg) | ||
55 | |||
56 | raise NotImplementedError(failmsg) | ||
57 | } | ||
58 | |||
59 | SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample" | ||
60 | python oecore_update_siteconf() { | ||
61 | # If we have a site.conf, check it's valid | ||
62 | current_sconf = d.getVar('SCONF_VERSION') | ||
63 | sconf_version = d.getVar('SITE_CONF_VERSION') | ||
64 | |||
65 | failmsg = """Your version of site.conf was generated from an older version of | ||
66 | site.conf.sample and there have been updates made to this file. Please compare the two | ||
67 | files and merge any changes before continuing. | ||
68 | |||
69 | Matching the version numbers will remove this message. | ||
70 | |||
71 | \"${SANITY_DIFF_TOOL} conf/site.conf ${SANITY_SITECONF_SAMPLE}\" | ||
72 | |||
73 | is a good way to visualise the changes.""" | ||
74 | failmsg = d.expand(failmsg) | ||
75 | |||
76 | raise NotImplementedError(failmsg) | ||
77 | } | ||
78 | |||
79 | SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample" | ||
80 | python oecore_update_bblayers() { | ||
81 | # bblayers.conf is out of date, so see if we can resolve that | ||
82 | |||
83 | current_lconf = int(d.getVar('LCONF_VERSION')) | ||
84 | lconf_version = int(d.getVar('LAYER_CONF_VERSION')) | ||
85 | |||
86 | failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}). | ||
87 | Please compare your file against bblayers.conf.sample and merge any changes before continuing. | ||
88 | "${SANITY_DIFF_TOOL} conf/bblayers.conf ${SANITY_BBLAYERCONF_SAMPLE}" | ||
89 | |||
90 | is a good way to visualise the changes.""" | ||
91 | failmsg = d.expand(failmsg) | ||
92 | |||
93 | if not current_lconf: | ||
94 | raise NotImplementedError(failmsg) | ||
95 | |||
96 | lines = [] | ||
97 | |||
98 | if current_lconf < 4: | ||
99 | raise NotImplementedError(failmsg) | ||
100 | |||
101 | bblayers_fn = bblayers_conf_file(d) | ||
102 | lines = sanity_conf_read(bblayers_fn) | ||
103 | |||
104 | if current_lconf == 4 and lconf_version > 4: | ||
105 | topdir_var = '$' + '{TOPDIR}' | ||
106 | index, bbpath_line = sanity_conf_find_line('BBPATH', lines) | ||
107 | if bbpath_line: | ||
108 | start = bbpath_line.find('"') | ||
109 | if start != -1 and (len(bbpath_line) != (start + 1)): | ||
110 | if bbpath_line[start + 1] == '"': | ||
111 | lines[index] = (bbpath_line[:start + 1] + | ||
112 | topdir_var + bbpath_line[start + 1:]) | ||
113 | else: | ||
114 | if not topdir_var in bbpath_line: | ||
115 | lines[index] = (bbpath_line[:start + 1] + | ||
116 | topdir_var + ':' + bbpath_line[start + 1:]) | ||
117 | else: | ||
118 | raise NotImplementedError(failmsg) | ||
119 | else: | ||
120 | index, bbfiles_line = sanity_conf_find_line('BBFILES', lines) | ||
121 | if bbfiles_line: | ||
122 | lines.insert(index, 'BBPATH = "' + topdir_var + '"\n') | ||
123 | else: | ||
124 | raise NotImplementedError(failmsg) | ||
125 | |||
126 | current_lconf += 1 | ||
127 | sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) | ||
128 | bb.note("Your conf/bblayers.conf has been automatically updated.") | ||
129 | return | ||
130 | |||
131 | elif current_lconf == 5 and lconf_version > 5: | ||
132 | # Null update, to avoid issues with people switching between poky and other distros | ||
133 | current_lconf = 6 | ||
134 | sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) | ||
135 | bb.note("Your conf/bblayers.conf has been automatically updated.") | ||
136 | return | ||
137 | |||
138 | status.addresult() | ||
139 | |||
140 | elif current_lconf == 6 and lconf_version > 6: | ||
141 | # Handle rename of meta-yocto -> meta-poky | ||
142 | # This marks the start of separate version numbers but code is needed in OE-Core | ||
143 | # for the migration, one last time. | ||
144 | layers = d.getVar('BBLAYERS').split() | ||
145 | layers = [ os.path.basename(path) for path in layers ] | ||
146 | if 'meta-yocto' in layers: | ||
147 | found = False | ||
148 | while True: | ||
149 | index, meta_yocto_line = sanity_conf_find_line(r'.*meta-yocto[\'"\s\n]', lines) | ||
150 | if meta_yocto_line: | ||
151 | lines[index] = meta_yocto_line.replace('meta-yocto', 'meta-poky') | ||
152 | found = True | ||
153 | else: | ||
154 | break | ||
155 | if not found: | ||
156 | raise NotImplementedError(failmsg) | ||
157 | index, meta_yocto_line = sanity_conf_find_line('LCONF_VERSION.*\n', lines) | ||
158 | if meta_yocto_line: | ||
159 | lines[index] = 'POKY_BBLAYERS_CONF_VERSION = "1"\n' | ||
160 | else: | ||
161 | raise NotImplementedError(failmsg) | ||
162 | with open(bblayers_fn, "w") as f: | ||
163 | f.write(''.join(lines)) | ||
164 | bb.note("Your conf/bblayers.conf has been automatically updated.") | ||
165 | return | ||
166 | current_lconf += 1 | ||
167 | sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) | ||
168 | bb.note("Your conf/bblayers.conf has been automatically updated.") | ||
169 | return | ||
170 | |||
171 | raise NotImplementedError(failmsg) | ||
172 | } | ||
173 | |||
174 | def raise_sanity_error(msg, d, network_error=False): | ||
175 | if d.getVar("SANITY_USE_EVENTS") == "1": | ||
176 | try: | ||
177 | bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d) | ||
178 | except TypeError: | ||
179 | bb.event.fire(bb.event.SanityCheckFailed(msg), d) | ||
180 | return | ||
181 | |||
182 | bb.fatal(""" OE-core's config sanity checker detected a potential misconfiguration. | ||
183 | Either fix the cause of this error or at your own risk disable the checker (see sanity.conf). | ||
184 | Following is the list of potential problems / advisories: | ||
185 | |||
186 | %s""" % msg) | ||
187 | |||
188 | # Check flags associated with a tuning. | ||
189 | def check_toolchain_tune_args(data, tune, multilib, errs): | ||
190 | found_errors = False | ||
191 | if check_toolchain_args_present(data, tune, multilib, errs, 'CCARGS'): | ||
192 | found_errors = True | ||
193 | if check_toolchain_args_present(data, tune, multilib, errs, 'ASARGS'): | ||
194 | found_errors = True | ||
195 | if check_toolchain_args_present(data, tune, multilib, errs, 'LDARGS'): | ||
196 | found_errors = True | ||
197 | |||
198 | return found_errors | ||
199 | |||
200 | def check_toolchain_args_present(data, tune, multilib, tune_errors, which): | ||
201 | args_set = (data.getVar("TUNE_%s" % which) or "").split() | ||
202 | args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune)) or "").split() | ||
203 | args_missing = [] | ||
204 | |||
205 | # If no args are listed/required, we are done. | ||
206 | if not args_wanted: | ||
207 | return | ||
208 | for arg in args_wanted: | ||
209 | if arg not in args_set: | ||
210 | args_missing.append(arg) | ||
211 | |||
212 | found_errors = False | ||
213 | if args_missing: | ||
214 | found_errors = True | ||
215 | tune_errors.append("TUNEABI for %s requires '%s' in TUNE_%s (%s)." % | ||
216 | (tune, ' '.join(args_missing), which, ' '.join(args_set))) | ||
217 | return found_errors | ||
218 | |||
219 | # Check a single tune for validity. | ||
220 | def check_toolchain_tune(data, tune, multilib): | ||
221 | tune_errors = [] | ||
222 | if not tune: | ||
223 | return "No tuning found for %s multilib." % multilib | ||
224 | localdata = bb.data.createCopy(data) | ||
225 | if multilib != "default": | ||
226 | # Apply the overrides so we can look at the details. | ||
227 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib | ||
228 | localdata.setVar("OVERRIDES", overrides) | ||
229 | bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) | ||
230 | features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune) or "").split() | ||
231 | if not features: | ||
232 | return "Tuning '%s' has no defined features, and cannot be used." % tune | ||
233 | valid_tunes = localdata.getVarFlags('TUNEVALID') or {} | ||
234 | conflicts = localdata.getVarFlags('TUNECONFLICTS') or {} | ||
235 | # [doc] is the documentation for the variable, not a real feature | ||
236 | if 'doc' in valid_tunes: | ||
237 | del valid_tunes['doc'] | ||
238 | if 'doc' in conflicts: | ||
239 | del conflicts['doc'] | ||
240 | for feature in features: | ||
241 | if feature in conflicts: | ||
242 | for conflict in conflicts[feature].split(): | ||
243 | if conflict in features: | ||
244 | tune_errors.append("Feature '%s' conflicts with '%s'." % | ||
245 | (feature, conflict)) | ||
246 | if feature in valid_tunes: | ||
247 | bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) | ||
248 | else: | ||
249 | tune_errors.append("Feature '%s' is not defined." % feature) | ||
250 | whitelist = localdata.getVar("TUNEABI_WHITELIST") | ||
251 | if whitelist: | ||
252 | tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune) | ||
253 | if not tuneabi: | ||
254 | tuneabi = tune | ||
255 | if True not in [x in whitelist.split() for x in tuneabi.split()]: | ||
256 | tune_errors.append("Tuning '%s' (%s) cannot be used with any supported tuning/ABI." % | ||
257 | (tune, tuneabi)) | ||
258 | else: | ||
259 | if not check_toolchain_tune_args(localdata, tuneabi, multilib, tune_errors): | ||
260 | bb.debug(2, "Sanity check: Compiler args OK for %s." % tune) | ||
261 | if tune_errors: | ||
262 | return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors) | ||
263 | |||
264 | def check_toolchain(data): | ||
265 | tune_error_set = [] | ||
266 | deftune = data.getVar("DEFAULTTUNE") | ||
267 | tune_errors = check_toolchain_tune(data, deftune, 'default') | ||
268 | if tune_errors: | ||
269 | tune_error_set.append(tune_errors) | ||
270 | |||
271 | multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split() | ||
272 | global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split() | ||
273 | |||
274 | if multilibs: | ||
275 | seen_libs = [] | ||
276 | seen_tunes = [] | ||
277 | for lib in multilibs: | ||
278 | if lib in seen_libs: | ||
279 | tune_error_set.append("The multilib '%s' appears more than once." % lib) | ||
280 | else: | ||
281 | seen_libs.append(lib) | ||
282 | if not lib in global_multilibs: | ||
283 | tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) | ||
284 | tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib) | ||
285 | if tune in seen_tunes: | ||
286 | tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) | ||
287 | else: | ||
288 | seen_libs.append(tune) | ||
289 | if tune == deftune: | ||
290 | tune_error_set.append("Multilib '%s' (%s) is also the default tuning." % (lib, deftune)) | ||
291 | else: | ||
292 | tune_errors = check_toolchain_tune(data, tune, lib) | ||
293 | if tune_errors: | ||
294 | tune_error_set.append(tune_errors) | ||
295 | if tune_error_set: | ||
296 | return "Toolchain tunings invalid:\n" + '\n'.join(tune_error_set) + "\n" | ||
297 | |||
298 | return "" | ||
299 | |||
300 | def check_conf_exists(fn, data): | ||
301 | bbpath = [] | ||
302 | fn = data.expand(fn) | ||
303 | vbbpath = data.getVar("BBPATH", False) | ||
304 | if vbbpath: | ||
305 | bbpath += vbbpath.split(":") | ||
306 | for p in bbpath: | ||
307 | currname = os.path.join(data.expand(p), fn) | ||
308 | if os.access(currname, os.R_OK): | ||
309 | return True | ||
310 | return False | ||
311 | |||
312 | def check_create_long_filename(filepath, pathname): | ||
313 | import string, random | ||
314 | testfile = os.path.join(filepath, ''.join(random.choice(string.ascii_letters) for x in range(200))) | ||
315 | try: | ||
316 | if not os.path.exists(filepath): | ||
317 | bb.utils.mkdirhier(filepath) | ||
318 | f = open(testfile, "w") | ||
319 | f.close() | ||
320 | os.remove(testfile) | ||
321 | except IOError as e: | ||
322 | import errno | ||
323 | err, strerror = e.args | ||
324 | if err == errno.ENAMETOOLONG: | ||
325 | return "Failed to create a file with a long name in %s. Please use a filesystem that does not unreasonably limit filename length.\n" % pathname | ||
326 | else: | ||
327 | return "Failed to create a file in %s: %s.\n" % (pathname, strerror) | ||
328 | except OSError as e: | ||
329 | errno, strerror = e.args | ||
330 | return "Failed to create %s directory in which to run long name sanity check: %s.\n" % (pathname, strerror) | ||
331 | return "" | ||
332 | |||
333 | def check_path_length(filepath, pathname, limit): | ||
334 | if len(filepath) > limit: | ||
335 | return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit) | ||
336 | return "" | ||
337 | |||
338 | def get_filesystem_id(path): | ||
339 | import subprocess | ||
340 | try: | ||
341 | return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8').strip() | ||
342 | except subprocess.CalledProcessError: | ||
343 | bb.warn("Can't get filesystem id of: %s" % path) | ||
344 | return None | ||
345 | |||
346 | # Check that the path isn't located on nfs. | ||
347 | def check_not_nfs(path, name): | ||
348 | # The nfs' filesystem id is 6969 | ||
349 | if get_filesystem_id(path) == "6969": | ||
350 | return "The %s: %s can't be located on nfs.\n" % (name, path) | ||
351 | return "" | ||
352 | |||
353 | # Check that the path is on a case-sensitive file system | ||
354 | def check_case_sensitive(path, name): | ||
355 | import tempfile | ||
356 | with tempfile.NamedTemporaryFile(prefix='TmP', dir=path) as tmp_file: | ||
357 | if os.path.exists(tmp_file.name.lower()): | ||
358 | return "The %s (%s) can't be on a case-insensitive file system.\n" % (name, path) | ||
359 | return "" | ||
360 | |||
361 | # Check that path isn't a broken symlink | ||
362 | def check_symlink(lnk, data): | ||
363 | if os.path.islink(lnk) and not os.path.exists(lnk): | ||
364 | raise_sanity_error("%s is a broken symlink." % lnk, data) | ||
365 | |||
366 | def check_connectivity(d): | ||
367 | # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable | ||
368 | # using the same syntax as for SRC_URI. If the variable is not set | ||
369 | # the check is skipped | ||
370 | test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split() | ||
371 | retval = "" | ||
372 | |||
373 | bbn = d.getVar('BB_NO_NETWORK') | ||
374 | if bbn not in (None, '0', '1'): | ||
375 | return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn | ||
376 | |||
377 | # Only check connectivity if network enabled and the | ||
378 | # CONNECTIVITY_CHECK_URIS are set | ||
379 | network_enabled = not (bbn == '1') | ||
380 | check_enabled = len(test_uris) | ||
381 | if check_enabled and network_enabled: | ||
382 | # Take a copy of the data store and unset MIRRORS and PREMIRRORS | ||
383 | data = bb.data.createCopy(d) | ||
384 | data.delVar('PREMIRRORS') | ||
385 | data.delVar('MIRRORS') | ||
386 | try: | ||
387 | fetcher = bb.fetch2.Fetch(test_uris, data) | ||
388 | fetcher.checkstatus() | ||
389 | except Exception as err: | ||
390 | # Allow the message to be configured so that users can be | ||
391 | # pointed to a support mechanism. | ||
392 | msg = data.getVar('CONNECTIVITY_CHECK_MSG') or "" | ||
393 | if len(msg) == 0: | ||
394 | msg = "%s.\n" % err | ||
395 | msg += " Please ensure your host's network is configured correctly,\n" | ||
396 | msg += " or set BB_NO_NETWORK = \"1\" to disable network access if\n" | ||
397 | msg += " all required sources are on local disk.\n" | ||
398 | retval = msg | ||
399 | |||
400 | return retval | ||
401 | |||
402 | def check_supported_distro(sanity_data): | ||
403 | from fnmatch import fnmatch | ||
404 | |||
405 | tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS') | ||
406 | if not tested_distros: | ||
407 | return | ||
408 | |||
409 | try: | ||
410 | distro = oe.lsb.distro_identifier() | ||
411 | except Exception: | ||
412 | distro = None | ||
413 | |||
414 | if not distro: | ||
415 | bb.warn('Host distribution could not be determined; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.') | ||
416 | |||
417 | for supported in [x.strip() for x in tested_distros.split('\\n')]: | ||
418 | if fnmatch(distro, supported): | ||
419 | return | ||
420 | |||
421 | bb.warn('Host distribution "%s" has not been validated with this version of the build system; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.' % distro) | ||
422 | |||
423 | # Checks we should only make if MACHINE is set correctly | ||
424 | def check_sanity_validmachine(sanity_data): | ||
425 | messages = "" | ||
426 | |||
427 | # Check TUNE_ARCH is set | ||
428 | if sanity_data.getVar('TUNE_ARCH') == 'INVALID': | ||
429 | messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n' | ||
430 | |||
431 | # Check TARGET_OS is set | ||
432 | if sanity_data.getVar('TARGET_OS') == 'INVALID': | ||
433 | messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n' | ||
434 | |||
435 | # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS | ||
436 | pkgarchs = sanity_data.getVar('PACKAGE_ARCHS') | ||
437 | tunepkg = sanity_data.getVar('TUNE_PKGARCH') | ||
438 | defaulttune = sanity_data.getVar('DEFAULTTUNE') | ||
439 | tunefound = False | ||
440 | seen = {} | ||
441 | dups = [] | ||
442 | |||
443 | for pa in pkgarchs.split(): | ||
444 | if seen.get(pa, 0) == 1: | ||
445 | dups.append(pa) | ||
446 | else: | ||
447 | seen[pa] = 1 | ||
448 | if pa == tunepkg: | ||
449 | tunefound = True | ||
450 | |||
451 | if len(dups): | ||
452 | messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups) | ||
453 | |||
454 | if tunefound == False: | ||
455 | messages = messages + "Error, the PACKAGE_ARCHS variable (%s) for DEFAULTTUNE (%s) does not contain TUNE_PKGARCH (%s)." % (pkgarchs, defaulttune, tunepkg) | ||
456 | |||
457 | return messages | ||
458 | |||
459 | # Patch before 2.7 can't handle all the features in git-style diffs. Some | ||
460 | # patches may incorrectly apply, and others won't apply at all. | ||
461 | def check_patch_version(sanity_data): | ||
462 | from distutils.version import LooseVersion | ||
463 | import re, subprocess | ||
464 | |||
465 | try: | ||
466 | result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8') | ||
467 | version = re.search(r"[0-9.]+", result.splitlines()[0]).group() | ||
468 | if LooseVersion(version) < LooseVersion("2.7"): | ||
469 | return "Your version of patch is older than 2.7 and has bugs which will break builds. Please install a newer version of patch.\n" | ||
470 | else: | ||
471 | return None | ||
472 | except subprocess.CalledProcessError as e: | ||
473 | return "Unable to execute patch --version, exit code %d:\n%s\n" % (e.returncode, e.output) | ||
474 | |||
475 | # Unpatched versions of make 3.82 are known to be broken. See GNU Savannah Bug 30612. | ||
476 | # Use a modified reproducer from http://savannah.gnu.org/bugs/?30612 to validate. | ||
477 | def check_make_version(sanity_data): | ||
478 | from distutils.version import LooseVersion | ||
479 | import subprocess | ||
480 | |||
481 | try: | ||
482 | result = subprocess.check_output(['make', '--version'], stderr=subprocess.STDOUT).decode('utf-8') | ||
483 | except subprocess.CalledProcessError as e: | ||
484 | return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output) | ||
485 | version = result.split()[2] | ||
486 | if LooseVersion(version) == LooseVersion("3.82"): | ||
487 | # Construct a test file | ||
488 | f = open("makefile_test", "w") | ||
489 | f.write("makefile_test.a: makefile_test_a.c makefile_test_b.c makefile_test.a( makefile_test_a.c makefile_test_b.c)\n") | ||
490 | f.write("\n") | ||
491 | f.write("makefile_test_a.c:\n") | ||
492 | f.write(" touch $@\n") | ||
493 | f.write("\n") | ||
494 | f.write("makefile_test_b.c:\n") | ||
495 | f.write(" touch $@\n") | ||
496 | f.close() | ||
497 | |||
498 | # Check if make 3.82 has been patched | ||
499 | try: | ||
500 | subprocess.check_call(['make', '-f', 'makefile_test']) | ||
501 | except subprocess.CalledProcessError as e: | ||
502 | return "Your version of make 3.82 is broken. Please revert to 3.81 or install a patched version.\n" | ||
503 | finally: | ||
504 | os.remove("makefile_test") | ||
505 | if os.path.exists("makefile_test_a.c"): | ||
506 | os.remove("makefile_test_a.c") | ||
507 | if os.path.exists("makefile_test_b.c"): | ||
508 | os.remove("makefile_test_b.c") | ||
509 | if os.path.exists("makefile_test.a"): | ||
510 | os.remove("makefile_test.a") | ||
511 | return None | ||
512 | |||
513 | |||
514 | # Check if we're running on WSL (Windows Subsystem for Linux). | ||
515 | # WSLv1 is known not to work but WSLv2 should work properly as | ||
516 | # long as the VHDX file is optimized often, let the user know | ||
517 | # upfront. | ||
518 | # More information on installing WSLv2 at: | ||
519 | # https://docs.microsoft.com/en-us/windows/wsl/wsl2-install | ||
520 | def check_wsl(d): | ||
521 | with open("/proc/version", "r") as f: | ||
522 | verdata = f.readlines() | ||
523 | for l in verdata: | ||
524 | if "Microsoft" in l: | ||
525 | return "OpenEmbedded doesn't work under WSLv1, please upgrade to WSLv2 if you want to run builds on Windows" | ||
526 | elif "microsoft" in l: | ||
527 | bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space") | ||
528 | return None | ||
529 | |||
530 | # Require at least gcc version 6.0. | ||
531 | # | ||
532 | # This can be fixed on CentOS-7 with devtoolset-6+ | ||
533 | # https://www.softwarecollections.org/en/scls/rhscl/devtoolset-6/ | ||
534 | # | ||
535 | # A less invasive fix is with scripts/install-buildtools (or with user | ||
536 | # built buildtools-extended-tarball) | ||
537 | # | ||
538 | def check_gcc_version(sanity_data): | ||
539 | from distutils.version import LooseVersion | ||
540 | import subprocess | ||
541 | |||
542 | build_cc, version = oe.utils.get_host_compiler_version(sanity_data) | ||
543 | if build_cc.strip() == "gcc": | ||
544 | if LooseVersion(version) < LooseVersion("6.0"): | ||
545 | return "Your version of gcc is older than 6.0 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n" | ||
546 | return None | ||
547 | |||
548 | # Tar version 1.24 and onwards handle overwriting symlinks correctly | ||
549 | # but earlier versions do not; this needs to work properly for sstate | ||
550 | # Version 1.28 is needed so opkg-build works correctly when reproducibile builds are enabled | ||
551 | def check_tar_version(sanity_data): | ||
552 | from distutils.version import LooseVersion | ||
553 | import subprocess | ||
554 | try: | ||
555 | result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') | ||
556 | except subprocess.CalledProcessError as e: | ||
557 | return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) | ||
558 | version = result.split()[3] | ||
559 | if LooseVersion(version) < LooseVersion("1.28"): | ||
560 | return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" | ||
561 | return None | ||
562 | |||
563 | # We use git parameters and functionality only found in 1.7.8 or later | ||
564 | # The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162 | ||
565 | # The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped | ||
566 | def check_git_version(sanity_data): | ||
567 | from distutils.version import LooseVersion | ||
568 | import subprocess | ||
569 | try: | ||
570 | result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8') | ||
571 | except subprocess.CalledProcessError as e: | ||
572 | return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output) | ||
573 | version = result.split()[2] | ||
574 | if LooseVersion(version) < LooseVersion("1.8.3.1"): | ||
575 | return "Your version of git is older than 1.8.3.1 and has bugs which will break builds. Please install a newer version of git.\n" | ||
576 | return None | ||
577 | |||
578 | # Check the required perl modules which may not be installed by default | ||
579 | def check_perl_modules(sanity_data): | ||
580 | import subprocess | ||
581 | ret = "" | ||
582 | modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper" ) | ||
583 | errresult = '' | ||
584 | for m in modules: | ||
585 | try: | ||
586 | subprocess.check_output(["perl", "-e", "use %s" % m]) | ||
587 | except subprocess.CalledProcessError as e: | ||
588 | errresult += bytes.decode(e.output) | ||
589 | ret += "%s " % m | ||
590 | if ret: | ||
591 | return "Required perl module(s) not found: %s\n\n%s\n" % (ret, errresult) | ||
592 | return None | ||
593 | |||
594 | def sanity_check_conffiles(d): | ||
595 | funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split() | ||
596 | for func in funcs: | ||
597 | conffile, current_version, required_version, func = func.split(":") | ||
598 | if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \ | ||
599 | d.getVar(current_version) != d.getVar(required_version): | ||
600 | try: | ||
601 | bb.build.exec_func(func, d) | ||
602 | except NotImplementedError as e: | ||
603 | bb.fatal(str(e)) | ||
604 | d.setVar("BB_INVALIDCONF", True) | ||
605 | |||
606 | def sanity_handle_abichanges(status, d): | ||
607 | # | ||
608 | # Check the 'ABI' of TMPDIR | ||
609 | # | ||
610 | import subprocess | ||
611 | |||
612 | current_abi = d.getVar('OELAYOUT_ABI') | ||
613 | abifile = d.getVar('SANITY_ABIFILE') | ||
614 | if os.path.exists(abifile): | ||
615 | with open(abifile, "r") as f: | ||
616 | abi = f.read().strip() | ||
617 | if not abi.isdigit(): | ||
618 | with open(abifile, "w") as f: | ||
619 | f.write(current_abi) | ||
620 | elif int(abi) <= 11 and current_abi == "12": | ||
621 | status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR")) | ||
622 | elif int(abi) <= 13 and current_abi == "14": | ||
623 | status.addresult("TMPDIR changed to include path filtering from the pseudo database.\nIt is recommended to use a clean TMPDIR with the new pseudo path filtering so TMPDIR (%s) would need to be removed to continue.\n" % d.getVar("TMPDIR")) | ||
624 | |||
625 | elif (abi != current_abi): | ||
626 | # Code to convert from one ABI to another could go here if possible. | ||
627 | status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi)) | ||
628 | else: | ||
629 | with open(abifile, "w") as f: | ||
630 | f.write(current_abi) | ||
631 | |||
632 | def check_sanity_sstate_dir_change(sstate_dir, data): | ||
633 | # Sanity checks to be done when the value of SSTATE_DIR changes | ||
634 | |||
635 | # Check that SSTATE_DIR isn't on a filesystem with limited filename length (eg. eCryptFS) | ||
636 | testmsg = "" | ||
637 | if sstate_dir != "": | ||
638 | testmsg = check_create_long_filename(sstate_dir, "SSTATE_DIR") | ||
639 | # If we don't have permissions to SSTATE_DIR, suggest the user set it as an SSTATE_MIRRORS | ||
640 | try: | ||
641 | err = testmsg.split(': ')[1].strip() | ||
642 | if err == "Permission denied.": | ||
643 | testmsg = testmsg + "You could try using %s in SSTATE_MIRRORS rather than as an SSTATE_CACHE.\n" % (sstate_dir) | ||
644 | except IndexError: | ||
645 | pass | ||
646 | return testmsg | ||
647 | |||
648 | def check_sanity_version_change(status, d): | ||
649 | # Sanity checks to be done when SANITY_VERSION or NATIVELSBSTRING changes | ||
650 | # In other words, these tests run once in a given build directory and then | ||
651 | # never again until the sanity version or host distrubution id/version changes. | ||
652 | |||
653 | # Check the python install is complete. Examples that are often removed in | ||
654 | # minimal installations: glib-2.0-natives requries # xml.parsers.expat and icu | ||
655 | # requires distutils.sysconfig. | ||
656 | try: | ||
657 | import xml.parsers.expat | ||
658 | import distutils.sysconfig | ||
659 | except ImportError as e: | ||
660 | status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name) | ||
661 | |||
662 | status.addresult(check_gcc_version(d)) | ||
663 | status.addresult(check_make_version(d)) | ||
664 | status.addresult(check_patch_version(d)) | ||
665 | status.addresult(check_tar_version(d)) | ||
666 | status.addresult(check_git_version(d)) | ||
667 | status.addresult(check_perl_modules(d)) | ||
668 | status.addresult(check_wsl(d)) | ||
669 | |||
670 | missing = "" | ||
671 | |||
672 | if not check_app_exists("${MAKE}", d): | ||
673 | missing = missing + "GNU make," | ||
674 | |||
675 | if not check_app_exists('${BUILD_CC}', d): | ||
676 | missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC") | ||
677 | |||
678 | if not check_app_exists('${BUILD_CXX}', d): | ||
679 | missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX") | ||
680 | |||
681 | required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES') | ||
682 | |||
683 | for util in required_utilities.split(): | ||
684 | if not check_app_exists(util, d): | ||
685 | missing = missing + "%s," % util | ||
686 | |||
687 | if missing: | ||
688 | missing = missing.rstrip(',') | ||
689 | status.addresult("Please install the following missing utilities: %s\n" % missing) | ||
690 | |||
691 | assume_provided = d.getVar('ASSUME_PROVIDED').split() | ||
692 | # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf | ||
693 | if "diffstat-native" not in assume_provided: | ||
694 | status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n') | ||
695 | |||
696 | # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) | ||
697 | import stat | ||
698 | tmpdir = d.getVar('TMPDIR') | ||
699 | status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) | ||
700 | tmpdirmode = os.stat(tmpdir).st_mode | ||
701 | if (tmpdirmode & stat.S_ISGID): | ||
702 | status.addresult("TMPDIR is setgid, please don't build in a setgid directory") | ||
703 | if (tmpdirmode & stat.S_ISUID): | ||
704 | status.addresult("TMPDIR is setuid, please don't build in a setuid directory") | ||
705 | |||
706 | # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS | ||
707 | pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") | ||
708 | workdir = d.getVar('WORKDIR', expand=True) | ||
709 | for i in pseudoignorepaths: | ||
710 | if i and workdir.startswith(i): | ||
711 | status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n") | ||
712 | |||
713 | # Check if PSEUDO_IGNORE_PATHS and and paths under pseudo control overlap | ||
714 | pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") | ||
715 | pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}" | ||
716 | pseudocontroldir = d.expand(pseudo_control_dir).split(",") | ||
717 | for i in pseudoignorepaths: | ||
718 | for j in pseudocontroldir: | ||
719 | if i and j: | ||
720 | if j.startswith(i): | ||
721 | status.addresult("A path included in PSEUDO_IGNORE_PATHS " + str(i) + " and the path " + str(j) + " overlap and this will break pseudo permission and ownership tracking. Please set the path " + str(j) + " to a different directory which does not overlap with pseudo controlled directories. \n") | ||
722 | |||
723 | # Some third-party software apparently relies on chmod etc. being suid root (!!) | ||
724 | import stat | ||
725 | suid_check_bins = "chown chmod mknod".split() | ||
726 | for bin_cmd in suid_check_bins: | ||
727 | bin_path = bb.utils.which(os.environ["PATH"], bin_cmd) | ||
728 | if bin_path: | ||
729 | bin_stat = os.stat(bin_path) | ||
730 | if bin_stat.st_uid == 0 and bin_stat.st_mode & stat.S_ISUID: | ||
731 | status.addresult('%s has the setuid bit set. This interferes with pseudo and may cause other issues that break the build process.\n' % bin_path) | ||
732 | |||
733 | # Check that we can fetch from various network transports | ||
734 | netcheck = check_connectivity(d) | ||
735 | status.addresult(netcheck) | ||
736 | if netcheck: | ||
737 | status.network_error = True | ||
738 | |||
739 | nolibs = d.getVar('NO32LIBS') | ||
740 | if not nolibs: | ||
741 | lib32path = '/lib' | ||
742 | if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ): | ||
743 | lib32path = '/lib32' | ||
744 | |||
745 | if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'): | ||
746 | status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n") | ||
747 | |||
748 | bbpaths = d.getVar('BBPATH').split(":") | ||
749 | if ("." in bbpaths or "./" in bbpaths or "" in bbpaths): | ||
750 | status.addresult("BBPATH references the current directory, either through " \ | ||
751 | "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\ | ||
752 | "layer configuration is adding empty elements to BBPATH.\n\t "\ | ||
753 | "Please check your layer.conf files and other BBPATH " \ | ||
754 | "settings to remove the current working directory " \ | ||
755 | "references.\n" \ | ||
756 | "Parsed BBPATH is" + str(bbpaths)); | ||
757 | |||
758 | oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF') | ||
759 | if not oes_bb_conf: | ||
760 | status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') | ||
761 | |||
762 | # The length of TMPDIR can't be longer than 410 | ||
763 | status.addresult(check_path_length(tmpdir, "TMPDIR", 410)) | ||
764 | |||
765 | # Check that TMPDIR isn't located on nfs | ||
766 | status.addresult(check_not_nfs(tmpdir, "TMPDIR")) | ||
767 | |||
768 | # Check for case-insensitive file systems (such as Linux in Docker on | ||
769 | # macOS with default HFS+ file system) | ||
770 | status.addresult(check_case_sensitive(tmpdir, "TMPDIR")) | ||
771 | |||
772 | def sanity_check_locale(d): | ||
773 | """ | ||
774 | Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists. | ||
775 | """ | ||
776 | import locale | ||
777 | try: | ||
778 | locale.setlocale(locale.LC_ALL, "en_US.UTF-8") | ||
779 | except locale.Error: | ||
780 | raise_sanity_error("Your system needs to support the en_US.UTF-8 locale.", d) | ||
781 | |||
782 | def check_sanity_everybuild(status, d): | ||
783 | import os, stat | ||
784 | # Sanity tests which test the users environment so need to run at each build (or are so cheap | ||
785 | # it makes sense to always run them. | ||
786 | |||
787 | if 0 == os.getuid(): | ||
788 | raise_sanity_error("Do not use Bitbake as root.", d) | ||
789 | |||
790 | # Check the Python version, we now have a minimum of Python 3.6 | ||
791 | import sys | ||
792 | if sys.hexversion < 0x030600F0: | ||
793 | status.addresult('The system requires at least Python 3.6 to run. Please update your Python interpreter.\n') | ||
794 | |||
795 | # Check the bitbake version meets minimum requirements | ||
796 | from distutils.version import LooseVersion | ||
797 | minversion = d.getVar('BB_MIN_VERSION') | ||
798 | if (LooseVersion(bb.__version__) < LooseVersion(minversion)): | ||
799 | status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) | ||
800 | |||
801 | sanity_check_locale(d) | ||
802 | |||
803 | paths = d.getVar('PATH').split(":") | ||
804 | if "." in paths or "./" in paths or "" in paths: | ||
805 | status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n") | ||
806 | |||
807 | #Check if bitbake is present in PATH environment variable | ||
808 | bb_check = bb.utils.which(d.getVar('PATH'), 'bitbake') | ||
809 | if not bb_check: | ||
810 | bb.warn("bitbake binary is not found in PATH, did you source the script?") | ||
811 | |||
812 | # Check whether 'inherit' directive is found (used for a class to inherit) | ||
813 | # in conf file it's supposed to be uppercase INHERIT | ||
814 | inherit = d.getVar('inherit') | ||
815 | if inherit: | ||
816 | status.addresult("Please don't use inherit directive in your local.conf. The directive is supposed to be used in classes and recipes only to inherit of bbclasses. Here INHERIT should be used.\n") | ||
817 | |||
818 | # Check that the DISTRO is valid, if set | ||
819 | # need to take into account DISTRO renaming DISTRO | ||
820 | distro = d.getVar('DISTRO') | ||
821 | if distro and distro != "nodistro": | ||
822 | if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ): | ||
823 | status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO")) | ||
824 | |||
825 | # Check that these variables don't use tilde-expansion as we don't do that | ||
826 | for v in ("TMPDIR", "DL_DIR", "SSTATE_DIR"): | ||
827 | if d.getVar(v).startswith("~"): | ||
828 | status.addresult("%s uses ~ but Bitbake will not expand this, use an absolute path or variables." % v) | ||
829 | |||
830 | # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't | ||
831 | # set, since so much relies on it being set. | ||
832 | dldir = d.getVar('DL_DIR') | ||
833 | if not dldir: | ||
834 | status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n") | ||
835 | if os.path.exists(dldir) and not os.access(dldir, os.W_OK): | ||
836 | status.addresult("DL_DIR: %s exists but you do not appear to have write access to it. \n" % dldir) | ||
837 | check_symlink(dldir, d) | ||
838 | |||
839 | # Check that the MACHINE is valid, if it is set | ||
840 | machinevalid = True | ||
841 | if d.getVar('MACHINE'): | ||
842 | if not check_conf_exists("conf/machine/${MACHINE}.conf", d): | ||
843 | status.addresult('MACHINE=%s is invalid. Please set a valid MACHINE in your local.conf, environment or other configuration file.\n' % (d.getVar('MACHINE'))) | ||
844 | machinevalid = False | ||
845 | else: | ||
846 | status.addresult(check_sanity_validmachine(d)) | ||
847 | else: | ||
848 | status.addresult('Please set a MACHINE in your local.conf or environment\n') | ||
849 | machinevalid = False | ||
850 | if machinevalid: | ||
851 | status.addresult(check_toolchain(d)) | ||
852 | |||
853 | # Check that the SDKMACHINE is valid, if it is set | ||
854 | if d.getVar('SDKMACHINE'): | ||
855 | if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d): | ||
856 | status.addresult('Specified SDKMACHINE value is not valid\n') | ||
857 | elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}": | ||
858 | status.addresult('SDKMACHINE is set, but SDK_ARCH has not been changed as a result - SDKMACHINE may have been set too late (e.g. in the distro configuration)\n') | ||
859 | |||
860 | # If SDK_VENDOR looks like "-my-sdk" then the triples are badly formed so fail early | ||
861 | sdkvendor = d.getVar("SDK_VENDOR") | ||
862 | if not (sdkvendor.startswith("-") and sdkvendor.count("-") == 1): | ||
863 | status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash; found '%s'\n" % sdkvendor) | ||
864 | |||
865 | check_supported_distro(d) | ||
866 | |||
867 | omask = os.umask(0o022) | ||
868 | if omask & 0o755: | ||
869 | status.addresult("Please use a umask which allows a+rx and u+rwx\n") | ||
870 | os.umask(omask) | ||
871 | |||
872 | if d.getVar('TARGET_ARCH') == "arm": | ||
873 | # This path is no longer user-readable in modern (very recent) Linux | ||
874 | try: | ||
875 | if os.path.exists("/proc/sys/vm/mmap_min_addr"): | ||
876 | f = open("/proc/sys/vm/mmap_min_addr", "r") | ||
877 | try: | ||
878 | if (int(f.read().strip()) > 65536): | ||
879 | status.addresult("/proc/sys/vm/mmap_min_addr is not <= 65536. This will cause problems with qemu so please fix the value (as root).\n\nTo fix this in later reboots, set vm.mmap_min_addr = 65536 in /etc/sysctl.conf.\n") | ||
880 | finally: | ||
881 | f.close() | ||
882 | except: | ||
883 | pass | ||
884 | |||
885 | oeroot = d.getVar('COREBASE') | ||
886 | if oeroot.find('+') != -1: | ||
887 | status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.") | ||
888 | if oeroot.find('@') != -1: | ||
889 | status.addresult("Error, you have an invalid character (@) in your COREBASE directory path. Please move the installation to a directory which doesn't include any @ characters.") | ||
890 | if oeroot.find(' ') != -1: | ||
891 | status.addresult("Error, you have a space in your COREBASE directory path. Please move the installation to a directory which doesn't include a space since autotools doesn't support this.") | ||
892 | |||
893 | # Check the format of MIRRORS, PREMIRRORS and SSTATE_MIRRORS | ||
894 | import re | ||
895 | mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS'] | ||
896 | protocols = ['http', 'ftp', 'file', 'https', \ | ||
897 | 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \ | ||
898 | 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3' ] | ||
899 | for mirror_var in mirror_vars: | ||
900 | mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split() | ||
901 | |||
902 | # Split into pairs | ||
903 | if len(mirrors) % 2 != 0: | ||
904 | bb.warn('Invalid mirror variable value for %s: %s, should contain paired members.' % (mirror_var, str(mirrors))) | ||
905 | continue | ||
906 | mirrors = list(zip(*[iter(mirrors)]*2)) | ||
907 | |||
908 | for mirror_entry in mirrors: | ||
909 | pattern, mirror = mirror_entry | ||
910 | |||
911 | decoded = bb.fetch2.decodeurl(pattern) | ||
912 | try: | ||
913 | pattern_scheme = re.compile(decoded[0]) | ||
914 | except re.error as exc: | ||
915 | bb.warn('Invalid scheme regex (%s) in %s; %s' % (pattern, mirror_var, mirror_entry)) | ||
916 | continue | ||
917 | |||
918 | if not any(pattern_scheme.match(protocol) for protocol in protocols): | ||
919 | bb.warn('Invalid protocol (%s) in %s: %s' % (decoded[0], mirror_var, mirror_entry)) | ||
920 | continue | ||
921 | |||
922 | if not any(mirror.startswith(protocol + '://') for protocol in protocols): | ||
923 | bb.warn('Invalid protocol in %s: %s' % (mirror_var, mirror_entry)) | ||
924 | continue | ||
925 | |||
926 | if mirror.startswith('file://'): | ||
927 | import urllib | ||
928 | check_symlink(urllib.parse.urlparse(mirror).path, d) | ||
929 | # SSTATE_MIRROR ends with a /PATH string | ||
930 | if mirror.endswith('/PATH'): | ||
931 | # remove /PATH$ from SSTATE_MIRROR to get a working | ||
932 | # base directory path | ||
933 | mirror_base = urllib.parse.urlparse(mirror[:-1*len('/PATH')]).path | ||
934 | check_symlink(mirror_base, d) | ||
935 | |||
936 | # Check that TMPDIR hasn't changed location since the last time we were run | ||
937 | tmpdir = d.getVar('TMPDIR') | ||
938 | checkfile = os.path.join(tmpdir, "saved_tmpdir") | ||
939 | if os.path.exists(checkfile): | ||
940 | with open(checkfile, "r") as f: | ||
941 | saved_tmpdir = f.read().strip() | ||
942 | if (saved_tmpdir != tmpdir): | ||
943 | status.addresult("Error, TMPDIR has changed location. You need to either move it back to %s or delete it and rebuild\n" % saved_tmpdir) | ||
944 | else: | ||
945 | bb.utils.mkdirhier(tmpdir) | ||
946 | # Remove setuid, setgid and sticky bits from TMPDIR | ||
947 | try: | ||
948 | os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID) | ||
949 | os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID) | ||
950 | os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX) | ||
951 | except OSError as exc: | ||
952 | bb.warn("Unable to chmod TMPDIR: %s" % exc) | ||
953 | with open(checkfile, "w") as f: | ||
954 | f.write(tmpdir) | ||
955 | |||
956 | # If /bin/sh is a symlink, check that it points to dash or bash | ||
957 | if os.path.islink('/bin/sh'): | ||
958 | real_sh = os.path.realpath('/bin/sh') | ||
959 | # Due to update-alternatives, the shell name may take various | ||
960 | # forms, such as /bin/dash, bin/bash, /bin/bash.bash ... | ||
961 | if '/dash' not in real_sh and '/bash' not in real_sh: | ||
962 | status.addresult("Error, /bin/sh links to %s, must be dash or bash\n" % real_sh) | ||
963 | |||
964 | def check_sanity(sanity_data): | ||
965 | class SanityStatus(object): | ||
966 | def __init__(self): | ||
967 | self.messages = "" | ||
968 | self.network_error = False | ||
969 | |||
970 | def addresult(self, message): | ||
971 | if message: | ||
972 | self.messages = self.messages + message | ||
973 | |||
974 | status = SanityStatus() | ||
975 | |||
976 | tmpdir = sanity_data.getVar('TMPDIR') | ||
977 | sstate_dir = sanity_data.getVar('SSTATE_DIR') | ||
978 | |||
979 | check_symlink(sstate_dir, sanity_data) | ||
980 | |||
981 | # Check saved sanity info | ||
982 | last_sanity_version = 0 | ||
983 | last_tmpdir = "" | ||
984 | last_sstate_dir = "" | ||
985 | last_nativelsbstr = "" | ||
986 | sanityverfile = sanity_data.expand("${TOPDIR}/cache/sanity_info") | ||
987 | if os.path.exists(sanityverfile): | ||
988 | with open(sanityverfile, 'r') as f: | ||
989 | for line in f: | ||
990 | if line.startswith('SANITY_VERSION'): | ||
991 | last_sanity_version = int(line.split()[1]) | ||
992 | if line.startswith('TMPDIR'): | ||
993 | last_tmpdir = line.split()[1] | ||
994 | if line.startswith('SSTATE_DIR'): | ||
995 | last_sstate_dir = line.split()[1] | ||
996 | if line.startswith('NATIVELSBSTRING'): | ||
997 | last_nativelsbstr = line.split()[1] | ||
998 | |||
999 | check_sanity_everybuild(status, sanity_data) | ||
1000 | |||
1001 | sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1) | ||
1002 | network_error = False | ||
1003 | # NATIVELSBSTRING var may have been overridden with "universal", so | ||
1004 | # get actual host distribution id and version | ||
1005 | nativelsbstr = lsb_distro_identifier(sanity_data) | ||
1006 | if last_sanity_version < sanity_version or last_nativelsbstr != nativelsbstr: | ||
1007 | check_sanity_version_change(status, sanity_data) | ||
1008 | status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data)) | ||
1009 | else: | ||
1010 | if last_sstate_dir != sstate_dir: | ||
1011 | status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data)) | ||
1012 | |||
1013 | if os.path.exists(os.path.dirname(sanityverfile)) and not status.messages: | ||
1014 | with open(sanityverfile, 'w') as f: | ||
1015 | f.write("SANITY_VERSION %s\n" % sanity_version) | ||
1016 | f.write("TMPDIR %s\n" % tmpdir) | ||
1017 | f.write("SSTATE_DIR %s\n" % sstate_dir) | ||
1018 | f.write("NATIVELSBSTRING %s\n" % nativelsbstr) | ||
1019 | |||
1020 | sanity_handle_abichanges(status, sanity_data) | ||
1021 | |||
1022 | if status.messages != "": | ||
1023 | raise_sanity_error(sanity_data.expand(status.messages), sanity_data, status.network_error) | ||
1024 | |||
1025 | # Create a copy of the datastore and finalise it to ensure appends and | ||
1026 | # overrides are set - the datastore has yet to be finalised at ConfigParsed | ||
1027 | def copy_data(e): | ||
1028 | sanity_data = bb.data.createCopy(e.data) | ||
1029 | sanity_data.finalize() | ||
1030 | return sanity_data | ||
1031 | |||
1032 | addhandler config_reparse_eventhandler | ||
1033 | config_reparse_eventhandler[eventmask] = "bb.event.ConfigParsed" | ||
1034 | python config_reparse_eventhandler() { | ||
1035 | sanity_check_conffiles(e.data) | ||
1036 | } | ||
1037 | |||
1038 | addhandler check_sanity_eventhandler | ||
1039 | check_sanity_eventhandler[eventmask] = "bb.event.SanityCheck bb.event.NetworkTest" | ||
1040 | python check_sanity_eventhandler() { | ||
1041 | if bb.event.getName(e) == "SanityCheck": | ||
1042 | sanity_data = copy_data(e) | ||
1043 | check_sanity(sanity_data) | ||
1044 | if e.generateevents: | ||
1045 | sanity_data.setVar("SANITY_USE_EVENTS", "1") | ||
1046 | bb.event.fire(bb.event.SanityCheckPassed(), e.data) | ||
1047 | elif bb.event.getName(e) == "NetworkTest": | ||
1048 | sanity_data = copy_data(e) | ||
1049 | if e.generateevents: | ||
1050 | sanity_data.setVar("SANITY_USE_EVENTS", "1") | ||
1051 | bb.event.fire(bb.event.NetworkTestFailed() if check_connectivity(sanity_data) else bb.event.NetworkTestPassed(), e.data) | ||
1052 | |||
1053 | return | ||
1054 | } | ||