summaryrefslogtreecommitdiffstats
path: root/meta/classes-global/sanity.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes-global/sanity.bbclass')
-rw-r--r--meta/classes-global/sanity.bbclass1114
1 files changed, 0 insertions, 1114 deletions
diff --git a/meta/classes-global/sanity.bbclass b/meta/classes-global/sanity.bbclass
deleted file mode 100644
index 6934e071a3..0000000000
--- a/meta/classes-global/sanity.bbclass
+++ /dev/null
@@ -1,1114 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7#
8# Sanity check the users setup for common misconfigurations
9#
10
11SANITY_REQUIRED_UTILITIES ?= "patch diffstat git bzip2 tar \
12 gzip gawk chrpath wget cpio perl file which"
13
14def bblayers_conf_file(d):
15 return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf')
16
17def sanity_conf_read(fn):
18 with open(fn, 'r') as f:
19 lines = f.readlines()
20 return lines
21
22def sanity_conf_find_line(pattern, lines):
23 import re
24 return next(((index, line)
25 for index, line in enumerate(lines)
26 if re.search(pattern, line)), (None, None))
27
28def sanity_conf_update(fn, lines, version_var_name, new_version):
29 index, line = sanity_conf_find_line(r"^%s" % version_var_name, lines)
30 lines[index] = '%s = "%d"\n' % (version_var_name, new_version)
31 with open(fn, "w") as f:
32 f.write(''.join(lines))
33
34# Functions added to this variable MUST throw a NotImplementedError exception unless
35# they successfully changed the config version in the config file. Exceptions
36# are used since exec_func doesn't handle return values.
37BBLAYERS_CONF_UPDATE_FUNCS += " \
38 conf/bblayers.conf:LCONF_VERSION:LAYER_CONF_VERSION:oecore_update_bblayers \
39 conf/local.conf:CONF_VERSION:LOCALCONF_VERSION:oecore_update_localconf \
40 conf/site.conf:SCONF_VERSION:SITE_CONF_VERSION:oecore_update_siteconf \
41"
42
43SANITY_DIFF_TOOL ?= "diff -u"
44
45SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/local.conf.sample"
46python oecore_update_localconf() {
47 # Check we are using a valid local.conf
48 current_conf = d.getVar('CONF_VERSION')
49 conf_version = d.getVar('LOCALCONF_VERSION')
50
51 failmsg = """Your version of local.conf was generated from an older/newer version of
52local.conf.sample and there have been updates made to this file. Please compare the two
53files and merge any changes before continuing.
54
55Matching the version numbers will remove this message.
56
57\"${SANITY_DIFF_TOOL} conf/local.conf ${SANITY_LOCALCONF_SAMPLE}\"
58
59is a good way to visualise the changes."""
60 failmsg = d.expand(failmsg)
61
62 raise NotImplementedError(failmsg)
63}
64
65SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/site.conf.sample"
66python oecore_update_siteconf() {
67 # If we have a site.conf, check it's valid
68 current_sconf = d.getVar('SCONF_VERSION')
69 sconf_version = d.getVar('SITE_CONF_VERSION')
70
71 failmsg = """Your version of site.conf was generated from an older version of
72site.conf.sample and there have been updates made to this file. Please compare the two
73files and merge any changes before continuing.
74
75Matching the version numbers will remove this message.
76
77\"${SANITY_DIFF_TOOL} conf/site.conf ${SANITY_SITECONF_SAMPLE}\"
78
79is a good way to visualise the changes."""
80 failmsg = d.expand(failmsg)
81
82 raise NotImplementedError(failmsg)
83}
84
85SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/bblayers.conf.sample"
86python oecore_update_bblayers() {
87 # bblayers.conf is out of date, so see if we can resolve that
88
89 current_lconf = int(d.getVar('LCONF_VERSION'))
90 lconf_version = int(d.getVar('LAYER_CONF_VERSION'))
91
92 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}).
93Please compare your file against bblayers.conf.sample and merge any changes before continuing.
94"${SANITY_DIFF_TOOL} conf/bblayers.conf ${SANITY_BBLAYERCONF_SAMPLE}"
95
96is a good way to visualise the changes."""
97 failmsg = d.expand(failmsg)
98
99 if not current_lconf:
100 raise NotImplementedError(failmsg)
101
102 lines = []
103
104 if current_lconf < 4:
105 raise NotImplementedError(failmsg)
106
107 bblayers_fn = bblayers_conf_file(d)
108 lines = sanity_conf_read(bblayers_fn)
109
110 if current_lconf == 4 and lconf_version > 4:
111 topdir_var = '$' + '{TOPDIR}'
112 index, bbpath_line = sanity_conf_find_line('BBPATH', lines)
113 if bbpath_line:
114 start = bbpath_line.find('"')
115 if start != -1 and (len(bbpath_line) != (start + 1)):
116 if bbpath_line[start + 1] == '"':
117 lines[index] = (bbpath_line[:start + 1] +
118 topdir_var + bbpath_line[start + 1:])
119 else:
120 if not topdir_var in bbpath_line:
121 lines[index] = (bbpath_line[:start + 1] +
122 topdir_var + ':' + bbpath_line[start + 1:])
123 else:
124 raise NotImplementedError(failmsg)
125 else:
126 index, bbfiles_line = sanity_conf_find_line('BBFILES', lines)
127 if bbfiles_line:
128 lines.insert(index, 'BBPATH = "' + topdir_var + '"\n')
129 else:
130 raise NotImplementedError(failmsg)
131
132 current_lconf += 1
133 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
134 bb.note("Your conf/bblayers.conf has been automatically updated.")
135 return
136
137 elif current_lconf == 5 and lconf_version > 5:
138 # Null update, to avoid issues with people switching between poky and other distros
139 current_lconf = 6
140 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
141 bb.note("Your conf/bblayers.conf has been automatically updated.")
142 return
143
144 status.addresult()
145
146 elif current_lconf == 6 and lconf_version > 6:
147 # Handle rename of meta-yocto -> meta-poky
148 # This marks the start of separate version numbers but code is needed in OE-Core
149 # for the migration, one last time.
150 layers = d.getVar('BBLAYERS').split()
151 layers = [ os.path.basename(path) for path in layers ]
152 if 'meta-yocto' in layers:
153 found = False
154 while True:
155 index, meta_yocto_line = sanity_conf_find_line(r'.*meta-yocto[\'"\s\n]', lines)
156 if meta_yocto_line:
157 lines[index] = meta_yocto_line.replace('meta-yocto', 'meta-poky')
158 found = True
159 else:
160 break
161 if not found:
162 raise NotImplementedError(failmsg)
163 index, meta_yocto_line = sanity_conf_find_line('LCONF_VERSION.*\n', lines)
164 if meta_yocto_line:
165 lines[index] = 'POKY_BBLAYERS_CONF_VERSION = "1"\n'
166 else:
167 raise NotImplementedError(failmsg)
168 with open(bblayers_fn, "w") as f:
169 f.write(''.join(lines))
170 bb.note("Your conf/bblayers.conf has been automatically updated.")
171 return
172 current_lconf += 1
173 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
174 bb.note("Your conf/bblayers.conf has been automatically updated.")
175 return
176
177 raise NotImplementedError(failmsg)
178}
179
180def raise_sanity_error(msg, d, network_error=False):
181 if d.getVar("SANITY_USE_EVENTS") == "1":
182 try:
183 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d)
184 except TypeError:
185 bb.event.fire(bb.event.SanityCheckFailed(msg), d)
186 return
187
188 bb.fatal(""" OE-core's config sanity checker detected a potential misconfiguration.
189 Either fix the cause of this error or at your own risk disable the checker (see sanity.conf).
190 Following is the list of potential problems / advisories:
191
192 %s""" % msg)
193
194# Check a single tune for validity.
195def check_toolchain_tune(data, tune, multilib):
196 tune_errors = []
197 if not tune:
198 return "No tuning found for %s multilib." % multilib
199 localdata = bb.data.createCopy(data)
200 if multilib != "default":
201 # Apply the overrides so we can look at the details.
202 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib
203 localdata.setVar("OVERRIDES", overrides)
204 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib))
205 features = (localdata.getVar("TUNE_FEATURES:tune-%s" % tune) or "").split()
206 if not features:
207 return "Tuning '%s' has no defined features, and cannot be used." % tune
208 valid_tunes = localdata.getVarFlags('TUNEVALID') or {}
209 conflicts = localdata.getVarFlags('TUNECONFLICTS') or {}
210 # [doc] is the documentation for the variable, not a real feature
211 if 'doc' in valid_tunes:
212 del valid_tunes['doc']
213 if 'doc' in conflicts:
214 del conflicts['doc']
215 for feature in features:
216 if feature in conflicts:
217 for conflict in conflicts[feature].split():
218 if conflict in features:
219 tune_errors.append("Feature '%s' conflicts with '%s'." %
220 (feature, conflict))
221 if feature in valid_tunes:
222 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
223 else:
224 tune_errors.append("Feature '%s' is not defined." % feature)
225 if tune_errors:
226 return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors)
227
228def check_toolchain(data):
229 tune_error_set = []
230 deftune = data.getVar("DEFAULTTUNE")
231 tune_errors = check_toolchain_tune(data, deftune, 'default')
232 if tune_errors:
233 tune_error_set.append(tune_errors)
234
235 multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split()
236 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split()
237
238 if multilibs:
239 seen_libs = []
240 seen_tunes = []
241 for lib in multilibs:
242 if lib in seen_libs:
243 tune_error_set.append("The multilib '%s' appears more than once." % lib)
244 else:
245 seen_libs.append(lib)
246 if not lib in global_multilibs:
247 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib)
248 tune = data.getVar("DEFAULTTUNE:virtclass-multilib-%s" % lib)
249 if tune in seen_tunes:
250 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune)
251 else:
252 seen_libs.append(tune)
253 if tune == deftune:
254 tune_error_set.append("Multilib '%s' (%s) is also the default tuning." % (lib, deftune))
255 else:
256 tune_errors = check_toolchain_tune(data, tune, lib)
257 if tune_errors:
258 tune_error_set.append(tune_errors)
259 if tune_error_set:
260 return "Toolchain tunings invalid:\n" + '\n'.join(tune_error_set) + "\n"
261
262 return ""
263
264def check_conf_exists(fn, data):
265 bbpath = []
266 fn = data.expand(fn)
267 vbbpath = data.getVar("BBPATH", False)
268 if vbbpath:
269 bbpath += vbbpath.split(":")
270 for p in bbpath:
271 currname = os.path.join(data.expand(p), fn)
272 if os.access(currname, os.R_OK):
273 return True
274 return False
275
276def check_create_long_filename(filepath, pathname):
277 import string, random
278 testfile = os.path.join(filepath, ''.join(random.choice(string.ascii_letters) for x in range(200)))
279 try:
280 if not os.path.exists(filepath):
281 bb.utils.mkdirhier(filepath)
282 f = open(testfile, "w")
283 f.close()
284 os.remove(testfile)
285 except IOError as e:
286 import errno
287 err, strerror = e.args
288 if err == errno.ENAMETOOLONG:
289 return "Failed to create a file with a long name in %s. Please use a filesystem that does not unreasonably limit filename length.\n" % pathname
290 else:
291 return "Failed to create a file in %s: %s.\n" % (pathname, strerror)
292 except OSError as e:
293 errno, strerror = e.args
294 return "Failed to create %s directory in which to run long name sanity check: %s.\n" % (pathname, strerror)
295 return ""
296
297def check_path_length(filepath, pathname, limit):
298 if len(filepath) > limit:
299 return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit)
300 return ""
301
302def check_non_ascii(filepath, pathname):
303 if(not filepath.isascii()):
304 return "Non-ASCII character(s) in %s path (\"%s\") detected. This would cause build failures as we build software that doesn't support this.\n" % (pathname, filepath)
305 return ""
306
307def get_filesystem_id(path):
308 import subprocess
309 try:
310 return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8').strip()
311 except subprocess.CalledProcessError:
312 bb.warn("Can't get filesystem id of: %s" % path)
313 return None
314
315# Check that the path isn't located on nfs.
316def check_not_nfs(path, name):
317 # The nfs' filesystem id is 6969
318 if get_filesystem_id(path) == "6969":
319 return "The %s: %s can't be located on nfs.\n" % (name, path)
320 return ""
321
322# Check that the path is on a case-sensitive file system
323def check_case_sensitive(path, name):
324 import tempfile
325 with tempfile.NamedTemporaryFile(prefix='TmP', dir=path) as tmp_file:
326 if os.path.exists(tmp_file.name.lower()):
327 return "The %s (%s) can't be on a case-insensitive file system.\n" % (name, path)
328 return ""
329
330# Check that path isn't a broken symlink
331def check_symlink(lnk, data):
332 if os.path.islink(lnk) and not os.path.exists(lnk):
333 raise_sanity_error("%s is a broken symlink." % lnk, data)
334
335def check_connectivity(d):
336 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
337 # using the same syntax as for SRC_URI. If the variable is not set
338 # the check is skipped
339 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split()
340 retval = ""
341
342 bbn = d.getVar('BB_NO_NETWORK')
343 if bbn not in (None, '0', '1'):
344 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn
345
346 # Only check connectivity if network enabled and the
347 # CONNECTIVITY_CHECK_URIS are set
348 network_enabled = not (bbn == '1')
349 check_enabled = len(test_uris)
350 if check_enabled and network_enabled:
351 # Take a copy of the data store and unset MIRRORS and PREMIRRORS
352 data = bb.data.createCopy(d)
353 data.delVar('PREMIRRORS')
354 data.delVar('MIRRORS')
355 try:
356 fetcher = bb.fetch2.Fetch(test_uris, data)
357 fetcher.checkstatus()
358 except Exception as err:
359 # Allow the message to be configured so that users can be
360 # pointed to a support mechanism.
361 msg = data.getVar('CONNECTIVITY_CHECK_MSG') or ""
362 if len(msg) == 0:
363 msg = "%s.\n" % err
364 msg += " Please ensure your host's network is configured correctly.\n"
365 msg += " Please ensure CONNECTIVITY_CHECK_URIS is correct and specified URIs are available.\n"
366 msg += " If your ISP or network is blocking the above URL,\n"
367 msg += " try with another domain name, for example by setting:\n"
368 msg += " CONNECTIVITY_CHECK_URIS = \"https://www.example.com/\""
369 msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n"
370 msg += " access if all required sources are on local disk.\n"
371 retval = msg
372
373 return retval
374
375def check_supported_distro(sanity_data):
376 from fnmatch import fnmatch
377
378 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS')
379 if not tested_distros:
380 return
381
382 try:
383 distro = oe.lsb.distro_identifier()
384 except Exception:
385 distro = None
386
387 if not distro:
388 bb.warn('Host distribution could not be determined; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.')
389
390 for supported in [x.strip() for x in tested_distros.split('\\n')]:
391 if fnmatch(distro, supported):
392 return
393
394 bb.warn('Host distribution "%s" has not been validated with this version of the build system; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.' % distro)
395
396# Checks we should only make if MACHINE is set correctly
397def check_sanity_validmachine(sanity_data):
398 messages = ""
399
400 # Check TUNE_ARCH is set
401 if sanity_data.getVar('TUNE_ARCH') == 'INVALID':
402 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n'
403
404 # Check TARGET_OS is set
405 if sanity_data.getVar('TARGET_OS') == 'INVALID':
406 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n'
407
408 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS
409 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS')
410 tunepkg = sanity_data.getVar('TUNE_PKGARCH')
411 defaulttune = sanity_data.getVar('DEFAULTTUNE')
412 tunefound = False
413 seen = {}
414 dups = []
415
416 for pa in pkgarchs.split():
417 if seen.get(pa, 0) == 1:
418 dups.append(pa)
419 else:
420 seen[pa] = 1
421 if pa == tunepkg:
422 tunefound = True
423
424 if len(dups):
425 messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups)
426
427 if tunefound == False:
428 messages = messages + "Error, the PACKAGE_ARCHS variable (%s) for DEFAULTTUNE (%s) does not contain TUNE_PKGARCH (%s)." % (pkgarchs, defaulttune, tunepkg)
429
430 return messages
431
432# Patch before 2.7 can't handle all the features in git-style diffs. Some
433# patches may incorrectly apply, and others won't apply at all.
434def check_patch_version(sanity_data):
435 import re, subprocess
436
437 patch_minimum_version = "2.7"
438
439 try:
440 result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
441 version = re.search(r"[0-9.]+", result.splitlines()[0]).group()
442 if bb.utils.vercmp_string_op(version, patch_minimum_version, "<"):
443 return ("Your version of patch is older than %s and has bugs which will break builds. "
444 "Please install a newer version of patch.\n" % patch_minimum_version)
445 else:
446 return None
447 except subprocess.CalledProcessError as e:
448 return "Unable to execute patch --version, exit code %d:\n%s\n" % (e.returncode, e.output)
449
450# Glibc needs make 4.0 or later, we may as well match at this point
451def check_make_version(sanity_data):
452 make_minimum_version = "4.0"
453 import subprocess
454
455 try:
456 result = subprocess.check_output(['make', '--version'], stderr=subprocess.STDOUT).decode('utf-8')
457 except subprocess.CalledProcessError as e:
458 return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output)
459 version = result.split()[2]
460 if bb.utils.vercmp_string_op(version, make_minimum_version, "<"):
461 return "Please install a make version of %s or later.\n" % make_minimum_version
462
463 if bb.utils.vercmp_string_op(version, "4.2.1", "=="):
464 distro = oe.lsb.distro_identifier()
465 if "ubuntu" in distro or "debian" in distro or "linuxmint" in distro:
466 return None
467 return "make version 4.2.1 is known to have issues on Centos/OpenSUSE and other non-Ubuntu systems. Please use a buildtools-make-tarball or a newer version of make.\n"
468 return None
469
470
471# Check if we're running on WSL (Windows Subsystem for Linux).
472# WSLv1 is known not to work but WSLv2 should work properly as
473# long as the VHDX file is optimized often, let the user know
474# upfront.
475# More information on installing WSLv2 at:
476# https://docs.microsoft.com/en-us/windows/wsl/wsl2-install
477def check_wsl(d):
478 with open("/proc/version", "r") as f:
479 verdata = f.readlines()
480 for l in verdata:
481 if "Microsoft" in l:
482 return "OpenEmbedded doesn't work under WSLv1, please upgrade to WSLv2 if you want to run builds on Windows"
483 elif "microsoft" in l:
484 bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space")
485 return None
486
487def check_userns():
488 """
489 Check that user namespaces are functional, as they're used for network isolation.
490 """
491
492 # There is a known failure case with AppAmrmor where the unshare() call
493 # succeeds (at which point the uid is nobody) but writing to the uid_map
494 # fails (so the uid isn't reset back to the user's uid). We can detect this.
495 parentuid = os.getuid()
496 if not bb.utils.is_local_uid(parentuid):
497 return None
498 pid = os.fork()
499 if not pid:
500 try:
501 bb.utils.disable_network()
502 except:
503 pass
504 os._exit(parentuid != os.getuid())
505
506 ret = os.waitpid(pid, 0)[1]
507 if ret:
508 bb.fatal("User namespaces are not usable by BitBake, possibly due to AppArmor.\n"
509 "See https://discourse.ubuntu.com/t/ubuntu-24-04-lts-noble-numbat-release-notes/39890#unprivileged-user-namespace-restrictions for more information.")
510
511
512# Require at least gcc version 10.1
513#
514# A less invasive fix is with scripts/install-buildtools (or with user
515# built buildtools-extended-tarball)
516#
517def check_gcc_version(sanity_data):
518 gcc_minimum_version = "10.1"
519 version = oe.utils.get_host_gcc_version(sanity_data)
520 if bb.utils.vercmp_string_op(version, gcc_minimum_version, "<"):
521 return ("Your version of gcc is older than %s and will break builds. Please install a newer "
522 "version of gcc (you could use the project's buildtools-extended-tarball or use "
523 "scripts/install-buildtools).\n" % gcc_minimum_version)
524 return None
525
526# Tar version 1.24 and onwards handle overwriting symlinks correctly
527# but earlier versions do not; this needs to work properly for sstate
528# Version 1.28 is needed so opkg-build works correctly when reproducible builds are enabled
529# Gtar is assumed at to be used as tar in poky
530def check_tar_version(sanity_data):
531 tar_minimum_version = "1.28"
532 import subprocess
533 try:
534 result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
535 except subprocess.CalledProcessError as e:
536 return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output)
537 if not "GNU" in result:
538 return "Your version of tar is not gtar. Please install gtar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n"
539 version = result.split()[3]
540 if bb.utils.vercmp_string_op(version, tar_minimum_version, "<"):
541 return ("Your version of tar is older than %s and does not have the support needed to enable reproducible "
542 "builds. Please install a newer version of tar (you could use the project's buildtools-tarball from "
543 "our last release or use scripts/install-buildtools).\n" % tar_minimum_version)
544
545 try:
546 result = subprocess.check_output(["tar", "--help"], stderr=subprocess.STDOUT).decode('utf-8')
547 if "--xattrs" not in result:
548 return "Your tar doesn't support --xattrs, please use GNU tar.\n"
549 except subprocess.CalledProcessError as e:
550 return "Unable to execute tar --help, exit code %d\n%s\n" % (e.returncode, e.output)
551
552 return None
553
554# We use git parameters and functionality only found in 1.7.8 or later
555# The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162
556# The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped
557def check_git_version(sanity_data):
558 git_minimum_version = "1.8.3.1"
559 import subprocess
560 try:
561 result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8')
562 except subprocess.CalledProcessError as e:
563 return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output)
564 version = result.split()[2]
565 if bb.utils.vercmp_string_op(version, git_minimum_version, "<"):
566 return ("Your version of git is older than %s and has bugs which will break builds. "
567 "Please install a newer version of git.\n" % git_minimum_version)
568 return None
569
570# Check the required perl modules which may not be installed by default
571def check_perl_modules(sanity_data):
572 import subprocess
573 ret = ""
574 modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper", "File::Compare", "File::Copy", "open ':std'", "FindBin" )
575 errresult = ''
576 for m in modules:
577 try:
578 subprocess.check_output(["perl", "-e", "use %s" % m])
579 except subprocess.CalledProcessError as e:
580 errresult += bytes.decode(e.output)
581 ret += "%s " % m
582 if ret:
583 return "Required perl module(s) not found: %s\n\n%s\n" % (ret, errresult)
584 return None
585
586def sanity_check_conffiles(d):
587 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split()
588 for func in funcs:
589 conffile, current_version, required_version, func = func.split(":")
590 if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \
591 d.getVar(current_version) != d.getVar(required_version):
592 try:
593 bb.build.exec_func(func, d)
594 except NotImplementedError as e:
595 bb.fatal(str(e))
596 d.setVar("BB_INVALIDCONF", True)
597
598def drop_v14_cross_builds(d):
599 import glob
600 indexes = glob.glob(d.expand("${SSTATE_MANIFESTS}/index-${BUILD_ARCH}_*"))
601 for i in indexes:
602 with open(i, "r") as f:
603 lines = f.readlines()
604 for l in reversed(lines):
605 try:
606 (stamp, manifest, workdir) = l.split()
607 except ValueError:
608 bb.fatal("Invalid line '%s' in sstate manifest '%s'" % (l, i))
609 for m in glob.glob(manifest + ".*"):
610 if m.endswith(".postrm"):
611 continue
612 sstate_clean_manifest(m, d)
613 bb.utils.remove(stamp + "*")
614 bb.utils.remove(workdir, recurse = True)
615
616def check_cpp_toolchain_flag(d, flag, error_message=None):
617 """
618 Checks if the g++ compiler supports the given flag
619 """
620 import shlex
621 import subprocess
622
623 cpp_code = """
624 #include <iostream>
625 int main() {
626 std::cout << "Hello, World!" << std::endl;
627 return 0;
628 }
629 """
630
631 cmd = ["g++", "-x", "c++","-", "-o", "/dev/null", flag]
632 try:
633 subprocess.run(cmd, input=cpp_code, capture_output=True, text=True, check=True)
634 return None
635 except subprocess.CalledProcessError as e:
636 return error_message or f"An unexpected issue occurred during the C++ toolchain check: {str(e)}"
637
638def sanity_handle_abichanges(status, d):
639 #
640 # Check the 'ABI' of TMPDIR
641 #
642 import subprocess
643
644 current_abi = d.getVar('OELAYOUT_ABI')
645 abifile = d.getVar('SANITY_ABIFILE')
646 if os.path.exists(abifile):
647 with open(abifile, "r") as f:
648 abi = f.read().strip()
649 if not abi.isdigit():
650 with open(abifile, "w") as f:
651 f.write(current_abi)
652 elif int(abi) <= 11 and current_abi == "12":
653 status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR"))
654 elif int(abi) <= 13 and current_abi == "14":
655 status.addresult("TMPDIR changed to include path filtering from the pseudo database.\nIt is recommended to use a clean TMPDIR with the new pseudo path filtering so TMPDIR (%s) would need to be removed to continue.\n" % d.getVar("TMPDIR"))
656 elif int(abi) == 14 and current_abi == "15":
657 drop_v14_cross_builds(d)
658 with open(abifile, "w") as f:
659 f.write(current_abi)
660 elif (abi != current_abi):
661 # Code to convert from one ABI to another could go here if possible.
662 status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi))
663 else:
664 with open(abifile, "w") as f:
665 f.write(current_abi)
666
667def check_sanity_sstate_dir_change(sstate_dir, data):
668 # Sanity checks to be done when the value of SSTATE_DIR changes
669
670 # Check that SSTATE_DIR isn't on a filesystem with limited filename length (eg. eCryptFS)
671 testmsg = ""
672 if sstate_dir != "":
673 testmsg = check_create_long_filename(sstate_dir, "SSTATE_DIR")
674 # If we don't have permissions to SSTATE_DIR, suggest the user set it as an SSTATE_MIRRORS
675 try:
676 err = testmsg.split(': ')[1].strip()
677 if err == "Permission denied.":
678 testmsg = testmsg + "You could try using %s in SSTATE_MIRRORS rather than as an SSTATE_CACHE.\n" % (sstate_dir)
679 except IndexError:
680 pass
681 return testmsg
682
683def check_sanity_version_change(status, d):
684 import glob
685
686 # Sanity checks to be done when SANITY_VERSION or NATIVELSBSTRING changes
687 # In other words, these tests run once in a given build directory and then
688 # never again until the sanity version or host distribution id/version changes.
689
690 # Check the python install is complete. Examples that are often removed in
691 # minimal installations: glib-2.0-natives requires xml.parsers.expat
692 try:
693 import xml.parsers.expat
694 except ImportError as e:
695 status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name)
696
697 status.addresult(check_gcc_version(d))
698 status.addresult(check_make_version(d))
699 status.addresult(check_patch_version(d))
700 status.addresult(check_tar_version(d))
701 status.addresult(check_git_version(d))
702 status.addresult(check_perl_modules(d))
703 status.addresult(check_wsl(d))
704 status.addresult(check_userns())
705
706 missing = ""
707
708 if not check_app_exists("${MAKE}", d):
709 missing = missing + "GNU make,"
710
711 if not check_app_exists('gcc', d):
712 missing = missing + "C Compiler (gcc),"
713
714 if not check_app_exists('g++', d):
715 missing = missing + "C++ Compiler (g++),"
716
717 # installing emacs on Ubuntu 24.04 pulls in emacs-gtk -> libgcc-14-dev despite gcc being 13
718 # this breaks libcxx-native and compiler-rt-native builds so tell the user to fix things
719 if glob.glob("/usr/lib/gcc/*/14/libgcc_s.so") and not glob.glob("/usr/lib/gcc/*/14/libstdc++.so"):
720 status.addresult('libgcc-14-dev is installed and not libstdc++-14-dev which will break clang native compiles. Please remove one or install the other.')
721
722 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES')
723
724 for util in required_utilities.split():
725 if not check_app_exists(util, d):
726 missing = missing + "%s," % util
727
728 if missing:
729 missing = missing.rstrip(',')
730 status.addresult("Please install the following missing utilities: %s\n" % missing)
731
732 assume_provided = d.getVar('ASSUME_PROVIDED').split()
733 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
734 if "diffstat-native" not in assume_provided:
735 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n')
736
737 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS)
738 import stat
739 tmpdir = d.getVar('TMPDIR')
740 topdir = d.getVar('TOPDIR')
741 status.addresult(check_create_long_filename(tmpdir, "TMPDIR"))
742 tmpdirmode = os.stat(tmpdir).st_mode
743 if (tmpdirmode & stat.S_ISGID):
744 status.addresult("TMPDIR is setgid, please don't build in a setgid directory")
745 if (tmpdirmode & stat.S_ISUID):
746 status.addresult("TMPDIR is setuid, please don't build in a setuid directory")
747
748 # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS
749 pseudoignorepaths = (d.getVar('PSEUDO_IGNORE_PATHS', expand=True) or "").split(",")
750 workdir = d.getVar('WORKDIR', expand=True)
751 for i in pseudoignorepaths:
752 if i and workdir.startswith(i):
753 status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n")
754
755 # Check if PSEUDO_IGNORE_PATHS and paths under pseudo control overlap
756 pseudoignorepaths = (d.getVar('PSEUDO_IGNORE_PATHS', expand=True) or "").split(",")
757 pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}"
758 pseudocontroldir = d.expand(pseudo_control_dir).split(",")
759 for i in pseudoignorepaths:
760 for j in pseudocontroldir:
761 if i and j:
762 if j.startswith(i):
763 status.addresult("A path included in PSEUDO_IGNORE_PATHS " + str(i) + " and the path " + str(j) + " overlap and this will break pseudo permission and ownership tracking. Please set the path " + str(j) + " to a different directory which does not overlap with pseudo controlled directories. \n")
764
765 # Some third-party software apparently relies on chmod etc. being suid root (!!)
766 import stat
767 suid_check_bins = "chown chmod mknod".split()
768 for bin_cmd in suid_check_bins:
769 bin_path = bb.utils.which(os.environ["PATH"], bin_cmd)
770 if bin_path:
771 bin_stat = os.stat(bin_path)
772 if bin_stat.st_uid == 0 and bin_stat.st_mode & stat.S_ISUID:
773 status.addresult('%s has the setuid bit set. This interferes with pseudo and may cause other issues that break the build process.\n' % bin_path)
774
775 # Check that we can fetch from various network transports
776 netcheck = check_connectivity(d)
777 status.addresult(netcheck)
778 if netcheck:
779 status.network_error = True
780
781 nolibs = d.getVar('NO32LIBS')
782 if not nolibs:
783 lib32path = '/lib'
784 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ):
785 lib32path = '/lib32'
786
787 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'):
788 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n")
789
790 bbpaths = d.getVar('BBPATH').split(":")
791 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths):
792 status.addresult("BBPATH references the current directory, either through " \
793 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\
794 "layer configuration is adding empty elements to BBPATH.\n\t "\
795 "Please check your layer.conf files and other BBPATH " \
796 "settings to remove the current working directory " \
797 "references.\n" \
798 "Parsed BBPATH is" + str(bbpaths));
799
800 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF')
801 if not oes_bb_conf:
802 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n')
803
804 # The length of TMPDIR can't be longer than 400
805 status.addresult(check_path_length(tmpdir, "TMPDIR", 400))
806
807 # Check that TOPDIR does not contain non ascii chars (perl_5.40.0, Perl-native and shadow-native build failures)
808 status.addresult(check_non_ascii(topdir, "TOPDIR"))
809
810 # Check that TMPDIR isn't located on nfs
811 status.addresult(check_not_nfs(tmpdir, "TMPDIR"))
812
813 # Check for case-insensitive file systems (such as Linux in Docker on
814 # macOS with default HFS+ file system)
815 status.addresult(check_case_sensitive(tmpdir, "TMPDIR"))
816
817 # Check if linking with lstdc++ is failing
818 status.addresult(check_cpp_toolchain_flag(d, "-lstdc++"))
819
820 # Check if the C++ toochain support the "--std=gnu++20" flag
821 status.addresult(check_cpp_toolchain_flag(d, "--std=gnu++20",
822 "An error occurred during checking the C++ toolchain for '--std=gnu++20' support. "
823 "Please use a g++ compiler that supports C++20 (e.g. g++ version 10 onwards)."))
824
825def sanity_check_locale(d):
826 """
827 Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists.
828 """
829 import locale
830 try:
831 locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
832 except locale.Error:
833 raise_sanity_error("Your system needs to support the en_US.UTF-8 locale.", d)
834
835def check_sanity_everybuild(status, d):
836 import os, stat
837 # Sanity tests which test the users environment so need to run at each build (or are so cheap
838 # it makes sense to always run them.
839
840 if 0 == os.getuid():
841 raise_sanity_error("Do not use Bitbake as root.", d)
842
843 # Check the Python version, we now have a minimum of Python 3.9
844 import sys
845 if sys.hexversion < 0x030900F0:
846 status.addresult('The system requires at least Python 3.9 to run. Please update your Python interpreter.\n')
847
848 # Check the bitbake version meets minimum requirements
849 minversion = d.getVar('BB_MIN_VERSION')
850 if bb.utils.vercmp_string_op(bb.__version__, minversion, "<"):
851 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__))
852
853 sanity_check_locale(d)
854
855 paths = d.getVar('PATH').split(":")
856 if "." in paths or "./" in paths or "" in paths:
857 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n")
858
859 #Check if bitbake is present in PATH environment variable
860 bb_check = bb.utils.which(d.getVar('PATH'), 'bitbake')
861 if not bb_check:
862 bb.warn("bitbake binary is not found in PATH, did you source the script?")
863
864 # Check whether 'inherit' directive is found (used for a class to inherit)
865 # in conf file it's supposed to be uppercase INHERIT
866 inherit = d.getVar('inherit')
867 if inherit:
868 status.addresult("Please don't use inherit directive in your local.conf. The directive is supposed to be used in classes and recipes only to inherit of bbclasses. Here INHERIT should be used.\n")
869
870 # Check that the DISTRO is valid, if set
871 # need to take into account DISTRO renaming DISTRO
872 distro = d.getVar('DISTRO')
873 if distro and distro != "nodistro":
874 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ):
875 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO"))
876
877 # Check that these variables don't use tilde-expansion as we don't do that
878 for v in ("TMPDIR", "DL_DIR", "SSTATE_DIR"):
879 if d.getVar(v).startswith("~"):
880 status.addresult("%s uses ~ but Bitbake will not expand this, use an absolute path or variables." % v)
881
882 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't
883 # set, since so much relies on it being set.
884 dldir = d.getVar('DL_DIR')
885 if not dldir:
886 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n")
887 if os.path.exists(dldir) and not os.access(dldir, os.W_OK):
888 status.addresult("DL_DIR: %s exists but you do not appear to have write access to it. \n" % dldir)
889 check_symlink(dldir, d)
890
891 # Check that the MACHINE is valid, if it is set
892 machinevalid = True
893 if d.getVar('MACHINE'):
894 if not check_conf_exists("conf/machine/${MACHINE}.conf", d):
895 status.addresult('MACHINE=%s is invalid. Please set a valid MACHINE in your local.conf, environment or other configuration file.\n' % (d.getVar('MACHINE')))
896 machinevalid = False
897 else:
898 status.addresult(check_sanity_validmachine(d))
899 else:
900 status.addresult('Please set a MACHINE in your local.conf or environment\n')
901 machinevalid = False
902 if machinevalid:
903 status.addresult(check_toolchain(d))
904
905 # Check that the SDKMACHINE is valid, if it is set
906 if d.getVar('SDKMACHINE'):
907 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d):
908 status.addresult('Specified SDKMACHINE value is not valid\n')
909 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}":
910 status.addresult('SDKMACHINE is set, but SDK_ARCH has not been changed as a result - SDKMACHINE may have been set too late (e.g. in the distro configuration)\n')
911
912 # If SDK_VENDOR looks like "-my-sdk" then the triples are badly formed so fail early
913 sdkvendor = d.getVar("SDK_VENDOR")
914 if not (sdkvendor.startswith("-") and sdkvendor.count("-") == 1):
915 status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash; found '%s'\n" % sdkvendor)
916
917 check_supported_distro(d)
918
919 omask = os.umask(0o022)
920 if omask & 0o755:
921 status.addresult("Please use a umask which allows a+rx and u+rwx\n")
922 os.umask(omask)
923
924 # Ensure /tmp is NOT mounted with noexec
925 if os.statvfs("/tmp").f_flag & os.ST_NOEXEC:
926 raise_sanity_error("/tmp shouldn't be mounted with noexec.", d)
927
928 if d.getVar('TARGET_ARCH') == "arm":
929 # This path is no longer user-readable in modern (very recent) Linux
930 try:
931 if os.path.exists("/proc/sys/vm/mmap_min_addr"):
932 f = open("/proc/sys/vm/mmap_min_addr", "r")
933 try:
934 if (int(f.read().strip()) > 65536):
935 status.addresult("/proc/sys/vm/mmap_min_addr is not <= 65536. This will cause problems with qemu so please fix the value (as root).\n\nTo fix this in later reboots, set vm.mmap_min_addr = 65536 in /etc/sysctl.conf.\n")
936 finally:
937 f.close()
938 except:
939 pass
940
941 for checkdir in ['COREBASE', 'TMPDIR']:
942 val = d.getVar(checkdir)
943 if val.find('..') != -1:
944 status.addresult("Error, you have '..' in your %s directory path. Please ensure the variable contains an absolute path as this can break some recipe builds in obtuse ways." % checkdir)
945 if val.find('+') != -1:
946 status.addresult("Error, you have an invalid character (+) in your %s directory path. Please move the installation to a directory which doesn't include any + characters." % checkdir)
947 if val.find('@') != -1:
948 status.addresult("Error, you have an invalid character (@) in your %s directory path. Please move the installation to a directory which doesn't include any @ characters." % checkdir)
949 if val.find(' ') != -1:
950 status.addresult("Error, you have a space in your %s directory path. Please move the installation to a directory which doesn't include a space since autotools doesn't support this." % checkdir)
951 if val.find('%') != -1:
952 status.addresult("Error, you have an invalid character (%) in your %s directory path which causes problems with python string formatting. Please move the installation to a directory which doesn't include any % characters." % checkdir)
953
954 # Check the format of MIRRORS, PREMIRRORS and SSTATE_MIRRORS
955 import re
956 mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS']
957 protocols = ['http', 'ftp', 'file', 'https', \
958 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \
959 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3', \
960 'az', 'ftps', 'crate', 'gs']
961 for mirror_var in mirror_vars:
962 mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split()
963
964 # Split into pairs
965 if len(mirrors) % 2 != 0:
966 bb.warn('Invalid mirror variable value for %s: %s, should contain paired members.' % (mirror_var, str(mirrors)))
967 continue
968 mirrors = list(zip(*[iter(mirrors)]*2))
969
970 for mirror_entry in mirrors:
971 pattern, mirror = mirror_entry
972
973 decoded = bb.fetch2.decodeurl(pattern)
974 try:
975 pattern_scheme = re.compile(decoded[0])
976 except re.error as exc:
977 bb.warn('Invalid scheme regex (%s) in %s; %s' % (pattern, mirror_var, mirror_entry))
978 continue
979
980 if not any(pattern_scheme.match(protocol) for protocol in protocols):
981 bb.warn('Invalid protocol (%s) in %s: %s' % (decoded[0], mirror_var, mirror_entry))
982 continue
983
984 if not any(mirror.startswith(protocol + '://') for protocol in protocols):
985 bb.warn('Invalid protocol in %s: %s' % (mirror_var, mirror_entry))
986 continue
987
988 if mirror.startswith('file://'):
989 import urllib
990 check_symlink(urllib.parse.urlparse(mirror).path, d)
991 # SSTATE_MIRROR ends with a /PATH string
992 if mirror.endswith('/PATH'):
993 # remove /PATH$ from SSTATE_MIRROR to get a working
994 # base directory path
995 mirror_base = urllib.parse.urlparse(mirror[:-1*len('/PATH')]).path
996 check_symlink(mirror_base, d)
997
998 # Check sstate mirrors aren't being used with a local hash server and no remote
999 hashserv = d.getVar("BB_HASHSERVE")
1000 if d.getVar("SSTATE_MIRRORS") and hashserv and hashserv.startswith("unix://") and not d.getVar("BB_HASHSERVE_UPSTREAM"):
1001 bb.warn("You are using a local hash equivalence server but have configured an sstate mirror. This will likely mean no sstate will match from the mirror. You may wish to disable the hash equivalence use (BB_HASHSERVE), or use a hash equivalence server alongside the sstate mirror.")
1002
1003 # Check that TMPDIR hasn't changed location since the last time we were run
1004 tmpdir = d.getVar('TMPDIR')
1005 checkfile = os.path.join(tmpdir, "saved_tmpdir")
1006 if os.path.exists(checkfile):
1007 with open(checkfile, "r") as f:
1008 saved_tmpdir = f.read().strip()
1009 if (saved_tmpdir != tmpdir):
1010 status.addresult("Error, TMPDIR has changed location. You need to either move it back to %s or delete it and rebuild\n" % saved_tmpdir)
1011 else:
1012 bb.utils.mkdirhier(tmpdir)
1013 # Remove setuid, setgid and sticky bits from TMPDIR
1014 try:
1015 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID)
1016 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID)
1017 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX)
1018 except OSError as exc:
1019 bb.warn("Unable to chmod TMPDIR: %s" % exc)
1020 with open(checkfile, "w") as f:
1021 f.write(tmpdir)
1022
1023 # If /bin/sh is a symlink, check that it points to dash or bash
1024 if os.path.islink('/bin/sh'):
1025 real_sh = os.path.realpath('/bin/sh')
1026 # Due to update-alternatives, the shell name may take various
1027 # forms, such as /bin/dash, bin/bash, /bin/bash.bash ...
1028 if '/dash' not in real_sh and '/bash' not in real_sh:
1029 status.addresult("Error, /bin/sh links to %s, must be dash or bash\n" % real_sh)
1030
1031def check_sanity(sanity_data):
1032 class SanityStatus(object):
1033 def __init__(self):
1034 self.messages = ""
1035 self.network_error = False
1036
1037 def addresult(self, message):
1038 if message:
1039 self.messages = self.messages + message
1040
1041 status = SanityStatus()
1042
1043 tmpdir = sanity_data.getVar('TMPDIR')
1044 sstate_dir = sanity_data.getVar('SSTATE_DIR')
1045
1046 check_symlink(sstate_dir, sanity_data)
1047
1048 # Check saved sanity info
1049 last_sanity_version = 0
1050 last_tmpdir = ""
1051 last_sstate_dir = ""
1052 last_nativelsbstr = ""
1053 sanityverfile = sanity_data.expand("${TOPDIR}/cache/sanity_info")
1054 if os.path.exists(sanityverfile):
1055 with open(sanityverfile, 'r') as f:
1056 for line in f:
1057 if line.startswith('SANITY_VERSION'):
1058 last_sanity_version = int(line.split()[1])
1059 if line.startswith('TMPDIR'):
1060 last_tmpdir = line.split()[1]
1061 if line.startswith('SSTATE_DIR'):
1062 last_sstate_dir = line.split()[1]
1063 if line.startswith('NATIVELSBSTRING'):
1064 last_nativelsbstr = line.split()[1]
1065
1066 check_sanity_everybuild(status, sanity_data)
1067
1068 sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1)
1069 network_error = False
1070 # NATIVELSBSTRING var may have been overridden with "universal", so
1071 # get actual host distribution id and version
1072 nativelsbstr = lsb_distro_identifier(sanity_data)
1073 if last_sanity_version < sanity_version or last_nativelsbstr != nativelsbstr:
1074 check_sanity_version_change(status, sanity_data)
1075 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data))
1076 else:
1077 if last_sstate_dir != sstate_dir:
1078 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data))
1079
1080 if os.path.exists(os.path.dirname(sanityverfile)) and not status.messages:
1081 with open(sanityverfile, 'w') as f:
1082 f.write("SANITY_VERSION %s\n" % sanity_version)
1083 f.write("TMPDIR %s\n" % tmpdir)
1084 f.write("SSTATE_DIR %s\n" % sstate_dir)
1085 f.write("NATIVELSBSTRING %s\n" % nativelsbstr)
1086
1087 sanity_handle_abichanges(status, sanity_data)
1088
1089 if status.messages != "":
1090 raise_sanity_error(sanity_data.expand(status.messages), sanity_data, status.network_error)
1091
1092addhandler config_reparse_eventhandler
1093config_reparse_eventhandler[eventmask] = "bb.event.ConfigParsed"
1094python config_reparse_eventhandler() {
1095 sanity_check_conffiles(e.data)
1096}
1097
1098addhandler check_sanity_eventhandler
1099check_sanity_eventhandler[eventmask] = "bb.event.SanityCheck bb.event.NetworkTest"
1100python check_sanity_eventhandler() {
1101 if bb.event.getName(e) == "SanityCheck":
1102 sanity_data = bb.data.createCopy(e.data)
1103 check_sanity(sanity_data)
1104 if e.generateevents:
1105 sanity_data.setVar("SANITY_USE_EVENTS", "1")
1106 bb.event.fire(bb.event.SanityCheckPassed(), e.data)
1107 elif bb.event.getName(e) == "NetworkTest":
1108 sanity_data = bb.data.createCopy(e.data)
1109 if e.generateevents:
1110 sanity_data.setVar("SANITY_USE_EVENTS", "1")
1111 bb.event.fire(bb.event.NetworkTestFailed() if check_connectivity(sanity_data) else bb.event.NetworkTestPassed(), e.data)
1112
1113 return
1114}