summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/utils.py')
-rw-r--r--bitbake/lib/bb/utils.py1671
1 files changed, 0 insertions, 1671 deletions
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
deleted file mode 100644
index b282d09abf..0000000000
--- a/bitbake/lib/bb/utils.py
+++ /dev/null
@@ -1,1671 +0,0 @@
1"""
2BitBake Utility Functions
3"""
4
5# Copyright (C) 2004 Michael Lauer
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re, fcntl, os, string, stat, shutil, time
11import sys
12import errno
13import logging
14import bb
15import bb.msg
16import multiprocessing
17import fcntl
18import importlib
19from importlib import machinery
20import itertools
21import subprocess
22import glob
23import fnmatch
24import traceback
25import errno
26import signal
27import collections
28import copy
29from subprocess import getstatusoutput
30from contextlib import contextmanager
31from ctypes import cdll
32
33logger = logging.getLogger("BitBake.Util")
34python_extensions = importlib.machinery.all_suffixes()
35
36
37def clean_context():
38 return {
39 "os": os,
40 "bb": bb,
41 "time": time,
42 }
43
44def get_context():
45 return _context
46
47
48def set_context(ctx):
49 _context = ctx
50
51# Context used in better_exec, eval
52_context = clean_context()
53
54class VersionStringException(Exception):
55 """Exception raised when an invalid version specification is found"""
56
57def explode_version(s):
58 r = []
59 alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
60 numeric_regexp = re.compile(r'^(\d+)(.*)$')
61 while (s != ''):
62 if s[0] in string.digits:
63 m = numeric_regexp.match(s)
64 r.append((0, int(m.group(1))))
65 s = m.group(2)
66 continue
67 if s[0] in string.ascii_letters:
68 m = alpha_regexp.match(s)
69 r.append((1, m.group(1)))
70 s = m.group(2)
71 continue
72 if s[0] == '~':
73 r.append((-1, s[0]))
74 else:
75 r.append((2, s[0]))
76 s = s[1:]
77 return r
78
79def split_version(s):
80 """Split a version string into its constituent parts (PE, PV, PR)"""
81 s = s.strip(" <>=")
82 e = 0
83 if s.count(':'):
84 e = int(s.split(":")[0])
85 s = s.split(":")[1]
86 r = ""
87 if s.count('-'):
88 r = s.rsplit("-", 1)[1]
89 s = s.rsplit("-", 1)[0]
90 v = s
91 return (e, v, r)
92
93def vercmp_part(a, b):
94 va = explode_version(a)
95 vb = explode_version(b)
96 while True:
97 if va == []:
98 (oa, ca) = (0, None)
99 else:
100 (oa, ca) = va.pop(0)
101 if vb == []:
102 (ob, cb) = (0, None)
103 else:
104 (ob, cb) = vb.pop(0)
105 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
106 return 0
107 if oa < ob:
108 return -1
109 elif oa > ob:
110 return 1
111 elif ca is None:
112 return -1
113 elif cb is None:
114 return 1
115 elif ca < cb:
116 return -1
117 elif ca > cb:
118 return 1
119
120def vercmp(ta, tb):
121 (ea, va, ra) = ta
122 (eb, vb, rb) = tb
123
124 r = int(ea or 0) - int(eb or 0)
125 if (r == 0):
126 r = vercmp_part(va, vb)
127 if (r == 0):
128 r = vercmp_part(ra, rb)
129 return r
130
131def vercmp_string(a, b):
132 """ Split version strings and compare them """
133 ta = split_version(a)
134 tb = split_version(b)
135 return vercmp(ta, tb)
136
137def vercmp_string_op(a, b, op):
138 """
139 Compare two versions and check if the specified comparison operator matches the result of the comparison.
140 This function is fairly liberal about what operators it will accept since there are a variety of styles
141 depending on the context.
142 """
143 res = vercmp_string(a, b)
144 if op in ('=', '=='):
145 return res == 0
146 elif op == '<=':
147 return res <= 0
148 elif op == '>=':
149 return res >= 0
150 elif op in ('>', '>>'):
151 return res > 0
152 elif op in ('<', '<<'):
153 return res < 0
154 elif op == '!=':
155 return res != 0
156 else:
157 raise VersionStringException('Unsupported comparison operator "%s"' % op)
158
159def explode_deps(s):
160 """
161 Take an RDEPENDS style string of format:
162 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
163 and return a list of dependencies.
164 Version information is ignored.
165 """
166 r = []
167 l = s.split()
168 flag = False
169 for i in l:
170 if i[0] == '(':
171 flag = True
172 #j = []
173 if not flag:
174 r.append(i)
175 #else:
176 # j.append(i)
177 if flag and i.endswith(')'):
178 flag = False
179 # Ignore version
180 #r[-1] += ' ' + ' '.join(j)
181 return r
182
183def explode_dep_versions2(s, *, sort=True):
184 """
185 Take an RDEPENDS style string of format:
186 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
187 and return a dictionary of dependencies and versions.
188 """
189 r = collections.OrderedDict()
190 l = s.replace(",", "").split()
191 lastdep = None
192 lastcmp = ""
193 lastver = ""
194 incmp = False
195 inversion = False
196 for i in l:
197 if i[0] == '(':
198 incmp = True
199 i = i[1:].strip()
200 if not i:
201 continue
202
203 if incmp:
204 incmp = False
205 inversion = True
206 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
207 #
208 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
209 # we list each possibly valid item.
210 # The build system is responsible for validation of what it supports.
211 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
212 lastcmp = i[0:2]
213 i = i[2:]
214 elif i.startswith(('<', '>', '=')):
215 lastcmp = i[0:1]
216 i = i[1:]
217 else:
218 # This is an unsupported case!
219 raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
220 lastcmp = (i or "")
221 i = ""
222 i.strip()
223 if not i:
224 continue
225
226 if inversion:
227 if i.endswith(')'):
228 i = i[:-1] or ""
229 inversion = False
230 if lastver and i:
231 lastver += " "
232 if i:
233 lastver += i
234 if lastdep not in r:
235 r[lastdep] = []
236 r[lastdep].append(lastcmp + " " + lastver)
237 continue
238
239 #if not inversion:
240 lastdep = i
241 lastver = ""
242 lastcmp = ""
243 if not (i in r and r[i]):
244 r[lastdep] = []
245
246 if sort:
247 r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
248 return r
249
250def explode_dep_versions(s):
251 """
252 Take an RDEPENDS style string of format:
253 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
254 skip null value and items appeared in dependancy string multiple times
255 and return a dictionary of dependencies and versions.
256 """
257 r = explode_dep_versions2(s)
258 for d in r:
259 if not r[d]:
260 r[d] = None
261 continue
262 if len(r[d]) > 1:
263 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
264 r[d] = r[d][0]
265 return r
266
267def join_deps(deps, commasep=True):
268 """
269 Take the result from explode_dep_versions and generate a dependency string
270 """
271 result = []
272 for dep in deps:
273 if deps[dep]:
274 if isinstance(deps[dep], list):
275 for v in deps[dep]:
276 result.append(dep + " (" + v + ")")
277 else:
278 result.append(dep + " (" + deps[dep] + ")")
279 else:
280 result.append(dep)
281 if commasep:
282 return ", ".join(result)
283 else:
284 return " ".join(result)
285
286def _print_trace(body, line):
287 """
288 Print the Environment of a Text Body
289 """
290 error = []
291 # print the environment of the method
292 min_line = max(1, line-4)
293 max_line = min(line + 4, len(body))
294 for i in range(min_line, max_line + 1):
295 if line == i:
296 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
297 else:
298 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
299 return error
300
301def better_compile(text, file, realfile, mode = "exec", lineno = 0):
302 """
303 A better compile method. This method
304 will print the offending lines.
305 """
306 try:
307 cache = bb.methodpool.compile_cache(text)
308 if cache:
309 return cache
310 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
311 text2 = "\n" * int(lineno) + text
312 code = compile(text2, realfile, mode)
313 bb.methodpool.compile_cache_add(text, code)
314 return code
315 except Exception as e:
316 error = []
317 # split the text into lines again
318 body = text.split('\n')
319 error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
320 if hasattr(e, "lineno"):
321 error.append("The code lines resulting in this error were:")
322 # e.lineno: line's position in reaflile
323 # lineno: function name's "position -1" in realfile
324 # e.lineno - lineno: line's relative position in function
325 error.extend(_print_trace(body, e.lineno - lineno))
326 else:
327 error.append("The function causing this error was:")
328 for line in body:
329 error.append(line)
330 error.append("%s: %s" % (e.__class__.__name__, str(e)))
331
332 logger.error("\n".join(error))
333
334 e = bb.BBHandledException(e)
335 raise e
336
337def _print_exception(t, value, tb, realfile, text, context):
338 error = []
339 try:
340 exception = traceback.format_exception_only(t, value)
341 error.append('Error executing a python function in %s:\n' % realfile)
342
343 # Strip 'us' from the stack (better_exec call) unless that was where the
344 # error came from
345 if tb.tb_next is not None:
346 tb = tb.tb_next
347
348 textarray = text.split('\n')
349
350 linefailed = tb.tb_lineno
351
352 tbextract = traceback.extract_tb(tb)
353 tbformat = traceback.format_list(tbextract)
354 error.append("The stack trace of python calls that resulted in this exception/failure was:")
355 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
356 error.extend(_print_trace(textarray, linefailed))
357
358 # See if this is a function we constructed and has calls back into other functions in
359 # "text". If so, try and improve the context of the error by diving down the trace
360 level = 0
361 nexttb = tb.tb_next
362 while nexttb is not None and (level+1) < len(tbextract):
363 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
364 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
365 # The code was possibly in the string we compiled ourselves
366 error.extend(_print_trace(textarray, tbextract[level+1][1]))
367 elif tbextract[level+1][0].startswith("/"):
368 # The code looks like it might be in a file, try and load it
369 try:
370 with open(tbextract[level+1][0], "r") as f:
371 text = f.readlines()
372 error.extend(_print_trace(text, tbextract[level+1][1]))
373 except:
374 error.append(tbformat[level+1])
375 else:
376 error.append(tbformat[level+1])
377 nexttb = tb.tb_next
378 level = level + 1
379
380 error.append("Exception: %s" % ''.join(exception))
381
382 # If the exception is from spwaning a task, let's be helpful and display
383 # the output (which hopefully includes stderr).
384 if isinstance(value, subprocess.CalledProcessError) and value.output:
385 error.append("Subprocess output:")
386 error.append(value.output.decode("utf-8", errors="ignore"))
387 finally:
388 logger.error("\n".join(error))
389
390def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
391 """
392 Similiar to better_compile, better_exec will
393 print the lines that are responsible for the
394 error.
395 """
396 import bb.parse
397 if not text:
398 text = code
399 if not hasattr(code, "co_filename"):
400 code = better_compile(code, realfile, realfile)
401 try:
402 exec(code, get_context(), context)
403 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError):
404 # Error already shown so passthrough, no need for traceback
405 raise
406 except Exception as e:
407 if pythonexception:
408 raise
409 (t, value, tb) = sys.exc_info()
410 try:
411 _print_exception(t, value, tb, realfile, text, context)
412 except Exception as e2:
413 logger.error("Exception handler error: %s" % str(e2))
414
415 e = bb.BBHandledException(e)
416 raise e
417
418def simple_exec(code, context):
419 exec(code, get_context(), context)
420
421def better_eval(source, locals, extraglobals = None):
422 ctx = get_context()
423 if extraglobals:
424 ctx = copy.copy(ctx)
425 for g in extraglobals:
426 ctx[g] = extraglobals[g]
427 return eval(source, ctx, locals)
428
429@contextmanager
430def fileslocked(files):
431 """Context manager for locking and unlocking file locks."""
432 locks = []
433 if files:
434 for lockfile in files:
435 locks.append(bb.utils.lockfile(lockfile))
436
437 try:
438 yield
439 finally:
440 for lock in locks:
441 bb.utils.unlockfile(lock)
442
443def lockfile(name, shared=False, retry=True, block=False):
444 """
445 Use the specified file as a lock file, return when the lock has
446 been acquired. Returns a variable to pass to unlockfile().
447 Parameters:
448 retry: True to re-try locking if it fails, False otherwise
449 block: True to block until the lock succeeds, False otherwise
450 The retry and block parameters are kind of equivalent unless you
451 consider the possibility of sending a signal to the process to break
452 out - at which point you want block=True rather than retry=True.
453 """
454 dirname = os.path.dirname(name)
455 mkdirhier(dirname)
456
457 if not os.access(dirname, os.W_OK):
458 logger.error("Unable to acquire lock '%s', directory is not writable",
459 name)
460 sys.exit(1)
461
462 op = fcntl.LOCK_EX
463 if shared:
464 op = fcntl.LOCK_SH
465 if not retry and not block:
466 op = op | fcntl.LOCK_NB
467
468 while True:
469 # If we leave the lockfiles lying around there is no problem
470 # but we should clean up after ourselves. This gives potential
471 # for races though. To work around this, when we acquire the lock
472 # we check the file we locked was still the lock file on disk.
473 # by comparing inode numbers. If they don't match or the lockfile
474 # no longer exists, we start again.
475
476 # This implementation is unfair since the last person to request the
477 # lock is the most likely to win it.
478
479 try:
480 lf = open(name, 'a+')
481 fileno = lf.fileno()
482 fcntl.flock(fileno, op)
483 statinfo = os.fstat(fileno)
484 if os.path.exists(lf.name):
485 statinfo2 = os.stat(lf.name)
486 if statinfo.st_ino == statinfo2.st_ino:
487 return lf
488 lf.close()
489 except OSError as e:
490 if e.errno == errno.EACCES:
491 logger.error("Unable to acquire lock '%s', %s",
492 e.strerror, name)
493 sys.exit(1)
494 try:
495 lf.close()
496 except Exception:
497 pass
498 pass
499 if not retry:
500 return None
501
502def unlockfile(lf):
503 """
504 Unlock a file locked using lockfile()
505 """
506 try:
507 # If we had a shared lock, we need to promote to exclusive before
508 # removing the lockfile. Attempt this, ignore failures.
509 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
510 os.unlink(lf.name)
511 except (IOError, OSError):
512 pass
513 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
514 lf.close()
515
516def _hasher(method, filename):
517 import mmap
518
519 with open(filename, "rb") as f:
520 try:
521 with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
522 for chunk in iter(lambda: mm.read(8192), b''):
523 method.update(chunk)
524 except ValueError:
525 # You can't mmap() an empty file so silence this exception
526 pass
527 return method.hexdigest()
528
529
530def md5_file(filename):
531 """
532 Return the hex string representation of the MD5 checksum of filename.
533 """
534 import hashlib
535 return _hasher(hashlib.md5(), filename)
536
537def sha256_file(filename):
538 """
539 Return the hex string representation of the 256-bit SHA checksum of
540 filename.
541 """
542 import hashlib
543 return _hasher(hashlib.sha256(), filename)
544
545def sha1_file(filename):
546 """
547 Return the hex string representation of the SHA1 checksum of the filename
548 """
549 import hashlib
550 return _hasher(hashlib.sha1(), filename)
551
552def sha384_file(filename):
553 """
554 Return the hex string representation of the SHA384 checksum of the filename
555 """
556 import hashlib
557 return _hasher(hashlib.sha384(), filename)
558
559def sha512_file(filename):
560 """
561 Return the hex string representation of the SHA512 checksum of the filename
562 """
563 import hashlib
564 return _hasher(hashlib.sha512(), filename)
565
566def preserved_envvars_exported():
567 """Variables which are taken from the environment and placed in and exported
568 from the metadata"""
569 return [
570 'BB_TASKHASH',
571 'HOME',
572 'LOGNAME',
573 'PATH',
574 'PWD',
575 'SHELL',
576 'USER',
577 'LC_ALL',
578 'BBSERVER',
579 ]
580
581def preserved_envvars():
582 """Variables which are taken from the environment and placed in the metadata"""
583 v = [
584 'BBPATH',
585 'BB_PRESERVE_ENV',
586 'BB_ENV_WHITELIST',
587 'BB_ENV_EXTRAWHITE',
588 ]
589 return v + preserved_envvars_exported()
590
591def filter_environment(good_vars):
592 """
593 Create a pristine environment for bitbake. This will remove variables that
594 are not known and may influence the build in a negative way.
595 """
596
597 removed_vars = {}
598 for key in list(os.environ):
599 if key in good_vars:
600 continue
601
602 removed_vars[key] = os.environ[key]
603 del os.environ[key]
604
605 # If we spawn a python process, we need to have a UTF-8 locale, else python's file
606 # access methods will use ascii. You can't change that mode once the interpreter is
607 # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
608 # distros support that and we need to set something.
609 os.environ["LC_ALL"] = "en_US.UTF-8"
610
611 if removed_vars:
612 logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
613
614 return removed_vars
615
616def approved_variables():
617 """
618 Determine and return the list of whitelisted variables which are approved
619 to remain in the environment.
620 """
621 if 'BB_PRESERVE_ENV' in os.environ:
622 return os.environ.keys()
623 approved = []
624 if 'BB_ENV_WHITELIST' in os.environ:
625 approved = os.environ['BB_ENV_WHITELIST'].split()
626 approved.extend(['BB_ENV_WHITELIST'])
627 else:
628 approved = preserved_envvars()
629 if 'BB_ENV_EXTRAWHITE' in os.environ:
630 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
631 if 'BB_ENV_EXTRAWHITE' not in approved:
632 approved.extend(['BB_ENV_EXTRAWHITE'])
633 return approved
634
635def clean_environment():
636 """
637 Clean up any spurious environment variables. This will remove any
638 variables the user hasn't chosen to preserve.
639 """
640 if 'BB_PRESERVE_ENV' not in os.environ:
641 good_vars = approved_variables()
642 return filter_environment(good_vars)
643
644 return {}
645
646def empty_environment():
647 """
648 Remove all variables from the environment.
649 """
650 for s in list(os.environ.keys()):
651 os.unsetenv(s)
652 del os.environ[s]
653
654def build_environment(d):
655 """
656 Build an environment from all exported variables.
657 """
658 import bb.data
659 for var in bb.data.keys(d):
660 export = d.getVarFlag(var, "export", False)
661 if export:
662 os.environ[var] = d.getVar(var) or ""
663
664def _check_unsafe_delete_path(path):
665 """
666 Basic safeguard against recursively deleting something we shouldn't. If it returns True,
667 the caller should raise an exception with an appropriate message.
668 NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
669 with potentially disastrous results.
670 """
671 extra = ''
672 # HOME might not be /home/something, so in case we can get it, check against it
673 homedir = os.environ.get('HOME', '')
674 if homedir:
675 extra = '|%s' % homedir
676 if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
677 return True
678 return False
679
680def remove(path, recurse=False, ionice=False):
681 """Equivalent to rm -f or rm -rf"""
682 if not path:
683 return
684 if recurse:
685 for name in glob.glob(path):
686 if _check_unsafe_delete_path(path):
687 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
688 # shutil.rmtree(name) would be ideal but its too slow
689 cmd = []
690 if ionice:
691 cmd = ['ionice', '-c', '3']
692 subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
693 return
694 for name in glob.glob(path):
695 try:
696 os.unlink(name)
697 except OSError as exc:
698 if exc.errno != errno.ENOENT:
699 raise
700
701def prunedir(topdir, ionice=False):
702 """ Delete everything reachable from the directory named in 'topdir'. """
703 # CAUTION: This is dangerous!
704 if _check_unsafe_delete_path(topdir):
705 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
706 remove(topdir, recurse=True, ionice=ionice)
707
708#
709# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
710# but thats possibly insane and suffixes is probably going to be small
711#
712def prune_suffix(var, suffixes, d):
713 """
714 See if var ends with any of the suffixes listed and
715 remove it if found
716 """
717 for suffix in suffixes:
718 if suffix and var.endswith(suffix):
719 return var[:-len(suffix)]
720 return var
721
722def mkdirhier(directory):
723 """Create a directory like 'mkdir -p', but does not complain if
724 directory already exists like os.makedirs
725 """
726
727 try:
728 os.makedirs(directory)
729 except OSError as e:
730 if e.errno != errno.EEXIST or not os.path.isdir(directory):
731 raise e
732
733def movefile(src, dest, newmtime = None, sstat = None):
734 """Moves a file from src to dest, preserving all permissions and
735 attributes; mtime will be preserved even when moving across
736 filesystems. Returns true on success and false on failure. Move is
737 atomic.
738 """
739
740 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
741 try:
742 if not sstat:
743 sstat = os.lstat(src)
744 except Exception as e:
745 print("movefile: Stating source file failed...", e)
746 return None
747
748 destexists = 1
749 try:
750 dstat = os.lstat(dest)
751 except:
752 dstat = os.lstat(os.path.dirname(dest))
753 destexists = 0
754
755 if destexists:
756 if stat.S_ISLNK(dstat[stat.ST_MODE]):
757 try:
758 os.unlink(dest)
759 destexists = 0
760 except Exception as e:
761 pass
762
763 if stat.S_ISLNK(sstat[stat.ST_MODE]):
764 try:
765 target = os.readlink(src)
766 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
767 os.unlink(dest)
768 os.symlink(target, dest)
769 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
770 os.unlink(src)
771 return os.lstat(dest)
772 except Exception as e:
773 print("movefile: failed to properly create symlink:", dest, "->", target, e)
774 return None
775
776 renamefailed = 1
777 # os.rename needs to know the dest path ending with file name
778 # so append the file name to a path only if it's a dir specified
779 srcfname = os.path.basename(src)
780 destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
781 else dest
782
783 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
784 try:
785 os.rename(src, destpath)
786 renamefailed = 0
787 except Exception as e:
788 if e.errno != errno.EXDEV:
789 # Some random error.
790 print("movefile: Failed to move", src, "to", dest, e)
791 return None
792 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
793
794 if renamefailed:
795 didcopy = 0
796 if stat.S_ISREG(sstat[stat.ST_MODE]):
797 try: # For safety copy then move it over.
798 shutil.copyfile(src, destpath + "#new")
799 os.rename(destpath + "#new", destpath)
800 didcopy = 1
801 except Exception as e:
802 print('movefile: copy', src, '->', dest, 'failed.', e)
803 return None
804 else:
805 #we don't yet handle special, so we need to fall back to /bin/mv
806 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
807 if a[0] != 0:
808 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
809 return None # failure
810 try:
811 if didcopy:
812 os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
813 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
814 os.unlink(src)
815 except Exception as e:
816 print("movefile: Failed to chown/chmod/unlink", dest, e)
817 return None
818
819 if newmtime:
820 os.utime(destpath, (newmtime, newmtime))
821 else:
822 os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
823 newmtime = sstat[stat.ST_MTIME]
824 return newmtime
825
826def copyfile(src, dest, newmtime = None, sstat = None):
827 """
828 Copies a file from src to dest, preserving all permissions and
829 attributes; mtime will be preserved even when moving across
830 filesystems. Returns true on success and false on failure.
831 """
832 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
833 try:
834 if not sstat:
835 sstat = os.lstat(src)
836 except Exception as e:
837 logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
838 return False
839
840 destexists = 1
841 try:
842 dstat = os.lstat(dest)
843 except:
844 dstat = os.lstat(os.path.dirname(dest))
845 destexists = 0
846
847 if destexists:
848 if stat.S_ISLNK(dstat[stat.ST_MODE]):
849 try:
850 os.unlink(dest)
851 destexists = 0
852 except Exception as e:
853 pass
854
855 if stat.S_ISLNK(sstat[stat.ST_MODE]):
856 try:
857 target = os.readlink(src)
858 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
859 os.unlink(dest)
860 os.symlink(target, dest)
861 os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
862 return os.lstat(dest)
863 except Exception as e:
864 logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
865 return False
866
867 if stat.S_ISREG(sstat[stat.ST_MODE]):
868 try:
869 srcchown = False
870 if not os.access(src, os.R_OK):
871 # Make sure we can read it
872 srcchown = True
873 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
874
875 # For safety copy then move it over.
876 shutil.copyfile(src, dest + "#new")
877 os.rename(dest + "#new", dest)
878 except Exception as e:
879 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
880 return False
881 finally:
882 if srcchown:
883 os.chmod(src, sstat[stat.ST_MODE])
884 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
885
886 else:
887 #we don't yet handle special, so we need to fall back to /bin/mv
888 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
889 if a[0] != 0:
890 logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
891 return False # failure
892 try:
893 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
894 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
895 except Exception as e:
896 logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
897 return False
898
899 if newmtime:
900 os.utime(dest, (newmtime, newmtime))
901 else:
902 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
903 newmtime = sstat[stat.ST_MTIME]
904 return newmtime
905
906def break_hardlinks(src, sstat = None):
907 """
908 Ensures src is the only hardlink to this file. Other hardlinks,
909 if any, are not affected (other than in their st_nlink value, of
910 course). Returns true on success and false on failure.
911
912 """
913 try:
914 if not sstat:
915 sstat = os.lstat(src)
916 except Exception as e:
917 logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
918 return False
919 if sstat[stat.ST_NLINK] == 1:
920 return True
921 return copyfile(src, src, sstat=sstat)
922
923def which(path, item, direction = 0, history = False, executable=False):
924 """
925 Locate `item` in the list of paths `path` (colon separated string like $PATH).
926 If `direction` is non-zero then the list is reversed.
927 If `history` is True then the list of candidates also returned as result,history.
928 If `executable` is True then the candidate has to be an executable file,
929 otherwise the candidate simply has to exist.
930 """
931
932 if executable:
933 is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
934 else:
935 is_candidate = lambda p: os.path.exists(p)
936
937 hist = []
938 paths = (path or "").split(':')
939 if direction != 0:
940 paths.reverse()
941
942 for p in paths:
943 next = os.path.join(p, item)
944 hist.append(next)
945 if is_candidate(next):
946 if not os.path.isabs(next):
947 next = os.path.abspath(next)
948 if history:
949 return next, hist
950 return next
951
952 if history:
953 return "", hist
954 return ""
955
956@contextmanager
957def umask(new_mask):
958 """
959 Context manager to set the umask to a specific mask, and restore it afterwards.
960 """
961 current_mask = os.umask(new_mask)
962 try:
963 yield
964 finally:
965 os.umask(current_mask)
966
967def to_boolean(string, default=None):
968 """
969 Check input string and return boolean value True/False/None
970 depending upon the checks
971 """
972 if not string:
973 return default
974
975 normalized = string.lower()
976 if normalized in ("y", "yes", "1", "true"):
977 return True
978 elif normalized in ("n", "no", "0", "false"):
979 return False
980 else:
981 raise ValueError("Invalid value for to_boolean: %s" % string)
982
983def contains(variable, checkvalues, truevalue, falsevalue, d):
984 """Check if a variable contains all the values specified.
985
986 Arguments:
987
988 variable -- the variable name. This will be fetched and expanded (using
989 d.getVar(variable)) and then split into a set().
990
991 checkvalues -- if this is a string it is split on whitespace into a set(),
992 otherwise coerced directly into a set().
993
994 truevalue -- the value to return if checkvalues is a subset of variable.
995
996 falsevalue -- the value to return if variable is empty or if checkvalues is
997 not a subset of variable.
998
999 d -- the data store.
1000 """
1001
1002 val = d.getVar(variable)
1003 if not val:
1004 return falsevalue
1005 val = set(val.split())
1006 if isinstance(checkvalues, str):
1007 checkvalues = set(checkvalues.split())
1008 else:
1009 checkvalues = set(checkvalues)
1010 if checkvalues.issubset(val):
1011 return truevalue
1012 return falsevalue
1013
1014def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1015 """Check if a variable contains any values specified.
1016
1017 Arguments:
1018
1019 variable -- the variable name. This will be fetched and expanded (using
1020 d.getVar(variable)) and then split into a set().
1021
1022 checkvalues -- if this is a string it is split on whitespace into a set(),
1023 otherwise coerced directly into a set().
1024
1025 truevalue -- the value to return if checkvalues is a subset of variable.
1026
1027 falsevalue -- the value to return if variable is empty or if checkvalues is
1028 not a subset of variable.
1029
1030 d -- the data store.
1031 """
1032 val = d.getVar(variable)
1033 if not val:
1034 return falsevalue
1035 val = set(val.split())
1036 if isinstance(checkvalues, str):
1037 checkvalues = set(checkvalues.split())
1038 else:
1039 checkvalues = set(checkvalues)
1040 if checkvalues & val:
1041 return truevalue
1042 return falsevalue
1043
1044def filter(variable, checkvalues, d):
1045 """Return all words in the variable that are present in the checkvalues.
1046
1047 Arguments:
1048
1049 variable -- the variable name. This will be fetched and expanded (using
1050 d.getVar(variable)) and then split into a set().
1051
1052 checkvalues -- if this is a string it is split on whitespace into a set(),
1053 otherwise coerced directly into a set().
1054
1055 d -- the data store.
1056 """
1057
1058 val = d.getVar(variable)
1059 if not val:
1060 return ''
1061 val = set(val.split())
1062 if isinstance(checkvalues, str):
1063 checkvalues = set(checkvalues.split())
1064 else:
1065 checkvalues = set(checkvalues)
1066 return ' '.join(sorted(checkvalues & val))
1067
1068
1069def get_referenced_vars(start_expr, d):
1070 """
1071 :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
1072 are ordered arbitrarily)
1073 """
1074
1075 seen = set()
1076 ret = []
1077
1078 # The first entry in the queue is the unexpanded start expression
1079 queue = collections.deque([start_expr])
1080 # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
1081 is_first = True
1082
1083 empty_data = bb.data.init()
1084 while queue:
1085 entry = queue.popleft()
1086 if is_first:
1087 # Entry is the start expression - no expansion needed
1088 is_first = False
1089 expression = entry
1090 else:
1091 # This is a variable name - need to get the value
1092 expression = d.getVar(entry, False)
1093 ret.append(entry)
1094
1095 # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
1096 # data store because we only want the variables directly used in the expression. It returns a set, which is what
1097 # dooms us to only ever be "quasi-BFS" rather than full BFS.
1098 new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
1099
1100 queue.extend(new_vars)
1101 seen.update(new_vars)
1102 return ret
1103
1104
1105def cpu_count():
1106 return multiprocessing.cpu_count()
1107
1108def nonblockingfd(fd):
1109 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1110
1111def process_profilelog(fn, pout = None):
1112 # Either call with a list of filenames and set pout or a filename and optionally pout.
1113 if not pout:
1114 pout = fn + '.processed'
1115
1116 with open(pout, 'w') as pout:
1117 import pstats
1118 if isinstance(fn, list):
1119 p = pstats.Stats(*fn, stream=pout)
1120 else:
1121 p = pstats.Stats(fn, stream=pout)
1122 p.sort_stats('time')
1123 p.print_stats()
1124 p.print_callers()
1125 p.sort_stats('cumulative')
1126 p.print_stats()
1127
1128 pout.flush()
1129
1130#
1131# Was present to work around multiprocessing pool bugs in python < 2.7.3
1132#
1133def multiprocessingpool(*args, **kwargs):
1134
1135 import multiprocessing.pool
1136 #import multiprocessing.util
1137 #multiprocessing.util.log_to_stderr(10)
1138 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1139 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1140 def wrapper(func):
1141 def wrap(self, timeout=None):
1142 return func(self, timeout=timeout if timeout is not None else 1e100)
1143 return wrap
1144 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1145
1146 return multiprocessing.Pool(*args, **kwargs)
1147
1148def exec_flat_python_func(func, *args, **kwargs):
1149 """Execute a flat python function (defined with def funcname(args):...)"""
1150 # Prepare a small piece of python code which calls the requested function
1151 # To do this we need to prepare two things - a set of variables we can use to pass
1152 # the values of arguments into the calling function, and the list of arguments for
1153 # the function being called
1154 context = {}
1155 funcargs = []
1156 # Handle unnamed arguments
1157 aidx = 1
1158 for arg in args:
1159 argname = 'arg_%s' % aidx
1160 context[argname] = arg
1161 funcargs.append(argname)
1162 aidx += 1
1163 # Handle keyword arguments
1164 context.update(kwargs)
1165 funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
1166 code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1167 comp = bb.utils.better_compile(code, '<string>', '<string>')
1168 bb.utils.better_exec(comp, context, code, '<string>')
1169 return context['retval']
1170
1171def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1172 """Edit lines from a recipe or config file and modify one or more
1173 specified variable values set in the file using a specified callback
1174 function. Lines are expected to have trailing newlines.
1175 Parameters:
1176 meta_lines: lines from the file; can be a list or an iterable
1177 (e.g. file pointer)
1178 variables: a list of variable names to look for. Functions
1179 may also be specified, but must be specified with '()' at
1180 the end of the name. Note that the function doesn't have
1181 any intrinsic understanding of _append, _prepend, _remove,
1182 or overrides, so these are considered as part of the name.
1183 These values go into a regular expression, so regular
1184 expression syntax is allowed.
1185 varfunc: callback function called for every variable matching
1186 one of the entries in the variables parameter. The function
1187 should take four arguments:
1188 varname: name of variable matched
1189 origvalue: current value in file
1190 op: the operator (e.g. '+=')
1191 newlines: list of lines up to this point. You can use
1192 this to prepend lines before this variable setting
1193 if you wish.
1194 and should return a four-element tuple:
1195 newvalue: new value to substitute in, or None to drop
1196 the variable setting entirely. (If the removal
1197 results in two consecutive blank lines, one of the
1198 blank lines will also be dropped).
1199 newop: the operator to use - if you specify None here,
1200 the original operation will be used.
1201 indent: number of spaces to indent multi-line entries,
1202 or -1 to indent up to the level of the assignment
1203 and opening quote, or a string to use as the indent.
1204 minbreak: True to allow the first element of a
1205 multi-line value to continue on the same line as
1206 the assignment, False to indent before the first
1207 element.
1208 To clarify, if you wish not to change the value, then you
1209 would return like this: return origvalue, None, 0, True
1210 match_overrides: True to match items with _overrides on the end,
1211 False otherwise
1212 Returns a tuple:
1213 updated:
1214 True if changes were made, False otherwise.
1215 newlines:
1216 Lines after processing
1217 """
1218
1219 var_res = {}
1220 if match_overrides:
1221 override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
1222 else:
1223 override_re = ''
1224 for var in variables:
1225 if var.endswith('()'):
1226 var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1227 else:
1228 var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1229
1230 updated = False
1231 varset_start = ''
1232 varlines = []
1233 newlines = []
1234 in_var = None
1235 full_value = ''
1236 var_end = ''
1237
1238 def handle_var_end():
1239 prerun_newlines = newlines[:]
1240 op = varset_start[len(in_var):].strip()
1241 (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1242 changed = (prerun_newlines != newlines)
1243
1244 if newvalue is None:
1245 # Drop the value
1246 return True
1247 elif newvalue != full_value or (newop not in [None, op]):
1248 if newop not in [None, op]:
1249 # Callback changed the operator
1250 varset_new = "%s %s" % (in_var, newop)
1251 else:
1252 varset_new = varset_start
1253
1254 if isinstance(indent, int):
1255 if indent == -1:
1256 indentspc = ' ' * (len(varset_new) + 2)
1257 else:
1258 indentspc = ' ' * indent
1259 else:
1260 indentspc = indent
1261 if in_var.endswith('()'):
1262 # A function definition
1263 if isinstance(newvalue, list):
1264 newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1265 else:
1266 if not newvalue.startswith('\n'):
1267 newvalue = '\n' + newvalue
1268 if not newvalue.endswith('\n'):
1269 newvalue = newvalue + '\n'
1270 newlines.append('%s {%s}\n' % (varset_new, newvalue))
1271 else:
1272 # Normal variable
1273 if isinstance(newvalue, list):
1274 if not newvalue:
1275 # Empty list -> empty string
1276 newlines.append('%s ""\n' % varset_new)
1277 elif minbreak:
1278 # First item on first line
1279 if len(newvalue) == 1:
1280 newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1281 else:
1282 newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1283 for item in newvalue[1:]:
1284 newlines.append('%s%s \\\n' % (indentspc, item))
1285 newlines.append('%s"\n' % indentspc)
1286 else:
1287 # No item on first line
1288 newlines.append('%s " \\\n' % varset_new)
1289 for item in newvalue:
1290 newlines.append('%s%s \\\n' % (indentspc, item))
1291 newlines.append('%s"\n' % indentspc)
1292 else:
1293 newlines.append('%s "%s"\n' % (varset_new, newvalue))
1294 return True
1295 else:
1296 # Put the old lines back where they were
1297 newlines.extend(varlines)
1298 # If newlines was touched by the function, we'll need to return True
1299 return changed
1300
1301 checkspc = False
1302
1303 for line in meta_lines:
1304 if in_var:
1305 value = line.rstrip()
1306 varlines.append(line)
1307 if in_var.endswith('()'):
1308 full_value += '\n' + value
1309 else:
1310 full_value += value[:-1]
1311 if value.endswith(var_end):
1312 if in_var.endswith('()'):
1313 if full_value.count('{') - full_value.count('}') >= 0:
1314 continue
1315 full_value = full_value[:-1]
1316 if handle_var_end():
1317 updated = True
1318 checkspc = True
1319 in_var = None
1320 else:
1321 skip = False
1322 for (varname, var_re) in var_res.items():
1323 res = var_re.match(line)
1324 if res:
1325 isfunc = varname.endswith('()')
1326 if isfunc:
1327 splitvalue = line.split('{', 1)
1328 var_end = '}'
1329 else:
1330 var_end = res.groups()[-1]
1331 splitvalue = line.split(var_end, 1)
1332 varset_start = splitvalue[0].rstrip()
1333 value = splitvalue[1].rstrip()
1334 if not isfunc and value.endswith('\\'):
1335 value = value[:-1]
1336 full_value = value
1337 varlines = [line]
1338 in_var = res.group(1)
1339 if isfunc:
1340 in_var += '()'
1341 if value.endswith(var_end):
1342 full_value = full_value[:-1]
1343 if handle_var_end():
1344 updated = True
1345 checkspc = True
1346 in_var = None
1347 skip = True
1348 break
1349 if not skip:
1350 if checkspc:
1351 checkspc = False
1352 if newlines and newlines[-1] == '\n' and line == '\n':
1353 # Squash blank line if there are two consecutive blanks after a removal
1354 continue
1355 newlines.append(line)
1356 return (updated, newlines)
1357
1358
1359def edit_metadata_file(meta_file, variables, varfunc):
1360 """Edit a recipe or config file and modify one or more specified
1361 variable values set in the file using a specified callback function.
1362 The file is only written to if the value(s) actually change.
1363 This is basically the file version of edit_metadata(), see that
1364 function's description for parameter/usage information.
1365 Returns True if the file was written to, False otherwise.
1366 """
1367 with open(meta_file, 'r') as f:
1368 (updated, newlines) = edit_metadata(f, variables, varfunc)
1369 if updated:
1370 with open(meta_file, 'w') as f:
1371 f.writelines(newlines)
1372 return updated
1373
1374
1375def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1376 """Edit bblayers.conf, adding and/or removing layers
1377 Parameters:
1378 bblayers_conf: path to bblayers.conf file to edit
1379 add: layer path (or list of layer paths) to add; None or empty
1380 list to add nothing
1381 remove: layer path (or list of layer paths) to remove; None or
1382 empty list to remove nothing
1383 edit_cb: optional callback function that will be called after
1384 processing adds/removes once per existing entry.
1385 Returns a tuple:
1386 notadded: list of layers specified to be added but weren't
1387 (because they were already in the list)
1388 notremoved: list of layers that were specified to be removed
1389 but weren't (because they weren't in the list)
1390 """
1391
1392 import fnmatch
1393
1394 def remove_trailing_sep(pth):
1395 if pth and pth[-1] == os.sep:
1396 pth = pth[:-1]
1397 return pth
1398
1399 approved = bb.utils.approved_variables()
1400 def canonicalise_path(pth):
1401 pth = remove_trailing_sep(pth)
1402 if 'HOME' in approved and '~' in pth:
1403 pth = os.path.expanduser(pth)
1404 return pth
1405
1406 def layerlist_param(value):
1407 if not value:
1408 return []
1409 elif isinstance(value, list):
1410 return [remove_trailing_sep(x) for x in value]
1411 else:
1412 return [remove_trailing_sep(value)]
1413
1414 addlayers = layerlist_param(add)
1415 removelayers = layerlist_param(remove)
1416
1417 # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1418 bblayercalls = []
1419 removed = []
1420 plusequals = False
1421 orig_bblayers = []
1422
1423 def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1424 bblayercalls.append(op)
1425 if op == '=':
1426 del orig_bblayers[:]
1427 orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1428 return (origvalue, None, 2, False)
1429
1430 def handle_bblayers(varname, origvalue, op, newlines):
1431 updated = False
1432 bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1433 if removelayers:
1434 for removelayer in removelayers:
1435 for layer in bblayers:
1436 if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
1437 updated = True
1438 bblayers.remove(layer)
1439 removed.append(removelayer)
1440 break
1441 if addlayers and not plusequals:
1442 for addlayer in addlayers:
1443 if addlayer not in bblayers:
1444 updated = True
1445 bblayers.append(addlayer)
1446 del addlayers[:]
1447
1448 if edit_cb:
1449 newlist = []
1450 for layer in bblayers:
1451 res = edit_cb(layer, canonicalise_path(layer))
1452 if res != layer:
1453 newlist.append(res)
1454 updated = True
1455 else:
1456 newlist.append(layer)
1457 bblayers = newlist
1458
1459 if updated:
1460 if op == '+=' and not bblayers:
1461 bblayers = None
1462 return (bblayers, None, 2, False)
1463 else:
1464 return (origvalue, None, 2, False)
1465
1466 with open(bblayers_conf, 'r') as f:
1467 (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
1468
1469 if not bblayercalls:
1470 raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1471
1472 # Try to do the "smart" thing depending on how the user has laid out
1473 # their bblayers.conf file
1474 if bblayercalls.count('+=') > 1:
1475 plusequals = True
1476
1477 removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1478 notadded = []
1479 for layer in addlayers:
1480 layer_canon = canonicalise_path(layer)
1481 if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1482 notadded.append(layer)
1483 notadded_canon = [canonicalise_path(layer) for layer in notadded]
1484 addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1485
1486 (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1487 if addlayers:
1488 # Still need to add these
1489 for addlayer in addlayers:
1490 newlines.append('BBLAYERS += "%s"\n' % addlayer)
1491 updated = True
1492
1493 if updated:
1494 with open(bblayers_conf, 'w') as f:
1495 f.writelines(newlines)
1496
1497 notremoved = list(set(removelayers) - set(removed))
1498
1499 return (notadded, notremoved)
1500
1501def get_collection_res(d):
1502 collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
1503 collection_res = {}
1504 for collection in collections:
1505 collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
1506
1507 return collection_res
1508
1509
1510def get_file_layer(filename, d, collection_res={}):
1511 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1512 if not collection_res:
1513 collection_res = get_collection_res(d)
1514
1515 def path_to_layer(path):
1516 # Use longest path so we handle nested layers
1517 matchlen = 0
1518 match = None
1519 for collection, regex in collection_res.items():
1520 if len(regex) > matchlen and re.match(regex, path):
1521 matchlen = len(regex)
1522 match = collection
1523 return match
1524
1525 result = None
1526 bbfiles = (d.getVar('BBFILES_PRIORITIZED') or '').split()
1527 bbfilesmatch = False
1528 for bbfilesentry in bbfiles:
1529 if fnmatch.fnmatchcase(filename, bbfilesentry):
1530 bbfilesmatch = True
1531 result = path_to_layer(bbfilesentry)
1532 break
1533
1534 if not bbfilesmatch:
1535 # Probably a bbclass
1536 result = path_to_layer(filename)
1537
1538 return result
1539
1540
1541# Constant taken from http://linux.die.net/include/linux/prctl.h
1542PR_SET_PDEATHSIG = 1
1543
1544class PrCtlError(Exception):
1545 pass
1546
1547def signal_on_parent_exit(signame):
1548 """
1549 Trigger signame to be sent when the parent process dies
1550 """
1551 signum = getattr(signal, signame)
1552 # http://linux.die.net/man/2/prctl
1553 result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1554 if result != 0:
1555 raise PrCtlError('prctl failed with error code %s' % result)
1556
1557#
1558# Manually call the ioprio syscall. We could depend on other libs like psutil
1559# however this gets us enough of what we need to bitbake for now without the
1560# dependency
1561#
1562_unamearch = os.uname()[4]
1563IOPRIO_WHO_PROCESS = 1
1564IOPRIO_CLASS_SHIFT = 13
1565
1566def ioprio_set(who, cls, value):
1567 NR_ioprio_set = None
1568 if _unamearch == "x86_64":
1569 NR_ioprio_set = 251
1570 elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1571 NR_ioprio_set = 289
1572 elif _unamearch == "aarch64":
1573 NR_ioprio_set = 30
1574
1575 if NR_ioprio_set:
1576 ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1577 rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1578 if rc != 0:
1579 raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1580 else:
1581 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1582
1583def set_process_name(name):
1584 from ctypes import cdll, byref, create_string_buffer
1585 # This is nice to have for debugging, not essential
1586 try:
1587 libc = cdll.LoadLibrary('libc.so.6')
1588 buf = create_string_buffer(bytes(name, 'utf-8'))
1589 libc.prctl(15, byref(buf), 0, 0, 0)
1590 except:
1591 pass
1592
1593def export_proxies(d):
1594 """ export common proxies variables from datastore to environment """
1595 import os
1596
1597 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
1598 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
1599 'GIT_PROXY_COMMAND']
1600 exported = False
1601
1602 for v in variables:
1603 if v in os.environ.keys():
1604 exported = True
1605 else:
1606 v_proxy = d.getVar(v)
1607 if v_proxy is not None:
1608 os.environ[v] = v_proxy
1609 exported = True
1610
1611 return exported
1612
1613
1614def load_plugins(logger, plugins, pluginpath):
1615 def load_plugin(name):
1616 logger.debug('Loading plugin %s' % name)
1617 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1618 if spec:
1619 return spec.loader.load_module()
1620
1621 logger.debug('Loading plugins from %s...' % pluginpath)
1622
1623 expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
1624 for ext in python_extensions)
1625 files = itertools.chain.from_iterable(expanded)
1626 names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
1627 for name in names:
1628 if name != '__init__':
1629 plugin = load_plugin(name)
1630 if hasattr(plugin, 'plugin_init'):
1631 obj = plugin.plugin_init(plugins)
1632 plugins.append(obj or plugin)
1633 else:
1634 plugins.append(plugin)
1635
1636
1637class LogCatcher(logging.Handler):
1638 """Logging handler for collecting logged messages so you can check them later"""
1639 def __init__(self):
1640 self.messages = []
1641 logging.Handler.__init__(self, logging.WARNING)
1642 def emit(self, record):
1643 self.messages.append(bb.build.logformatter.format(record))
1644 def contains(self, message):
1645 return (message in self.messages)
1646
1647def is_semver(version):
1648 """
1649 Is the version string following the semver semantic?
1650
1651 https://semver.org/spec/v2.0.0.html
1652 """
1653 regex = re.compile(
1654 r"""
1655 ^
1656 (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
1657 (?:-(
1658 (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
1659 (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
1660 ))?
1661 (?:\+(
1662 [0-9a-zA-Z-]+
1663 (?:\.[0-9a-zA-Z-]+)*
1664 ))?
1665 $
1666 """, re.VERBOSE)
1667
1668 if regex.match(version) is None:
1669 return False
1670
1671 return True