summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/utils.py')
-rw-r--r--bitbake/lib/bb/utils.py1051
1 files changed, 823 insertions, 228 deletions
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index b282d09abf..366836bfc9 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -11,24 +11,30 @@ import re, fcntl, os, string, stat, shutil, time
11import sys 11import sys
12import errno 12import errno
13import logging 13import logging
14import bb 14import locale
15import bb.msg 15from bb import multiprocessing
16import multiprocessing
17import fcntl
18import importlib 16import importlib
19from importlib import machinery 17import importlib.machinery
18import importlib.util
20import itertools 19import itertools
21import subprocess 20import subprocess
22import glob 21import glob
23import fnmatch 22import fnmatch
24import traceback 23import traceback
25import errno
26import signal 24import signal
27import collections 25import collections
28import copy 26import copy
27import ctypes
28import random
29import socket
30import struct
31import tempfile
29from subprocess import getstatusoutput 32from subprocess import getstatusoutput
30from contextlib import contextmanager 33from contextlib import contextmanager
31from ctypes import cdll 34from ctypes import cdll
35import bb
36import bb.msg
37import bb.filter
32 38
33logger = logging.getLogger("BitBake.Util") 39logger = logging.getLogger("BitBake.Util")
34python_extensions = importlib.machinery.all_suffixes() 40python_extensions = importlib.machinery.all_suffixes()
@@ -43,7 +49,7 @@ def clean_context():
43 49
44def get_context(): 50def get_context():
45 return _context 51 return _context
46 52
47 53
48def set_context(ctx): 54def set_context(ctx):
49 _context = ctx 55 _context = ctx
@@ -77,7 +83,16 @@ def explode_version(s):
77 return r 83 return r
78 84
79def split_version(s): 85def split_version(s):
80 """Split a version string into its constituent parts (PE, PV, PR)""" 86 """Split a version string into its constituent parts (PE, PV, PR).
87
88 Arguments:
89
90 - ``s``: version string. The format of the input string should be::
91
92 ${PE}:${PV}-${PR}
93
94 Returns a tuple ``(pe, pv, pr)``.
95 """
81 s = s.strip(" <>=") 96 s = s.strip(" <>=")
82 e = 0 97 e = 0
83 if s.count(':'): 98 if s.count(':'):
@@ -129,16 +144,30 @@ def vercmp(ta, tb):
129 return r 144 return r
130 145
131def vercmp_string(a, b): 146def vercmp_string(a, b):
132 """ Split version strings and compare them """ 147 """ Split version strings using ``bb.utils.split_version()`` and compare
148 them with ``bb.utils.vercmp().``
149
150 Arguments:
151
152 - ``a``: left version string operand.
153 - ``b``: right version string operand.
154
155 Returns what ``bb.utils.vercmp()`` returns."""
133 ta = split_version(a) 156 ta = split_version(a)
134 tb = split_version(b) 157 tb = split_version(b)
135 return vercmp(ta, tb) 158 return vercmp(ta, tb)
136 159
137def vercmp_string_op(a, b, op): 160def vercmp_string_op(a, b, op):
138 """ 161 """
139 Compare two versions and check if the specified comparison operator matches the result of the comparison. 162 Takes the return value ``bb.utils.vercmp()`` and returns the operation
140 This function is fairly liberal about what operators it will accept since there are a variety of styles 163 defined by ``op`` between the return value and 0.
141 depending on the context. 164
165 Arguments:
166
167 - ``a``: left version string operand.
168 - ``b``: right version string operand.
169 - ``op``: operator string. Can be one of ``=``, ``==``, ``<=``, ``>=``,
170 ``>``, ``>>``, ``<``, ``<<`` or ``!=``.
142 """ 171 """
143 res = vercmp_string(a, b) 172 res = vercmp_string(a, b)
144 if op in ('=', '=='): 173 if op in ('=', '=='):
@@ -156,11 +185,19 @@ def vercmp_string_op(a, b, op):
156 else: 185 else:
157 raise VersionStringException('Unsupported comparison operator "%s"' % op) 186 raise VersionStringException('Unsupported comparison operator "%s"' % op)
158 187
188@bb.filter.filter_proc(name="bb.utils.explode_deps")
159def explode_deps(s): 189def explode_deps(s):
160 """ 190 """
161 Take an RDEPENDS style string of format: 191 Takes an RDEPENDS style string of format::
162 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 192
163 and return a list of dependencies. 193 DEPEND1 (optional version) DEPEND2 (optional version) ...
194
195 Arguments:
196
197 - ``s``: input RDEPENDS style string
198
199 Returns a list of dependencies.
200
164 Version information is ignored. 201 Version information is ignored.
165 """ 202 """
166 r = [] 203 r = []
@@ -182,9 +219,17 @@ def explode_deps(s):
182 219
183def explode_dep_versions2(s, *, sort=True): 220def explode_dep_versions2(s, *, sort=True):
184 """ 221 """
185 Take an RDEPENDS style string of format: 222 Takes an RDEPENDS style string of format::
186 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 223
187 and return a dictionary of dependencies and versions. 224 DEPEND1 (optional version) DEPEND2 (optional version) ...
225
226 Arguments:
227
228 - ``s``: input RDEPENDS style string
229 - ``*``: *Unused*.
230 - ``sort``: whether to sort the output or not.
231
232 Returns a dictionary of dependencies and versions.
188 """ 233 """
189 r = collections.OrderedDict() 234 r = collections.OrderedDict()
190 l = s.replace(",", "").split() 235 l = s.replace(",", "").split()
@@ -205,8 +250,8 @@ def explode_dep_versions2(s, *, sort=True):
205 inversion = True 250 inversion = True
206 # This list is based on behavior and supported comparisons from deb, opkg and rpm. 251 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
207 # 252 #
208 # Even though =<, <<, ==, !=, =>, and >> may not be supported, 253 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
209 # we list each possibly valid item. 254 # we list each possibly valid item.
210 # The build system is responsible for validation of what it supports. 255 # The build system is responsible for validation of what it supports.
211 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): 256 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
212 lastcmp = i[0:2] 257 lastcmp = i[0:2]
@@ -249,10 +294,17 @@ def explode_dep_versions2(s, *, sort=True):
249 294
250def explode_dep_versions(s): 295def explode_dep_versions(s):
251 """ 296 """
252 Take an RDEPENDS style string of format: 297 Take an RDEPENDS style string of format::
253 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 298
254 skip null value and items appeared in dependancy string multiple times 299 DEPEND1 (optional version) DEPEND2 (optional version) ...
255 and return a dictionary of dependencies and versions. 300
301 Skips null values and items appeared in dependency string multiple times.
302
303 Arguments:
304
305 - ``s``: input RDEPENDS style string
306
307 Returns a dictionary of dependencies and versions.
256 """ 308 """
257 r = explode_dep_versions2(s) 309 r = explode_dep_versions2(s)
258 for d in r: 310 for d in r:
@@ -266,7 +318,17 @@ def explode_dep_versions(s):
266 318
267def join_deps(deps, commasep=True): 319def join_deps(deps, commasep=True):
268 """ 320 """
269 Take the result from explode_dep_versions and generate a dependency string 321 Take a result from ``bb.utils.explode_dep_versions()`` and generate a
322 dependency string.
323
324 Arguments:
325
326 - ``deps``: dictionary of dependencies and versions.
327 - ``commasep``: makes the return value separated by commas if ``True``,
328 separated by spaces otherwise.
329
330 Returns a comma-separated (space-separated if ``comma-sep`` is ``False``)
331 string of dependencies and versions.
270 """ 332 """
271 result = [] 333 result = []
272 for dep in deps: 334 for dep in deps:
@@ -340,7 +402,7 @@ def _print_exception(t, value, tb, realfile, text, context):
340 exception = traceback.format_exception_only(t, value) 402 exception = traceback.format_exception_only(t, value)
341 error.append('Error executing a python function in %s:\n' % realfile) 403 error.append('Error executing a python function in %s:\n' % realfile)
342 404
343 # Strip 'us' from the stack (better_exec call) unless that was where the 405 # Strip 'us' from the stack (better_exec call) unless that was where the
344 # error came from 406 # error came from
345 if tb.tb_next is not None: 407 if tb.tb_next is not None:
346 tb = tb.tb_next 408 tb = tb.tb_next
@@ -379,7 +441,7 @@ def _print_exception(t, value, tb, realfile, text, context):
379 441
380 error.append("Exception: %s" % ''.join(exception)) 442 error.append("Exception: %s" % ''.join(exception))
381 443
382 # If the exception is from spwaning a task, let's be helpful and display 444 # If the exception is from spawning a task, let's be helpful and display
383 # the output (which hopefully includes stderr). 445 # the output (which hopefully includes stderr).
384 if isinstance(value, subprocess.CalledProcessError) and value.output: 446 if isinstance(value, subprocess.CalledProcessError) and value.output:
385 error.append("Subprocess output:") 447 error.append("Subprocess output:")
@@ -400,7 +462,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
400 code = better_compile(code, realfile, realfile) 462 code = better_compile(code, realfile, realfile)
401 try: 463 try:
402 exec(code, get_context(), context) 464 exec(code, get_context(), context)
403 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError): 465 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
404 # Error already shown so passthrough, no need for traceback 466 # Error already shown so passthrough, no need for traceback
405 raise 467 raise
406 except Exception as e: 468 except Exception as e:
@@ -427,33 +489,56 @@ def better_eval(source, locals, extraglobals = None):
427 return eval(source, ctx, locals) 489 return eval(source, ctx, locals)
428 490
429@contextmanager 491@contextmanager
430def fileslocked(files): 492def fileslocked(files, *args, **kwargs):
431 """Context manager for locking and unlocking file locks.""" 493 """Context manager for locking and unlocking file locks. Uses
494 ``bb.utils.lockfile()`` and ``bb.utils.unlockfile()`` to lock and unlock
495 files.
496
497 No return value."""
432 locks = [] 498 locks = []
433 if files: 499 if files:
434 for lockfile in files: 500 for lockfile in files:
435 locks.append(bb.utils.lockfile(lockfile)) 501 l = bb.utils.lockfile(lockfile, *args, **kwargs)
502 if l is not None:
503 locks.append(l)
436 504
437 try: 505 try:
438 yield 506 yield
439 finally: 507 finally:
508 locks.reverse()
440 for lock in locks: 509 for lock in locks:
441 bb.utils.unlockfile(lock) 510 bb.utils.unlockfile(lock)
442 511
443def lockfile(name, shared=False, retry=True, block=False): 512def lockfile(name, shared=False, retry=True, block=False):
444 """ 513 """
445 Use the specified file as a lock file, return when the lock has 514 Use the specified file (with filename ``name``) as a lock file, return when
446 been acquired. Returns a variable to pass to unlockfile(). 515 the lock has been acquired. Returns a variable to pass to unlockfile().
447 Parameters: 516
448 retry: True to re-try locking if it fails, False otherwise 517 Arguments:
449 block: True to block until the lock succeeds, False otherwise 518
519 - ``shared``: sets the lock as a shared lock instead of an
520 exclusive lock.
521 - ``retry``: ``True`` to re-try locking if it fails, ``False``
522 otherwise.
523 - ``block``: ``True`` to block until the lock succeeds,
524 ``False`` otherwise.
525
450 The retry and block parameters are kind of equivalent unless you 526 The retry and block parameters are kind of equivalent unless you
451 consider the possibility of sending a signal to the process to break 527 consider the possibility of sending a signal to the process to break
452 out - at which point you want block=True rather than retry=True. 528 out - at which point you want block=True rather than retry=True.
529
530 Returns the locked file descriptor in case of success, ``None`` otherwise.
453 """ 531 """
532 basename = os.path.basename(name)
533 if len(basename) > 255:
534 root, ext = os.path.splitext(basename)
535 basename = root[:255 - len(ext)] + ext
536
454 dirname = os.path.dirname(name) 537 dirname = os.path.dirname(name)
455 mkdirhier(dirname) 538 mkdirhier(dirname)
456 539
540 name = os.path.join(dirname, basename)
541
457 if not os.access(dirname, os.W_OK): 542 if not os.access(dirname, os.W_OK):
458 logger.error("Unable to acquire lock '%s', directory is not writable", 543 logger.error("Unable to acquire lock '%s', directory is not writable",
459 name) 544 name)
@@ -487,7 +572,7 @@ def lockfile(name, shared=False, retry=True, block=False):
487 return lf 572 return lf
488 lf.close() 573 lf.close()
489 except OSError as e: 574 except OSError as e:
490 if e.errno == errno.EACCES: 575 if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
491 logger.error("Unable to acquire lock '%s', %s", 576 logger.error("Unable to acquire lock '%s', %s",
492 e.strerror, name) 577 e.strerror, name)
493 sys.exit(1) 578 sys.exit(1)
@@ -501,7 +586,13 @@ def lockfile(name, shared=False, retry=True, block=False):
501 586
502def unlockfile(lf): 587def unlockfile(lf):
503 """ 588 """
504 Unlock a file locked using lockfile() 589 Unlock a file locked using ``bb.utils.lockfile()``.
590
591 Arguments:
592
593 - ``lf``: the locked file descriptor.
594
595 No return value.
505 """ 596 """
506 try: 597 try:
507 # If we had a shared lock, we need to promote to exclusive before 598 # If we had a shared lock, we need to promote to exclusive before
@@ -529,43 +620,97 @@ def _hasher(method, filename):
529 620
530def md5_file(filename): 621def md5_file(filename):
531 """ 622 """
532 Return the hex string representation of the MD5 checksum of filename. 623 Arguments:
624
625 - ``filename``: path to the input file.
626
627 Returns the hexadecimal string representation of the MD5 checksum of filename.
533 """ 628 """
534 import hashlib 629 import hashlib
535 return _hasher(hashlib.md5(), filename) 630 try:
631 sig = hashlib.new('MD5', usedforsecurity=False)
632 except TypeError:
633 # Some configurations don't appear to support two arguments
634 sig = hashlib.new('MD5')
635 return _hasher(sig, filename)
536 636
537def sha256_file(filename): 637def sha256_file(filename):
538 """ 638 """
539 Return the hex string representation of the 256-bit SHA checksum of 639 Returns the hexadecimal representation of the 256-bit SHA checksum of
540 filename. 640 filename.
641
642 Arguments:
643
644 - ``filename``: path to the file.
541 """ 645 """
542 import hashlib 646 import hashlib
543 return _hasher(hashlib.sha256(), filename) 647 return _hasher(hashlib.sha256(), filename)
544 648
545def sha1_file(filename): 649def sha1_file(filename):
546 """ 650 """
547 Return the hex string representation of the SHA1 checksum of the filename 651 Returns the hexadecimal representation of the SHA1 checksum of the filename
652
653 Arguments:
654
655 - ``filename``: path to the file.
548 """ 656 """
549 import hashlib 657 import hashlib
550 return _hasher(hashlib.sha1(), filename) 658 return _hasher(hashlib.sha1(), filename)
551 659
552def sha384_file(filename): 660def sha384_file(filename):
553 """ 661 """
554 Return the hex string representation of the SHA384 checksum of the filename 662 Returns the hexadecimal representation of the SHA384 checksum of the filename
663
664 Arguments:
665
666 - ``filename``: path to the file.
555 """ 667 """
556 import hashlib 668 import hashlib
557 return _hasher(hashlib.sha384(), filename) 669 return _hasher(hashlib.sha384(), filename)
558 670
559def sha512_file(filename): 671def sha512_file(filename):
560 """ 672 """
561 Return the hex string representation of the SHA512 checksum of the filename 673 Returns the hexadecimal representation of the SHA512 checksum of the filename
674
675 Arguments:
676
677 - ``filename``: path to the file.
562 """ 678 """
563 import hashlib 679 import hashlib
564 return _hasher(hashlib.sha512(), filename) 680 return _hasher(hashlib.sha512(), filename)
565 681
682def goh1_file(filename):
683 """
684 Returns the hexadecimal string representation of the Go mod h1 checksum of the
685 filename. The Go mod h1 checksum uses the Go dirhash package. The package
686 defines hashes over directory trees and is used by go mod for mod files and
687 zip archives.
688
689 Arguments:
690
691 - ``filename``: path to the file.
692 """
693 import hashlib
694 import zipfile
695
696 lines = []
697 if zipfile.is_zipfile(filename):
698 with zipfile.ZipFile(filename) as archive:
699 for fn in sorted(archive.namelist()):
700 method = hashlib.sha256()
701 method.update(archive.read(fn))
702 hash = method.hexdigest()
703 lines.append("%s %s\n" % (hash, fn))
704 else:
705 hash = _hasher(hashlib.sha256(), filename)
706 lines.append("%s go.mod\n" % hash)
707 method = hashlib.sha256()
708 method.update("".join(lines).encode('utf-8'))
709 return method.hexdigest()
710
566def preserved_envvars_exported(): 711def preserved_envvars_exported():
567 """Variables which are taken from the environment and placed in and exported 712 """Returns the list of variables which are taken from the environment and
568 from the metadata""" 713 placed in and exported from the metadata."""
569 return [ 714 return [
570 'BB_TASKHASH', 715 'BB_TASKHASH',
571 'HOME', 716 'HOME',
@@ -579,19 +724,42 @@ def preserved_envvars_exported():
579 ] 724 ]
580 725
581def preserved_envvars(): 726def preserved_envvars():
582 """Variables which are taken from the environment and placed in the metadata""" 727 """Returns the list of variables which are taken from the environment and
728 placed in the metadata."""
583 v = [ 729 v = [
584 'BBPATH', 730 'BBPATH',
585 'BB_PRESERVE_ENV', 731 'BB_PRESERVE_ENV',
586 'BB_ENV_WHITELIST', 732 'BB_ENV_PASSTHROUGH_ADDITIONS',
587 'BB_ENV_EXTRAWHITE',
588 ] 733 ]
589 return v + preserved_envvars_exported() 734 return v + preserved_envvars_exported()
590 735
736def check_system_locale():
737 """Make sure the required system locale are available and configured.
738
739 No return value."""
740 default_locale = locale.getlocale(locale.LC_CTYPE)
741
742 try:
743 locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
744 except:
745 sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
746 else:
747 locale.setlocale(locale.LC_CTYPE, default_locale)
748
749 if sys.getfilesystemencoding() != "utf-8":
750 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
751 "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
752
591def filter_environment(good_vars): 753def filter_environment(good_vars):
592 """ 754 """
593 Create a pristine environment for bitbake. This will remove variables that 755 Create a pristine environment for bitbake. This will remove variables that
594 are not known and may influence the build in a negative way. 756 are not known and may influence the build in a negative way.
757
758 Arguments:
759
760 - ``good_vars``: list of variable to exclude from the filtering.
761
762 No return value.
595 """ 763 """
596 764
597 removed_vars = {} 765 removed_vars = {}
@@ -615,27 +783,29 @@ def filter_environment(good_vars):
615 783
616def approved_variables(): 784def approved_variables():
617 """ 785 """
618 Determine and return the list of whitelisted variables which are approved 786 Determine and return the list of variables which are approved
619 to remain in the environment. 787 to remain in the environment.
620 """ 788 """
621 if 'BB_PRESERVE_ENV' in os.environ: 789 if 'BB_PRESERVE_ENV' in os.environ:
622 return os.environ.keys() 790 return os.environ.keys()
623 approved = [] 791 approved = []
624 if 'BB_ENV_WHITELIST' in os.environ: 792 if 'BB_ENV_PASSTHROUGH' in os.environ:
625 approved = os.environ['BB_ENV_WHITELIST'].split() 793 approved = os.environ['BB_ENV_PASSTHROUGH'].split()
626 approved.extend(['BB_ENV_WHITELIST']) 794 approved.extend(['BB_ENV_PASSTHROUGH'])
627 else: 795 else:
628 approved = preserved_envvars() 796 approved = preserved_envvars()
629 if 'BB_ENV_EXTRAWHITE' in os.environ: 797 if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
630 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) 798 approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
631 if 'BB_ENV_EXTRAWHITE' not in approved: 799 if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
632 approved.extend(['BB_ENV_EXTRAWHITE']) 800 approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
633 return approved 801 return approved
634 802
635def clean_environment(): 803def clean_environment():
636 """ 804 """
637 Clean up any spurious environment variables. This will remove any 805 Clean up any spurious environment variables. This will remove any
638 variables the user hasn't chosen to preserve. 806 variables the user hasn't chosen to preserve.
807
808 No return value.
639 """ 809 """
640 if 'BB_PRESERVE_ENV' not in os.environ: 810 if 'BB_PRESERVE_ENV' not in os.environ:
641 good_vars = approved_variables() 811 good_vars = approved_variables()
@@ -646,6 +816,8 @@ def clean_environment():
646def empty_environment(): 816def empty_environment():
647 """ 817 """
648 Remove all variables from the environment. 818 Remove all variables from the environment.
819
820 No return value.
649 """ 821 """
650 for s in list(os.environ.keys()): 822 for s in list(os.environ.keys()):
651 os.unsetenv(s) 823 os.unsetenv(s)
@@ -654,6 +826,12 @@ def empty_environment():
654def build_environment(d): 826def build_environment(d):
655 """ 827 """
656 Build an environment from all exported variables. 828 Build an environment from all exported variables.
829
830 Arguments:
831
832 - ``d``: the data store.
833
834 No return value.
657 """ 835 """
658 import bb.data 836 import bb.data
659 for var in bb.data.keys(d): 837 for var in bb.data.keys(d):
@@ -678,13 +856,23 @@ def _check_unsafe_delete_path(path):
678 return False 856 return False
679 857
680def remove(path, recurse=False, ionice=False): 858def remove(path, recurse=False, ionice=False):
681 """Equivalent to rm -f or rm -rf""" 859 """Equivalent to rm -f or rm -rf.
860
861 Arguments:
862
863 - ``path``: path to file/directory to remove.
864 - ``recurse``: deletes recursively if ``True``.
865 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
866 ionice``.
867
868 No return value.
869 """
682 if not path: 870 if not path:
683 return 871 return
684 if recurse: 872 if recurse:
685 for name in glob.glob(path): 873 for name in glob.glob(path):
686 if _check_unsafe_delete_path(path): 874 if _check_unsafe_delete_path(name):
687 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path) 875 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
688 # shutil.rmtree(name) would be ideal but its too slow 876 # shutil.rmtree(name) would be ideal but its too slow
689 cmd = [] 877 cmd = []
690 if ionice: 878 if ionice:
@@ -699,7 +887,17 @@ def remove(path, recurse=False, ionice=False):
699 raise 887 raise
700 888
701def prunedir(topdir, ionice=False): 889def prunedir(topdir, ionice=False):
702 """ Delete everything reachable from the directory named in 'topdir'. """ 890 """
891 Delete everything reachable from the directory named in ``topdir``.
892
893 Arguments:
894
895 - ``topdir``: directory path.
896 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
897 ionice``.
898
899 No return value.
900 """
703 # CAUTION: This is dangerous! 901 # CAUTION: This is dangerous!
704 if _check_unsafe_delete_path(topdir): 902 if _check_unsafe_delete_path(topdir):
705 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) 903 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
@@ -710,9 +908,16 @@ def prunedir(topdir, ionice=False):
710# but thats possibly insane and suffixes is probably going to be small 908# but thats possibly insane and suffixes is probably going to be small
711# 909#
712def prune_suffix(var, suffixes, d): 910def prune_suffix(var, suffixes, d):
713 """ 911 """
714 See if var ends with any of the suffixes listed and 912 Check if ``var`` ends with any of the suffixes listed in ``suffixes`` and
715 remove it if found 913 remove it if found.
914
915 Arguments:
916
917 - ``var``: string to check for suffixes.
918 - ``suffixes``: list of strings representing suffixes to check for.
919
920 Returns the string ``var`` without the suffix.
716 """ 921 """
717 for suffix in suffixes: 922 for suffix in suffixes:
718 if suffix and var.endswith(suffix): 923 if suffix and var.endswith(suffix):
@@ -721,9 +926,16 @@ def prune_suffix(var, suffixes, d):
721 926
722def mkdirhier(directory): 927def mkdirhier(directory):
723 """Create a directory like 'mkdir -p', but does not complain if 928 """Create a directory like 'mkdir -p', but does not complain if
724 directory already exists like os.makedirs 929 directory already exists like ``os.makedirs()``.
725 """
726 930
931 Arguments:
932
933 - ``directory``: path to the directory.
934
935 No return value.
936 """
937 if '${' in str(directory):
938 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
727 try: 939 try:
728 os.makedirs(directory) 940 os.makedirs(directory)
729 except OSError as e: 941 except OSError as e:
@@ -731,10 +943,24 @@ def mkdirhier(directory):
731 raise e 943 raise e
732 944
733def movefile(src, dest, newmtime = None, sstat = None): 945def movefile(src, dest, newmtime = None, sstat = None):
734 """Moves a file from src to dest, preserving all permissions and 946 """Moves a file from ``src`` to ``dest``, preserving all permissions and
735 attributes; mtime will be preserved even when moving across 947 attributes; mtime will be preserved even when moving across
736 filesystems. Returns true on success and false on failure. Move is 948 filesystems. Returns ``True`` on success and ``False`` on failure. Move is
737 atomic. 949 atomic.
950
951 Arguments:
952
953 - ``src`` -- Source file.
954 - ``dest`` -- Destination file.
955 - ``newmtime`` -- new mtime to be passed as float seconds since the epoch.
956 - ``sstat`` -- os.stat_result to use for the destination file.
957
958 Returns an ``os.stat_result`` of the destination file if the
959 source file is a symbolic link or the ``sstat`` argument represents a
960 symbolic link - in which case the destination file will also be created as
961 a symbolic link.
962
963 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
738 """ 964 """
739 965
740 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 966 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
@@ -742,7 +968,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
742 if not sstat: 968 if not sstat:
743 sstat = os.lstat(src) 969 sstat = os.lstat(src)
744 except Exception as e: 970 except Exception as e:
745 print("movefile: Stating source file failed...", e) 971 logger.warning("movefile: Stating source file failed...", e)
746 return None 972 return None
747 973
748 destexists = 1 974 destexists = 1
@@ -770,7 +996,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
770 os.unlink(src) 996 os.unlink(src)
771 return os.lstat(dest) 997 return os.lstat(dest)
772 except Exception as e: 998 except Exception as e:
773 print("movefile: failed to properly create symlink:", dest, "->", target, e) 999 logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
774 return None 1000 return None
775 1001
776 renamefailed = 1 1002 renamefailed = 1
@@ -782,12 +1008,12 @@ def movefile(src, dest, newmtime = None, sstat = None):
782 1008
783 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: 1009 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
784 try: 1010 try:
785 os.rename(src, destpath) 1011 bb.utils.rename(src, destpath)
786 renamefailed = 0 1012 renamefailed = 0
787 except Exception as e: 1013 except Exception as e:
788 if e.errno != errno.EXDEV: 1014 if e.errno != errno.EXDEV:
789 # Some random error. 1015 # Some random error.
790 print("movefile: Failed to move", src, "to", dest, e) 1016 logger.warning("movefile: Failed to move", src, "to", dest, e)
791 return None 1017 return None
792 # Invalid cross-device-link 'bind' mounted or actually Cross-Device 1018 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
793 1019
@@ -796,16 +1022,16 @@ def movefile(src, dest, newmtime = None, sstat = None):
796 if stat.S_ISREG(sstat[stat.ST_MODE]): 1022 if stat.S_ISREG(sstat[stat.ST_MODE]):
797 try: # For safety copy then move it over. 1023 try: # For safety copy then move it over.
798 shutil.copyfile(src, destpath + "#new") 1024 shutil.copyfile(src, destpath + "#new")
799 os.rename(destpath + "#new", destpath) 1025 bb.utils.rename(destpath + "#new", destpath)
800 didcopy = 1 1026 didcopy = 1
801 except Exception as e: 1027 except Exception as e:
802 print('movefile: copy', src, '->', dest, 'failed.', e) 1028 logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
803 return None 1029 return None
804 else: 1030 else:
805 #we don't yet handle special, so we need to fall back to /bin/mv 1031 #we don't yet handle special, so we need to fall back to /bin/mv
806 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") 1032 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
807 if a[0] != 0: 1033 if a[0] != 0:
808 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) 1034 logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
809 return None # failure 1035 return None # failure
810 try: 1036 try:
811 if didcopy: 1037 if didcopy:
@@ -813,7 +1039,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
813 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown 1039 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
814 os.unlink(src) 1040 os.unlink(src)
815 except Exception as e: 1041 except Exception as e:
816 print("movefile: Failed to chown/chmod/unlink", dest, e) 1042 logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
817 return None 1043 return None
818 1044
819 if newmtime: 1045 if newmtime:
@@ -825,9 +1051,24 @@ def movefile(src, dest, newmtime = None, sstat = None):
825 1051
826def copyfile(src, dest, newmtime = None, sstat = None): 1052def copyfile(src, dest, newmtime = None, sstat = None):
827 """ 1053 """
828 Copies a file from src to dest, preserving all permissions and 1054 Copies a file from ``src`` to ``dest``, preserving all permissions and
829 attributes; mtime will be preserved even when moving across 1055 attributes; mtime will be preserved even when moving across
830 filesystems. Returns true on success and false on failure. 1056 filesystems.
1057
1058 Arguments:
1059
1060 - ``src``: Source file.
1061 - ``dest``: Destination file.
1062 - ``newmtime``: new mtime to be passed as float seconds since the epoch.
1063 - ``sstat``: os.stat_result to use for the destination file.
1064
1065 Returns an ``os.stat_result`` of the destination file if the
1066 source file is a symbolic link or the ``sstat`` argument represents a
1067 symbolic link - in which case the destination file will also be created as
1068 a symbolic link.
1069
1070 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
1071
831 """ 1072 """
832 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 1073 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
833 try: 1074 try:
@@ -874,7 +1115,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
874 1115
875 # For safety copy then move it over. 1116 # For safety copy then move it over.
876 shutil.copyfile(src, dest + "#new") 1117 shutil.copyfile(src, dest + "#new")
877 os.rename(dest + "#new", dest) 1118 bb.utils.rename(dest + "#new", dest)
878 except Exception as e: 1119 except Exception as e:
879 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) 1120 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
880 return False 1121 return False
@@ -905,10 +1146,16 @@ def copyfile(src, dest, newmtime = None, sstat = None):
905 1146
906def break_hardlinks(src, sstat = None): 1147def break_hardlinks(src, sstat = None):
907 """ 1148 """
908 Ensures src is the only hardlink to this file. Other hardlinks, 1149 Ensures ``src`` is the only hardlink to this file. Other hardlinks,
909 if any, are not affected (other than in their st_nlink value, of 1150 if any, are not affected (other than in their st_nlink value, of
910 course). Returns true on success and false on failure. 1151 course).
1152
1153 Arguments:
1154
1155 - ``src``: source file path.
1156 - ``sstat``: os.stat_result to use when checking if the file is a link.
911 1157
1158 Returns ``True`` on success and ``False`` on failure.
912 """ 1159 """
913 try: 1160 try:
914 if not sstat: 1161 if not sstat:
@@ -922,11 +1169,24 @@ def break_hardlinks(src, sstat = None):
922 1169
923def which(path, item, direction = 0, history = False, executable=False): 1170def which(path, item, direction = 0, history = False, executable=False):
924 """ 1171 """
925 Locate `item` in the list of paths `path` (colon separated string like $PATH). 1172 Locate ``item`` in the list of paths ``path`` (colon separated string like
926 If `direction` is non-zero then the list is reversed. 1173 ``$PATH``).
927 If `history` is True then the list of candidates also returned as result,history. 1174
928 If `executable` is True then the candidate has to be an executable file, 1175 Arguments:
929 otherwise the candidate simply has to exist. 1176
1177 - ``path``: list of colon-separated paths.
1178 - ``item``: string to search for.
1179 - ``direction``: if non-zero then the list is reversed.
1180 - ``history``: if ``True`` then the list of candidates also returned as
1181 ``result,history`` where ``history`` is the list of previous path
1182 checked.
1183 - ``executable``: if ``True`` then the candidate defined by ``path`` has
1184 to be an executable file, otherwise if ``False`` the candidate simply
1185 has to exist.
1186
1187 Returns the item if found in the list of path, otherwise an empty string.
1188 If ``history`` is ``True``, return the list of previous path checked in a
1189 tuple with the found (or not found) item as ``(item, history)``.
930 """ 1190 """
931 1191
932 if executable: 1192 if executable:
@@ -953,10 +1213,29 @@ def which(path, item, direction = 0, history = False, executable=False):
953 return "", hist 1213 return "", hist
954 return "" 1214 return ""
955 1215
1216def to_filemode(input):
1217 """
1218 Take a bitbake variable contents defining a file mode and return
1219 the proper python representation of the number
1220
1221 Arguments:
1222
1223 - ``input``: a string or number to convert, e.g. a bitbake variable
1224 string, assumed to be an octal representation
1225
1226 Returns the python file mode as a number
1227 """
1228 # umask might come in as a number or text string..
1229 if type(input) is int:
1230 return input
1231 return int(input, 8)
1232
956@contextmanager 1233@contextmanager
957def umask(new_mask): 1234def umask(new_mask):
958 """ 1235 """
959 Context manager to set the umask to a specific mask, and restore it afterwards. 1236 Context manager to set the umask to a specific mask, and restore it afterwards.
1237
1238 No return value.
960 """ 1239 """
961 current_mask = os.umask(new_mask) 1240 current_mask = os.umask(new_mask)
962 try: 1241 try:
@@ -965,13 +1244,26 @@ def umask(new_mask):
965 os.umask(current_mask) 1244 os.umask(current_mask)
966 1245
967def to_boolean(string, default=None): 1246def to_boolean(string, default=None):
968 """ 1247 """
969 Check input string and return boolean value True/False/None 1248 Check input string and return boolean value True/False/None
970 depending upon the checks 1249 depending upon the checks.
1250
1251 Arguments:
1252
1253 - ``string``: input string.
1254 - ``default``: default return value if the input ``string`` is ``None``,
1255 ``0``, ``False`` or an empty string.
1256
1257 Returns ``True`` if the string is one of "y", "yes", "1", "true", ``False``
1258 if the string is one of "n", "no", "0", or "false". Return ``default`` if
1259 the input ``string`` is ``None``, ``0``, ``False`` or an empty string.
971 """ 1260 """
972 if not string: 1261 if not string:
973 return default 1262 return default
974 1263
1264 if isinstance(string, int):
1265 return string != 0
1266
975 normalized = string.lower() 1267 normalized = string.lower()
976 if normalized in ("y", "yes", "1", "true"): 1268 if normalized in ("y", "yes", "1", "true"):
977 return True 1269 return True
@@ -985,18 +1277,17 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
985 1277
986 Arguments: 1278 Arguments:
987 1279
988 variable -- the variable name. This will be fetched and expanded (using 1280 - ``variable``: the variable name. This will be fetched and expanded (using
989 d.getVar(variable)) and then split into a set(). 1281 d.getVar(variable)) and then split into a set().
990 1282 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
991 checkvalues -- if this is a string it is split on whitespace into a set(), 1283 otherwise coerced directly into a set().
992 otherwise coerced directly into a set(). 1284 - ``truevalue``: the value to return if checkvalues is a subset of variable.
993 1285 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
994 truevalue -- the value to return if checkvalues is a subset of variable. 1286 not a subset of variable.
995 1287 - ``d``: the data store.
996 falsevalue -- the value to return if variable is empty or if checkvalues is
997 not a subset of variable.
998 1288
999 d -- the data store. 1289 Returns ``True`` if the variable contains the values specified, ``False``
1290 otherwise.
1000 """ 1291 """
1001 1292
1002 val = d.getVar(variable) 1293 val = d.getVar(variable)
@@ -1016,18 +1307,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1016 1307
1017 Arguments: 1308 Arguments:
1018 1309
1019 variable -- the variable name. This will be fetched and expanded (using 1310 - ``variable``: the variable name. This will be fetched and expanded (using
1020 d.getVar(variable)) and then split into a set(). 1311 d.getVar(variable)) and then split into a set().
1312 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1313 otherwise coerced directly into a set().
1314 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1315 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1316 not a subset of variable.
1317 - ``d``: the data store.
1021 1318
1022 checkvalues -- if this is a string it is split on whitespace into a set(), 1319 Returns ``True`` if the variable contains any of the values specified,
1023 otherwise coerced directly into a set(). 1320 ``False`` otherwise.
1024
1025 truevalue -- the value to return if checkvalues is a subset of variable.
1026
1027 falsevalue -- the value to return if variable is empty or if checkvalues is
1028 not a subset of variable.
1029
1030 d -- the data store.
1031 """ 1321 """
1032 val = d.getVar(variable) 1322 val = d.getVar(variable)
1033 if not val: 1323 if not val:
@@ -1042,17 +1332,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1042 return falsevalue 1332 return falsevalue
1043 1333
1044def filter(variable, checkvalues, d): 1334def filter(variable, checkvalues, d):
1045 """Return all words in the variable that are present in the checkvalues. 1335 """Return all words in the variable that are present in the ``checkvalues``.
1046 1336
1047 Arguments: 1337 Arguments:
1048 1338
1049 variable -- the variable name. This will be fetched and expanded (using 1339 - ``variable``: the variable name. This will be fetched and expanded (using
1050 d.getVar(variable)) and then split into a set(). 1340 d.getVar(variable)) and then split into a set().
1341 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1342 otherwise coerced directly into a set().
1343 - ``d``: the data store.
1051 1344
1052 checkvalues -- if this is a string it is split on whitespace into a set(), 1345 Returns a list of string.
1053 otherwise coerced directly into a set().
1054
1055 d -- the data store.
1056 """ 1346 """
1057 1347
1058 val = d.getVar(variable) 1348 val = d.getVar(variable)
@@ -1068,8 +1358,27 @@ def filter(variable, checkvalues, d):
1068 1358
1069def get_referenced_vars(start_expr, d): 1359def get_referenced_vars(start_expr, d):
1070 """ 1360 """
1071 :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level 1361 Get the names of the variables referenced in a given expression.
1072 are ordered arbitrarily) 1362
1363 Arguments:
1364
1365 - ``start_expr``: the expression where to look for variables references.
1366
1367 For example::
1368
1369 ${VAR_A} string ${VAR_B}
1370
1371 Or::
1372
1373 ${@d.getVar('VAR')}
1374
1375 If a variables makes references to other variables, the latter are also
1376 returned recursively.
1377
1378 - ``d``: the data store.
1379
1380 Returns the names of vars referenced in ``start_expr`` (recursively), in
1381 quasi-BFS order (variables within the same level are ordered arbitrarily).
1073 """ 1382 """
1074 1383
1075 seen = set() 1384 seen = set()
@@ -1103,50 +1412,79 @@ def get_referenced_vars(start_expr, d):
1103 1412
1104 1413
1105def cpu_count(): 1414def cpu_count():
1106 return multiprocessing.cpu_count() 1415 try:
1416 return len(os.sched_getaffinity(0))
1417 except OSError:
1418 return multiprocessing.cpu_count()
1107 1419
1108def nonblockingfd(fd): 1420def nonblockingfd(fd):
1109 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) 1421 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1110 1422
1111def process_profilelog(fn, pout = None): 1423def profile_function(profile, function, output_fn, process=True):
1424 """Common function to profile a code block and optionally process the
1425 output using or processing function.
1426
1427 Arguments:
1428
1429 - ``profile``: a boolean saying whether to enable profiling or not
1430 - ``function``: the function call to profile/run
1431 - ``outputfn``: where to write the profiling data
1432 - ``process``: whether to process the profiling data and write a report
1433
1434 Returns the wrapped function return value
1435 """
1436 if profile:
1437 try:
1438 import cProfile as profile
1439 except:
1440 import profile
1441 prof = profile.Profile()
1442 ret = profile.Profile.runcall(prof, function)
1443 prof.dump_stats(output_fn)
1444 if process:
1445 process_profilelog(output_fn)
1446 serverlog("Raw profiling information saved to %s and processed statistics to %s.report*" % (output_fn, output_fn))
1447 return ret
1448 else:
1449 return function()
1450
1451def process_profilelog(fn, fn_out = None):
1112 # Either call with a list of filenames and set pout or a filename and optionally pout. 1452 # Either call with a list of filenames and set pout or a filename and optionally pout.
1113 if not pout: 1453 import pstats
1114 pout = fn + '.processed'
1115 1454
1116 with open(pout, 'w') as pout: 1455 if not fn_out:
1117 import pstats 1456 fn_out = fn + '.report'
1457
1458 def pstatopen():
1118 if isinstance(fn, list): 1459 if isinstance(fn, list):
1119 p = pstats.Stats(*fn, stream=pout) 1460 return pstats.Stats(*fn, stream=pout)
1120 else: 1461 return pstats.Stats(fn, stream=pout)
1121 p = pstats.Stats(fn, stream=pout) 1462
1463 with open(fn_out + '.time', 'w') as pout:
1464 p = pstatopen()
1122 p.sort_stats('time') 1465 p.sort_stats('time')
1123 p.print_stats() 1466 p.print_stats()
1467
1468 with open(fn_out + '.time-callers', 'w') as pout:
1469 p = pstatopen()
1470 p.sort_stats('time')
1124 p.print_callers() 1471 p.print_callers()
1472
1473 with open(fn_out + '.cumulative', 'w') as pout:
1474 p = pstatopen()
1125 p.sort_stats('cumulative') 1475 p.sort_stats('cumulative')
1126 p.print_stats() 1476 p.print_stats()
1127 1477
1128 pout.flush() 1478 with open(fn_out + '.cumulative-callers', 'w') as pout:
1129 1479 p = pstatopen()
1130# 1480 p.sort_stats('cumulative')
1131# Was present to work around multiprocessing pool bugs in python < 2.7.3 1481 p.print_callers()
1132#
1133def multiprocessingpool(*args, **kwargs):
1134
1135 import multiprocessing.pool
1136 #import multiprocessing.util
1137 #multiprocessing.util.log_to_stderr(10)
1138 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1139 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1140 def wrapper(func):
1141 def wrap(self, timeout=None):
1142 return func(self, timeout=timeout if timeout is not None else 1e100)
1143 return wrap
1144 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1145 1482
1146 return multiprocessing.Pool(*args, **kwargs)
1147 1483
1148def exec_flat_python_func(func, *args, **kwargs): 1484def exec_flat_python_func(func, *args, **kwargs):
1149 """Execute a flat python function (defined with def funcname(args):...)""" 1485 """Execute a flat python function (defined with ``def funcname(args): ...``)
1486
1487 Returns the return value of the function."""
1150 # Prepare a small piece of python code which calls the requested function 1488 # Prepare a small piece of python code which calls the requested function
1151 # To do this we need to prepare two things - a set of variables we can use to pass 1489 # To do this we need to prepare two things - a set of variables we can use to pass
1152 # the values of arguments into the calling function, and the list of arguments for 1490 # the values of arguments into the calling function, and the list of arguments for
@@ -1172,48 +1510,57 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1172 """Edit lines from a recipe or config file and modify one or more 1510 """Edit lines from a recipe or config file and modify one or more
1173 specified variable values set in the file using a specified callback 1511 specified variable values set in the file using a specified callback
1174 function. Lines are expected to have trailing newlines. 1512 function. Lines are expected to have trailing newlines.
1175 Parameters: 1513
1176 meta_lines: lines from the file; can be a list or an iterable 1514 Arguments:
1177 (e.g. file pointer) 1515
1178 variables: a list of variable names to look for. Functions 1516 - ``meta_lines``: lines from the file; can be a list or an iterable
1179 may also be specified, but must be specified with '()' at 1517 (e.g. file pointer)
1180 the end of the name. Note that the function doesn't have 1518 - ``variables``: a list of variable names to look for. Functions
1181 any intrinsic understanding of _append, _prepend, _remove, 1519 may also be specified, but must be specified with ``()`` at
1182 or overrides, so these are considered as part of the name. 1520 the end of the name. Note that the function doesn't have
1183 These values go into a regular expression, so regular 1521 any intrinsic understanding of ``:append``, ``:prepend``, ``:remove``,
1184 expression syntax is allowed. 1522 or overrides, so these are considered as part of the name.
1185 varfunc: callback function called for every variable matching 1523 These values go into a regular expression, so regular
1186 one of the entries in the variables parameter. The function 1524 expression syntax is allowed.
1187 should take four arguments: 1525 - ``varfunc``: callback function called for every variable matching
1188 varname: name of variable matched 1526 one of the entries in the variables parameter.
1189 origvalue: current value in file 1527
1190 op: the operator (e.g. '+=') 1528 The function should take four arguments:
1191 newlines: list of lines up to this point. You can use 1529
1192 this to prepend lines before this variable setting 1530 - ``varname``: name of variable matched
1193 if you wish. 1531 - ``origvalue``: current value in file
1194 and should return a four-element tuple: 1532 - ``op``: the operator (e.g. ``+=``)
1195 newvalue: new value to substitute in, or None to drop 1533 - ``newlines``: list of lines up to this point. You can use
1196 the variable setting entirely. (If the removal 1534 this to prepend lines before this variable setting
1197 results in two consecutive blank lines, one of the 1535 if you wish.
1198 blank lines will also be dropped). 1536
1199 newop: the operator to use - if you specify None here, 1537 And should return a four-element tuple:
1200 the original operation will be used. 1538
1201 indent: number of spaces to indent multi-line entries, 1539 - ``newvalue``: new value to substitute in, or ``None`` to drop
1202 or -1 to indent up to the level of the assignment 1540 the variable setting entirely. (If the removal
1203 and opening quote, or a string to use as the indent. 1541 results in two consecutive blank lines, one of the
1204 minbreak: True to allow the first element of a 1542 blank lines will also be dropped).
1205 multi-line value to continue on the same line as 1543 - ``newop``: the operator to use - if you specify ``None`` here,
1206 the assignment, False to indent before the first 1544 the original operation will be used.
1207 element. 1545 - ``indent``: number of spaces to indent multi-line entries,
1208 To clarify, if you wish not to change the value, then you 1546 or ``-1`` to indent up to the level of the assignment
1209 would return like this: return origvalue, None, 0, True 1547 and opening quote, or a string to use as the indent.
1210 match_overrides: True to match items with _overrides on the end, 1548 - ``minbreak``: ``True`` to allow the first element of a
1211 False otherwise 1549 multi-line value to continue on the same line as
1550 the assignment, ``False`` to indent before the first
1551 element.
1552
1553 To clarify, if you wish not to change the value, then you
1554 would return like this::
1555
1556 return origvalue, None, 0, True
1557 - ``match_overrides``: True to match items with _overrides on the end,
1558 False otherwise
1559
1212 Returns a tuple: 1560 Returns a tuple:
1213 updated: 1561
1214 True if changes were made, False otherwise. 1562 - ``updated``: ``True`` if changes were made, ``False`` otherwise.
1215 newlines: 1563 - ``newlines``: Lines after processing.
1216 Lines after processing
1217 """ 1564 """
1218 1565
1219 var_res = {} 1566 var_res = {}
@@ -1357,12 +1704,13 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1357 1704
1358 1705
1359def edit_metadata_file(meta_file, variables, varfunc): 1706def edit_metadata_file(meta_file, variables, varfunc):
1360 """Edit a recipe or config file and modify one or more specified 1707 """Edit a recipe or configuration file and modify one or more specified
1361 variable values set in the file using a specified callback function. 1708 variable values set in the file using a specified callback function.
1362 The file is only written to if the value(s) actually change. 1709 The file is only written to if the value(s) actually change.
1363 This is basically the file version of edit_metadata(), see that 1710 This is basically the file version of ``bb.utils.edit_metadata()``, see that
1364 function's description for parameter/usage information. 1711 function's description for parameter/usage information.
1365 Returns True if the file was written to, False otherwise. 1712
1713 Returns ``True`` if the file was written to, ``False`` otherwise.
1366 """ 1714 """
1367 with open(meta_file, 'r') as f: 1715 with open(meta_file, 'r') as f:
1368 (updated, newlines) = edit_metadata(f, variables, varfunc) 1716 (updated, newlines) = edit_metadata(f, variables, varfunc)
@@ -1373,23 +1721,25 @@ def edit_metadata_file(meta_file, variables, varfunc):
1373 1721
1374 1722
1375def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): 1723def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1376 """Edit bblayers.conf, adding and/or removing layers 1724 """Edit ``bblayers.conf``, adding and/or removing layers.
1377 Parameters: 1725
1378 bblayers_conf: path to bblayers.conf file to edit 1726 Arguments:
1379 add: layer path (or list of layer paths) to add; None or empty 1727
1380 list to add nothing 1728 - ``bblayers_conf``: path to ``bblayers.conf`` file to edit
1381 remove: layer path (or list of layer paths) to remove; None or 1729 - ``add``: layer path (or list of layer paths) to add; ``None`` or empty
1382 empty list to remove nothing 1730 list to add nothing
1383 edit_cb: optional callback function that will be called after 1731 - ``remove``: layer path (or list of layer paths) to remove; ``None`` or
1384 processing adds/removes once per existing entry. 1732 empty list to remove nothing
1733 - ``edit_cb``: optional callback function that will be called
1734 after processing adds/removes once per existing entry.
1735
1385 Returns a tuple: 1736 Returns a tuple:
1386 notadded: list of layers specified to be added but weren't
1387 (because they were already in the list)
1388 notremoved: list of layers that were specified to be removed
1389 but weren't (because they weren't in the list)
1390 """
1391 1737
1392 import fnmatch 1738 - ``notadded``: list of layers specified to be added but weren't
1739 (because they were already in the list)
1740 - ``notremoved``: list of layers that were specified to be removed
1741 but weren't (because they weren't in the list)
1742 """
1393 1743
1394 def remove_trailing_sep(pth): 1744 def remove_trailing_sep(pth):
1395 if pth and pth[-1] == os.sep: 1745 if pth and pth[-1] == os.sep:
@@ -1508,7 +1858,22 @@ def get_collection_res(d):
1508 1858
1509 1859
1510def get_file_layer(filename, d, collection_res={}): 1860def get_file_layer(filename, d, collection_res={}):
1511 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" 1861 """Determine the collection (or layer name, as defined by a layer's
1862 ``layer.conf`` file) containing the specified file.
1863
1864 Arguments:
1865
1866 - ``filename``: the filename to look for.
1867 - ``d``: the data store.
1868 - ``collection_res``: dictionary with the layer names as keys and file
1869 patterns to match as defined with the BBFILE_COLLECTIONS and
1870 BBFILE_PATTERN variables respectively. The return value of
1871 ``bb.utils.get_collection_res()`` is the default if this variable is
1872 not specified.
1873
1874 Returns the layer name containing the file. If multiple layers contain the
1875 file, the last matching layer name from collection_res is returned.
1876 """
1512 if not collection_res: 1877 if not collection_res:
1513 collection_res = get_collection_res(d) 1878 collection_res = get_collection_res(d)
1514 1879
@@ -1546,7 +1911,13 @@ class PrCtlError(Exception):
1546 1911
1547def signal_on_parent_exit(signame): 1912def signal_on_parent_exit(signame):
1548 """ 1913 """
1549 Trigger signame to be sent when the parent process dies 1914 Trigger ``signame`` to be sent when the parent process dies.
1915
1916 Arguments:
1917
1918 - ``signame``: name of the signal. See ``man signal``.
1919
1920 No return value.
1550 """ 1921 """
1551 signum = getattr(signal, signame) 1922 signum = getattr(signal, signame)
1552 # http://linux.die.net/man/2/prctl 1923 # http://linux.die.net/man/2/prctl
@@ -1581,7 +1952,7 @@ def ioprio_set(who, cls, value):
1581 bb.warn("Unable to set IO Prio for arch %s" % _unamearch) 1952 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1582 1953
1583def set_process_name(name): 1954def set_process_name(name):
1584 from ctypes import cdll, byref, create_string_buffer 1955 from ctypes import byref, create_string_buffer
1585 # This is nice to have for debugging, not essential 1956 # This is nice to have for debugging, not essential
1586 try: 1957 try:
1587 libc = cdll.LoadLibrary('libc.so.6') 1958 libc = cdll.LoadLibrary('libc.so.6')
@@ -1590,33 +1961,96 @@ def set_process_name(name):
1590 except: 1961 except:
1591 pass 1962 pass
1592 1963
1593def export_proxies(d): 1964def enable_loopback_networking():
1594 """ export common proxies variables from datastore to environment """ 1965 # From bits/ioctls.h
1595 import os 1966 SIOCGIFFLAGS = 0x8913
1967 SIOCSIFFLAGS = 0x8914
1968 SIOCSIFADDR = 0x8916
1969 SIOCSIFNETMASK = 0x891C
1596 1970
1597 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', 1971 # if.h
1598 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY', 1972 IFF_UP = 0x1
1599 'GIT_PROXY_COMMAND'] 1973 IFF_RUNNING = 0x40
1600 exported = False
1601 1974
1602 for v in variables: 1975 # bits/socket.h
1603 if v in os.environ.keys(): 1976 AF_INET = 2
1604 exported = True 1977
1605 else: 1978 # char ifr_name[IFNAMSIZ=16]
1606 v_proxy = d.getVar(v) 1979 ifr_name = struct.pack("@16s", b"lo")
1607 if v_proxy is not None: 1980 def netdev_req(fd, req, data = b""):
1608 os.environ[v] = v_proxy 1981 # Pad and add interface name
1609 exported = True 1982 data = ifr_name + data + (b'\x00' * (16 - len(data)))
1983 # Return all data after interface name
1984 return fcntl.ioctl(fd, req, data)[16:]
1985
1986 with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
1987 fd = sock.fileno()
1610 1988
1611 return exported 1989 # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
1990 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
1991 netdev_req(fd, SIOCSIFADDR, req)
1612 1992
1993 # short ifr_flags
1994 flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
1995 flags |= IFF_UP | IFF_RUNNING
1996 netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
1997
1998 # struct sockaddr_in ifr_netmask
1999 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
2000 netdev_req(fd, SIOCSIFNETMASK, req)
2001
2002def disable_network(uid=None, gid=None):
2003 """
2004 Disable networking in the current process if the kernel supports it, else
2005 just return after logging to debug. To do this we need to create a new user
2006 namespace, then map back to the original uid/gid.
2007
2008 Arguments:
2009
2010 - ``uid``: original user id.
2011 - ``gid``: original user group id.
2012
2013 No return value.
2014 """
2015 libc = ctypes.CDLL('libc.so.6')
2016
2017 # From sched.h
2018 # New user namespace
2019 CLONE_NEWUSER = 0x10000000
2020 # New network namespace
2021 CLONE_NEWNET = 0x40000000
2022
2023 if uid is None:
2024 uid = os.getuid()
2025 if gid is None:
2026 gid = os.getgid()
2027
2028 ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
2029 if ret != 0:
2030 logger.debug("System doesn't support disabling network without admin privs")
2031 return
2032 with open("/proc/self/uid_map", "w") as f:
2033 f.write("%s %s 1" % (uid, uid))
2034 with open("/proc/self/setgroups", "w") as f:
2035 f.write("deny")
2036 with open("/proc/self/gid_map", "w") as f:
2037 f.write("%s %s 1" % (gid, gid))
2038
2039def export_proxies(d):
2040 from bb.fetch2 import get_fetcher_environment
2041 """ export common proxies variables from datastore to environment """
2042 newenv = get_fetcher_environment(d)
2043 for v in newenv:
2044 os.environ[v] = newenv[v]
1613 2045
1614def load_plugins(logger, plugins, pluginpath): 2046def load_plugins(logger, plugins, pluginpath):
1615 def load_plugin(name): 2047 def load_plugin(name):
1616 logger.debug('Loading plugin %s' % name) 2048 logger.debug('Loading plugin %s' % name)
1617 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 2049 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1618 if spec: 2050 if spec:
1619 return spec.loader.load_module() 2051 mod = importlib.util.module_from_spec(spec)
2052 spec.loader.exec_module(mod)
2053 return mod
1620 2054
1621 logger.debug('Loading plugins from %s...' % pluginpath) 2055 logger.debug('Loading plugins from %s...' % pluginpath)
1622 2056
@@ -1646,9 +2080,14 @@ class LogCatcher(logging.Handler):
1646 2080
1647def is_semver(version): 2081def is_semver(version):
1648 """ 2082 """
1649 Is the version string following the semver semantic? 2083 Arguments:
2084
2085 - ``version``: the version string.
2086
2087 Returns ``True`` if the version string follow semantic versioning, ``False``
2088 otherwise.
1650 2089
1651 https://semver.org/spec/v2.0.0.html 2090 See https://semver.org/spec/v2.0.0.html.
1652 """ 2091 """
1653 regex = re.compile( 2092 regex = re.compile(
1654 r""" 2093 r"""
@@ -1669,3 +2108,159 @@ def is_semver(version):
1669 return False 2108 return False
1670 2109
1671 return True 2110 return True
2111
2112# Wrapper around os.rename which can handle cross device problems
2113# e.g. from container filesystems
2114def rename(src, dst):
2115 try:
2116 os.rename(src, dst)
2117 except OSError as err:
2118 if err.errno == 18:
2119 # Invalid cross-device link error
2120 shutil.move(src, dst)
2121 else:
2122 raise err
2123
2124@contextmanager
2125def environment(**envvars):
2126 """
2127 Context manager to selectively update the environment with the specified mapping.
2128
2129 No return value.
2130 """
2131 backup = dict(os.environ)
2132 try:
2133 os.environ.update(envvars)
2134 yield
2135 finally:
2136 for var in envvars:
2137 if var in backup:
2138 os.environ[var] = backup[var]
2139 elif var in os.environ:
2140 del os.environ[var]
2141
2142def is_local_uid(uid=''):
2143 """
2144 Check whether uid is a local one or not.
2145 Can't use pwd module since it gets all UIDs, not local ones only.
2146
2147 Arguments:
2148
2149 - ``uid``: user id. If not specified the user id is determined from
2150 ``os.getuid()``.
2151
2152 Returns ``True`` is the user id is local, ``False`` otherwise.
2153 """
2154 if not uid:
2155 uid = os.getuid()
2156 with open('/etc/passwd', 'r') as f:
2157 for line in f:
2158 line_split = line.split(':')
2159 if len(line_split) < 3:
2160 continue
2161 if str(uid) == line_split[2]:
2162 return True
2163 return False
2164
2165def mkstemp(suffix=None, prefix=None, dir=None, text=False):
2166 """
2167 Generates a unique temporary file, independent of time.
2168
2169 mkstemp() in glibc (at least) generates unique file names based on the
2170 current system time. When combined with highly parallel builds, and
2171 operating over NFS (e.g. shared sstate/downloads) this can result in
2172 conflicts and race conditions.
2173
2174 This function adds additional entropy to the file name so that a collision
2175 is independent of time and thus extremely unlikely.
2176
2177 Arguments:
2178
2179 - ``suffix``: filename suffix.
2180 - ``prefix``: filename prefix.
2181 - ``dir``: directory where the file will be created.
2182 - ``text``: if ``True``, the file is opened in text mode.
2183
2184 Returns a tuple containing:
2185
2186 - the file descriptor for the created file
2187 - the name of the file.
2188 """
2189 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
2190 if prefix:
2191 prefix = prefix + entropy
2192 else:
2193 prefix = tempfile.gettempprefix() + entropy
2194 return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
2195
2196def path_is_descendant(descendant, ancestor):
2197 """
2198 Returns ``True`` if the path ``descendant`` is a descendant of ``ancestor``
2199 (including being equivalent to ``ancestor`` itself). Otherwise returns
2200 ``False``.
2201
2202 Correctly accounts for symlinks, bind mounts, etc. by using
2203 ``os.path.samestat()`` to compare paths.
2204
2205 May raise any exception that ``os.stat()`` raises.
2206
2207 Arguments:
2208
2209 - ``descendant``: path to check for being an ancestor.
2210 - ``ancestor``: path to the ancestor ``descendant`` will be checked
2211 against.
2212 """
2213
2214 ancestor_stat = os.stat(ancestor)
2215
2216 # Recurse up each directory component of the descendant to see if it is
2217 # equivalent to the ancestor
2218 check_dir = os.path.abspath(descendant).rstrip("/")
2219 while check_dir:
2220 check_stat = os.stat(check_dir)
2221 if os.path.samestat(check_stat, ancestor_stat):
2222 return True
2223 check_dir = os.path.dirname(check_dir).rstrip("/")
2224
2225 return False
2226
2227# Recomputing the sets in signal.py is expensive (bitbake -pP idle)
2228# so try and use _signal directly to avoid it
2229valid_signals = signal.valid_signals()
2230try:
2231 import _signal
2232 sigmask = _signal.pthread_sigmask
2233except ImportError:
2234 sigmask = signal.pthread_sigmask
2235
2236# If we don't have a timeout of some kind and a process/thread exits badly (for example
2237# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
2238# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
2239# This function can still deadlock python since it can't signal the other threads to exit
2240# (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads
2241# to exit.
2242@contextmanager
2243def lock_timeout(lock):
2244 try:
2245 s = sigmask(signal.SIG_BLOCK, valid_signals)
2246 held = lock.acquire(timeout=5*60)
2247 if not held:
2248 bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack())
2249 os._exit(1)
2250 yield held
2251 finally:
2252 lock.release()
2253 sigmask(signal.SIG_SETMASK, s)
2254
2255# A version of lock_timeout without the check that the lock was locked and a shorter timeout
2256@contextmanager
2257def lock_timeout_nocheck(lock):
2258 l = False
2259 try:
2260 s = sigmask(signal.SIG_BLOCK, valid_signals)
2261 l = lock.acquire(timeout=10)
2262 yield l
2263 finally:
2264 if l:
2265 lock.release()
2266 sigmask(signal.SIG_SETMASK, s)