diff options
Diffstat (limited to 'bitbake/lib/bb/utils.py')
-rw-r--r-- | bitbake/lib/bb/utils.py | 962 |
1 files changed, 761 insertions, 201 deletions
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index b282d09abf..1cc74ed546 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
@@ -11,24 +11,29 @@ import re, fcntl, os, string, stat, shutil, time | |||
11 | import sys | 11 | import sys |
12 | import errno | 12 | import errno |
13 | import logging | 13 | import logging |
14 | import bb | 14 | import locale |
15 | import bb.msg | ||
16 | import multiprocessing | 15 | import multiprocessing |
17 | import fcntl | ||
18 | import importlib | 16 | import importlib |
19 | from importlib import machinery | 17 | import importlib.machinery |
18 | import importlib.util | ||
20 | import itertools | 19 | import itertools |
21 | import subprocess | 20 | import subprocess |
22 | import glob | 21 | import glob |
23 | import fnmatch | 22 | import fnmatch |
24 | import traceback | 23 | import traceback |
25 | import errno | ||
26 | import signal | 24 | import signal |
27 | import collections | 25 | import collections |
28 | import copy | 26 | import copy |
27 | import ctypes | ||
28 | import random | ||
29 | import socket | ||
30 | import struct | ||
31 | import tempfile | ||
29 | from subprocess import getstatusoutput | 32 | from subprocess import getstatusoutput |
30 | from contextlib import contextmanager | 33 | from contextlib import contextmanager |
31 | from ctypes import cdll | 34 | from ctypes import cdll |
35 | import bb | ||
36 | import bb.msg | ||
32 | 37 | ||
33 | logger = logging.getLogger("BitBake.Util") | 38 | logger = logging.getLogger("BitBake.Util") |
34 | python_extensions = importlib.machinery.all_suffixes() | 39 | python_extensions = importlib.machinery.all_suffixes() |
@@ -43,7 +48,7 @@ def clean_context(): | |||
43 | 48 | ||
44 | def get_context(): | 49 | def get_context(): |
45 | return _context | 50 | return _context |
46 | 51 | ||
47 | 52 | ||
48 | def set_context(ctx): | 53 | def set_context(ctx): |
49 | _context = ctx | 54 | _context = ctx |
@@ -77,7 +82,16 @@ def explode_version(s): | |||
77 | return r | 82 | return r |
78 | 83 | ||
79 | def split_version(s): | 84 | def split_version(s): |
80 | """Split a version string into its constituent parts (PE, PV, PR)""" | 85 | """Split a version string into its constituent parts (PE, PV, PR). |
86 | |||
87 | Arguments: | ||
88 | |||
89 | - ``s``: version string. The format of the input string should be:: | ||
90 | |||
91 | ${PE}:${PV}-${PR} | ||
92 | |||
93 | Returns a tuple ``(pe, pv, pr)``. | ||
94 | """ | ||
81 | s = s.strip(" <>=") | 95 | s = s.strip(" <>=") |
82 | e = 0 | 96 | e = 0 |
83 | if s.count(':'): | 97 | if s.count(':'): |
@@ -129,16 +143,30 @@ def vercmp(ta, tb): | |||
129 | return r | 143 | return r |
130 | 144 | ||
131 | def vercmp_string(a, b): | 145 | def vercmp_string(a, b): |
132 | """ Split version strings and compare them """ | 146 | """ Split version strings using ``bb.utils.split_version()`` and compare |
147 | them with ``bb.utils.vercmp().`` | ||
148 | |||
149 | Arguments: | ||
150 | |||
151 | - ``a``: left version string operand. | ||
152 | - ``b``: right version string operand. | ||
153 | |||
154 | Returns what ``bb.utils.vercmp()`` returns.""" | ||
133 | ta = split_version(a) | 155 | ta = split_version(a) |
134 | tb = split_version(b) | 156 | tb = split_version(b) |
135 | return vercmp(ta, tb) | 157 | return vercmp(ta, tb) |
136 | 158 | ||
137 | def vercmp_string_op(a, b, op): | 159 | def vercmp_string_op(a, b, op): |
138 | """ | 160 | """ |
139 | Compare two versions and check if the specified comparison operator matches the result of the comparison. | 161 | Takes the return value ``bb.utils.vercmp()`` and returns the operation |
140 | This function is fairly liberal about what operators it will accept since there are a variety of styles | 162 | defined by ``op`` between the return value and 0. |
141 | depending on the context. | 163 | |
164 | Arguments: | ||
165 | |||
166 | - ``a``: left version string operand. | ||
167 | - ``b``: right version string operand. | ||
168 | - ``op``: operator string. Can be one of ``=``, ``==``, ``<=``, ``>=``, | ||
169 | ``>``, ``>>``, ``<``, ``<<`` or ``!=``. | ||
142 | """ | 170 | """ |
143 | res = vercmp_string(a, b) | 171 | res = vercmp_string(a, b) |
144 | if op in ('=', '=='): | 172 | if op in ('=', '=='): |
@@ -158,9 +186,16 @@ def vercmp_string_op(a, b, op): | |||
158 | 186 | ||
159 | def explode_deps(s): | 187 | def explode_deps(s): |
160 | """ | 188 | """ |
161 | Take an RDEPENDS style string of format: | 189 | Takes an RDEPENDS style string of format:: |
162 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." | 190 | |
163 | and return a list of dependencies. | 191 | DEPEND1 (optional version) DEPEND2 (optional version) ... |
192 | |||
193 | Arguments: | ||
194 | |||
195 | - ``s``: input RDEPENDS style string | ||
196 | |||
197 | Returns a list of dependencies. | ||
198 | |||
164 | Version information is ignored. | 199 | Version information is ignored. |
165 | """ | 200 | """ |
166 | r = [] | 201 | r = [] |
@@ -182,9 +217,17 @@ def explode_deps(s): | |||
182 | 217 | ||
183 | def explode_dep_versions2(s, *, sort=True): | 218 | def explode_dep_versions2(s, *, sort=True): |
184 | """ | 219 | """ |
185 | Take an RDEPENDS style string of format: | 220 | Takes an RDEPENDS style string of format:: |
186 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." | 221 | |
187 | and return a dictionary of dependencies and versions. | 222 | DEPEND1 (optional version) DEPEND2 (optional version) ... |
223 | |||
224 | Arguments: | ||
225 | |||
226 | - ``s``: input RDEPENDS style string | ||
227 | - ``*``: *Unused*. | ||
228 | - ``sort``: whether to sort the output or not. | ||
229 | |||
230 | Returns a dictionary of dependencies and versions. | ||
188 | """ | 231 | """ |
189 | r = collections.OrderedDict() | 232 | r = collections.OrderedDict() |
190 | l = s.replace(",", "").split() | 233 | l = s.replace(",", "").split() |
@@ -205,8 +248,8 @@ def explode_dep_versions2(s, *, sort=True): | |||
205 | inversion = True | 248 | inversion = True |
206 | # This list is based on behavior and supported comparisons from deb, opkg and rpm. | 249 | # This list is based on behavior and supported comparisons from deb, opkg and rpm. |
207 | # | 250 | # |
208 | # Even though =<, <<, ==, !=, =>, and >> may not be supported, | 251 | # Even though =<, <<, ==, !=, =>, and >> may not be supported, |
209 | # we list each possibly valid item. | 252 | # we list each possibly valid item. |
210 | # The build system is responsible for validation of what it supports. | 253 | # The build system is responsible for validation of what it supports. |
211 | if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): | 254 | if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): |
212 | lastcmp = i[0:2] | 255 | lastcmp = i[0:2] |
@@ -249,10 +292,17 @@ def explode_dep_versions2(s, *, sort=True): | |||
249 | 292 | ||
250 | def explode_dep_versions(s): | 293 | def explode_dep_versions(s): |
251 | """ | 294 | """ |
252 | Take an RDEPENDS style string of format: | 295 | Take an RDEPENDS style string of format:: |
253 | "DEPEND1 (optional version) DEPEND2 (optional version) ..." | 296 | |
254 | skip null value and items appeared in dependancy string multiple times | 297 | DEPEND1 (optional version) DEPEND2 (optional version) ... |
255 | and return a dictionary of dependencies and versions. | 298 | |
299 | Skips null values and items appeared in dependency string multiple times. | ||
300 | |||
301 | Arguments: | ||
302 | |||
303 | - ``s``: input RDEPENDS style string | ||
304 | |||
305 | Returns a dictionary of dependencies and versions. | ||
256 | """ | 306 | """ |
257 | r = explode_dep_versions2(s) | 307 | r = explode_dep_versions2(s) |
258 | for d in r: | 308 | for d in r: |
@@ -266,7 +316,17 @@ def explode_dep_versions(s): | |||
266 | 316 | ||
267 | def join_deps(deps, commasep=True): | 317 | def join_deps(deps, commasep=True): |
268 | """ | 318 | """ |
269 | Take the result from explode_dep_versions and generate a dependency string | 319 | Take a result from ``bb.utils.explode_dep_versions()`` and generate a |
320 | dependency string. | ||
321 | |||
322 | Arguments: | ||
323 | |||
324 | - ``deps``: dictionary of dependencies and versions. | ||
325 | - ``commasep``: makes the return value separated by commas if ``True``, | ||
326 | separated by spaces otherwise. | ||
327 | |||
328 | Returns a comma-separated (space-separated if ``comma-sep`` is ``False``) | ||
329 | string of dependencies and versions. | ||
270 | """ | 330 | """ |
271 | result = [] | 331 | result = [] |
272 | for dep in deps: | 332 | for dep in deps: |
@@ -340,7 +400,7 @@ def _print_exception(t, value, tb, realfile, text, context): | |||
340 | exception = traceback.format_exception_only(t, value) | 400 | exception = traceback.format_exception_only(t, value) |
341 | error.append('Error executing a python function in %s:\n' % realfile) | 401 | error.append('Error executing a python function in %s:\n' % realfile) |
342 | 402 | ||
343 | # Strip 'us' from the stack (better_exec call) unless that was where the | 403 | # Strip 'us' from the stack (better_exec call) unless that was where the |
344 | # error came from | 404 | # error came from |
345 | if tb.tb_next is not None: | 405 | if tb.tb_next is not None: |
346 | tb = tb.tb_next | 406 | tb = tb.tb_next |
@@ -379,7 +439,7 @@ def _print_exception(t, value, tb, realfile, text, context): | |||
379 | 439 | ||
380 | error.append("Exception: %s" % ''.join(exception)) | 440 | error.append("Exception: %s" % ''.join(exception)) |
381 | 441 | ||
382 | # If the exception is from spwaning a task, let's be helpful and display | 442 | # If the exception is from spawning a task, let's be helpful and display |
383 | # the output (which hopefully includes stderr). | 443 | # the output (which hopefully includes stderr). |
384 | if isinstance(value, subprocess.CalledProcessError) and value.output: | 444 | if isinstance(value, subprocess.CalledProcessError) and value.output: |
385 | error.append("Subprocess output:") | 445 | error.append("Subprocess output:") |
@@ -400,7 +460,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception | |||
400 | code = better_compile(code, realfile, realfile) | 460 | code = better_compile(code, realfile, realfile) |
401 | try: | 461 | try: |
402 | exec(code, get_context(), context) | 462 | exec(code, get_context(), context) |
403 | except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError): | 463 | except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError): |
404 | # Error already shown so passthrough, no need for traceback | 464 | # Error already shown so passthrough, no need for traceback |
405 | raise | 465 | raise |
406 | except Exception as e: | 466 | except Exception as e: |
@@ -427,33 +487,56 @@ def better_eval(source, locals, extraglobals = None): | |||
427 | return eval(source, ctx, locals) | 487 | return eval(source, ctx, locals) |
428 | 488 | ||
429 | @contextmanager | 489 | @contextmanager |
430 | def fileslocked(files): | 490 | def fileslocked(files, *args, **kwargs): |
431 | """Context manager for locking and unlocking file locks.""" | 491 | """Context manager for locking and unlocking file locks. Uses |
492 | ``bb.utils.lockfile()`` and ``bb.utils.unlockfile()`` to lock and unlock | ||
493 | files. | ||
494 | |||
495 | No return value.""" | ||
432 | locks = [] | 496 | locks = [] |
433 | if files: | 497 | if files: |
434 | for lockfile in files: | 498 | for lockfile in files: |
435 | locks.append(bb.utils.lockfile(lockfile)) | 499 | l = bb.utils.lockfile(lockfile, *args, **kwargs) |
500 | if l is not None: | ||
501 | locks.append(l) | ||
436 | 502 | ||
437 | try: | 503 | try: |
438 | yield | 504 | yield |
439 | finally: | 505 | finally: |
506 | locks.reverse() | ||
440 | for lock in locks: | 507 | for lock in locks: |
441 | bb.utils.unlockfile(lock) | 508 | bb.utils.unlockfile(lock) |
442 | 509 | ||
443 | def lockfile(name, shared=False, retry=True, block=False): | 510 | def lockfile(name, shared=False, retry=True, block=False): |
444 | """ | 511 | """ |
445 | Use the specified file as a lock file, return when the lock has | 512 | Use the specified file (with filename ``name``) as a lock file, return when |
446 | been acquired. Returns a variable to pass to unlockfile(). | 513 | the lock has been acquired. Returns a variable to pass to unlockfile(). |
447 | Parameters: | 514 | |
448 | retry: True to re-try locking if it fails, False otherwise | 515 | Arguments: |
449 | block: True to block until the lock succeeds, False otherwise | 516 | |
517 | - ``shared``: sets the lock as a shared lock instead of an | ||
518 | exclusive lock. | ||
519 | - ``retry``: ``True`` to re-try locking if it fails, ``False`` | ||
520 | otherwise. | ||
521 | - ``block``: ``True`` to block until the lock succeeds, | ||
522 | ``False`` otherwise. | ||
523 | |||
450 | The retry and block parameters are kind of equivalent unless you | 524 | The retry and block parameters are kind of equivalent unless you |
451 | consider the possibility of sending a signal to the process to break | 525 | consider the possibility of sending a signal to the process to break |
452 | out - at which point you want block=True rather than retry=True. | 526 | out - at which point you want block=True rather than retry=True. |
527 | |||
528 | Returns the locked file descriptor in case of success, ``None`` otherwise. | ||
453 | """ | 529 | """ |
530 | basename = os.path.basename(name) | ||
531 | if len(basename) > 255: | ||
532 | root, ext = os.path.splitext(basename) | ||
533 | basename = root[:255 - len(ext)] + ext | ||
534 | |||
454 | dirname = os.path.dirname(name) | 535 | dirname = os.path.dirname(name) |
455 | mkdirhier(dirname) | 536 | mkdirhier(dirname) |
456 | 537 | ||
538 | name = os.path.join(dirname, basename) | ||
539 | |||
457 | if not os.access(dirname, os.W_OK): | 540 | if not os.access(dirname, os.W_OK): |
458 | logger.error("Unable to acquire lock '%s', directory is not writable", | 541 | logger.error("Unable to acquire lock '%s', directory is not writable", |
459 | name) | 542 | name) |
@@ -487,7 +570,7 @@ def lockfile(name, shared=False, retry=True, block=False): | |||
487 | return lf | 570 | return lf |
488 | lf.close() | 571 | lf.close() |
489 | except OSError as e: | 572 | except OSError as e: |
490 | if e.errno == errno.EACCES: | 573 | if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG: |
491 | logger.error("Unable to acquire lock '%s', %s", | 574 | logger.error("Unable to acquire lock '%s', %s", |
492 | e.strerror, name) | 575 | e.strerror, name) |
493 | sys.exit(1) | 576 | sys.exit(1) |
@@ -501,7 +584,13 @@ def lockfile(name, shared=False, retry=True, block=False): | |||
501 | 584 | ||
502 | def unlockfile(lf): | 585 | def unlockfile(lf): |
503 | """ | 586 | """ |
504 | Unlock a file locked using lockfile() | 587 | Unlock a file locked using ``bb.utils.lockfile()``. |
588 | |||
589 | Arguments: | ||
590 | |||
591 | - ``lf``: the locked file descriptor. | ||
592 | |||
593 | No return value. | ||
505 | """ | 594 | """ |
506 | try: | 595 | try: |
507 | # If we had a shared lock, we need to promote to exclusive before | 596 | # If we had a shared lock, we need to promote to exclusive before |
@@ -529,43 +618,97 @@ def _hasher(method, filename): | |||
529 | 618 | ||
530 | def md5_file(filename): | 619 | def md5_file(filename): |
531 | """ | 620 | """ |
532 | Return the hex string representation of the MD5 checksum of filename. | 621 | Arguments: |
622 | |||
623 | - ``filename``: path to the input file. | ||
624 | |||
625 | Returns the hexadecimal string representation of the MD5 checksum of filename. | ||
533 | """ | 626 | """ |
534 | import hashlib | 627 | import hashlib |
535 | return _hasher(hashlib.md5(), filename) | 628 | try: |
629 | sig = hashlib.new('MD5', usedforsecurity=False) | ||
630 | except TypeError: | ||
631 | # Some configurations don't appear to support two arguments | ||
632 | sig = hashlib.new('MD5') | ||
633 | return _hasher(sig, filename) | ||
536 | 634 | ||
537 | def sha256_file(filename): | 635 | def sha256_file(filename): |
538 | """ | 636 | """ |
539 | Return the hex string representation of the 256-bit SHA checksum of | 637 | Returns the hexadecimal representation of the 256-bit SHA checksum of |
540 | filename. | 638 | filename. |
639 | |||
640 | Arguments: | ||
641 | |||
642 | - ``filename``: path to the file. | ||
541 | """ | 643 | """ |
542 | import hashlib | 644 | import hashlib |
543 | return _hasher(hashlib.sha256(), filename) | 645 | return _hasher(hashlib.sha256(), filename) |
544 | 646 | ||
545 | def sha1_file(filename): | 647 | def sha1_file(filename): |
546 | """ | 648 | """ |
547 | Return the hex string representation of the SHA1 checksum of the filename | 649 | Returns the hexadecimal representation of the SHA1 checksum of the filename |
650 | |||
651 | Arguments: | ||
652 | |||
653 | - ``filename``: path to the file. | ||
548 | """ | 654 | """ |
549 | import hashlib | 655 | import hashlib |
550 | return _hasher(hashlib.sha1(), filename) | 656 | return _hasher(hashlib.sha1(), filename) |
551 | 657 | ||
552 | def sha384_file(filename): | 658 | def sha384_file(filename): |
553 | """ | 659 | """ |
554 | Return the hex string representation of the SHA384 checksum of the filename | 660 | Returns the hexadecimal representation of the SHA384 checksum of the filename |
661 | |||
662 | Arguments: | ||
663 | |||
664 | - ``filename``: path to the file. | ||
555 | """ | 665 | """ |
556 | import hashlib | 666 | import hashlib |
557 | return _hasher(hashlib.sha384(), filename) | 667 | return _hasher(hashlib.sha384(), filename) |
558 | 668 | ||
559 | def sha512_file(filename): | 669 | def sha512_file(filename): |
560 | """ | 670 | """ |
561 | Return the hex string representation of the SHA512 checksum of the filename | 671 | Returns the hexadecimal representation of the SHA512 checksum of the filename |
672 | |||
673 | Arguments: | ||
674 | |||
675 | - ``filename``: path to the file. | ||
562 | """ | 676 | """ |
563 | import hashlib | 677 | import hashlib |
564 | return _hasher(hashlib.sha512(), filename) | 678 | return _hasher(hashlib.sha512(), filename) |
565 | 679 | ||
680 | def goh1_file(filename): | ||
681 | """ | ||
682 | Returns the hexadecimal string representation of the Go mod h1 checksum of the | ||
683 | filename. The Go mod h1 checksum uses the Go dirhash package. The package | ||
684 | defines hashes over directory trees and is used by go mod for mod files and | ||
685 | zip archives. | ||
686 | |||
687 | Arguments: | ||
688 | |||
689 | - ``filename``: path to the file. | ||
690 | """ | ||
691 | import hashlib | ||
692 | import zipfile | ||
693 | |||
694 | lines = [] | ||
695 | if zipfile.is_zipfile(filename): | ||
696 | with zipfile.ZipFile(filename) as archive: | ||
697 | for fn in sorted(archive.namelist()): | ||
698 | method = hashlib.sha256() | ||
699 | method.update(archive.read(fn)) | ||
700 | hash = method.hexdigest() | ||
701 | lines.append("%s %s\n" % (hash, fn)) | ||
702 | else: | ||
703 | hash = _hasher(hashlib.sha256(), filename) | ||
704 | lines.append("%s go.mod\n" % hash) | ||
705 | method = hashlib.sha256() | ||
706 | method.update("".join(lines).encode('utf-8')) | ||
707 | return method.hexdigest() | ||
708 | |||
566 | def preserved_envvars_exported(): | 709 | def preserved_envvars_exported(): |
567 | """Variables which are taken from the environment and placed in and exported | 710 | """Returns the list of variables which are taken from the environment and |
568 | from the metadata""" | 711 | placed in and exported from the metadata.""" |
569 | return [ | 712 | return [ |
570 | 'BB_TASKHASH', | 713 | 'BB_TASKHASH', |
571 | 'HOME', | 714 | 'HOME', |
@@ -579,19 +722,42 @@ def preserved_envvars_exported(): | |||
579 | ] | 722 | ] |
580 | 723 | ||
581 | def preserved_envvars(): | 724 | def preserved_envvars(): |
582 | """Variables which are taken from the environment and placed in the metadata""" | 725 | """Returns the list of variables which are taken from the environment and |
726 | placed in the metadata.""" | ||
583 | v = [ | 727 | v = [ |
584 | 'BBPATH', | 728 | 'BBPATH', |
585 | 'BB_PRESERVE_ENV', | 729 | 'BB_PRESERVE_ENV', |
586 | 'BB_ENV_WHITELIST', | 730 | 'BB_ENV_PASSTHROUGH_ADDITIONS', |
587 | 'BB_ENV_EXTRAWHITE', | ||
588 | ] | 731 | ] |
589 | return v + preserved_envvars_exported() | 732 | return v + preserved_envvars_exported() |
590 | 733 | ||
734 | def check_system_locale(): | ||
735 | """Make sure the required system locale are available and configured. | ||
736 | |||
737 | No return value.""" | ||
738 | default_locale = locale.getlocale(locale.LC_CTYPE) | ||
739 | |||
740 | try: | ||
741 | locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8")) | ||
742 | except: | ||
743 | sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system") | ||
744 | else: | ||
745 | locale.setlocale(locale.LC_CTYPE, default_locale) | ||
746 | |||
747 | if sys.getfilesystemencoding() != "utf-8": | ||
748 | sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n" | ||
749 | "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.") | ||
750 | |||
591 | def filter_environment(good_vars): | 751 | def filter_environment(good_vars): |
592 | """ | 752 | """ |
593 | Create a pristine environment for bitbake. This will remove variables that | 753 | Create a pristine environment for bitbake. This will remove variables that |
594 | are not known and may influence the build in a negative way. | 754 | are not known and may influence the build in a negative way. |
755 | |||
756 | Arguments: | ||
757 | |||
758 | - ``good_vars``: list of variable to exclude from the filtering. | ||
759 | |||
760 | No return value. | ||
595 | """ | 761 | """ |
596 | 762 | ||
597 | removed_vars = {} | 763 | removed_vars = {} |
@@ -615,27 +781,29 @@ def filter_environment(good_vars): | |||
615 | 781 | ||
616 | def approved_variables(): | 782 | def approved_variables(): |
617 | """ | 783 | """ |
618 | Determine and return the list of whitelisted variables which are approved | 784 | Determine and return the list of variables which are approved |
619 | to remain in the environment. | 785 | to remain in the environment. |
620 | """ | 786 | """ |
621 | if 'BB_PRESERVE_ENV' in os.environ: | 787 | if 'BB_PRESERVE_ENV' in os.environ: |
622 | return os.environ.keys() | 788 | return os.environ.keys() |
623 | approved = [] | 789 | approved = [] |
624 | if 'BB_ENV_WHITELIST' in os.environ: | 790 | if 'BB_ENV_PASSTHROUGH' in os.environ: |
625 | approved = os.environ['BB_ENV_WHITELIST'].split() | 791 | approved = os.environ['BB_ENV_PASSTHROUGH'].split() |
626 | approved.extend(['BB_ENV_WHITELIST']) | 792 | approved.extend(['BB_ENV_PASSTHROUGH']) |
627 | else: | 793 | else: |
628 | approved = preserved_envvars() | 794 | approved = preserved_envvars() |
629 | if 'BB_ENV_EXTRAWHITE' in os.environ: | 795 | if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ: |
630 | approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) | 796 | approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split()) |
631 | if 'BB_ENV_EXTRAWHITE' not in approved: | 797 | if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved: |
632 | approved.extend(['BB_ENV_EXTRAWHITE']) | 798 | approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS']) |
633 | return approved | 799 | return approved |
634 | 800 | ||
635 | def clean_environment(): | 801 | def clean_environment(): |
636 | """ | 802 | """ |
637 | Clean up any spurious environment variables. This will remove any | 803 | Clean up any spurious environment variables. This will remove any |
638 | variables the user hasn't chosen to preserve. | 804 | variables the user hasn't chosen to preserve. |
805 | |||
806 | No return value. | ||
639 | """ | 807 | """ |
640 | if 'BB_PRESERVE_ENV' not in os.environ: | 808 | if 'BB_PRESERVE_ENV' not in os.environ: |
641 | good_vars = approved_variables() | 809 | good_vars = approved_variables() |
@@ -646,6 +814,8 @@ def clean_environment(): | |||
646 | def empty_environment(): | 814 | def empty_environment(): |
647 | """ | 815 | """ |
648 | Remove all variables from the environment. | 816 | Remove all variables from the environment. |
817 | |||
818 | No return value. | ||
649 | """ | 819 | """ |
650 | for s in list(os.environ.keys()): | 820 | for s in list(os.environ.keys()): |
651 | os.unsetenv(s) | 821 | os.unsetenv(s) |
@@ -654,6 +824,12 @@ def empty_environment(): | |||
654 | def build_environment(d): | 824 | def build_environment(d): |
655 | """ | 825 | """ |
656 | Build an environment from all exported variables. | 826 | Build an environment from all exported variables. |
827 | |||
828 | Arguments: | ||
829 | |||
830 | - ``d``: the data store. | ||
831 | |||
832 | No return value. | ||
657 | """ | 833 | """ |
658 | import bb.data | 834 | import bb.data |
659 | for var in bb.data.keys(d): | 835 | for var in bb.data.keys(d): |
@@ -678,13 +854,23 @@ def _check_unsafe_delete_path(path): | |||
678 | return False | 854 | return False |
679 | 855 | ||
680 | def remove(path, recurse=False, ionice=False): | 856 | def remove(path, recurse=False, ionice=False): |
681 | """Equivalent to rm -f or rm -rf""" | 857 | """Equivalent to rm -f or rm -rf. |
858 | |||
859 | Arguments: | ||
860 | |||
861 | - ``path``: path to file/directory to remove. | ||
862 | - ``recurse``: deletes recursively if ``True``. | ||
863 | - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man | ||
864 | ionice``. | ||
865 | |||
866 | No return value. | ||
867 | """ | ||
682 | if not path: | 868 | if not path: |
683 | return | 869 | return |
684 | if recurse: | 870 | if recurse: |
685 | for name in glob.glob(path): | 871 | for name in glob.glob(path): |
686 | if _check_unsafe_delete_path(path): | 872 | if _check_unsafe_delete_path(name): |
687 | raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path) | 873 | raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name) |
688 | # shutil.rmtree(name) would be ideal but its too slow | 874 | # shutil.rmtree(name) would be ideal but its too slow |
689 | cmd = [] | 875 | cmd = [] |
690 | if ionice: | 876 | if ionice: |
@@ -699,7 +885,17 @@ def remove(path, recurse=False, ionice=False): | |||
699 | raise | 885 | raise |
700 | 886 | ||
701 | def prunedir(topdir, ionice=False): | 887 | def prunedir(topdir, ionice=False): |
702 | """ Delete everything reachable from the directory named in 'topdir'. """ | 888 | """ |
889 | Delete everything reachable from the directory named in ``topdir``. | ||
890 | |||
891 | Arguments: | ||
892 | |||
893 | - ``topdir``: directory path. | ||
894 | - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man | ||
895 | ionice``. | ||
896 | |||
897 | No return value. | ||
898 | """ | ||
703 | # CAUTION: This is dangerous! | 899 | # CAUTION: This is dangerous! |
704 | if _check_unsafe_delete_path(topdir): | 900 | if _check_unsafe_delete_path(topdir): |
705 | raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) | 901 | raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) |
@@ -710,9 +906,16 @@ def prunedir(topdir, ionice=False): | |||
710 | # but thats possibly insane and suffixes is probably going to be small | 906 | # but thats possibly insane and suffixes is probably going to be small |
711 | # | 907 | # |
712 | def prune_suffix(var, suffixes, d): | 908 | def prune_suffix(var, suffixes, d): |
713 | """ | 909 | """ |
714 | See if var ends with any of the suffixes listed and | 910 | Check if ``var`` ends with any of the suffixes listed in ``suffixes`` and |
715 | remove it if found | 911 | remove it if found. |
912 | |||
913 | Arguments: | ||
914 | |||
915 | - ``var``: string to check for suffixes. | ||
916 | - ``suffixes``: list of strings representing suffixes to check for. | ||
917 | |||
918 | Returns the string ``var`` without the suffix. | ||
716 | """ | 919 | """ |
717 | for suffix in suffixes: | 920 | for suffix in suffixes: |
718 | if suffix and var.endswith(suffix): | 921 | if suffix and var.endswith(suffix): |
@@ -721,9 +924,16 @@ def prune_suffix(var, suffixes, d): | |||
721 | 924 | ||
722 | def mkdirhier(directory): | 925 | def mkdirhier(directory): |
723 | """Create a directory like 'mkdir -p', but does not complain if | 926 | """Create a directory like 'mkdir -p', but does not complain if |
724 | directory already exists like os.makedirs | 927 | directory already exists like ``os.makedirs()``. |
725 | """ | ||
726 | 928 | ||
929 | Arguments: | ||
930 | |||
931 | - ``directory``: path to the directory. | ||
932 | |||
933 | No return value. | ||
934 | """ | ||
935 | if '${' in str(directory): | ||
936 | bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory)) | ||
727 | try: | 937 | try: |
728 | os.makedirs(directory) | 938 | os.makedirs(directory) |
729 | except OSError as e: | 939 | except OSError as e: |
@@ -731,10 +941,24 @@ def mkdirhier(directory): | |||
731 | raise e | 941 | raise e |
732 | 942 | ||
733 | def movefile(src, dest, newmtime = None, sstat = None): | 943 | def movefile(src, dest, newmtime = None, sstat = None): |
734 | """Moves a file from src to dest, preserving all permissions and | 944 | """Moves a file from ``src`` to ``dest``, preserving all permissions and |
735 | attributes; mtime will be preserved even when moving across | 945 | attributes; mtime will be preserved even when moving across |
736 | filesystems. Returns true on success and false on failure. Move is | 946 | filesystems. Returns ``True`` on success and ``False`` on failure. Move is |
737 | atomic. | 947 | atomic. |
948 | |||
949 | Arguments: | ||
950 | |||
951 | - ``src`` -- Source file. | ||
952 | - ``dest`` -- Destination file. | ||
953 | - ``newmtime`` -- new mtime to be passed as float seconds since the epoch. | ||
954 | - ``sstat`` -- os.stat_result to use for the destination file. | ||
955 | |||
956 | Returns an ``os.stat_result`` of the destination file if the | ||
957 | source file is a symbolic link or the ``sstat`` argument represents a | ||
958 | symbolic link - in which case the destination file will also be created as | ||
959 | a symbolic link. | ||
960 | |||
961 | Otherwise, returns ``newmtime`` on success and ``False`` on failure. | ||
738 | """ | 962 | """ |
739 | 963 | ||
740 | #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" | 964 | #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" |
@@ -742,7 +966,7 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
742 | if not sstat: | 966 | if not sstat: |
743 | sstat = os.lstat(src) | 967 | sstat = os.lstat(src) |
744 | except Exception as e: | 968 | except Exception as e: |
745 | print("movefile: Stating source file failed...", e) | 969 | logger.warning("movefile: Stating source file failed...", e) |
746 | return None | 970 | return None |
747 | 971 | ||
748 | destexists = 1 | 972 | destexists = 1 |
@@ -770,7 +994,7 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
770 | os.unlink(src) | 994 | os.unlink(src) |
771 | return os.lstat(dest) | 995 | return os.lstat(dest) |
772 | except Exception as e: | 996 | except Exception as e: |
773 | print("movefile: failed to properly create symlink:", dest, "->", target, e) | 997 | logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e) |
774 | return None | 998 | return None |
775 | 999 | ||
776 | renamefailed = 1 | 1000 | renamefailed = 1 |
@@ -782,12 +1006,12 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
782 | 1006 | ||
783 | if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: | 1007 | if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: |
784 | try: | 1008 | try: |
785 | os.rename(src, destpath) | 1009 | bb.utils.rename(src, destpath) |
786 | renamefailed = 0 | 1010 | renamefailed = 0 |
787 | except Exception as e: | 1011 | except Exception as e: |
788 | if e.errno != errno.EXDEV: | 1012 | if e.errno != errno.EXDEV: |
789 | # Some random error. | 1013 | # Some random error. |
790 | print("movefile: Failed to move", src, "to", dest, e) | 1014 | logger.warning("movefile: Failed to move", src, "to", dest, e) |
791 | return None | 1015 | return None |
792 | # Invalid cross-device-link 'bind' mounted or actually Cross-Device | 1016 | # Invalid cross-device-link 'bind' mounted or actually Cross-Device |
793 | 1017 | ||
@@ -796,16 +1020,16 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
796 | if stat.S_ISREG(sstat[stat.ST_MODE]): | 1020 | if stat.S_ISREG(sstat[stat.ST_MODE]): |
797 | try: # For safety copy then move it over. | 1021 | try: # For safety copy then move it over. |
798 | shutil.copyfile(src, destpath + "#new") | 1022 | shutil.copyfile(src, destpath + "#new") |
799 | os.rename(destpath + "#new", destpath) | 1023 | bb.utils.rename(destpath + "#new", destpath) |
800 | didcopy = 1 | 1024 | didcopy = 1 |
801 | except Exception as e: | 1025 | except Exception as e: |
802 | print('movefile: copy', src, '->', dest, 'failed.', e) | 1026 | logger.warning('movefile: copy', src, '->', dest, 'failed.', e) |
803 | return None | 1027 | return None |
804 | else: | 1028 | else: |
805 | #we don't yet handle special, so we need to fall back to /bin/mv | 1029 | #we don't yet handle special, so we need to fall back to /bin/mv |
806 | a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") | 1030 | a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") |
807 | if a[0] != 0: | 1031 | if a[0] != 0: |
808 | print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) | 1032 | logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) |
809 | return None # failure | 1033 | return None # failure |
810 | try: | 1034 | try: |
811 | if didcopy: | 1035 | if didcopy: |
@@ -813,7 +1037,7 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
813 | os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown | 1037 | os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown |
814 | os.unlink(src) | 1038 | os.unlink(src) |
815 | except Exception as e: | 1039 | except Exception as e: |
816 | print("movefile: Failed to chown/chmod/unlink", dest, e) | 1040 | logger.warning("movefile: Failed to chown/chmod/unlink", dest, e) |
817 | return None | 1041 | return None |
818 | 1042 | ||
819 | if newmtime: | 1043 | if newmtime: |
@@ -825,9 +1049,24 @@ def movefile(src, dest, newmtime = None, sstat = None): | |||
825 | 1049 | ||
826 | def copyfile(src, dest, newmtime = None, sstat = None): | 1050 | def copyfile(src, dest, newmtime = None, sstat = None): |
827 | """ | 1051 | """ |
828 | Copies a file from src to dest, preserving all permissions and | 1052 | Copies a file from ``src`` to ``dest``, preserving all permissions and |
829 | attributes; mtime will be preserved even when moving across | 1053 | attributes; mtime will be preserved even when moving across |
830 | filesystems. Returns true on success and false on failure. | 1054 | filesystems. |
1055 | |||
1056 | Arguments: | ||
1057 | |||
1058 | - ``src``: Source file. | ||
1059 | - ``dest``: Destination file. | ||
1060 | - ``newmtime``: new mtime to be passed as float seconds since the epoch. | ||
1061 | - ``sstat``: os.stat_result to use for the destination file. | ||
1062 | |||
1063 | Returns an ``os.stat_result`` of the destination file if the | ||
1064 | source file is a symbolic link or the ``sstat`` argument represents a | ||
1065 | symbolic link - in which case the destination file will also be created as | ||
1066 | a symbolic link. | ||
1067 | |||
1068 | Otherwise, returns ``newmtime`` on success and ``False`` on failure. | ||
1069 | |||
831 | """ | 1070 | """ |
832 | #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" | 1071 | #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" |
833 | try: | 1072 | try: |
@@ -874,7 +1113,7 @@ def copyfile(src, dest, newmtime = None, sstat = None): | |||
874 | 1113 | ||
875 | # For safety copy then move it over. | 1114 | # For safety copy then move it over. |
876 | shutil.copyfile(src, dest + "#new") | 1115 | shutil.copyfile(src, dest + "#new") |
877 | os.rename(dest + "#new", dest) | 1116 | bb.utils.rename(dest + "#new", dest) |
878 | except Exception as e: | 1117 | except Exception as e: |
879 | logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) | 1118 | logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) |
880 | return False | 1119 | return False |
@@ -905,10 +1144,16 @@ def copyfile(src, dest, newmtime = None, sstat = None): | |||
905 | 1144 | ||
906 | def break_hardlinks(src, sstat = None): | 1145 | def break_hardlinks(src, sstat = None): |
907 | """ | 1146 | """ |
908 | Ensures src is the only hardlink to this file. Other hardlinks, | 1147 | Ensures ``src`` is the only hardlink to this file. Other hardlinks, |
909 | if any, are not affected (other than in their st_nlink value, of | 1148 | if any, are not affected (other than in their st_nlink value, of |
910 | course). Returns true on success and false on failure. | 1149 | course). |
1150 | |||
1151 | Arguments: | ||
1152 | |||
1153 | - ``src``: source file path. | ||
1154 | - ``sstat``: os.stat_result to use when checking if the file is a link. | ||
911 | 1155 | ||
1156 | Returns ``True`` on success and ``False`` on failure. | ||
912 | """ | 1157 | """ |
913 | try: | 1158 | try: |
914 | if not sstat: | 1159 | if not sstat: |
@@ -922,11 +1167,24 @@ def break_hardlinks(src, sstat = None): | |||
922 | 1167 | ||
923 | def which(path, item, direction = 0, history = False, executable=False): | 1168 | def which(path, item, direction = 0, history = False, executable=False): |
924 | """ | 1169 | """ |
925 | Locate `item` in the list of paths `path` (colon separated string like $PATH). | 1170 | Locate ``item`` in the list of paths ``path`` (colon separated string like |
926 | If `direction` is non-zero then the list is reversed. | 1171 | ``$PATH``). |
927 | If `history` is True then the list of candidates also returned as result,history. | 1172 | |
928 | If `executable` is True then the candidate has to be an executable file, | 1173 | Arguments: |
929 | otherwise the candidate simply has to exist. | 1174 | |
1175 | - ``path``: list of colon-separated paths. | ||
1176 | - ``item``: string to search for. | ||
1177 | - ``direction``: if non-zero then the list is reversed. | ||
1178 | - ``history``: if ``True`` then the list of candidates also returned as | ||
1179 | ``result,history`` where ``history`` is the list of previous path | ||
1180 | checked. | ||
1181 | - ``executable``: if ``True`` then the candidate defined by ``path`` has | ||
1182 | to be an executable file, otherwise if ``False`` the candidate simply | ||
1183 | has to exist. | ||
1184 | |||
1185 | Returns the item if found in the list of path, otherwise an empty string. | ||
1186 | If ``history`` is ``True``, return the list of previous path checked in a | ||
1187 | tuple with the found (or not found) item as ``(item, history)``. | ||
930 | """ | 1188 | """ |
931 | 1189 | ||
932 | if executable: | 1190 | if executable: |
@@ -953,10 +1211,29 @@ def which(path, item, direction = 0, history = False, executable=False): | |||
953 | return "", hist | 1211 | return "", hist |
954 | return "" | 1212 | return "" |
955 | 1213 | ||
1214 | def to_filemode(input): | ||
1215 | """ | ||
1216 | Take a bitbake variable contents defining a file mode and return | ||
1217 | the proper python representation of the number | ||
1218 | |||
1219 | Arguments: | ||
1220 | |||
1221 | - ``input``: a string or number to convert, e.g. a bitbake variable | ||
1222 | string, assumed to be an octal representation | ||
1223 | |||
1224 | Returns the python file mode as a number | ||
1225 | """ | ||
1226 | # umask might come in as a number or text string.. | ||
1227 | if type(input) is int: | ||
1228 | return input | ||
1229 | return int(input, 8) | ||
1230 | |||
956 | @contextmanager | 1231 | @contextmanager |
957 | def umask(new_mask): | 1232 | def umask(new_mask): |
958 | """ | 1233 | """ |
959 | Context manager to set the umask to a specific mask, and restore it afterwards. | 1234 | Context manager to set the umask to a specific mask, and restore it afterwards. |
1235 | |||
1236 | No return value. | ||
960 | """ | 1237 | """ |
961 | current_mask = os.umask(new_mask) | 1238 | current_mask = os.umask(new_mask) |
962 | try: | 1239 | try: |
@@ -965,13 +1242,26 @@ def umask(new_mask): | |||
965 | os.umask(current_mask) | 1242 | os.umask(current_mask) |
966 | 1243 | ||
967 | def to_boolean(string, default=None): | 1244 | def to_boolean(string, default=None): |
968 | """ | 1245 | """ |
969 | Check input string and return boolean value True/False/None | 1246 | Check input string and return boolean value True/False/None |
970 | depending upon the checks | 1247 | depending upon the checks. |
1248 | |||
1249 | Arguments: | ||
1250 | |||
1251 | - ``string``: input string. | ||
1252 | - ``default``: default return value if the input ``string`` is ``None``, | ||
1253 | ``0``, ``False`` or an empty string. | ||
1254 | |||
1255 | Returns ``True`` if the string is one of "y", "yes", "1", "true", ``False`` | ||
1256 | if the string is one of "n", "no", "0", or "false". Return ``default`` if | ||
1257 | the input ``string`` is ``None``, ``0``, ``False`` or an empty string. | ||
971 | """ | 1258 | """ |
972 | if not string: | 1259 | if not string: |
973 | return default | 1260 | return default |
974 | 1261 | ||
1262 | if isinstance(string, int): | ||
1263 | return string != 0 | ||
1264 | |||
975 | normalized = string.lower() | 1265 | normalized = string.lower() |
976 | if normalized in ("y", "yes", "1", "true"): | 1266 | if normalized in ("y", "yes", "1", "true"): |
977 | return True | 1267 | return True |
@@ -985,18 +1275,17 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): | |||
985 | 1275 | ||
986 | Arguments: | 1276 | Arguments: |
987 | 1277 | ||
988 | variable -- the variable name. This will be fetched and expanded (using | 1278 | - ``variable``: the variable name. This will be fetched and expanded (using |
989 | d.getVar(variable)) and then split into a set(). | 1279 | d.getVar(variable)) and then split into a set(). |
990 | 1280 | - ``checkvalues``: if this is a string it is split on whitespace into a set(), | |
991 | checkvalues -- if this is a string it is split on whitespace into a set(), | 1281 | otherwise coerced directly into a set(). |
992 | otherwise coerced directly into a set(). | 1282 | - ``truevalue``: the value to return if checkvalues is a subset of variable. |
1283 | - ``falsevalue``: the value to return if variable is empty or if checkvalues is | ||
1284 | not a subset of variable. | ||
1285 | - ``d``: the data store. | ||
993 | 1286 | ||
994 | truevalue -- the value to return if checkvalues is a subset of variable. | 1287 | Returns ``True`` if the variable contains the values specified, ``False`` |
995 | 1288 | otherwise. | |
996 | falsevalue -- the value to return if variable is empty or if checkvalues is | ||
997 | not a subset of variable. | ||
998 | |||
999 | d -- the data store. | ||
1000 | """ | 1289 | """ |
1001 | 1290 | ||
1002 | val = d.getVar(variable) | 1291 | val = d.getVar(variable) |
@@ -1016,18 +1305,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d): | |||
1016 | 1305 | ||
1017 | Arguments: | 1306 | Arguments: |
1018 | 1307 | ||
1019 | variable -- the variable name. This will be fetched and expanded (using | 1308 | - ``variable``: the variable name. This will be fetched and expanded (using |
1020 | d.getVar(variable)) and then split into a set(). | 1309 | d.getVar(variable)) and then split into a set(). |
1021 | 1310 | - ``checkvalues``: if this is a string it is split on whitespace into a set(), | |
1022 | checkvalues -- if this is a string it is split on whitespace into a set(), | 1311 | otherwise coerced directly into a set(). |
1023 | otherwise coerced directly into a set(). | 1312 | - ``truevalue``: the value to return if checkvalues is a subset of variable. |
1024 | 1313 | - ``falsevalue``: the value to return if variable is empty or if checkvalues is | |
1025 | truevalue -- the value to return if checkvalues is a subset of variable. | 1314 | not a subset of variable. |
1315 | - ``d``: the data store. | ||
1026 | 1316 | ||
1027 | falsevalue -- the value to return if variable is empty or if checkvalues is | 1317 | Returns ``True`` if the variable contains any of the values specified, |
1028 | not a subset of variable. | 1318 | ``False`` otherwise. |
1029 | |||
1030 | d -- the data store. | ||
1031 | """ | 1319 | """ |
1032 | val = d.getVar(variable) | 1320 | val = d.getVar(variable) |
1033 | if not val: | 1321 | if not val: |
@@ -1042,17 +1330,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d): | |||
1042 | return falsevalue | 1330 | return falsevalue |
1043 | 1331 | ||
1044 | def filter(variable, checkvalues, d): | 1332 | def filter(variable, checkvalues, d): |
1045 | """Return all words in the variable that are present in the checkvalues. | 1333 | """Return all words in the variable that are present in the ``checkvalues``. |
1046 | 1334 | ||
1047 | Arguments: | 1335 | Arguments: |
1048 | 1336 | ||
1049 | variable -- the variable name. This will be fetched and expanded (using | 1337 | - ``variable``: the variable name. This will be fetched and expanded (using |
1050 | d.getVar(variable)) and then split into a set(). | 1338 | d.getVar(variable)) and then split into a set(). |
1051 | 1339 | - ``checkvalues``: if this is a string it is split on whitespace into a set(), | |
1052 | checkvalues -- if this is a string it is split on whitespace into a set(), | 1340 | otherwise coerced directly into a set(). |
1053 | otherwise coerced directly into a set(). | 1341 | - ``d``: the data store. |
1054 | 1342 | ||
1055 | d -- the data store. | 1343 | Returns a list of string. |
1056 | """ | 1344 | """ |
1057 | 1345 | ||
1058 | val = d.getVar(variable) | 1346 | val = d.getVar(variable) |
@@ -1068,8 +1356,27 @@ def filter(variable, checkvalues, d): | |||
1068 | 1356 | ||
1069 | def get_referenced_vars(start_expr, d): | 1357 | def get_referenced_vars(start_expr, d): |
1070 | """ | 1358 | """ |
1071 | :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level | 1359 | Get the names of the variables referenced in a given expression. |
1072 | are ordered arbitrarily) | 1360 | |
1361 | Arguments: | ||
1362 | |||
1363 | - ``start_expr``: the expression where to look for variables references. | ||
1364 | |||
1365 | For example:: | ||
1366 | |||
1367 | ${VAR_A} string ${VAR_B} | ||
1368 | |||
1369 | Or:: | ||
1370 | |||
1371 | ${@d.getVar('VAR')} | ||
1372 | |||
1373 | If a variables makes references to other variables, the latter are also | ||
1374 | returned recursively. | ||
1375 | |||
1376 | - ``d``: the data store. | ||
1377 | |||
1378 | Returns the names of vars referenced in ``start_expr`` (recursively), in | ||
1379 | quasi-BFS order (variables within the same level are ordered arbitrarily). | ||
1073 | """ | 1380 | """ |
1074 | 1381 | ||
1075 | seen = set() | 1382 | seen = set() |
@@ -1103,7 +1410,10 @@ def get_referenced_vars(start_expr, d): | |||
1103 | 1410 | ||
1104 | 1411 | ||
1105 | def cpu_count(): | 1412 | def cpu_count(): |
1106 | return multiprocessing.cpu_count() | 1413 | try: |
1414 | return len(os.sched_getaffinity(0)) | ||
1415 | except OSError: | ||
1416 | return multiprocessing.cpu_count() | ||
1107 | 1417 | ||
1108 | def nonblockingfd(fd): | 1418 | def nonblockingfd(fd): |
1109 | fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) | 1419 | fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) |
@@ -1146,7 +1456,9 @@ def multiprocessingpool(*args, **kwargs): | |||
1146 | return multiprocessing.Pool(*args, **kwargs) | 1456 | return multiprocessing.Pool(*args, **kwargs) |
1147 | 1457 | ||
1148 | def exec_flat_python_func(func, *args, **kwargs): | 1458 | def exec_flat_python_func(func, *args, **kwargs): |
1149 | """Execute a flat python function (defined with def funcname(args):...)""" | 1459 | """Execute a flat python function (defined with ``def funcname(args): ...``) |
1460 | |||
1461 | Returns the return value of the function.""" | ||
1150 | # Prepare a small piece of python code which calls the requested function | 1462 | # Prepare a small piece of python code which calls the requested function |
1151 | # To do this we need to prepare two things - a set of variables we can use to pass | 1463 | # To do this we need to prepare two things - a set of variables we can use to pass |
1152 | # the values of arguments into the calling function, and the list of arguments for | 1464 | # the values of arguments into the calling function, and the list of arguments for |
@@ -1172,48 +1484,57 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False): | |||
1172 | """Edit lines from a recipe or config file and modify one or more | 1484 | """Edit lines from a recipe or config file and modify one or more |
1173 | specified variable values set in the file using a specified callback | 1485 | specified variable values set in the file using a specified callback |
1174 | function. Lines are expected to have trailing newlines. | 1486 | function. Lines are expected to have trailing newlines. |
1175 | Parameters: | 1487 | |
1176 | meta_lines: lines from the file; can be a list or an iterable | 1488 | Arguments: |
1177 | (e.g. file pointer) | 1489 | |
1178 | variables: a list of variable names to look for. Functions | 1490 | - ``meta_lines``: lines from the file; can be a list or an iterable |
1179 | may also be specified, but must be specified with '()' at | 1491 | (e.g. file pointer) |
1180 | the end of the name. Note that the function doesn't have | 1492 | - ``variables``: a list of variable names to look for. Functions |
1181 | any intrinsic understanding of _append, _prepend, _remove, | 1493 | may also be specified, but must be specified with ``()`` at |
1182 | or overrides, so these are considered as part of the name. | 1494 | the end of the name. Note that the function doesn't have |
1183 | These values go into a regular expression, so regular | 1495 | any intrinsic understanding of ``:append``, ``:prepend``, ``:remove``, |
1184 | expression syntax is allowed. | 1496 | or overrides, so these are considered as part of the name. |
1185 | varfunc: callback function called for every variable matching | 1497 | These values go into a regular expression, so regular |
1186 | one of the entries in the variables parameter. The function | 1498 | expression syntax is allowed. |
1187 | should take four arguments: | 1499 | - ``varfunc``: callback function called for every variable matching |
1188 | varname: name of variable matched | 1500 | one of the entries in the variables parameter. |
1189 | origvalue: current value in file | 1501 | |
1190 | op: the operator (e.g. '+=') | 1502 | The function should take four arguments: |
1191 | newlines: list of lines up to this point. You can use | 1503 | |
1192 | this to prepend lines before this variable setting | 1504 | - ``varname``: name of variable matched |
1193 | if you wish. | 1505 | - ``origvalue``: current value in file |
1194 | and should return a four-element tuple: | 1506 | - ``op``: the operator (e.g. ``+=``) |
1195 | newvalue: new value to substitute in, or None to drop | 1507 | - ``newlines``: list of lines up to this point. You can use |
1196 | the variable setting entirely. (If the removal | 1508 | this to prepend lines before this variable setting |
1197 | results in two consecutive blank lines, one of the | 1509 | if you wish. |
1198 | blank lines will also be dropped). | 1510 | |
1199 | newop: the operator to use - if you specify None here, | 1511 | And should return a four-element tuple: |
1200 | the original operation will be used. | 1512 | |
1201 | indent: number of spaces to indent multi-line entries, | 1513 | - ``newvalue``: new value to substitute in, or ``None`` to drop |
1202 | or -1 to indent up to the level of the assignment | 1514 | the variable setting entirely. (If the removal |
1203 | and opening quote, or a string to use as the indent. | 1515 | results in two consecutive blank lines, one of the |
1204 | minbreak: True to allow the first element of a | 1516 | blank lines will also be dropped). |
1205 | multi-line value to continue on the same line as | 1517 | - ``newop``: the operator to use - if you specify ``None`` here, |
1206 | the assignment, False to indent before the first | 1518 | the original operation will be used. |
1207 | element. | 1519 | - ``indent``: number of spaces to indent multi-line entries, |
1208 | To clarify, if you wish not to change the value, then you | 1520 | or ``-1`` to indent up to the level of the assignment |
1209 | would return like this: return origvalue, None, 0, True | 1521 | and opening quote, or a string to use as the indent. |
1210 | match_overrides: True to match items with _overrides on the end, | 1522 | - ``minbreak``: ``True`` to allow the first element of a |
1211 | False otherwise | 1523 | multi-line value to continue on the same line as |
1524 | the assignment, ``False`` to indent before the first | ||
1525 | element. | ||
1526 | |||
1527 | To clarify, if you wish not to change the value, then you | ||
1528 | would return like this:: | ||
1529 | |||
1530 | return origvalue, None, 0, True | ||
1531 | - ``match_overrides``: True to match items with _overrides on the end, | ||
1532 | False otherwise | ||
1533 | |||
1212 | Returns a tuple: | 1534 | Returns a tuple: |
1213 | updated: | 1535 | |
1214 | True if changes were made, False otherwise. | 1536 | - ``updated``: ``True`` if changes were made, ``False`` otherwise. |
1215 | newlines: | 1537 | - ``newlines``: Lines after processing. |
1216 | Lines after processing | ||
1217 | """ | 1538 | """ |
1218 | 1539 | ||
1219 | var_res = {} | 1540 | var_res = {} |
@@ -1357,12 +1678,13 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False): | |||
1357 | 1678 | ||
1358 | 1679 | ||
1359 | def edit_metadata_file(meta_file, variables, varfunc): | 1680 | def edit_metadata_file(meta_file, variables, varfunc): |
1360 | """Edit a recipe or config file and modify one or more specified | 1681 | """Edit a recipe or configuration file and modify one or more specified |
1361 | variable values set in the file using a specified callback function. | 1682 | variable values set in the file using a specified callback function. |
1362 | The file is only written to if the value(s) actually change. | 1683 | The file is only written to if the value(s) actually change. |
1363 | This is basically the file version of edit_metadata(), see that | 1684 | This is basically the file version of ``bb.utils.edit_metadata()``, see that |
1364 | function's description for parameter/usage information. | 1685 | function's description for parameter/usage information. |
1365 | Returns True if the file was written to, False otherwise. | 1686 | |
1687 | Returns ``True`` if the file was written to, ``False`` otherwise. | ||
1366 | """ | 1688 | """ |
1367 | with open(meta_file, 'r') as f: | 1689 | with open(meta_file, 'r') as f: |
1368 | (updated, newlines) = edit_metadata(f, variables, varfunc) | 1690 | (updated, newlines) = edit_metadata(f, variables, varfunc) |
@@ -1373,23 +1695,25 @@ def edit_metadata_file(meta_file, variables, varfunc): | |||
1373 | 1695 | ||
1374 | 1696 | ||
1375 | def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): | 1697 | def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): |
1376 | """Edit bblayers.conf, adding and/or removing layers | 1698 | """Edit ``bblayers.conf``, adding and/or removing layers. |
1377 | Parameters: | 1699 | |
1378 | bblayers_conf: path to bblayers.conf file to edit | 1700 | Arguments: |
1379 | add: layer path (or list of layer paths) to add; None or empty | 1701 | |
1380 | list to add nothing | 1702 | - ``bblayers_conf``: path to ``bblayers.conf`` file to edit |
1381 | remove: layer path (or list of layer paths) to remove; None or | 1703 | - ``add``: layer path (or list of layer paths) to add; ``None`` or empty |
1382 | empty list to remove nothing | 1704 | list to add nothing |
1383 | edit_cb: optional callback function that will be called after | 1705 | - ``remove``: layer path (or list of layer paths) to remove; ``None`` or |
1384 | processing adds/removes once per existing entry. | 1706 | empty list to remove nothing |
1707 | - ``edit_cb``: optional callback function that will be called | ||
1708 | after processing adds/removes once per existing entry. | ||
1709 | |||
1385 | Returns a tuple: | 1710 | Returns a tuple: |
1386 | notadded: list of layers specified to be added but weren't | ||
1387 | (because they were already in the list) | ||
1388 | notremoved: list of layers that were specified to be removed | ||
1389 | but weren't (because they weren't in the list) | ||
1390 | """ | ||
1391 | 1711 | ||
1392 | import fnmatch | 1712 | - ``notadded``: list of layers specified to be added but weren't |
1713 | (because they were already in the list) | ||
1714 | - ``notremoved``: list of layers that were specified to be removed | ||
1715 | but weren't (because they weren't in the list) | ||
1716 | """ | ||
1393 | 1717 | ||
1394 | def remove_trailing_sep(pth): | 1718 | def remove_trailing_sep(pth): |
1395 | if pth and pth[-1] == os.sep: | 1719 | if pth and pth[-1] == os.sep: |
@@ -1508,7 +1832,22 @@ def get_collection_res(d): | |||
1508 | 1832 | ||
1509 | 1833 | ||
1510 | def get_file_layer(filename, d, collection_res={}): | 1834 | def get_file_layer(filename, d, collection_res={}): |
1511 | """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" | 1835 | """Determine the collection (or layer name, as defined by a layer's |
1836 | ``layer.conf`` file) containing the specified file. | ||
1837 | |||
1838 | Arguments: | ||
1839 | |||
1840 | - ``filename``: the filename to look for. | ||
1841 | - ``d``: the data store. | ||
1842 | - ``collection_res``: dictionary with the layer names as keys and file | ||
1843 | patterns to match as defined with the BBFILE_COLLECTIONS and | ||
1844 | BBFILE_PATTERN variables respectively. The return value of | ||
1845 | ``bb.utils.get_collection_res()`` is the default if this variable is | ||
1846 | not specified. | ||
1847 | |||
1848 | Returns the layer name containing the file. If multiple layers contain the | ||
1849 | file, the last matching layer name from collection_res is returned. | ||
1850 | """ | ||
1512 | if not collection_res: | 1851 | if not collection_res: |
1513 | collection_res = get_collection_res(d) | 1852 | collection_res = get_collection_res(d) |
1514 | 1853 | ||
@@ -1546,7 +1885,13 @@ class PrCtlError(Exception): | |||
1546 | 1885 | ||
1547 | def signal_on_parent_exit(signame): | 1886 | def signal_on_parent_exit(signame): |
1548 | """ | 1887 | """ |
1549 | Trigger signame to be sent when the parent process dies | 1888 | Trigger ``signame`` to be sent when the parent process dies. |
1889 | |||
1890 | Arguments: | ||
1891 | |||
1892 | - ``signame``: name of the signal. See ``man signal``. | ||
1893 | |||
1894 | No return value. | ||
1550 | """ | 1895 | """ |
1551 | signum = getattr(signal, signame) | 1896 | signum = getattr(signal, signame) |
1552 | # http://linux.die.net/man/2/prctl | 1897 | # http://linux.die.net/man/2/prctl |
@@ -1581,7 +1926,7 @@ def ioprio_set(who, cls, value): | |||
1581 | bb.warn("Unable to set IO Prio for arch %s" % _unamearch) | 1926 | bb.warn("Unable to set IO Prio for arch %s" % _unamearch) |
1582 | 1927 | ||
1583 | def set_process_name(name): | 1928 | def set_process_name(name): |
1584 | from ctypes import cdll, byref, create_string_buffer | 1929 | from ctypes import byref, create_string_buffer |
1585 | # This is nice to have for debugging, not essential | 1930 | # This is nice to have for debugging, not essential |
1586 | try: | 1931 | try: |
1587 | libc = cdll.LoadLibrary('libc.so.6') | 1932 | libc = cdll.LoadLibrary('libc.so.6') |
@@ -1590,33 +1935,96 @@ def set_process_name(name): | |||
1590 | except: | 1935 | except: |
1591 | pass | 1936 | pass |
1592 | 1937 | ||
1593 | def export_proxies(d): | 1938 | def enable_loopback_networking(): |
1594 | """ export common proxies variables from datastore to environment """ | 1939 | # From bits/ioctls.h |
1595 | import os | 1940 | SIOCGIFFLAGS = 0x8913 |
1941 | SIOCSIFFLAGS = 0x8914 | ||
1942 | SIOCSIFADDR = 0x8916 | ||
1943 | SIOCSIFNETMASK = 0x891C | ||
1596 | 1944 | ||
1597 | variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', | 1945 | # if.h |
1598 | 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY', | 1946 | IFF_UP = 0x1 |
1599 | 'GIT_PROXY_COMMAND'] | 1947 | IFF_RUNNING = 0x40 |
1600 | exported = False | ||
1601 | 1948 | ||
1602 | for v in variables: | 1949 | # bits/socket.h |
1603 | if v in os.environ.keys(): | 1950 | AF_INET = 2 |
1604 | exported = True | 1951 | |
1605 | else: | 1952 | # char ifr_name[IFNAMSIZ=16] |
1606 | v_proxy = d.getVar(v) | 1953 | ifr_name = struct.pack("@16s", b"lo") |
1607 | if v_proxy is not None: | 1954 | def netdev_req(fd, req, data = b""): |
1608 | os.environ[v] = v_proxy | 1955 | # Pad and add interface name |
1609 | exported = True | 1956 | data = ifr_name + data + (b'\x00' * (16 - len(data))) |
1957 | # Return all data after interface name | ||
1958 | return fcntl.ioctl(fd, req, data)[16:] | ||
1959 | |||
1960 | with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock: | ||
1961 | fd = sock.fileno() | ||
1962 | |||
1963 | # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; } | ||
1964 | req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1) | ||
1965 | netdev_req(fd, SIOCSIFADDR, req) | ||
1610 | 1966 | ||
1611 | return exported | 1967 | # short ifr_flags |
1968 | flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0] | ||
1969 | flags |= IFF_UP | IFF_RUNNING | ||
1970 | netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags)) | ||
1612 | 1971 | ||
1972 | # struct sockaddr_in ifr_netmask | ||
1973 | req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0) | ||
1974 | netdev_req(fd, SIOCSIFNETMASK, req) | ||
1975 | |||
1976 | def disable_network(uid=None, gid=None): | ||
1977 | """ | ||
1978 | Disable networking in the current process if the kernel supports it, else | ||
1979 | just return after logging to debug. To do this we need to create a new user | ||
1980 | namespace, then map back to the original uid/gid. | ||
1981 | |||
1982 | Arguments: | ||
1983 | |||
1984 | - ``uid``: original user id. | ||
1985 | - ``gid``: original user group id. | ||
1986 | |||
1987 | No return value. | ||
1988 | """ | ||
1989 | libc = ctypes.CDLL('libc.so.6') | ||
1990 | |||
1991 | # From sched.h | ||
1992 | # New user namespace | ||
1993 | CLONE_NEWUSER = 0x10000000 | ||
1994 | # New network namespace | ||
1995 | CLONE_NEWNET = 0x40000000 | ||
1996 | |||
1997 | if uid is None: | ||
1998 | uid = os.getuid() | ||
1999 | if gid is None: | ||
2000 | gid = os.getgid() | ||
2001 | |||
2002 | ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER) | ||
2003 | if ret != 0: | ||
2004 | logger.debug("System doesn't support disabling network without admin privs") | ||
2005 | return | ||
2006 | with open("/proc/self/uid_map", "w") as f: | ||
2007 | f.write("%s %s 1" % (uid, uid)) | ||
2008 | with open("/proc/self/setgroups", "w") as f: | ||
2009 | f.write("deny") | ||
2010 | with open("/proc/self/gid_map", "w") as f: | ||
2011 | f.write("%s %s 1" % (gid, gid)) | ||
2012 | |||
2013 | def export_proxies(d): | ||
2014 | from bb.fetch2 import get_fetcher_environment | ||
2015 | """ export common proxies variables from datastore to environment """ | ||
2016 | newenv = get_fetcher_environment(d) | ||
2017 | for v in newenv: | ||
2018 | os.environ[v] = newenv[v] | ||
1613 | 2019 | ||
1614 | def load_plugins(logger, plugins, pluginpath): | 2020 | def load_plugins(logger, plugins, pluginpath): |
1615 | def load_plugin(name): | 2021 | def load_plugin(name): |
1616 | logger.debug('Loading plugin %s' % name) | 2022 | logger.debug('Loading plugin %s' % name) |
1617 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) | 2023 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) |
1618 | if spec: | 2024 | if spec: |
1619 | return spec.loader.load_module() | 2025 | mod = importlib.util.module_from_spec(spec) |
2026 | spec.loader.exec_module(mod) | ||
2027 | return mod | ||
1620 | 2028 | ||
1621 | logger.debug('Loading plugins from %s...' % pluginpath) | 2029 | logger.debug('Loading plugins from %s...' % pluginpath) |
1622 | 2030 | ||
@@ -1646,9 +2054,14 @@ class LogCatcher(logging.Handler): | |||
1646 | 2054 | ||
1647 | def is_semver(version): | 2055 | def is_semver(version): |
1648 | """ | 2056 | """ |
1649 | Is the version string following the semver semantic? | 2057 | Arguments: |
2058 | |||
2059 | - ``version``: the version string. | ||
2060 | |||
2061 | Returns ``True`` if the version string follow semantic versioning, ``False`` | ||
2062 | otherwise. | ||
1650 | 2063 | ||
1651 | https://semver.org/spec/v2.0.0.html | 2064 | See https://semver.org/spec/v2.0.0.html. |
1652 | """ | 2065 | """ |
1653 | regex = re.compile( | 2066 | regex = re.compile( |
1654 | r""" | 2067 | r""" |
@@ -1669,3 +2082,150 @@ def is_semver(version): | |||
1669 | return False | 2082 | return False |
1670 | 2083 | ||
1671 | return True | 2084 | return True |
2085 | |||
2086 | # Wrapper around os.rename which can handle cross device problems | ||
2087 | # e.g. from container filesystems | ||
2088 | def rename(src, dst): | ||
2089 | try: | ||
2090 | os.rename(src, dst) | ||
2091 | except OSError as err: | ||
2092 | if err.errno == 18: | ||
2093 | # Invalid cross-device link error | ||
2094 | shutil.move(src, dst) | ||
2095 | else: | ||
2096 | raise err | ||
2097 | |||
2098 | @contextmanager | ||
2099 | def environment(**envvars): | ||
2100 | """ | ||
2101 | Context manager to selectively update the environment with the specified mapping. | ||
2102 | |||
2103 | No return value. | ||
2104 | """ | ||
2105 | backup = dict(os.environ) | ||
2106 | try: | ||
2107 | os.environ.update(envvars) | ||
2108 | yield | ||
2109 | finally: | ||
2110 | for var in envvars: | ||
2111 | if var in backup: | ||
2112 | os.environ[var] = backup[var] | ||
2113 | elif var in os.environ: | ||
2114 | del os.environ[var] | ||
2115 | |||
2116 | def is_local_uid(uid=''): | ||
2117 | """ | ||
2118 | Check whether uid is a local one or not. | ||
2119 | Can't use pwd module since it gets all UIDs, not local ones only. | ||
2120 | |||
2121 | Arguments: | ||
2122 | |||
2123 | - ``uid``: user id. If not specified the user id is determined from | ||
2124 | ``os.getuid()``. | ||
2125 | |||
2126 | Returns ``True`` is the user id is local, ``False`` otherwise. | ||
2127 | """ | ||
2128 | if not uid: | ||
2129 | uid = os.getuid() | ||
2130 | with open('/etc/passwd', 'r') as f: | ||
2131 | for line in f: | ||
2132 | line_split = line.split(':') | ||
2133 | if len(line_split) < 3: | ||
2134 | continue | ||
2135 | if str(uid) == line_split[2]: | ||
2136 | return True | ||
2137 | return False | ||
2138 | |||
2139 | def mkstemp(suffix=None, prefix=None, dir=None, text=False): | ||
2140 | """ | ||
2141 | Generates a unique temporary file, independent of time. | ||
2142 | |||
2143 | mkstemp() in glibc (at least) generates unique file names based on the | ||
2144 | current system time. When combined with highly parallel builds, and | ||
2145 | operating over NFS (e.g. shared sstate/downloads) this can result in | ||
2146 | conflicts and race conditions. | ||
2147 | |||
2148 | This function adds additional entropy to the file name so that a collision | ||
2149 | is independent of time and thus extremely unlikely. | ||
2150 | |||
2151 | Arguments: | ||
2152 | |||
2153 | - ``suffix``: filename suffix. | ||
2154 | - ``prefix``: filename prefix. | ||
2155 | - ``dir``: directory where the file will be created. | ||
2156 | - ``text``: if ``True``, the file is opened in text mode. | ||
2157 | |||
2158 | Returns a tuple containing: | ||
2159 | |||
2160 | - the file descriptor for the created file | ||
2161 | - the name of the file. | ||
2162 | """ | ||
2163 | entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20)) | ||
2164 | if prefix: | ||
2165 | prefix = prefix + entropy | ||
2166 | else: | ||
2167 | prefix = tempfile.gettempprefix() + entropy | ||
2168 | return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text) | ||
2169 | |||
2170 | def path_is_descendant(descendant, ancestor): | ||
2171 | """ | ||
2172 | Returns ``True`` if the path ``descendant`` is a descendant of ``ancestor`` | ||
2173 | (including being equivalent to ``ancestor`` itself). Otherwise returns | ||
2174 | ``False``. | ||
2175 | |||
2176 | Correctly accounts for symlinks, bind mounts, etc. by using | ||
2177 | ``os.path.samestat()`` to compare paths. | ||
2178 | |||
2179 | May raise any exception that ``os.stat()`` raises. | ||
2180 | |||
2181 | Arguments: | ||
2182 | |||
2183 | - ``descendant``: path to check for being an ancestor. | ||
2184 | - ``ancestor``: path to the ancestor ``descendant`` will be checked | ||
2185 | against. | ||
2186 | """ | ||
2187 | |||
2188 | ancestor_stat = os.stat(ancestor) | ||
2189 | |||
2190 | # Recurse up each directory component of the descendant to see if it is | ||
2191 | # equivalent to the ancestor | ||
2192 | check_dir = os.path.abspath(descendant).rstrip("/") | ||
2193 | while check_dir: | ||
2194 | check_stat = os.stat(check_dir) | ||
2195 | if os.path.samestat(check_stat, ancestor_stat): | ||
2196 | return True | ||
2197 | check_dir = os.path.dirname(check_dir).rstrip("/") | ||
2198 | |||
2199 | return False | ||
2200 | |||
2201 | # If we don't have a timeout of some kind and a process/thread exits badly (for example | ||
2202 | # OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better | ||
2203 | # we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked. | ||
2204 | # This function can still deadlock python since it can't signal the other threads to exit | ||
2205 | # (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads | ||
2206 | # to exit. | ||
2207 | @contextmanager | ||
2208 | def lock_timeout(lock): | ||
2209 | try: | ||
2210 | s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals()) | ||
2211 | held = lock.acquire(timeout=5*60) | ||
2212 | if not held: | ||
2213 | bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack()) | ||
2214 | os._exit(1) | ||
2215 | yield held | ||
2216 | finally: | ||
2217 | lock.release() | ||
2218 | signal.pthread_sigmask(signal.SIG_SETMASK, s) | ||
2219 | |||
2220 | # A version of lock_timeout without the check that the lock was locked and a shorter timeout | ||
2221 | @contextmanager | ||
2222 | def lock_timeout_nocheck(lock): | ||
2223 | l = False | ||
2224 | try: | ||
2225 | s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals()) | ||
2226 | l = lock.acquire(timeout=10) | ||
2227 | yield l | ||
2228 | finally: | ||
2229 | if l: | ||
2230 | lock.release() | ||
2231 | signal.pthread_sigmask(signal.SIG_SETMASK, s) | ||