summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/__init__.py9
-rw-r--r--bitbake/lib/bb/build.py19
-rw-r--r--bitbake/lib/bb/command.py28
-rw-r--r--bitbake/lib/bb/compat.py10
-rw-r--r--bitbake/lib/bb/cooker.py52
-rw-r--r--bitbake/lib/bb/cookerdata.py14
-rw-r--r--bitbake/lib/bb/data.py1
-rw-r--r--bitbake/lib/bb/data_smart.py20
-rw-r--r--bitbake/lib/bb/event.py16
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py5
-rw-r--r--bitbake/lib/bb/fetch2/git.py54
-rw-r--r--bitbake/lib/bb/fetch2/wget.py24
-rw-r--r--bitbake/lib/bb/monitordisk.py7
-rw-r--r--bitbake/lib/bb/msg.py6
-rw-r--r--bitbake/lib/bb/parse/ast.py2
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py2
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py2
-rw-r--r--bitbake/lib/bb/persist_data.py13
-rw-r--r--bitbake/lib/bb/process.py3
-rw-r--r--bitbake/lib/bb/providers.py4
-rw-r--r--bitbake/lib/bb/runqueue.py185
-rw-r--r--bitbake/lib/bb/server/process.py16
-rw-r--r--bitbake/lib/bb/siggen.py3
-rw-r--r--bitbake/lib/bb/tests/codeparser.py30
-rw-r--r--bitbake/lib/bb/tests/event.py17
-rw-r--r--bitbake/lib/bb/tests/fetch.py117
-rw-r--r--bitbake/lib/bb/tinfoil.py17
-rw-r--r--bitbake/lib/bb/ui/knotty.py32
-rw-r--r--bitbake/lib/bb/ui/taskexp.py5
-rw-r--r--bitbake/lib/bb/utils.py52
30 files changed, 581 insertions, 184 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index b96466e654..ba8039497f 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -15,6 +15,13 @@ import sys
15if sys.version_info < (3, 5, 0): 15if sys.version_info < (3, 5, 0):
16 raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake") 16 raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake")
17 17
18if sys.version_info < (3, 10, 0):
19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
20 # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
21 # https://bugs.ams1.psf.io/issue42888
22 # so ensure libgcc_s is loaded early on
23 import ctypes
24 libgcc_s = ctypes.CDLL('libgcc_s.so.1')
18 25
19class BBHandledException(Exception): 26class BBHandledException(Exception):
20 """ 27 """
@@ -47,7 +54,7 @@ class BBLogger(Logger):
47 if not bb.event.worker_pid: 54 if not bb.event.worker_pid:
48 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]): 55 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]):
49 return 56 return
50 if loglevel > bb.msg.loggerDefaultLogLevel: 57 if loglevel < bb.msg.loggerDefaultLogLevel:
51 return 58 return
52 return self.log(loglevel, msg, *args, **kwargs) 59 return self.log(loglevel, msg, *args, **kwargs)
53 60
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 23b6ee455f..aaada8a18b 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -27,6 +27,9 @@ from bb import data, event, utils
27bblogger = logging.getLogger('BitBake') 27bblogger = logging.getLogger('BitBake')
28logger = logging.getLogger('BitBake.Build') 28logger = logging.getLogger('BitBake.Build')
29 29
30verboseShellLogging = False
31verboseStdoutLogging = False
32
30__mtime_cache = {} 33__mtime_cache = {}
31 34
32def cached_mtime_noerror(f): 35def cached_mtime_noerror(f):
@@ -290,8 +293,8 @@ def exec_func_python(func, d, runfile, cwd=None):
290 lineno = int(d.getVarFlag(func, "lineno", False)) 293 lineno = int(d.getVarFlag(func, "lineno", False))
291 bb.methodpool.insert_method(func, text, fn, lineno - 1) 294 bb.methodpool.insert_method(func, text, fn, lineno - 1)
292 295
293 comp = utils.better_compile(code, func, "exec_python_func() autogenerated") 296 comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
294 utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated") 297 utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
295 finally: 298 finally:
296 bb.debug(2, "Python function %s finished" % func) 299 bb.debug(2, "Python function %s finished" % func)
297 300
@@ -371,7 +374,7 @@ def exec_func_shell(func, d, runfile, cwd=None):
371 374
372 bb.data.emit_func(func, script, d) 375 bb.data.emit_func(func, script, d)
373 376
374 if bb.msg.loggerVerboseLogs: 377 if verboseShellLogging or bb.utils.to_boolean(d.getVar("BB_VERBOSE_LOGS", False)):
375 script.write("set -x\n") 378 script.write("set -x\n")
376 if cwd: 379 if cwd:
377 script.write("cd '%s'\n" % cwd) 380 script.write("cd '%s'\n" % cwd)
@@ -391,7 +394,7 @@ exit $ret
391 if fakerootcmd: 394 if fakerootcmd:
392 cmd = [fakerootcmd, runfile] 395 cmd = [fakerootcmd, runfile]
393 396
394 if bb.msg.loggerDefaultVerbose: 397 if verboseStdoutLogging:
395 logfile = LogTee(logger, StdoutNoopContextManager()) 398 logfile = LogTee(logger, StdoutNoopContextManager())
396 else: 399 else:
397 logfile = StdoutNoopContextManager() 400 logfile = StdoutNoopContextManager()
@@ -587,11 +590,15 @@ def _exec_task(fn, task, d, quieterr):
587 except bb.BBHandledException: 590 except bb.BBHandledException:
588 event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata) 591 event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata)
589 return 1 592 return 1
590 except Exception as exc: 593 except (Exception, SystemExit) as exc:
591 if quieterr: 594 if quieterr:
592 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) 595 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
593 else: 596 else:
594 errprinted = errchk.triggered 597 errprinted = errchk.triggered
598 # If the output is already on stdout, we've printed the information in the
599 # logs once already so don't duplicate
600 if verboseStdoutLogging:
601 errprinted = True
595 logger.error(str(exc)) 602 logger.error(str(exc))
596 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) 603 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
597 return 1 604 return 1
@@ -901,6 +908,8 @@ def tasksbetween(task_start, task_end, d):
901 def follow_chain(task, endtask, chain=None): 908 def follow_chain(task, endtask, chain=None):
902 if not chain: 909 if not chain:
903 chain = [] 910 chain = []
911 if task in chain:
912 bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
904 chain.append(task) 913 chain.append(task)
905 for othertask in tasks: 914 for othertask in tasks:
906 if othertask == task: 915 if othertask == task:
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index 6abf38668b..b8429b2773 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -20,6 +20,7 @@ Commands are queued in a CommandQueue
20 20
21from collections import OrderedDict, defaultdict 21from collections import OrderedDict, defaultdict
22 22
23import io
23import bb.event 24import bb.event
24import bb.cooker 25import bb.cooker
25import bb.remotedata 26import bb.remotedata
@@ -74,8 +75,12 @@ class Command:
74 result = command_method(self, commandline) 75 result = command_method(self, commandline)
75 except CommandError as exc: 76 except CommandError as exc:
76 return None, exc.args[0] 77 return None, exc.args[0]
77 except (Exception, SystemExit): 78 except (Exception, SystemExit) as exc:
78 import traceback 79 import traceback
80 if isinstance(exc, bb.BBHandledException):
81 # We need to start returning real exceptions here. Until we do, we can't
82 # tell if an exception is an instance of bb.BBHandledException
83 return None, "bb.BBHandledException()\n" + traceback.format_exc()
79 return None, traceback.format_exc() 84 return None, traceback.format_exc()
80 else: 85 else:
81 return result, None 86 return result, None
@@ -474,6 +479,17 @@ class CommandsSync:
474 d = command.remotedatastores[dsindex].varhistory 479 d = command.remotedatastores[dsindex].varhistory
475 return getattr(d, method)(*args, **kwargs) 480 return getattr(d, method)(*args, **kwargs)
476 481
482 def dataStoreConnectorVarHistCmdEmit(self, command, params):
483 dsindex = params[0]
484 var = params[1]
485 oval = params[2]
486 val = params[3]
487 d = command.remotedatastores[params[4]]
488
489 o = io.StringIO()
490 command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
491 return o.getvalue()
492
477 def dataStoreConnectorIncHistCmd(self, command, params): 493 def dataStoreConnectorIncHistCmd(self, command, params):
478 dsindex = params[0] 494 dsindex = params[0]
479 method = params[1] 495 method = params[1]
@@ -620,6 +636,16 @@ class CommandsAsync:
620 command.finishAsyncCommand() 636 command.finishAsyncCommand()
621 findFilesMatchingInDir.needcache = False 637 findFilesMatchingInDir.needcache = False
622 638
639 def testCookerCommandEvent(self, command, params):
640 """
641 Dummy command used by OEQA selftest to test tinfoil without IO
642 """
643 pattern = params[0]
644
645 command.cooker.testCookerCommandEvent(pattern)
646 command.finishAsyncCommand()
647 testCookerCommandEvent.needcache = False
648
623 def findConfigFilePath(self, command, params): 649 def findConfigFilePath(self, command, params):
624 """ 650 """
625 Find the path of the requested configuration file 651 Find the path of the requested configuration file
diff --git a/bitbake/lib/bb/compat.py b/bitbake/lib/bb/compat.py
deleted file mode 100644
index 49356681ab..0000000000
--- a/bitbake/lib/bb/compat.py
+++ /dev/null
@@ -1,10 +0,0 @@
1#
2# SPDX-License-Identifier: GPL-2.0-only
3#
4
5"""Code pulled from future python versions, here for compatibility"""
6
7from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
8from functools import total_ordering
9
10
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index d90bd3945f..6743bce585 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -13,7 +13,6 @@ import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
15import multiprocessing 15import multiprocessing
16import sre_constants
17import threading 16import threading
18from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
19from contextlib import closing 18from contextlib import closing
@@ -411,10 +410,7 @@ class BBCooker:
411 self.data.disableTracking() 410 self.data.disableTracking()
412 411
413 def parseConfiguration(self): 412 def parseConfiguration(self):
414 # Set log file verbosity 413 self.updateCacheSync()
415 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
416 if verboselogs:
417 bb.msg.loggerVerboseLogs = True
418 414
419 # Change nice level if we're asked to 415 # Change nice level if we're asked to
420 nice = self.data.getVar("BB_NICE_LEVEL") 416 nice = self.data.getVar("BB_NICE_LEVEL")
@@ -1022,6 +1018,11 @@ class BBCooker:
1022 if matches: 1018 if matches:
1023 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1019 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1024 1020
1021 def testCookerCommandEvent(self, filepattern):
1022 # Dummy command used by OEQA selftest to test tinfoil without IO
1023 matches = ["A", "B"]
1024 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1025
1025 def findProviders(self, mc=''): 1026 def findProviders(self, mc=''):
1026 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1027 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1027 1028
@@ -1636,6 +1637,7 @@ class BBCooker:
1636 return 1637 return
1637 1638
1638 def post_serve(self): 1639 def post_serve(self):
1640 self.shutdown(force=True)
1639 prserv.serv.auto_shutdown() 1641 prserv.serv.auto_shutdown()
1640 if self.hashserv: 1642 if self.hashserv:
1641 self.hashserv.process.terminate() 1643 self.hashserv.process.terminate()
@@ -1650,6 +1652,7 @@ class BBCooker:
1650 1652
1651 if self.parser: 1653 if self.parser:
1652 self.parser.shutdown(clean=not force, force=force) 1654 self.parser.shutdown(clean=not force, force=force)
1655 self.parser.final_cleanup()
1653 1656
1654 def finishcommand(self): 1657 def finishcommand(self):
1655 self.state = state.initial 1658 self.state = state.initial
@@ -1791,7 +1794,7 @@ class CookerCollectFiles(object):
1791 try: 1794 try:
1792 re.compile(mask) 1795 re.compile(mask)
1793 bbmasks.append(mask) 1796 bbmasks.append(mask)
1794 except sre_constants.error: 1797 except re.error:
1795 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) 1798 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1796 1799
1797 # Then validate the combined regular expressions. This should never 1800 # Then validate the combined regular expressions. This should never
@@ -1799,7 +1802,7 @@ class CookerCollectFiles(object):
1799 bbmask = "|".join(bbmasks) 1802 bbmask = "|".join(bbmasks)
1800 try: 1803 try:
1801 bbmask_compiled = re.compile(bbmask) 1804 bbmask_compiled = re.compile(bbmask)
1802 except sre_constants.error: 1805 except re.error:
1803 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) 1806 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1804 bbmask = None 1807 bbmask = None
1805 1808
@@ -1931,7 +1934,8 @@ class Parser(multiprocessing.Process):
1931 except queue.Empty: 1934 except queue.Empty:
1932 pass 1935 pass
1933 else: 1936 else:
1934 self.results.cancel_join_thread() 1937 self.results.close()
1938 self.results.join_thread()
1935 break 1939 break
1936 1940
1937 if pending: 1941 if pending:
@@ -1940,6 +1944,8 @@ class Parser(multiprocessing.Process):
1940 try: 1944 try:
1941 job = self.jobs.pop() 1945 job = self.jobs.pop()
1942 except IndexError: 1946 except IndexError:
1947 self.results.close()
1948 self.results.join_thread()
1943 break 1949 break
1944 result = self.parse(*job) 1950 result = self.parse(*job)
1945 # Clear the siggen cache after parsing to control memory usage, its huge 1951 # Clear the siggen cache after parsing to control memory usage, its huge
@@ -2015,6 +2021,7 @@ class CookerParser(object):
2015 2021
2016 self.start() 2022 self.start()
2017 self.haveshutdown = False 2023 self.haveshutdown = False
2024 self.syncthread = None
2018 2025
2019 def start(self): 2026 def start(self):
2020 self.results = self.load_cached() 2027 self.results = self.load_cached()
@@ -2056,12 +2063,9 @@ class CookerParser(object):
2056 self.total) 2063 self.total)
2057 2064
2058 bb.event.fire(event, self.cfgdata) 2065 bb.event.fire(event, self.cfgdata)
2059 for process in self.processes: 2066
2060 self.parser_quit.put(None) 2067 for process in self.processes:
2061 else: 2068 self.parser_quit.put(None)
2062 self.parser_quit.cancel_join_thread()
2063 for process in self.processes:
2064 self.parser_quit.put(None)
2065 2069
2066 # Cleanup the queue before call process.join(), otherwise there might be 2070 # Cleanup the queue before call process.join(), otherwise there might be
2067 # deadlocks. 2071 # deadlocks.
@@ -2078,9 +2082,13 @@ class CookerParser(object):
2078 else: 2082 else:
2079 process.join() 2083 process.join()
2080 2084
2085 self.parser_quit.close()
2086 # Allow data left in the cancel queue to be discarded
2087 self.parser_quit.cancel_join_thread()
2088
2081 sync = threading.Thread(target=self.bb_cache.sync) 2089 sync = threading.Thread(target=self.bb_cache.sync)
2090 self.syncthread = sync
2082 sync.start() 2091 sync.start()
2083 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
2084 bb.codeparser.parser_cache_savemerge() 2092 bb.codeparser.parser_cache_savemerge()
2085 bb.fetch.fetcher_parse_done() 2093 bb.fetch.fetcher_parse_done()
2086 if self.cooker.configuration.profile: 2094 if self.cooker.configuration.profile:
@@ -2094,6 +2102,10 @@ class CookerParser(object):
2094 bb.utils.process_profilelog(profiles, pout = pout) 2102 bb.utils.process_profilelog(profiles, pout = pout)
2095 print("Processed parsing statistics saved to %s" % (pout)) 2103 print("Processed parsing statistics saved to %s" % (pout))
2096 2104
2105 def final_cleanup(self):
2106 if self.syncthread:
2107 self.syncthread.join()
2108
2097 def load_cached(self): 2109 def load_cached(self):
2098 for filename, appends in self.fromcache: 2110 for filename, appends in self.fromcache:
2099 cached, infos = self.bb_cache.load(filename, appends) 2111 cached, infos = self.bb_cache.load(filename, appends)
@@ -2126,18 +2138,18 @@ class CookerParser(object):
2126 except bb.BBHandledException as exc: 2138 except bb.BBHandledException as exc:
2127 self.error += 1 2139 self.error += 1
2128 logger.error('Failed to parse recipe: %s' % exc.recipe) 2140 logger.error('Failed to parse recipe: %s' % exc.recipe)
2129 self.shutdown(clean=False) 2141 self.shutdown(clean=False, force=True)
2130 return False 2142 return False
2131 except ParsingFailure as exc: 2143 except ParsingFailure as exc:
2132 self.error += 1 2144 self.error += 1
2133 logger.error('Unable to parse %s: %s' % 2145 logger.error('Unable to parse %s: %s' %
2134 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2146 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2135 self.shutdown(clean=False) 2147 self.shutdown(clean=False, force=True)
2136 return False 2148 return False
2137 except bb.parse.ParseError as exc: 2149 except bb.parse.ParseError as exc:
2138 self.error += 1 2150 self.error += 1
2139 logger.error(str(exc)) 2151 logger.error(str(exc))
2140 self.shutdown(clean=False) 2152 self.shutdown(clean=False, force=True)
2141 return False 2153 return False
2142 except bb.data_smart.ExpansionError as exc: 2154 except bb.data_smart.ExpansionError as exc:
2143 self.error += 1 2155 self.error += 1
@@ -2146,7 +2158,7 @@ class CookerParser(object):
2146 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2158 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2147 logger.error('ExpansionError during parsing %s', value.recipe, 2159 logger.error('ExpansionError during parsing %s', value.recipe,
2148 exc_info=(etype, value, tb)) 2160 exc_info=(etype, value, tb))
2149 self.shutdown(clean=False) 2161 self.shutdown(clean=False, force=True)
2150 return False 2162 return False
2151 except Exception as exc: 2163 except Exception as exc:
2152 self.error += 1 2164 self.error += 1
@@ -2158,7 +2170,7 @@ class CookerParser(object):
2158 # Most likely, an exception occurred during raising an exception 2170 # Most likely, an exception occurred during raising an exception
2159 import traceback 2171 import traceback
2160 logger.error('Exception during parse: %s' % traceback.format_exc()) 2172 logger.error('Exception during parse: %s' % traceback.format_exc())
2161 self.shutdown(clean=False) 2173 self.shutdown(clean=False, force=True)
2162 return False 2174 return False
2163 2175
2164 self.current += 1 2176 self.current += 1
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 472423fdc8..30727bf2ee 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -58,11 +58,14 @@ class ConfigParameters(object):
58 def updateToServer(self, server, environment): 58 def updateToServer(self, server, environment):
59 options = {} 59 options = {}
60 for o in ["abort", "force", "invalidate_stamp", 60 for o in ["abort", "force", "invalidate_stamp",
61 "verbose", "debug", "dry_run", "dump_signatures", 61 "debug", "dry_run", "dump_signatures",
62 "debug_domains", "extra_assume_provided", "profile", 62 "debug_domains", "extra_assume_provided", "profile",
63 "prefile", "postfile", "server_timeout"]: 63 "prefile", "postfile", "server_timeout"]:
64 options[o] = getattr(self.options, o) 64 options[o] = getattr(self.options, o)
65 65
66 options['build_verbose_shell'] = self.options.verbose
67 options['build_verbose_stdout'] = self.options.verbose
68
66 ret, error = server.runCommand(["updateConfig", options, environment, sys.argv]) 69 ret, error = server.runCommand(["updateConfig", options, environment, sys.argv])
67 if error: 70 if error:
68 raise Exception("Unable to update the server configuration with local parameters: %s" % error) 71 raise Exception("Unable to update the server configuration with local parameters: %s" % error)
@@ -125,6 +128,8 @@ class CookerConfiguration(object):
125 self.skipsetscene = False 128 self.skipsetscene = False
126 self.invalidate_stamp = False 129 self.invalidate_stamp = False
127 self.dump_signatures = [] 130 self.dump_signatures = []
131 self.build_verbose_shell = False
132 self.build_verbose_stdout = False
128 self.dry_run = False 133 self.dry_run = False
129 self.tracking = False 134 self.tracking = False
130 self.xmlrpcinterface = [] 135 self.xmlrpcinterface = []
@@ -297,6 +302,8 @@ class CookerDataBuilder(object):
297 302
298 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() 303 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
299 for config in multiconfig: 304 for config in multiconfig:
305 if config[0].isdigit():
306 bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
300 mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) 307 mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
301 bb.event.fire(bb.event.ConfigParsed(), mcdata) 308 bb.event.fire(bb.event.ConfigParsed(), mcdata)
302 self.mcdata[config] = mcdata 309 self.mcdata[config] = mcdata
@@ -348,6 +355,9 @@ class CookerDataBuilder(object):
348 layers = (data.getVar('BBLAYERS') or "").split() 355 layers = (data.getVar('BBLAYERS') or "").split()
349 broken_layers = [] 356 broken_layers = []
350 357
358 if not layers:
359 bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition")
360
351 data = bb.data.createCopy(data) 361 data = bb.data.createCopy(data)
352 approved = bb.utils.approved_variables() 362 approved = bb.utils.approved_variables()
353 363
@@ -399,6 +409,8 @@ class CookerDataBuilder(object):
399 if c in collections_tmp: 409 if c in collections_tmp:
400 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) 410 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
401 compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) 411 compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
412 if compat and not layerseries:
413 bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
402 if compat and not (compat & layerseries): 414 if compat and not (compat & layerseries):
403 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" 415 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
404 % (c, " ".join(layerseries), " ".join(compat))) 416 % (c, " ".join(layerseries), " ".join(compat)))
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index b0683c5180..1d21e00a1c 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -301,6 +301,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
301 value += "\n_remove of %s" % r 301 value += "\n_remove of %s" % r
302 deps |= r2.references 302 deps |= r2.references
303 deps = deps | (keys & r2.execs) 303 deps = deps | (keys & r2.execs)
304 value = handle_contains(value, r2.contains, d)
304 return value 305 return value
305 306
306 if "vardepvalue" in varflags: 307 if "vardepvalue" in varflags:
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 1d8774ee5e..c46d3f0a08 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -17,7 +17,7 @@ BitBake build tools.
17# Based on functions from the base bb module, Copyright 2003 Holger Schurig 17# Based on functions from the base bb module, Copyright 2003 Holger Schurig
18 18
19import copy, re, sys, traceback 19import copy, re, sys, traceback
20from collections import MutableMapping 20from collections.abc import MutableMapping
21import logging 21import logging
22import hashlib 22import hashlib
23import bb, bb.codeparser 23import bb, bb.codeparser
@@ -28,7 +28,7 @@ logger = logging.getLogger("BitBake.Data")
28 28
29__setvar_keyword__ = ["_append", "_prepend", "_remove"] 29__setvar_keyword__ = ["_append", "_prepend", "_remove"]
30__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$') 30__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
31__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}") 31__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}")
32__expand_python_regexp__ = re.compile(r"\${@.+?}") 32__expand_python_regexp__ = re.compile(r"\${@.+?}")
33__whitespace_split__ = re.compile(r'(\s)') 33__whitespace_split__ = re.compile(r'(\s)')
34__override_regexp__ = re.compile(r'[a-z0-9]+') 34__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -403,7 +403,7 @@ class DataSmart(MutableMapping):
403 s = __expand_python_regexp__.sub(varparse.python_sub, s) 403 s = __expand_python_regexp__.sub(varparse.python_sub, s)
404 except SyntaxError as e: 404 except SyntaxError as e:
405 # Likely unmatched brackets, just don't expand the expression 405 # Likely unmatched brackets, just don't expand the expression
406 if e.msg != "EOL while scanning string literal": 406 if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
407 raise 407 raise
408 if s == olds: 408 if s == olds:
409 break 409 break
@@ -411,6 +411,8 @@ class DataSmart(MutableMapping):
411 raise 411 raise
412 except bb.parse.SkipRecipe: 412 except bb.parse.SkipRecipe:
413 raise 413 raise
414 except bb.BBHandledException:
415 raise
414 except Exception as exc: 416 except Exception as exc:
415 tb = sys.exc_info()[2] 417 tb = sys.exc_info()[2]
416 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc 418 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
@@ -481,6 +483,7 @@ class DataSmart(MutableMapping):
481 483
482 def setVar(self, var, value, **loginfo): 484 def setVar(self, var, value, **loginfo):
483 #print("var=" + str(var) + " val=" + str(value)) 485 #print("var=" + str(var) + " val=" + str(value))
486 var = var.replace(":", "_")
484 self.expand_cache = {} 487 self.expand_cache = {}
485 parsing=False 488 parsing=False
486 if 'parsing' in loginfo: 489 if 'parsing' in loginfo:
@@ -589,6 +592,8 @@ class DataSmart(MutableMapping):
589 """ 592 """
590 Rename the variable key to newkey 593 Rename the variable key to newkey
591 """ 594 """
595 key = key.replace(":", "_")
596 newkey = newkey.replace(":", "_")
592 if key == newkey: 597 if key == newkey:
593 bb.warn("Calling renameVar with equivalent keys (%s) is invalid" % key) 598 bb.warn("Calling renameVar with equivalent keys (%s) is invalid" % key)
594 return 599 return
@@ -637,6 +642,7 @@ class DataSmart(MutableMapping):
637 self.setVar(var + "_prepend", value, ignore=True, parsing=True) 642 self.setVar(var + "_prepend", value, ignore=True, parsing=True)
638 643
639 def delVar(self, var, **loginfo): 644 def delVar(self, var, **loginfo):
645 var = var.replace(":", "_")
640 self.expand_cache = {} 646 self.expand_cache = {}
641 647
642 loginfo['detail'] = "" 648 loginfo['detail'] = ""
@@ -664,6 +670,7 @@ class DataSmart(MutableMapping):
664 override = None 670 override = None
665 671
666 def setVarFlag(self, var, flag, value, **loginfo): 672 def setVarFlag(self, var, flag, value, **loginfo):
673 var = var.replace(":", "_")
667 self.expand_cache = {} 674 self.expand_cache = {}
668 675
669 if 'op' not in loginfo: 676 if 'op' not in loginfo:
@@ -687,6 +694,7 @@ class DataSmart(MutableMapping):
687 self.dict["__exportlist"]["_content"].add(var) 694 self.dict["__exportlist"]["_content"].add(var)
688 695
689 def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False, retparser=False): 696 def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False, retparser=False):
697 var = var.replace(":", "_")
690 if flag == "_content": 698 if flag == "_content":
691 cachename = var 699 cachename = var
692 else: 700 else:
@@ -814,6 +822,7 @@ class DataSmart(MutableMapping):
814 return value 822 return value
815 823
816 def delVarFlag(self, var, flag, **loginfo): 824 def delVarFlag(self, var, flag, **loginfo):
825 var = var.replace(":", "_")
817 self.expand_cache = {} 826 self.expand_cache = {}
818 827
819 local_var, _ = self._findVar(var) 828 local_var, _ = self._findVar(var)
@@ -831,6 +840,7 @@ class DataSmart(MutableMapping):
831 del self.dict[var][flag] 840 del self.dict[var][flag]
832 841
833 def appendVarFlag(self, var, flag, value, **loginfo): 842 def appendVarFlag(self, var, flag, value, **loginfo):
843 var = var.replace(":", "_")
834 loginfo['op'] = 'append' 844 loginfo['op'] = 'append'
835 loginfo['flag'] = flag 845 loginfo['flag'] = flag
836 self.varhistory.record(**loginfo) 846 self.varhistory.record(**loginfo)
@@ -838,6 +848,7 @@ class DataSmart(MutableMapping):
838 self.setVarFlag(var, flag, newvalue, ignore=True) 848 self.setVarFlag(var, flag, newvalue, ignore=True)
839 849
840 def prependVarFlag(self, var, flag, value, **loginfo): 850 def prependVarFlag(self, var, flag, value, **loginfo):
851 var = var.replace(":", "_")
841 loginfo['op'] = 'prepend' 852 loginfo['op'] = 'prepend'
842 loginfo['flag'] = flag 853 loginfo['flag'] = flag
843 self.varhistory.record(**loginfo) 854 self.varhistory.record(**loginfo)
@@ -845,6 +856,7 @@ class DataSmart(MutableMapping):
845 self.setVarFlag(var, flag, newvalue, ignore=True) 856 self.setVarFlag(var, flag, newvalue, ignore=True)
846 857
847 def setVarFlags(self, var, flags, **loginfo): 858 def setVarFlags(self, var, flags, **loginfo):
859 var = var.replace(":", "_")
848 self.expand_cache = {} 860 self.expand_cache = {}
849 infer_caller_details(loginfo) 861 infer_caller_details(loginfo)
850 if not var in self.dict: 862 if not var in self.dict:
@@ -859,6 +871,7 @@ class DataSmart(MutableMapping):
859 self.dict[var][i] = flags[i] 871 self.dict[var][i] = flags[i]
860 872
861 def getVarFlags(self, var, expand = False, internalflags=False): 873 def getVarFlags(self, var, expand = False, internalflags=False):
874 var = var.replace(":", "_")
862 local_var, _ = self._findVar(var) 875 local_var, _ = self._findVar(var)
863 flags = {} 876 flags = {}
864 877
@@ -875,6 +888,7 @@ class DataSmart(MutableMapping):
875 888
876 889
877 def delVarFlags(self, var, **loginfo): 890 def delVarFlags(self, var, **loginfo):
891 var = var.replace(":", "_")
878 self.expand_cache = {} 892 self.expand_cache = {}
879 if not var in self.dict: 893 if not var in self.dict:
880 self._makeShadowCopy(var) 894 self._makeShadowCopy(var)
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index d1359f0100..cb0b3b3345 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -10,17 +10,17 @@ BitBake build tools.
10# SPDX-License-Identifier: GPL-2.0-only 10# SPDX-License-Identifier: GPL-2.0-only
11# 11#
12 12
13import sys
14import pickle
15import logging
16import atexit
17import traceback
18import ast 13import ast
14import atexit
15import collections
16import logging
17import pickle
18import sys
19import threading 19import threading
20import traceback
20 21
21import bb.utils
22import bb.compat
23import bb.exceptions 22import bb.exceptions
23import bb.utils
24 24
25# This is the pid for which we should generate the event. This is set when 25# This is the pid for which we should generate the event. This is set when
26# the runqueue forks off. 26# the runqueue forks off.
@@ -56,7 +56,7 @@ def set_class_handlers(h):
56 _handlers = h 56 _handlers = h
57 57
58def clean_class_handlers(): 58def clean_class_handlers():
59 return bb.compat.OrderedDict() 59 return collections.OrderedDict()
60 60
61# Internal 61# Internal
62_handlers = clean_class_handlers() 62_handlers = clean_class_handlers()
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index dc99914cd9..3e6555bd67 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -562,6 +562,9 @@ def verify_checksum(ud, d, precomputed={}):
562 562
563 checksum_expected = getattr(ud, "%s_expected" % checksum_id) 563 checksum_expected = getattr(ud, "%s_expected" % checksum_id)
564 564
565 if checksum_expected == '':
566 checksum_expected = None
567
565 return { 568 return {
566 "id": checksum_id, 569 "id": checksum_id,
567 "name": checksum_name, 570 "name": checksum_name,
@@ -612,7 +615,7 @@ def verify_checksum(ud, d, precomputed={}):
612 615
613 for ci in checksum_infos: 616 for ci in checksum_infos:
614 if ci["expected"] and ci["expected"] != ci["data"]: 617 if ci["expected"] and ci["expected"] != ci["data"]:
615 messages.append("File: '%s' has %s checksum %s when %s was " \ 618 messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
616 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) 619 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
617 bad_checksum = ci["data"] 620 bad_checksum = ci["data"]
618 621
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index 8740e9c05f..cad1ae8207 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -44,7 +44,8 @@ Supported SRC_URI options are:
44 44
45- nobranch 45- nobranch
46 Don't check the SHA validation for branch. set this option for the recipe 46 Don't check the SHA validation for branch. set this option for the recipe
47 referring to commit which is valid in tag instead of branch. 47 referring to commit which is valid in any namespace (branch, tag, ...)
48 instead of branch.
48 The default is "0", set nobranch=1 if needed. 49 The default is "0", set nobranch=1 if needed.
49 50
50- usehead 51- usehead
@@ -63,10 +64,12 @@ import errno
63import fnmatch 64import fnmatch
64import os 65import os
65import re 66import re
67import shlex
66import subprocess 68import subprocess
67import tempfile 69import tempfile
68import bb 70import bb
69import bb.progress 71import bb.progress
72from contextlib import contextmanager
70from bb.fetch2 import FetchMethod 73from bb.fetch2 import FetchMethod
71from bb.fetch2 import runfetchcmd 74from bb.fetch2 import runfetchcmd
72from bb.fetch2 import logger 75from bb.fetch2 import logger
@@ -140,6 +143,10 @@ class Git(FetchMethod):
140 ud.proto = 'file' 143 ud.proto = 'file'
141 else: 144 else:
142 ud.proto = "git" 145 ud.proto = "git"
146 if ud.host == "github.com" and ud.proto == "git":
147 # github stopped supporting git protocol
148 # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
149 ud.proto = "https"
143 150
144 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): 151 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
145 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) 152 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -219,7 +226,12 @@ class Git(FetchMethod):
219 ud.shallow = False 226 ud.shallow = False
220 227
221 if ud.usehead: 228 if ud.usehead:
222 ud.unresolvedrev['default'] = 'HEAD' 229 # When usehead is set let's associate 'HEAD' with the unresolved
230 # rev of this repository. This will get resolved into a revision
231 # later. If an actual revision happens to have also been provided
232 # then this setting will be overridden.
233 for name in ud.names:
234 ud.unresolvedrev[name] = 'HEAD'
223 235
224 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" 236 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
225 237
@@ -342,7 +354,7 @@ class Git(FetchMethod):
342 # We do this since git will use a "-l" option automatically for local urls where possible 354 # We do this since git will use a "-l" option automatically for local urls where possible
343 if repourl.startswith("file://"): 355 if repourl.startswith("file://"):
344 repourl = repourl[7:] 356 repourl = repourl[7:]
345 clone_cmd = "LANG=C %s clone --bare --mirror \"%s\" %s --progress" % (ud.basecmd, repourl, ud.clonedir) 357 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
346 if ud.proto.lower() != 'file': 358 if ud.proto.lower() != 'file':
347 bb.fetch2.check_network_access(d, clone_cmd, ud.url) 359 bb.fetch2.check_network_access(d, clone_cmd, ud.url)
348 progresshandler = GitProgressHandler(d) 360 progresshandler = GitProgressHandler(d)
@@ -354,8 +366,12 @@ class Git(FetchMethod):
354 if "origin" in output: 366 if "origin" in output:
355 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir) 367 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
356 368
357 runfetchcmd("%s remote add --mirror=fetch origin \"%s\"" % (ud.basecmd, repourl), d, workdir=ud.clonedir) 369 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
358 fetch_cmd = "LANG=C %s fetch -f --progress \"%s\" refs/*:refs/*" % (ud.basecmd, repourl) 370
371 if ud.nobranch:
372 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
373 else:
374 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
359 if ud.proto.lower() != 'file': 375 if ud.proto.lower() != 'file':
360 bb.fetch2.check_network_access(d, fetch_cmd, ud.url) 376 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
361 progresshandler = GitProgressHandler(d) 377 progresshandler = GitProgressHandler(d)
@@ -388,7 +404,7 @@ class Git(FetchMethod):
388 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 404 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
389 try: 405 try:
390 # Do the checkout. This implicitly involves a Git LFS fetch. 406 # Do the checkout. This implicitly involves a Git LFS fetch.
391 self.unpack(ud, tmpdir, d) 407 Git.unpack(self, ud, tmpdir, d)
392 408
393 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into 409 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
394 # the bare clonedir. 410 # the bare clonedir.
@@ -408,6 +424,20 @@ class Git(FetchMethod):
408 bb.utils.remove(tmpdir, recurse=True) 424 bb.utils.remove(tmpdir, recurse=True)
409 425
410 def build_mirror_data(self, ud, d): 426 def build_mirror_data(self, ud, d):
427
428 # Create as a temp file and move atomically into position to avoid races
429 @contextmanager
430 def create_atomic(filename):
431 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
432 try:
433 yield tfile
434 umask = os.umask(0o666)
435 os.umask(umask)
436 os.chmod(tfile, (0o666 & ~umask))
437 os.rename(tfile, filename)
438 finally:
439 os.close(fd)
440
411 if ud.shallow and ud.write_shallow_tarballs: 441 if ud.shallow and ud.write_shallow_tarballs:
412 if not os.path.exists(ud.fullshallow): 442 if not os.path.exists(ud.fullshallow):
413 if os.path.islink(ud.fullshallow): 443 if os.path.islink(ud.fullshallow):
@@ -418,7 +448,8 @@ class Git(FetchMethod):
418 self.clone_shallow_local(ud, shallowclone, d) 448 self.clone_shallow_local(ud, shallowclone, d)
419 449
420 logger.info("Creating tarball of git repository") 450 logger.info("Creating tarball of git repository")
421 runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone) 451 with create_atomic(ud.fullshallow) as tfile:
452 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
422 runfetchcmd("touch %s.done" % ud.fullshallow, d) 453 runfetchcmd("touch %s.done" % ud.fullshallow, d)
423 finally: 454 finally:
424 bb.utils.remove(tempdir, recurse=True) 455 bb.utils.remove(tempdir, recurse=True)
@@ -427,7 +458,8 @@ class Git(FetchMethod):
427 os.unlink(ud.fullmirror) 458 os.unlink(ud.fullmirror)
428 459
429 logger.info("Creating tarball of git repository") 460 logger.info("Creating tarball of git repository")
430 runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir) 461 with create_atomic(ud.fullmirror) as tfile:
462 runfetchcmd("tar -czf %s ." % tfile, d, workdir=ud.clonedir)
431 runfetchcmd("touch %s.done" % ud.fullmirror, d) 463 runfetchcmd("touch %s.done" % ud.fullmirror, d)
432 464
433 def clone_shallow_local(self, ud, dest, d): 465 def clone_shallow_local(self, ud, dest, d):
@@ -533,7 +565,7 @@ class Git(FetchMethod):
533 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) 565 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
534 566
535 repourl = self._get_repo_url(ud) 567 repourl = self._get_repo_url(ud)
536 runfetchcmd("%s remote set-url origin \"%s\"" % (ud.basecmd, repourl), d, workdir=destdir) 568 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
537 569
538 if self._contains_lfs(ud, d, destdir): 570 if self._contains_lfs(ud, d, destdir):
539 if need_lfs and not self._find_git_lfs(d): 571 if need_lfs and not self._find_git_lfs(d):
@@ -661,8 +693,8 @@ class Git(FetchMethod):
661 d.setVar('_BB_GIT_IN_LSREMOTE', '1') 693 d.setVar('_BB_GIT_IN_LSREMOTE', '1')
662 try: 694 try:
663 repourl = self._get_repo_url(ud) 695 repourl = self._get_repo_url(ud)
664 cmd = "%s ls-remote \"%s\" %s" % \ 696 cmd = "%s ls-remote %s %s" % \
665 (ud.basecmd, repourl, search) 697 (ud.basecmd, shlex.quote(repourl), search)
666 if ud.proto.lower() != 'file': 698 if ud.proto.lower() != 'file':
667 bb.fetch2.check_network_access(d, cmd, repourl) 699 bb.fetch2.check_network_access(d, cmd, repourl)
668 output = runfetchcmd(cmd, d, True) 700 output = runfetchcmd(cmd, d, True)
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index f7d1de26b7..368c644337 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
52 52
53 53
54class Wget(FetchMethod): 54class Wget(FetchMethod):
55
56 # CDNs like CloudFlare may do a 'browser integrity test' which can fail
57 # with the standard wget/urllib User-Agent, so pretend to be a modern
58 # browser.
59 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
60
55 """Class to fetch urls via 'wget'""" 61 """Class to fetch urls via 'wget'"""
56 def supports(self, ud, d): 62 def supports(self, ud, d):
57 """ 63 """
@@ -91,10 +97,9 @@ class Wget(FetchMethod):
91 97
92 fetchcmd = self.basecmd 98 fetchcmd = self.basecmd
93 99
94 if 'downloadfilename' in ud.parm: 100 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
95 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) 101 bb.utils.mkdirhier(os.path.dirname(localpath))
96 bb.utils.mkdirhier(os.path.dirname(localpath)) 102 fetchcmd += " -O %s" % shlex.quote(localpath)
97 fetchcmd += " -O %s" % shlex.quote(localpath)
98 103
99 if ud.user and ud.pswd: 104 if ud.user and ud.pswd:
100 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) 105 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
@@ -108,6 +113,10 @@ class Wget(FetchMethod):
108 113
109 self._runwget(ud, d, fetchcmd, False) 114 self._runwget(ud, d, fetchcmd, False)
110 115
116 # Remove the ".tmp" and move the file into position atomically
117 # Our lock prevents multiple writers but mirroring code may grab incomplete files
118 os.rename(localpath, localpath[:-4])
119
111 # Sanity check since wget can pretend it succeed when it didn't 120 # Sanity check since wget can pretend it succeed when it didn't
112 # Also, this used to happen if sourceforge sent us to the mirror page 121 # Also, this used to happen if sourceforge sent us to the mirror page
113 if not os.path.exists(ud.localpath): 122 if not os.path.exists(ud.localpath):
@@ -300,7 +309,7 @@ class Wget(FetchMethod):
300 # Some servers (FusionForge, as used on Alioth) require that the 309 # Some servers (FusionForge, as used on Alioth) require that the
301 # optional Accept header is set. 310 # optional Accept header is set.
302 r.add_header("Accept", "*/*") 311 r.add_header("Accept", "*/*")
303 r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12") 312 r.add_header("User-Agent", self.user_agent)
304 def add_basic_auth(login_str, request): 313 def add_basic_auth(login_str, request):
305 '''Adds Basic auth to http request, pass in login:password as string''' 314 '''Adds Basic auth to http request, pass in login:password as string'''
306 import base64 315 import base64
@@ -319,7 +328,7 @@ class Wget(FetchMethod):
319 except (TypeError, ImportError, IOError, netrc.NetrcParseError): 328 except (TypeError, ImportError, IOError, netrc.NetrcParseError):
320 pass 329 pass
321 330
322 with opener.open(r) as response: 331 with opener.open(r, timeout=30) as response:
323 pass 332 pass
324 except urllib.error.URLError as e: 333 except urllib.error.URLError as e:
325 if try_again: 334 if try_again:
@@ -404,9 +413,8 @@ class Wget(FetchMethod):
404 """ 413 """
405 f = tempfile.NamedTemporaryFile() 414 f = tempfile.NamedTemporaryFile()
406 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: 415 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
407 agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
408 fetchcmd = self.basecmd 416 fetchcmd = self.basecmd
409 fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'" 417 fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
410 try: 418 try:
411 self._runwget(ud, d, fetchcmd, True, workdir=workdir) 419 self._runwget(ud, d, fetchcmd, True, workdir=workdir)
412 fetchresult = f.read() 420 fetchresult = f.read()
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
index e7c07264a8..4d243af30b 100644
--- a/bitbake/lib/bb/monitordisk.py
+++ b/bitbake/lib/bb/monitordisk.py
@@ -229,9 +229,10 @@ class diskMonitor:
229 freeInode = st.f_favail 229 freeInode = st.f_favail
230 230
231 if minInode and freeInode < minInode: 231 if minInode and freeInode < minInode:
232 # Some filesystems use dynamic inodes so can't run out 232 # Some filesystems use dynamic inodes so can't run out.
233 # (e.g. btrfs). This is reported by the inode count being 0. 233 # This is reported by the inode count being 0 (btrfs) or the free
234 if st.f_files == 0: 234 # inode count being -1 (cephfs).
235 if st.f_files == 0 or st.f_favail == -1:
235 self.devDict[k][2] = None 236 self.devDict[k][2] = None
236 continue 237 continue
237 # Always show warning, the self.checked would always be False if the action is WARN 238 # Always show warning, the self.checked would always be False if the action is WARN
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 2d88c4e72d..1b1a23bb50 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -146,18 +146,12 @@ class LogFilterLTLevel(logging.Filter):
146# 146#
147 147
148loggerDefaultLogLevel = BBLogFormatter.NOTE 148loggerDefaultLogLevel = BBLogFormatter.NOTE
149loggerDefaultVerbose = False
150loggerVerboseLogs = False
151loggerDefaultDomains = {} 149loggerDefaultDomains = {}
152 150
153def init_msgconfig(verbose, debug, debug_domains=None): 151def init_msgconfig(verbose, debug, debug_domains=None):
154 """ 152 """
155 Set default verbosity and debug levels config the logger 153 Set default verbosity and debug levels config the logger
156 """ 154 """
157 bb.msg.loggerDefaultVerbose = verbose
158 if verbose:
159 bb.msg.loggerVerboseLogs = True
160
161 if debug: 155 if debug:
162 bb.msg.loggerDefaultLogLevel = BBLogFormatter.DEBUG - debug + 1 156 bb.msg.loggerDefaultLogLevel = BBLogFormatter.DEBUG - debug + 1
163 elif verbose: 157 elif verbose:
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index eb8cfa21b8..9f46f3f35a 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -97,6 +97,7 @@ class DataNode(AstNode):
97 def eval(self, data): 97 def eval(self, data):
98 groupd = self.groupd 98 groupd = self.groupd
99 key = groupd["var"] 99 key = groupd["var"]
100 key = key.replace(":", "_")
100 loginfo = { 101 loginfo = {
101 'variable': key, 102 'variable': key,
102 'file': self.filename, 103 'file': self.filename,
@@ -207,6 +208,7 @@ class ExportFuncsNode(AstNode):
207 def eval(self, data): 208 def eval(self, data):
208 209
209 for func in self.n: 210 for func in self.n:
211 func = func.replace(":", "_")
210 calledfunc = self.classname + "_" + func 212 calledfunc = self.classname + "_" + func
211 213
212 if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False): 214 if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 6e216effb8..8781129fc1 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -22,7 +22,7 @@ from .ConfHandler import include, init
22# For compatibility 22# For compatibility
23bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"]) 23bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
24 24
25__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) 25__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
26__inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) 26__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
27__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) 27__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
28__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") 28__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index af64d3446e..a7e81bd6ad 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,7 +20,7 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
20__config_regexp__ = re.compile( r""" 20__config_regexp__ = re.compile( r"""
21 ^ 21 ^
22 (?P<exp>export\s+)? 22 (?P<exp>export\s+)?
23 (?P<var>[a-zA-Z0-9\-_+.${}/~]+?) 23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?)
24 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])? 24 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
25 25
26 \s* ( 26 \s* (
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index 7357ab2d44..56c983f816 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -12,14 +12,15 @@ currently, providing a key/value store accessed by 'domain'.
12# 12#
13 13
14import collections 14import collections
15import collections.abc
16import contextlib
17import functools
15import logging 18import logging
16import os.path 19import os.path
20import sqlite3
17import sys 21import sys
18import warnings 22import warnings
19from bb.compat import total_ordering 23from collections.abc import Mapping
20from collections import Mapping
21import sqlite3
22import contextlib
23 24
24sqlversion = sqlite3.sqlite_version_info 25sqlversion = sqlite3.sqlite_version_info
25if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): 26if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -28,8 +29,8 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
28 29
29logger = logging.getLogger("BitBake.PersistData") 30logger = logging.getLogger("BitBake.PersistData")
30 31
31@total_ordering 32@functools.total_ordering
32class SQLTable(collections.MutableMapping): 33class SQLTable(collections.abc.MutableMapping):
33 class _Decorators(object): 34 class _Decorators(object):
34 @staticmethod 35 @staticmethod
35 def retry(*, reconnect=True): 36 def retry(*, reconnect=True):
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
index 2dc472a86f..24c588e533 100644
--- a/bitbake/lib/bb/process.py
+++ b/bitbake/lib/bb/process.py
@@ -179,5 +179,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
179 stderr = stderr.decode("utf-8") 179 stderr = stderr.decode("utf-8")
180 180
181 if pipe.returncode != 0: 181 if pipe.returncode != 0:
182 if log:
183 # Don't duplicate the output in the exception if logging it
184 raise ExecutionError(cmd, pipe.returncode, None, None)
182 raise ExecutionError(cmd, pipe.returncode, stdout, stderr) 185 raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
183 return stdout, stderr 186 return stdout, stderr
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index 81459c36d5..484e1ea4f3 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -151,7 +151,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
151 if item: 151 if item:
152 itemstr = " (for item %s)" % item 152 itemstr = " (for item %s)" % item
153 if preferred_file is None: 153 if preferred_file is None:
154 logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr) 154 logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr)
155 available_vers = [] 155 available_vers = []
156 for file_set in pkg_pn: 156 for file_set in pkg_pn:
157 for f in file_set: 157 for f in file_set:
@@ -163,7 +163,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
163 available_vers.append(ver_str) 163 available_vers.append(ver_str)
164 if available_vers: 164 if available_vers:
165 available_vers.sort() 165 available_vers.sort()
166 logger.info("versions of %s available: %s", pn, ' '.join(available_vers)) 166 logger.warning("versions of %s available: %s", pn, ' '.join(available_vers))
167 else: 167 else:
168 logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) 168 logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
169 169
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 30cab5379e..886eef1f27 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -24,6 +24,7 @@ import pickle
24from multiprocessing import Process 24from multiprocessing import Process
25import shlex 25import shlex
26import pprint 26import pprint
27import time
27 28
28bblogger = logging.getLogger("BitBake") 29bblogger = logging.getLogger("BitBake")
29logger = logging.getLogger("BitBake.RunQueue") 30logger = logging.getLogger("BitBake.RunQueue")
@@ -142,6 +143,55 @@ class RunQueueScheduler(object):
142 self.buildable.append(tid) 143 self.buildable.append(tid)
143 144
144 self.rev_prio_map = None 145 self.rev_prio_map = None
146 self.is_pressure_usable()
147
148 def is_pressure_usable(self):
149 """
150 If monitoring pressure, return True if pressure files can be open and read. For example
151 openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
152 is returned.
153 """
154 if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
155 try:
156 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
157 open("/proc/pressure/io") as io_pressure_fds, \
158 open("/proc/pressure/memory") as memory_pressure_fds:
159
160 self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
161 self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
162 self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
163 self.prev_pressure_time = time.time()
164 self.check_pressure = True
165 except:
166 bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
167 self.check_pressure = False
168 else:
169 self.check_pressure = False
170
171 def exceeds_max_pressure(self):
172 """
173 Monitor the difference in total pressure at least once per second, if
174 BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
175 """
176 if self.check_pressure:
177 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
178 open("/proc/pressure/io") as io_pressure_fds, \
179 open("/proc/pressure/memory") as memory_pressure_fds:
180 # extract "total" from /proc/pressure/{cpu|io}
181 curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
182 curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
183 curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
184 exceeds_cpu_pressure = self.rq.max_cpu_pressure and (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) > self.rq.max_cpu_pressure
185 exceeds_io_pressure = self.rq.max_io_pressure and (float(curr_io_pressure) - float(self.prev_io_pressure)) > self.rq.max_io_pressure
186 exceeds_memory_pressure = self.rq.max_memory_pressure and (float(curr_memory_pressure) - float(self.prev_memory_pressure)) > self.rq.max_memory_pressure
187 now = time.time()
188 if now - self.prev_pressure_time > 1.0:
189 self.prev_cpu_pressure = curr_cpu_pressure
190 self.prev_io_pressure = curr_io_pressure
191 self.prev_memory_pressure = curr_memory_pressure
192 self.prev_pressure_time = now
193 return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
194 return False
145 195
146 def next_buildable_task(self): 196 def next_buildable_task(self):
147 """ 197 """
@@ -155,6 +205,12 @@ class RunQueueScheduler(object):
155 if not buildable: 205 if not buildable:
156 return None 206 return None
157 207
208 # Bitbake requires that at least one task be active. Only check for pressure if
209 # this is the case, otherwise the pressure limitation could result in no tasks
210 # being active and no new tasks started thereby, at times, breaking the scheduler.
211 if self.rq.stats.active and self.exceeds_max_pressure():
212 return None
213
158 # Filter out tasks that have a max number of threads that have been exceeded 214 # Filter out tasks that have a max number of threads that have been exceeded
159 skip_buildable = {} 215 skip_buildable = {}
160 for running in self.rq.runq_running.difference(self.rq.runq_complete): 216 for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -1256,8 +1312,8 @@ class RunQueue:
1256 "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv, 1312 "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
1257 "sigdata" : bb.parse.siggen.get_taskdata(), 1313 "sigdata" : bb.parse.siggen.get_taskdata(),
1258 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, 1314 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
1259 "logdefaultverbose" : bb.msg.loggerDefaultVerbose, 1315 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
1260 "logdefaultverboselogs" : bb.msg.loggerVerboseLogs, 1316 "build_verbose_stdout" : self.cooker.configuration.build_verbose_stdout,
1261 "logdefaultdomain" : bb.msg.loggerDefaultDomains, 1317 "logdefaultdomain" : bb.msg.loggerDefaultDomains,
1262 "prhost" : self.cooker.prhost, 1318 "prhost" : self.cooker.prhost,
1263 "buildname" : self.cfgData.getVar("BUILDNAME"), 1319 "buildname" : self.cfgData.getVar("BUILDNAME"),
@@ -1700,6 +1756,9 @@ class RunQueueExecute:
1700 1756
1701 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) 1757 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
1702 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" 1758 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
1759 self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
1760 self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
1761 self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
1703 1762
1704 self.sq_buildable = set() 1763 self.sq_buildable = set()
1705 self.sq_running = set() 1764 self.sq_running = set()
@@ -1735,6 +1794,29 @@ class RunQueueExecute:
1735 if self.number_tasks <= 0: 1794 if self.number_tasks <= 0:
1736 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks) 1795 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
1737 1796
1797 lower_limit = 1.0
1798 upper_limit = 1000000.0
1799 if self.max_cpu_pressure:
1800 self.max_cpu_pressure = float(self.max_cpu_pressure)
1801 if self.max_cpu_pressure < lower_limit:
1802 bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
1803 if self.max_cpu_pressure > upper_limit:
1804 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
1805
1806 if self.max_io_pressure:
1807 self.max_io_pressure = float(self.max_io_pressure)
1808 if self.max_io_pressure < lower_limit:
1809 bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
1810 if self.max_io_pressure > upper_limit:
1811 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1812
1813 if self.max_memory_pressure:
1814 self.max_memory_pressure = float(self.max_memory_pressure)
1815 if self.max_memory_pressure < lower_limit:
1816 bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
1817 if self.max_memory_pressure > upper_limit:
1818 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1819
1738 # List of setscene tasks which we've covered 1820 # List of setscene tasks which we've covered
1739 self.scenequeue_covered = set() 1821 self.scenequeue_covered = set()
1740 # List of tasks which are covered (including setscene ones) 1822 # List of tasks which are covered (including setscene ones)
@@ -1893,6 +1975,20 @@ class RunQueueExecute:
1893 self.setbuildable(revdep) 1975 self.setbuildable(revdep)
1894 logger.debug(1, "Marking task %s as buildable", revdep) 1976 logger.debug(1, "Marking task %s as buildable", revdep)
1895 1977
1978 found = None
1979 for t in sorted(self.sq_deferred.copy()):
1980 if self.sq_deferred[t] == task:
1981 # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
1982 # We shouldn't allow all to run at once as it is prone to races.
1983 if not found:
1984 bb.note("Deferred task %s now buildable" % t)
1985 del self.sq_deferred[t]
1986 update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
1987 found = t
1988 else:
1989 bb.note("Deferring %s after %s" % (t, found))
1990 self.sq_deferred[t] = found
1991
1896 def task_complete(self, task): 1992 def task_complete(self, task):
1897 self.stats.taskCompleted() 1993 self.stats.taskCompleted()
1898 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) 1994 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
@@ -1934,6 +2030,10 @@ class RunQueueExecute:
1934 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks)) 2030 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks))
1935 err = True 2031 err = True
1936 2032
2033 for tid in self.scenequeue_covered.intersection(self.scenequeue_notcovered):
2034 # No task should end up in both covered and uncovered, that is a bug.
2035 logger.error("Setscene task %s in both covered and notcovered." % tid)
2036
1937 for tid in self.rqdata.runq_setscene_tids: 2037 for tid in self.rqdata.runq_setscene_tids:
1938 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered: 2038 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered:
1939 err = True 2039 err = True
@@ -1998,8 +2098,6 @@ class RunQueueExecute:
1998 logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask) 2098 logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask)
1999 self.sq_task_failoutright(nexttask) 2099 self.sq_task_failoutright(nexttask)
2000 return True 2100 return True
2001 else:
2002 self.sqdata.outrightfail.remove(nexttask)
2003 if nexttask in self.sqdata.outrightfail: 2101 if nexttask in self.sqdata.outrightfail:
2004 logger.debug(2, 'No package found, so skipping setscene task %s', nexttask) 2102 logger.debug(2, 'No package found, so skipping setscene task %s', nexttask)
2005 self.sq_task_failoutright(nexttask) 2103 self.sq_task_failoutright(nexttask)
@@ -2150,7 +2248,8 @@ class RunQueueExecute:
2150 if self.sq_deferred: 2248 if self.sq_deferred:
2151 tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0]) 2249 tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0])
2152 logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid) 2250 logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid)
2153 self.sq_task_failoutright(tid) 2251 if tid not in self.runq_complete:
2252 self.sq_task_failoutright(tid)
2154 return True 2253 return True
2155 2254
2156 if len(self.failed_tids) != 0: 2255 if len(self.failed_tids) != 0:
@@ -2264,10 +2363,16 @@ class RunQueueExecute:
2264 self.updated_taskhash_queue.remove((tid, unihash)) 2363 self.updated_taskhash_queue.remove((tid, unihash))
2265 2364
2266 if unihash != self.rqdata.runtaskentries[tid].unihash: 2365 if unihash != self.rqdata.runtaskentries[tid].unihash:
2267 hashequiv_logger.verbose("Task %s unihash changed to %s" % (tid, unihash)) 2366 # Make sure we rehash any other tasks with the same task hash that we're deferred against.
2268 self.rqdata.runtaskentries[tid].unihash = unihash 2367 torehash = [tid]
2269 bb.parse.siggen.set_unihash(tid, unihash) 2368 for deftid in self.sq_deferred:
2270 toprocess.add(tid) 2369 if self.sq_deferred[deftid] == tid:
2370 torehash.append(deftid)
2371 for hashtid in torehash:
2372 hashequiv_logger.verbose("Task %s unihash changed to %s" % (hashtid, unihash))
2373 self.rqdata.runtaskentries[hashtid].unihash = unihash
2374 bb.parse.siggen.set_unihash(hashtid, unihash)
2375 toprocess.add(hashtid)
2271 2376
2272 # Work out all tasks which depend upon these 2377 # Work out all tasks which depend upon these
2273 total = set() 2378 total = set()
@@ -2406,6 +2511,14 @@ class RunQueueExecute:
2406 2511
2407 if update_tasks: 2512 if update_tasks:
2408 self.sqdone = False 2513 self.sqdone = False
2514 for mc in sorted(self.sqdata.multiconfigs):
2515 for tid in sorted([t[0] for t in update_tasks]):
2516 if mc_from_tid(tid) != mc:
2517 continue
2518 h = pending_hash_index(tid, self.rqdata)
2519 if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
2520 self.sq_deferred[tid] = self.sqdata.hashes[h]
2521 bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
2409 update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False) 2522 update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
2410 2523
2411 for (tid, harddepfail, origvalid) in update_tasks: 2524 for (tid, harddepfail, origvalid) in update_tasks:
@@ -2421,6 +2534,9 @@ class RunQueueExecute:
2421 2534
2422 for dep in sorted(self.sqdata.sq_deps[task]): 2535 for dep in sorted(self.sqdata.sq_deps[task]):
2423 if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: 2536 if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]:
2537 if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
2538 # dependency could be already processed, e.g. noexec setscene task
2539 continue
2424 logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2540 logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2425 self.sq_task_failoutright(dep) 2541 self.sq_task_failoutright(dep)
2426 continue 2542 continue
@@ -2743,6 +2859,19 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2743 sqdata.stamppresent = set() 2859 sqdata.stamppresent = set()
2744 sqdata.valid = set() 2860 sqdata.valid = set()
2745 2861
2862 sqdata.hashes = {}
2863 sqrq.sq_deferred = {}
2864 for mc in sorted(sqdata.multiconfigs):
2865 for tid in sorted(sqdata.sq_revdeps):
2866 if mc_from_tid(tid) != mc:
2867 continue
2868 h = pending_hash_index(tid, rqdata)
2869 if h not in sqdata.hashes:
2870 sqdata.hashes[h] = tid
2871 else:
2872 sqrq.sq_deferred[tid] = sqdata.hashes[h]
2873 bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
2874
2746 update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True) 2875 update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
2747 2876
2748def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True): 2877def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True):
@@ -2754,6 +2883,8 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
2754 sqdata.stamppresent.remove(tid) 2883 sqdata.stamppresent.remove(tid)
2755 if tid in sqdata.valid: 2884 if tid in sqdata.valid:
2756 sqdata.valid.remove(tid) 2885 sqdata.valid.remove(tid)
2886 if tid in sqdata.outrightfail:
2887 sqdata.outrightfail.remove(tid)
2757 2888
2758 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2889 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2759 2890
@@ -2781,28 +2912,20 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
2781 2912
2782 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary) 2913 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
2783 2914
2784 sqdata.hashes = {} 2915 for tid in tids:
2785 for mc in sorted(sqdata.multiconfigs): 2916 if tid in sqdata.stamppresent:
2786 for tid in sorted(sqdata.sq_revdeps): 2917 continue
2787 if mc_from_tid(tid) != mc: 2918 if tid in sqdata.valid:
2788 continue 2919 continue
2789 if tid in sqdata.stamppresent: 2920 if tid in sqdata.noexec:
2790 continue 2921 continue
2791 if tid in sqdata.valid: 2922 if tid in sqrq.scenequeue_covered:
2792 continue 2923 continue
2793 if tid in sqdata.noexec: 2924 if tid in sqrq.scenequeue_notcovered:
2794 continue 2925 continue
2795 if tid in sqrq.scenequeue_notcovered: 2926 if tid in sqrq.sq_deferred:
2796 continue 2927 continue
2797 sqdata.outrightfail.add(tid) 2928 sqdata.outrightfail.add(tid)
2798
2799 h = pending_hash_index(tid, rqdata)
2800 if h not in sqdata.hashes:
2801 sqdata.hashes[h] = tid
2802 else:
2803 sqrq.sq_deferred[tid] = sqdata.hashes[h]
2804 bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
2805
2806 2929
2807class TaskFailure(Exception): 2930class TaskFailure(Exception):
2808 """ 2931 """
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index b66fbe0acd..4bdb84ae37 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -25,6 +25,7 @@ import subprocess
25import errno 25import errno
26import re 26import re
27import datetime 27import datetime
28import gc
28import bb.server.xmlrpcserver 29import bb.server.xmlrpcserver
29from bb import daemonize 30from bb import daemonize
30from multiprocessing import queues 31from multiprocessing import queues
@@ -152,7 +153,8 @@ class ProcessServer(multiprocessing.Process):
152 conn = newconnections.pop(-1) 153 conn = newconnections.pop(-1)
153 fds.append(conn) 154 fds.append(conn)
154 self.controllersock = conn 155 self.controllersock = conn
155 elif self.timeout is None and not ready: 156
157 elif not self.timeout and not ready:
156 print("No timeout, exiting.") 158 print("No timeout, exiting.")
157 self.quit = True 159 self.quit = True
158 160
@@ -220,6 +222,7 @@ class ProcessServer(multiprocessing.Process):
220 try: 222 try:
221 print("Running command %s" % command) 223 print("Running command %s" % command)
222 self.command_channel_reply.send(self.cooker.command.runCommand(command)) 224 self.command_channel_reply.send(self.cooker.command.runCommand(command))
225 print("Command Completed")
223 except Exception as e: 226 except Exception as e:
224 logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e))) 227 logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e)))
225 228
@@ -347,7 +350,12 @@ class ServerCommunicator():
347 logger.info("No reply from server in 30s") 350 logger.info("No reply from server in 30s")
348 if not self.recv.poll(30): 351 if not self.recv.poll(30):
349 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)") 352 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)")
350 return self.recv.get() 353 ret, exc = self.recv.get()
354 # Should probably turn all exceptions in exc back into exceptions?
355 # For now, at least handle BBHandledException
356 if exc and "BBHandledException" in exc:
357 raise bb.BBHandledException()
358 return ret, exc
351 359
352 def updateFeatureSet(self, featureset): 360 def updateFeatureSet(self, featureset):
353 _, error = self.runCommand(["setFeatures", featureset]) 361 _, error = self.runCommand(["setFeatures", featureset])
@@ -586,7 +594,7 @@ class BBUIEventQueue:
586 self.reader = ConnectionReader(readfd) 594 self.reader = ConnectionReader(readfd)
587 595
588 self.t = threading.Thread() 596 self.t = threading.Thread()
589 self.t.setDaemon(True) 597 self.t.daemon = True
590 self.t.run = self.startCallbackHandler 598 self.t.run = self.startCallbackHandler
591 self.t.start() 599 self.t.start()
592 600
@@ -664,8 +672,10 @@ class ConnectionWriter(object):
664 672
665 def send(self, obj): 673 def send(self, obj):
666 obj = multiprocessing.reduction.ForkingPickler.dumps(obj) 674 obj = multiprocessing.reduction.ForkingPickler.dumps(obj)
675 gc.disable()
667 with self.wlock: 676 with self.wlock:
668 self.writer.send_bytes(obj) 677 self.writer.send_bytes(obj)
678 gc.enable()
669 679
670 def fileno(self): 680 def fileno(self):
671 return self.writer.fileno() 681 return self.writer.fileno()
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 26fa7f05ce..9d4f67aa90 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -318,7 +318,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
318 else: 318 else:
319 sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid] 319 sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
320 320
321 bb.utils.mkdirhier(os.path.dirname(sigfile)) 321 with bb.utils.umask(0o002):
322 bb.utils.mkdirhier(os.path.dirname(sigfile))
322 323
323 data = {} 324 data = {}
324 data['task'] = task 325 data['task'] = task
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index 826a2d2f6d..f1c4f618d8 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -111,9 +111,9 @@ ${D}${libdir}/pkgconfig/*.pc
111 self.assertExecs(set(["sed"])) 111 self.assertExecs(set(["sed"]))
112 112
113 def test_parameter_expansion_modifiers(self): 113 def test_parameter_expansion_modifiers(self):
114 # - and + are also valid modifiers for parameter expansion, but are 114 # -,+ and : are also valid modifiers for parameter expansion, but are
115 # valid characters in bitbake variable names, so are not included here 115 # valid characters in bitbake variable names, so are not included here
116 for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'): 116 for i in ('=', '?', '#', '%', '##', '%%'):
117 name = "foo%sbar" % i 117 name = "foo%sbar" % i
118 self.parseExpression("${%s}" % name) 118 self.parseExpression("${%s}" % name)
119 self.assertNotIn(name, self.references) 119 self.assertNotIn(name, self.references)
@@ -412,6 +412,32 @@ esac
412 # Check final value 412 # Check final value
413 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone']) 413 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
414 414
415 def test_contains_vardeps_override_operators(self):
416 # Check override operators handle dependencies correctly with the contains functionality
417 expr_plain = 'testval'
418 expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
419 expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
420 expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
421 # Check dependencies
422 self.d.setVar('ANOTHERVAR', expr_plain)
423 self.d.prependVar('ANOTHERVAR', expr_prepend)
424 self.d.appendVar('ANOTHERVAR', expr_append)
425 self.d.setVar('ANOTHERVAR:remove', expr_remove)
426 self.d.setVar('TESTVAR1', 'blah')
427 self.d.setVar('TESTVAR2', 'testval2')
428 self.d.setVar('TESTVAR3', 'no-testval')
429 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d)
430 self.assertEqual(sorted(values.splitlines()),
431 sorted([
432 expr_prepend + expr_plain + expr_append,
433 '_remove of ' + expr_remove,
434 'TESTVAR1{testval1} = Unset',
435 'TESTVAR2{testval2} = Set',
436 'TESTVAR3{no-testval} = Set',
437 ]))
438 # Check final value
439 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
440
415 #Currently no wildcard support 441 #Currently no wildcard support
416 #def test_vardeps_wildcards(self): 442 #def test_vardeps_wildcards(self):
417 # self.d.setVar("oe_libinstall", "echo test") 443 # self.d.setVar("oe_libinstall", "echo test")
diff --git a/bitbake/lib/bb/tests/event.py b/bitbake/lib/bb/tests/event.py
index 9229b63d47..9ca7e9bc8e 100644
--- a/bitbake/lib/bb/tests/event.py
+++ b/bitbake/lib/bb/tests/event.py
@@ -6,17 +6,18 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9import unittest 9import collections
10import bb
11import logging
12import bb.compat
13import bb.event
14import importlib 10import importlib
11import logging
12import pickle
15import threading 13import threading
16import time 14import time
17import pickle 15import unittest
18from unittest.mock import Mock 16from unittest.mock import Mock
19from unittest.mock import call 17from unittest.mock import call
18
19import bb
20import bb.event
20from bb.msg import BBLogFormatter 21from bb.msg import BBLogFormatter
21 22
22 23
@@ -75,7 +76,7 @@ class EventHandlingTest(unittest.TestCase):
75 76
76 def _create_test_handlers(self): 77 def _create_test_handlers(self):
77 """ Method used to create a test handler ordered dictionary """ 78 """ Method used to create a test handler ordered dictionary """
78 test_handlers = bb.compat.OrderedDict() 79 test_handlers = collections.OrderedDict()
79 test_handlers["handler1"] = self._test_process.handler1 80 test_handlers["handler1"] = self._test_process.handler1
80 test_handlers["handler2"] = self._test_process.handler2 81 test_handlers["handler2"] = self._test_process.handler2
81 return test_handlers 82 return test_handlers
@@ -96,7 +97,7 @@ class EventHandlingTest(unittest.TestCase):
96 97
97 def test_clean_class_handlers(self): 98 def test_clean_class_handlers(self):
98 """ Test clean_class_handlers method """ 99 """ Test clean_class_handlers method """
99 cleanDict = bb.compat.OrderedDict() 100 cleanDict = collections.OrderedDict()
100 self.assertEqual(cleanDict, 101 self.assertEqual(cleanDict,
101 bb.event.clean_class_handlers()) 102 bb.event.clean_class_handlers())
102 103
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 9453c90d2b..61dd5cccaf 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -371,6 +371,7 @@ class FetcherTest(unittest.TestCase):
371 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes": 371 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
372 print("Not cleaning up %s. Please remove manually." % self.tempdir) 372 print("Not cleaning up %s. Please remove manually." % self.tempdir)
373 else: 373 else:
374 bb.process.run('chmod u+rw -R %s' % self.tempdir)
374 bb.utils.prunedir(self.tempdir) 375 bb.utils.prunedir(self.tempdir)
375 376
376class MirrorUriTest(FetcherTest): 377class MirrorUriTest(FetcherTest):
@@ -471,7 +472,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
471 super(GitDownloadDirectoryNamingTest, self).setUp() 472 super(GitDownloadDirectoryNamingTest, self).setUp()
472 self.recipe_url = "git://git.openembedded.org/bitbake" 473 self.recipe_url = "git://git.openembedded.org/bitbake"
473 self.recipe_dir = "git.openembedded.org.bitbake" 474 self.recipe_dir = "git.openembedded.org.bitbake"
474 self.mirror_url = "git://github.com/openembedded/bitbake.git" 475 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
475 self.mirror_dir = "github.com.openembedded.bitbake.git" 476 self.mirror_dir = "github.com.openembedded.bitbake.git"
476 477
477 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 478 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
@@ -519,7 +520,7 @@ class TarballNamingTest(FetcherTest):
519 super(TarballNamingTest, self).setUp() 520 super(TarballNamingTest, self).setUp()
520 self.recipe_url = "git://git.openembedded.org/bitbake" 521 self.recipe_url = "git://git.openembedded.org/bitbake"
521 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" 522 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
522 self.mirror_url = "git://github.com/openembedded/bitbake.git" 523 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
523 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz" 524 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
524 525
525 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') 526 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -553,7 +554,7 @@ class GitShallowTarballNamingTest(FetcherTest):
553 super(GitShallowTarballNamingTest, self).setUp() 554 super(GitShallowTarballNamingTest, self).setUp()
554 self.recipe_url = "git://git.openembedded.org/bitbake" 555 self.recipe_url = "git://git.openembedded.org/bitbake"
555 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz" 556 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
556 self.mirror_url = "git://github.com/openembedded/bitbake.git" 557 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
557 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz" 558 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
558 559
559 self.d.setVar('BB_GIT_SHALLOW', '1') 560 self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -649,6 +650,58 @@ class FetcherLocalTest(FetcherTest):
649 with self.assertRaises(bb.fetch2.UnpackError): 650 with self.assertRaises(bb.fetch2.UnpackError):
650 self.fetchUnpack(['file://a;subdir=/bin/sh']) 651 self.fetchUnpack(['file://a;subdir=/bin/sh'])
651 652
653 def test_local_gitfetch_usehead(self):
654 # Create dummy local Git repo
655 src_dir = tempfile.mkdtemp(dir=self.tempdir,
656 prefix='gitfetch_localusehead_')
657 src_dir = os.path.abspath(src_dir)
658 bb.process.run("git init", cwd=src_dir)
659 bb.process.run("git commit --allow-empty -m'Dummy commit'",
660 cwd=src_dir)
661 # Use other branch than master
662 bb.process.run("git checkout -b my-devel", cwd=src_dir)
663 bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
664 cwd=src_dir)
665 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
666 orig_rev = stdout[0].strip()
667
668 # Fetch and check revision
669 self.d.setVar("SRCREV", "AUTOINC")
670 url = "git://" + src_dir + ";protocol=file;usehead=1"
671 fetcher = bb.fetch.Fetch([url], self.d)
672 fetcher.download()
673 fetcher.unpack(self.unpackdir)
674 stdout = bb.process.run("git rev-parse HEAD",
675 cwd=os.path.join(self.unpackdir, 'git'))
676 unpack_rev = stdout[0].strip()
677 self.assertEqual(orig_rev, unpack_rev)
678
679 def test_local_gitfetch_usehead_withname(self):
680 # Create dummy local Git repo
681 src_dir = tempfile.mkdtemp(dir=self.tempdir,
682 prefix='gitfetch_localusehead_')
683 src_dir = os.path.abspath(src_dir)
684 bb.process.run("git init", cwd=src_dir)
685 bb.process.run("git commit --allow-empty -m'Dummy commit'",
686 cwd=src_dir)
687 # Use other branch than master
688 bb.process.run("git checkout -b my-devel", cwd=src_dir)
689 bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
690 cwd=src_dir)
691 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
692 orig_rev = stdout[0].strip()
693
694 # Fetch and check revision
695 self.d.setVar("SRCREV", "AUTOINC")
696 url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName"
697 fetcher = bb.fetch.Fetch([url], self.d)
698 fetcher.download()
699 fetcher.unpack(self.unpackdir)
700 stdout = bb.process.run("git rev-parse HEAD",
701 cwd=os.path.join(self.unpackdir, 'git'))
702 unpack_rev = stdout[0].strip()
703 self.assertEqual(orig_rev, unpack_rev)
704
652class FetcherNoNetworkTest(FetcherTest): 705class FetcherNoNetworkTest(FetcherTest):
653 def setUp(self): 706 def setUp(self):
654 super().setUp() 707 super().setUp()
@@ -845,6 +898,8 @@ class FetcherNetworkTest(FetcherTest):
845 prefix='gitfetch_localusehead_') 898 prefix='gitfetch_localusehead_')
846 src_dir = os.path.abspath(src_dir) 899 src_dir = os.path.abspath(src_dir)
847 bb.process.run("git init", cwd=src_dir) 900 bb.process.run("git init", cwd=src_dir)
901 bb.process.run("git config user.email 'you@example.com'", cwd=src_dir)
902 bb.process.run("git config user.name 'Your Name'", cwd=src_dir)
848 bb.process.run("git commit --allow-empty -m'Dummy commit'", 903 bb.process.run("git commit --allow-empty -m'Dummy commit'",
849 cwd=src_dir) 904 cwd=src_dir)
850 # Use other branch than master 905 # Use other branch than master
@@ -918,7 +973,7 @@ class FetcherNetworkTest(FetcherTest):
918 def test_git_submodule_dbus_broker(self): 973 def test_git_submodule_dbus_broker(self):
919 # The following external repositories have show failures in fetch and unpack operations 974 # The following external repositories have show failures in fetch and unpack operations
920 # We want to avoid regressions! 975 # We want to avoid regressions!
921 url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main" 976 url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
922 fetcher = bb.fetch.Fetch([url], self.d) 977 fetcher = bb.fetch.Fetch([url], self.d)
923 fetcher.download() 978 fetcher.download()
924 # Previous cwd has been deleted 979 # Previous cwd has been deleted
@@ -934,7 +989,7 @@ class FetcherNetworkTest(FetcherTest):
934 989
935 @skipIfNoNetwork() 990 @skipIfNoNetwork()
936 def test_git_submodule_CLI11(self): 991 def test_git_submodule_CLI11(self):
937 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf" 992 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
938 fetcher = bb.fetch.Fetch([url], self.d) 993 fetcher = bb.fetch.Fetch([url], self.d)
939 fetcher.download() 994 fetcher.download()
940 # Previous cwd has been deleted 995 # Previous cwd has been deleted
@@ -949,12 +1004,12 @@ class FetcherNetworkTest(FetcherTest):
949 @skipIfNoNetwork() 1004 @skipIfNoNetwork()
950 def test_git_submodule_update_CLI11(self): 1005 def test_git_submodule_update_CLI11(self):
951 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ 1006 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
952 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714" 1007 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
953 fetcher = bb.fetch.Fetch([url], self.d) 1008 fetcher = bb.fetch.Fetch([url], self.d)
954 fetcher.download() 1009 fetcher.download()
955 1010
956 # CLI11 that pulls in a newer nlohmann-json 1011 # CLI11 that pulls in a newer nlohmann-json
957 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca" 1012 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
958 fetcher = bb.fetch.Fetch([url], self.d) 1013 fetcher = bb.fetch.Fetch([url], self.d)
959 fetcher.download() 1014 fetcher.download()
960 # Previous cwd has been deleted 1015 # Previous cwd has been deleted
@@ -968,7 +1023,7 @@ class FetcherNetworkTest(FetcherTest):
968 1023
969 @skipIfNoNetwork() 1024 @skipIfNoNetwork()
970 def test_git_submodule_aktualizr(self): 1025 def test_git_submodule_aktualizr(self):
971 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44" 1026 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
972 fetcher = bb.fetch.Fetch([url], self.d) 1027 fetcher = bb.fetch.Fetch([url], self.d)
973 fetcher.download() 1028 fetcher.download()
974 # Previous cwd has been deleted 1029 # Previous cwd has been deleted
@@ -988,7 +1043,7 @@ class FetcherNetworkTest(FetcherTest):
988 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """ 1043 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
989 1044
990 # This repository also has submodules where the module (name), path and url do not align 1045 # This repository also has submodules where the module (name), path and url do not align
991 url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699" 1046 url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
992 fetcher = bb.fetch.Fetch([url], self.d) 1047 fetcher = bb.fetch.Fetch([url], self.d)
993 fetcher.download() 1048 fetcher.download()
994 # Previous cwd has been deleted 1049 # Previous cwd has been deleted
@@ -1046,7 +1101,7 @@ class SVNTest(FetcherTest):
1046 1101
1047 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir) 1102 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
1048 # Github will emulate SVN. Use this to check if we're downloding... 1103 # Github will emulate SVN. Use this to check if we're downloding...
1049 bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .", 1104 bb.process.run("svn propset svn:externals 'bitbake https://github.com/PhilipHazel/pcre2.git' .",
1050 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1105 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
1051 bb.process.run("svn commit --non-interactive -m 'Add external'", 1106 bb.process.run("svn commit --non-interactive -m 'Add external'",
1052 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1107 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1164,7 +1219,7 @@ class FetchLatestVersionTest(FetcherTest):
1164 1219
1165 test_git_uris = { 1220 test_git_uris = {
1166 # version pattern "X.Y.Z" 1221 # version pattern "X.Y.Z"
1167 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "") 1222 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
1168 : "1.99.4", 1223 : "1.99.4",
1169 # version pattern "vX.Y" 1224 # version pattern "vX.Y"
1170 # mirror of git.infradead.org since network issues interfered with testing 1225 # mirror of git.infradead.org since network issues interfered with testing
@@ -1175,7 +1230,7 @@ class FetchLatestVersionTest(FetcherTest):
1175 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "") 1230 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
1176 : "1.0", 1231 : "1.0",
1177 # version pattern "pkg_name-vX.Y.Z" 1232 # version pattern "pkg_name-vX.Y.Z"
1178 ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "") 1233 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
1179 : "1.4.0", 1234 : "1.4.0",
1180 # combination version pattern 1235 # combination version pattern
1181 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "") 1236 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
@@ -1187,13 +1242,13 @@ class FetchLatestVersionTest(FetcherTest):
1187 : "20120614", 1242 : "20120614",
1188 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX 1243 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
1189 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing 1244 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
1190 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))") 1245 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))")
1191 : "0.4.3", 1246 : "0.4.3",
1192 ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))") 1247 ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))")
1193 : "11.0.0", 1248 : "11.0.0",
1194 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") 1249 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
1195 : "1.3.59", 1250 : "1.3.59",
1196 ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") 1251 ("remake", "git://github.com/rocky/remake.git;protocol=https", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
1197 : "3.82+dbg0.9", 1252 : "3.82+dbg0.9",
1198 } 1253 }
1199 1254
@@ -1233,11 +1288,11 @@ class FetchLatestVersionTest(FetcherTest):
1233 # 1288 #
1234 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 1289 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
1235 # https://github.com/apple/cups/releases 1290 # https://github.com/apple/cups/releases
1236 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") 1291 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
1237 : "2.0.0", 1292 : "2.0.0",
1238 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz 1293 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
1239 # http://ftp.debian.org/debian/pool/main/d/db5.3/ 1294 # http://ftp.debian.org/debian/pool/main/d/db5.3/
1240 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") 1295 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
1241 : "5.3.10", 1296 : "5.3.10",
1242 } 1297 }
1243 1298
@@ -1283,13 +1338,10 @@ class FetchCheckStatusTest(FetcherTest):
1283 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", 1338 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
1284 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", 1339 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
1285 "https://yoctoproject.org/", 1340 "https://yoctoproject.org/",
1286 "https://yoctoproject.org/documentation", 1341 "https://docs.yoctoproject.org/",
1287 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", 1342 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
1288 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", 1343 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
1289 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", 1344 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
1290 "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
1291 "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
1292 "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
1293 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD 1345 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD
1294 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz" 1346 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
1295 ] 1347 ]
@@ -1328,6 +1380,8 @@ class GitMakeShallowTest(FetcherTest):
1328 self.gitdir = os.path.join(self.tempdir, 'gitshallow') 1380 self.gitdir = os.path.join(self.tempdir, 'gitshallow')
1329 bb.utils.mkdirhier(self.gitdir) 1381 bb.utils.mkdirhier(self.gitdir)
1330 bb.process.run('git init', cwd=self.gitdir) 1382 bb.process.run('git init', cwd=self.gitdir)
1383 bb.process.run('git config user.email "you@example.com"', cwd=self.gitdir)
1384 bb.process.run('git config user.name "Your Name"', cwd=self.gitdir)
1331 1385
1332 def assertRefs(self, expected_refs): 1386 def assertRefs(self, expected_refs):
1333 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines() 1387 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines()
@@ -1451,6 +1505,8 @@ class GitShallowTest(FetcherTest):
1451 1505
1452 bb.utils.mkdirhier(self.srcdir) 1506 bb.utils.mkdirhier(self.srcdir)
1453 self.git('init', cwd=self.srcdir) 1507 self.git('init', cwd=self.srcdir)
1508 self.git('config user.email "you@example.com"', cwd=self.srcdir)
1509 self.git('config user.name "Your Name"', cwd=self.srcdir)
1454 self.d.setVar('WORKDIR', self.tempdir) 1510 self.d.setVar('WORKDIR', self.tempdir)
1455 self.d.setVar('S', self.gitdir) 1511 self.d.setVar('S', self.gitdir)
1456 self.d.delVar('PREMIRRORS') 1512 self.d.delVar('PREMIRRORS')
@@ -1532,6 +1588,7 @@ class GitShallowTest(FetcherTest):
1532 1588
1533 # fetch and unpack, from the shallow tarball 1589 # fetch and unpack, from the shallow tarball
1534 bb.utils.remove(self.gitdir, recurse=True) 1590 bb.utils.remove(self.gitdir, recurse=True)
1591 bb.process.run('chmod u+w -R "%s"' % ud.clonedir)
1535 bb.utils.remove(ud.clonedir, recurse=True) 1592 bb.utils.remove(ud.clonedir, recurse=True)
1536 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) 1593 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
1537 1594
@@ -1684,6 +1741,8 @@ class GitShallowTest(FetcherTest):
1684 smdir = os.path.join(self.tempdir, 'gitsubmodule') 1741 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1685 bb.utils.mkdirhier(smdir) 1742 bb.utils.mkdirhier(smdir)
1686 self.git('init', cwd=smdir) 1743 self.git('init', cwd=smdir)
1744 self.git('config user.email "you@example.com"', cwd=smdir)
1745 self.git('config user.name "Your Name"', cwd=smdir)
1687 # Make this look like it was cloned from a remote... 1746 # Make this look like it was cloned from a remote...
1688 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 1747 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1689 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 1748 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1691,7 +1750,7 @@ class GitShallowTest(FetcherTest):
1691 self.add_empty_file('bsub', cwd=smdir) 1750 self.add_empty_file('bsub', cwd=smdir)
1692 1751
1693 self.git('submodule init', cwd=self.srcdir) 1752 self.git('submodule init', cwd=self.srcdir)
1694 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 1753 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1695 self.git('submodule update', cwd=self.srcdir) 1754 self.git('submodule update', cwd=self.srcdir)
1696 self.git('commit -m submodule -a', cwd=self.srcdir) 1755 self.git('commit -m submodule -a', cwd=self.srcdir)
1697 1756
@@ -1714,6 +1773,8 @@ class GitShallowTest(FetcherTest):
1714 smdir = os.path.join(self.tempdir, 'gitsubmodule') 1773 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1715 bb.utils.mkdirhier(smdir) 1774 bb.utils.mkdirhier(smdir)
1716 self.git('init', cwd=smdir) 1775 self.git('init', cwd=smdir)
1776 self.git('config user.email "you@example.com"', cwd=smdir)
1777 self.git('config user.name "Your Name"', cwd=smdir)
1717 # Make this look like it was cloned from a remote... 1778 # Make this look like it was cloned from a remote...
1718 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 1779 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1719 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 1780 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1721,7 +1782,7 @@ class GitShallowTest(FetcherTest):
1721 self.add_empty_file('bsub', cwd=smdir) 1782 self.add_empty_file('bsub', cwd=smdir)
1722 1783
1723 self.git('submodule init', cwd=self.srcdir) 1784 self.git('submodule init', cwd=self.srcdir)
1724 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 1785 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1725 self.git('submodule update', cwd=self.srcdir) 1786 self.git('submodule update', cwd=self.srcdir)
1726 self.git('commit -m submodule -a', cwd=self.srcdir) 1787 self.git('commit -m submodule -a', cwd=self.srcdir)
1727 1788
@@ -1756,8 +1817,8 @@ class GitShallowTest(FetcherTest):
1756 self.git('annex init', cwd=self.srcdir) 1817 self.git('annex init', cwd=self.srcdir)
1757 open(os.path.join(self.srcdir, 'c'), 'w').close() 1818 open(os.path.join(self.srcdir, 'c'), 'w').close()
1758 self.git('annex add c', cwd=self.srcdir) 1819 self.git('annex add c', cwd=self.srcdir)
1759 self.git('commit -m annex-c -a', cwd=self.srcdir) 1820 self.git('commit --author "Foo Bar <foo@bar>" -m annex-c -a', cwd=self.srcdir)
1760 bb.process.run('chmod u+w -R %s' % os.path.join(self.srcdir, '.git', 'annex')) 1821 bb.process.run('chmod u+w -R %s' % self.srcdir)
1761 1822
1762 uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir 1823 uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir
1763 fetcher, ud = self.fetch_shallow(uri) 1824 fetcher, ud = self.fetch_shallow(uri)
@@ -1971,7 +2032,7 @@ class GitShallowTest(FetcherTest):
1971 2032
1972 @skipIfNoNetwork() 2033 @skipIfNoNetwork()
1973 def test_bitbake(self): 2034 def test_bitbake(self):
1974 self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir) 2035 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
1975 self.git('config core.bare true', cwd=self.srcdir) 2036 self.git('config core.bare true', cwd=self.srcdir)
1976 self.git('fetch', cwd=self.srcdir) 2037 self.git('fetch', cwd=self.srcdir)
1977 2038
@@ -2032,6 +2093,8 @@ class GitLfsTest(FetcherTest):
2032 2093
2033 bb.utils.mkdirhier(self.srcdir) 2094 bb.utils.mkdirhier(self.srcdir)
2034 self.git('init', cwd=self.srcdir) 2095 self.git('init', cwd=self.srcdir)
2096 self.git('config user.email "you@example.com"', cwd=self.srcdir)
2097 self.git('config user.name "Your Name"', cwd=self.srcdir)
2035 with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs: 2098 with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs:
2036 attrs.write('*.mp3 filter=lfs -text') 2099 attrs.write('*.mp3 filter=lfs -text')
2037 self.git(['add', '.gitattributes'], cwd=self.srcdir) 2100 self.git(['add', '.gitattributes'], cwd=self.srcdir)
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index 8c9b6b8ca5..8bec8cbaf6 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -53,6 +53,10 @@ class TinfoilDataStoreConnectorVarHistory:
53 def remoteCommand(self, cmd, *args, **kwargs): 53 def remoteCommand(self, cmd, *args, **kwargs):
54 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs) 54 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
55 55
56 def emit(self, var, oval, val, o, d):
57 ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
58 o.write(ret)
59
56 def __getattr__(self, name): 60 def __getattr__(self, name):
57 if not hasattr(bb.data_smart.VariableHistory, name): 61 if not hasattr(bb.data_smart.VariableHistory, name):
58 raise AttributeError("VariableHistory has no such method %s" % name) 62 raise AttributeError("VariableHistory has no such method %s" % name)
@@ -448,7 +452,7 @@ class Tinfoil:
448 self.run_actions(config_params) 452 self.run_actions(config_params)
449 self.recipes_parsed = True 453 self.recipes_parsed = True
450 454
451 def run_command(self, command, *params): 455 def run_command(self, command, *params, handle_events=True):
452 """ 456 """
453 Run a command on the server (as implemented in bb.command). 457 Run a command on the server (as implemented in bb.command).
454 Note that there are two types of command - synchronous and 458 Note that there are two types of command - synchronous and
@@ -465,7 +469,16 @@ class Tinfoil:
465 commandline = [command] 469 commandline = [command]
466 if params: 470 if params:
467 commandline.extend(params) 471 commandline.extend(params)
468 result = self.server_connection.connection.runCommand(commandline) 472 try:
473 result = self.server_connection.connection.runCommand(commandline)
474 finally:
475 while handle_events:
476 event = self.wait_event()
477 if not event:
478 break
479 if isinstance(event, logging.LogRecord):
480 if event.taskpid == 0 or event.levelno > logging.INFO:
481 self.logger.handle(event)
469 if result[1]: 482 if result[1]:
470 raise TinfoilCommandFailed(result[1]) 483 raise TinfoilCommandFailed(result[1])
471 return result[0] 484 return result[0]
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 87e873d644..d1f74389db 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -227,7 +227,9 @@ class TerminalFilter(object):
227 227
228 def keepAlive(self, t): 228 def keepAlive(self, t):
229 if not self.cuu: 229 if not self.cuu:
230 print("Bitbake still alive (%ds)" % t) 230 print("Bitbake still alive (no events for %ds). Active tasks:" % t)
231 for t in self.helper.running_tasks:
232 print(t)
231 sys.stdout.flush() 233 sys.stdout.flush()
232 234
233 def updateFooter(self): 235 def updateFooter(self):
@@ -380,14 +382,27 @@ _evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.Lo
380 "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent", 382 "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
381 "bb.build.TaskProgress", "bb.event.ProcessStarted", "bb.event.ProcessProgress", "bb.event.ProcessFinished"] 383 "bb.build.TaskProgress", "bb.event.ProcessStarted", "bb.event.ProcessProgress", "bb.event.ProcessFinished"]
382 384
385def drain_events_errorhandling(eventHandler):
386 # We don't have logging setup, we do need to show any events we see before exiting
387 event = True
388 logger = bb.msg.logger_create('bitbake', sys.stdout)
389 while event:
390 event = eventHandler.waitEvent(0)
391 if isinstance(event, logging.LogRecord):
392 logger.handle(event)
393
383def main(server, eventHandler, params, tf = TerminalFilter): 394def main(server, eventHandler, params, tf = TerminalFilter):
384 395
385 if not params.observe_only: 396 try:
386 params.updateToServer(server, os.environ.copy()) 397 if not params.observe_only:
398 params.updateToServer(server, os.environ.copy())
387 399
388 includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only) 400 includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only)
389 401
390 loglevel, _ = bb.msg.constructLogOptions() 402 loglevel, _ = bb.msg.constructLogOptions()
403 except bb.BBHandledException:
404 drain_events_errorhandling(eventHandler)
405 return 1
391 406
392 if params.options.quiet == 0: 407 if params.options.quiet == 0:
393 console_loglevel = loglevel 408 console_loglevel = loglevel
@@ -584,7 +599,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
584 warnings = 0 599 warnings = 0
585 taskfailures = [] 600 taskfailures = []
586 601
587 printinterval = 5000 602 printintervaldelta = 10 * 60 # 10 minutes
603 printinterval = printintervaldelta
588 lastprint = time.time() 604 lastprint = time.time()
589 605
590 termfilter = tf(main, helper, console_handlers, params.options.quiet) 606 termfilter = tf(main, helper, console_handlers, params.options.quiet)
@@ -594,7 +610,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
594 try: 610 try:
595 if (lastprint + printinterval) <= time.time(): 611 if (lastprint + printinterval) <= time.time():
596 termfilter.keepAlive(printinterval) 612 termfilter.keepAlive(printinterval)
597 printinterval += 5000 613 printinterval += printintervaldelta
598 event = eventHandler.waitEvent(0) 614 event = eventHandler.waitEvent(0)
599 if event is None: 615 if event is None:
600 if main.shutdown > 1: 616 if main.shutdown > 1:
@@ -625,7 +641,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
625 641
626 if isinstance(event, logging.LogRecord): 642 if isinstance(event, logging.LogRecord):
627 lastprint = time.time() 643 lastprint = time.time()
628 printinterval = 5000 644 printinterval = printintervaldelta
629 if event.levelno >= bb.msg.BBLogFormatter.ERROR: 645 if event.levelno >= bb.msg.BBLogFormatter.ERROR:
630 errors = errors + 1 646 errors = errors + 1
631 return_value = 1 647 return_value = 1
diff --git a/bitbake/lib/bb/ui/taskexp.py b/bitbake/lib/bb/ui/taskexp.py
index 2b246710ca..c00eaf6638 100644
--- a/bitbake/lib/bb/ui/taskexp.py
+++ b/bitbake/lib/bb/ui/taskexp.py
@@ -8,6 +8,7 @@
8# 8#
9 9
10import sys 10import sys
11import traceback
11 12
12try: 13try:
13 import gi 14 import gi
@@ -196,6 +197,7 @@ def main(server, eventHandler, params):
196 gtkgui.start() 197 gtkgui.start()
197 198
198 try: 199 try:
200 params.updateToServer(server, os.environ.copy())
199 params.updateFromServer(server) 201 params.updateFromServer(server)
200 cmdline = params.parseActions() 202 cmdline = params.parseActions()
201 if not cmdline: 203 if not cmdline:
@@ -218,6 +220,9 @@ def main(server, eventHandler, params):
218 except client.Fault as x: 220 except client.Fault as x:
219 print("XMLRPC Fault getting commandline:\n %s" % x) 221 print("XMLRPC Fault getting commandline:\n %s" % x)
220 return 222 return
223 except Exception as e:
224 print("Exception in startup:\n %s" % traceback.format_exc())
225 return
221 226
222 if gtkthread.quit.isSet(): 227 if gtkthread.quit.isSet():
223 return 228 return
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index 5f5767c1da..34fa0b7a67 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -16,7 +16,8 @@ import bb.msg
16import multiprocessing 16import multiprocessing
17import fcntl 17import fcntl
18import importlib 18import importlib
19from importlib import machinery 19import importlib.machinery
20import importlib.util
20import itertools 21import itertools
21import subprocess 22import subprocess
22import glob 23import glob
@@ -420,12 +421,14 @@ def better_eval(source, locals, extraglobals = None):
420 return eval(source, ctx, locals) 421 return eval(source, ctx, locals)
421 422
422@contextmanager 423@contextmanager
423def fileslocked(files): 424def fileslocked(files, *args, **kwargs):
424 """Context manager for locking and unlocking file locks.""" 425 """Context manager for locking and unlocking file locks."""
425 locks = [] 426 locks = []
426 if files: 427 if files:
427 for lockfile in files: 428 for lockfile in files:
428 locks.append(bb.utils.lockfile(lockfile)) 429 l = bb.utils.lockfile(lockfile, *args, **kwargs)
430 if l is not None:
431 locks.append(l)
429 432
430 try: 433 try:
431 yield 434 yield
@@ -458,9 +461,16 @@ def lockfile(name, shared=False, retry=True, block=False):
458 consider the possibility of sending a signal to the process to break 461 consider the possibility of sending a signal to the process to break
459 out - at which point you want block=True rather than retry=True. 462 out - at which point you want block=True rather than retry=True.
460 """ 463 """
464 basename = os.path.basename(name)
465 if len(basename) > 255:
466 root, ext = os.path.splitext(basename)
467 basename = root[:255 - len(ext)] + ext
468
461 dirname = os.path.dirname(name) 469 dirname = os.path.dirname(name)
462 mkdirhier(dirname) 470 mkdirhier(dirname)
463 471
472 name = os.path.join(dirname, basename)
473
464 if not os.access(dirname, os.W_OK): 474 if not os.access(dirname, os.W_OK):
465 logger.error("Unable to acquire lock '%s', directory is not writable", 475 logger.error("Unable to acquire lock '%s', directory is not writable",
466 name) 476 name)
@@ -494,7 +504,7 @@ def lockfile(name, shared=False, retry=True, block=False):
494 return lf 504 return lf
495 lf.close() 505 lf.close()
496 except OSError as e: 506 except OSError as e:
497 if e.errno == errno.EACCES: 507 if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
498 logger.error("Unable to acquire lock '%s', %s", 508 logger.error("Unable to acquire lock '%s', %s",
499 e.strerror, name) 509 e.strerror, name)
500 sys.exit(1) 510 sys.exit(1)
@@ -959,6 +969,17 @@ def which(path, item, direction = 0, history = False, executable=False):
959 return "", hist 969 return "", hist
960 return "" 970 return ""
961 971
972@contextmanager
973def umask(new_mask):
974 """
975 Context manager to set the umask to a specific mask, and restore it afterwards.
976 """
977 current_mask = os.umask(new_mask)
978 try:
979 yield
980 finally:
981 os.umask(current_mask)
982
962def to_boolean(string, default=None): 983def to_boolean(string, default=None):
963 if not string: 984 if not string:
964 return default 985 return default
@@ -1560,21 +1581,22 @@ def set_process_name(name):
1560 1581
1561# export common proxies variables from datastore to environment 1582# export common proxies variables from datastore to environment
1562def export_proxies(d): 1583def export_proxies(d):
1563 import os 1584 """ export common proxies variables from datastore to environment """
1564 1585
1565 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', 1586 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
1566 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY', 1587 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
1567 'GIT_PROXY_COMMAND'] 1588 'GIT_PROXY_COMMAND', 'SSL_CERT_FILE', 'SSL_CERT_DIR']
1568 exported = False 1589 exported = False
1569 1590
1570 for v in variables: 1591 origenv = d.getVar("BB_ORIGENV")
1571 if v in os.environ.keys(): 1592
1593 for name in variables:
1594 value = d.getVar(name)
1595 if not value and origenv:
1596 value = origenv.getVar(name)
1597 if value:
1598 os.environ[name] = value
1572 exported = True 1599 exported = True
1573 else:
1574 v_proxy = d.getVar(v)
1575 if v_proxy is not None:
1576 os.environ[v] = v_proxy
1577 exported = True
1578 1600
1579 return exported 1601 return exported
1580 1602
@@ -1584,7 +1606,9 @@ def load_plugins(logger, plugins, pluginpath):
1584 logger.debug(1, 'Loading plugin %s' % name) 1606 logger.debug(1, 'Loading plugin %s' % name)
1585 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 1607 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1586 if spec: 1608 if spec:
1587 return spec.loader.load_module() 1609 mod = importlib.util.module_from_spec(spec)
1610 spec.loader.exec_module(mod)
1611 return mod
1588 1612
1589 logger.debug(1, 'Loading plugins from %s...' % pluginpath) 1613 logger.debug(1, 'Loading plugins from %s...' % pluginpath)
1590 1614