summaryrefslogtreecommitdiffstats
path: root/bitbake/lib
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib')
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py6
-rw-r--r--bitbake/lib/bb/command.py21
-rw-r--r--bitbake/lib/bb/cooker.py43
-rw-r--r--bitbake/lib/bb/data_smart.py8
-rw-r--r--bitbake/lib/bb/event.py19
-rw-r--r--bitbake/lib/bb/exceptions.py96
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py64
-rw-r--r--bitbake/lib/bb/fetch2/gcp.py14
-rw-r--r--bitbake/lib/bb/fetch2/git.py3
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py44
-rw-r--r--bitbake/lib/bb/fetch2/wget.py4
-rw-r--r--bitbake/lib/bb/msg.py4
-rw-r--r--bitbake/lib/bb/parse/ast.py20
-rw-r--r--bitbake/lib/bb/persist_data.py1
-rw-r--r--bitbake/lib/bb/runqueue.py22
-rw-r--r--bitbake/lib/bb/tests/fetch.py6
-rw-r--r--bitbake/lib/bb/tinfoil.py16
-rw-r--r--bitbake/lib/bb/ui/knotty.py20
-rw-r--r--bitbake/lib/bb/ui/teamcity.py5
-rw-r--r--bitbake/lib/bb/utils.py20
-rw-r--r--bitbake/lib/bblayers/query.py15
-rw-r--r--bitbake/lib/toaster/tests/builds/buildtest.py2
22 files changed, 214 insertions, 239 deletions
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
index a350b4fb12..6fa2839f48 100644
--- a/bitbake/lib/bb/asyncrpc/client.py
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -87,7 +87,11 @@ class AsyncClient(object):
87 import websockets 87 import websockets
88 88
89 async def connect_sock(): 89 async def connect_sock():
90 websocket = await websockets.connect(uri, ping_interval=None) 90 websocket = await websockets.connect(
91 uri,
92 ping_interval=None,
93 open_timeout=self.timeout,
94 )
91 return WebsocketConnection(websocket, self.timeout) 95 return WebsocketConnection(websocket, self.timeout)
92 96
93 self._connect_sock = connect_sock 97 self._connect_sock = connect_sock
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index 1fcb9bf14c..5e166fe45c 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -420,15 +420,30 @@ class CommandsSync:
420 return command.cooker.recipecaches[mc].pkg_dp 420 return command.cooker.recipecaches[mc].pkg_dp
421 getDefaultPreference.readonly = True 421 getDefaultPreference.readonly = True
422 422
423
423 def getSkippedRecipes(self, command, params): 424 def getSkippedRecipes(self, command, params):
425 """
426 Get the map of skipped recipes for the specified multiconfig/mc name (`params[0]`).
427
428 Invoked by `bb.tinfoil.Tinfoil.get_skipped_recipes`
429
430 :param command: Internally used parameter.
431 :param params: Parameter array. params[0] is multiconfig/mc name. If not given, then default mc '' is assumed.
432 :return: Dict whose keys are virtualfns and values are `bb.cooker.SkippedPackage`
433 """
434 try:
435 mc = params[0]
436 except IndexError:
437 mc = ''
438
424 # Return list sorted by reverse priority order 439 # Return list sorted by reverse priority order
425 import bb.cache 440 import bb.cache
426 def sortkey(x): 441 def sortkey(x):
427 vfn, _ = x 442 vfn, _ = x
428 realfn, _, mc = bb.cache.virtualfn2realfn(vfn) 443 realfn, _, item_mc = bb.cache.virtualfn2realfn(vfn)
429 return (-command.cooker.collections[mc].calc_bbfile_priority(realfn)[0], vfn) 444 return -command.cooker.collections[item_mc].calc_bbfile_priority(realfn)[0], vfn
430 445
431 skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), key=sortkey)) 446 skipdict = OrderedDict(sorted(command.cooker.skiplist_by_mc[mc].items(), key=sortkey))
432 return list(skipdict.items()) 447 return list(skipdict.items())
433 getSkippedRecipes.readonly = True 448 getSkippedRecipes.readonly = True
434 449
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 6318ef4a8f..6fce19b464 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -17,7 +17,7 @@ import threading
17from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
18from contextlib import closing 18from contextlib import closing
19from collections import defaultdict, namedtuple 19from collections import defaultdict, namedtuple
20import bb, bb.exceptions, bb.command 20import bb, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue 22import queue
23import signal 23import signal
@@ -134,7 +134,8 @@ class BBCooker:
134 self.baseconfig_valid = False 134 self.baseconfig_valid = False
135 self.parsecache_valid = False 135 self.parsecache_valid = False
136 self.eventlog = None 136 self.eventlog = None
137 self.skiplist = {} 137 # The skiplists, one per multiconfig
138 self.skiplist_by_mc = defaultdict(dict)
138 self.featureset = CookerFeatures() 139 self.featureset = CookerFeatures()
139 if featureSet: 140 if featureSet:
140 for f in featureSet: 141 for f in featureSet:
@@ -612,8 +613,8 @@ class BBCooker:
612 localdata = {} 613 localdata = {}
613 614
614 for mc in self.multiconfigs: 615 for mc in self.multiconfigs:
615 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete) 616 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 617 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
617 bb.data.expandKeys(localdata[mc]) 618 bb.data.expandKeys(localdata[mc])
618 619
619 current = 0 620 current = 0
@@ -933,7 +934,7 @@ class BBCooker:
933 for mc in self.multiconfigs: 934 for mc in self.multiconfigs:
934 # First get list of recipes, including skipped 935 # First get list of recipes, including skipped
935 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 936 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
936 recipefns.extend(self.skiplist.keys()) 937 recipefns.extend(self.skiplist_by_mc[mc].keys())
937 938
938 # Work out list of bbappends that have been applied 939 # Work out list of bbappends that have been applied
939 applied_appends = [] 940 applied_appends = []
@@ -2097,7 +2098,6 @@ class Parser(multiprocessing.Process):
2097 except Exception as exc: 2098 except Exception as exc:
2098 tb = sys.exc_info()[2] 2099 tb = sys.exc_info()[2]
2099 exc.recipe = filename 2100 exc.recipe = filename
2100 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2101 return True, None, exc 2101 return True, None, exc
2102 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2102 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2103 # and for example a worker thread doesn't just exit on its own in response to 2103 # and for example a worker thread doesn't just exit on its own in response to
@@ -2298,8 +2298,12 @@ class CookerParser(object):
2298 return False 2298 return False
2299 except ParsingFailure as exc: 2299 except ParsingFailure as exc:
2300 self.error += 1 2300 self.error += 1
2301 logger.error('Unable to parse %s: %s' % 2301
2302 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2302 exc_desc = str(exc)
2303 if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2304 exc_desc = 'Exited with "%d"' % exc.code
2305
2306 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2303 self.shutdown(clean=False) 2307 self.shutdown(clean=False)
2304 return False 2308 return False
2305 except bb.parse.ParseError as exc: 2309 except bb.parse.ParseError as exc:
@@ -2308,20 +2312,33 @@ class CookerParser(object):
2308 self.shutdown(clean=False, eventmsg=str(exc)) 2312 self.shutdown(clean=False, eventmsg=str(exc))
2309 return False 2313 return False
2310 except bb.data_smart.ExpansionError as exc: 2314 except bb.data_smart.ExpansionError as exc:
2315 def skip_frames(f, fn_prefix):
2316 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2317 f = f.tb_next
2318 return f
2319
2311 self.error += 1 2320 self.error += 1
2312 bbdir = os.path.dirname(__file__) + os.sep 2321 bbdir = os.path.dirname(__file__) + os.sep
2313 etype, value, _ = sys.exc_info() 2322 etype, value, tb = sys.exc_info()
2314 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2323
2324 # Remove any frames where the code comes from bitbake. This
2325 # prevents deep (and pretty useless) backtraces for expansion error
2326 tb = skip_frames(tb, bbdir)
2327 cur = tb
2328 while cur:
2329 cur.tb_next = skip_frames(cur.tb_next, bbdir)
2330 cur = cur.tb_next
2331
2315 logger.error('ExpansionError during parsing %s', value.recipe, 2332 logger.error('ExpansionError during parsing %s', value.recipe,
2316 exc_info=(etype, value, tb)) 2333 exc_info=(etype, value, tb))
2317 self.shutdown(clean=False) 2334 self.shutdown(clean=False)
2318 return False 2335 return False
2319 except Exception as exc: 2336 except Exception as exc:
2320 self.error += 1 2337 self.error += 1
2321 etype, value, tb = sys.exc_info() 2338 _, value, _ = sys.exc_info()
2322 if hasattr(value, "recipe"): 2339 if hasattr(value, "recipe"):
2323 logger.error('Unable to parse %s' % value.recipe, 2340 logger.error('Unable to parse %s' % value.recipe,
2324 exc_info=(etype, value, exc.traceback)) 2341 exc_info=sys.exc_info())
2325 else: 2342 else:
2326 # Most likely, an exception occurred during raising an exception 2343 # Most likely, an exception occurred during raising an exception
2327 import traceback 2344 import traceback
@@ -2342,7 +2359,7 @@ class CookerParser(object):
2342 for virtualfn, info_array in result: 2359 for virtualfn, info_array in result:
2343 if info_array[0].skipped: 2360 if info_array[0].skipped:
2344 self.skipped += 1 2361 self.skipped += 1
2345 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2362 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2346 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2363 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2347 parsed=parsed, watcher = self.cooker.add_filewatch) 2364 parsed=parsed, watcher = self.cooker.add_filewatch)
2348 return True 2365 return True
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index c6049d578e..7b67127c06 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -31,7 +31,7 @@ logger = logging.getLogger("BitBake.Data")
31 31
32__setvar_keyword__ = [":append", ":prepend", ":remove"] 32__setvar_keyword__ = [":append", ":prepend", ":remove"]
33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$') 33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}") 34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+}")
35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}") 35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
36__whitespace_split__ = re.compile(r'(\s)') 36__whitespace_split__ = re.compile(r'(\s)')
37__override_regexp__ = re.compile(r'[a-z0-9]+') 37__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -580,12 +580,9 @@ class DataSmart(MutableMapping):
580 else: 580 else:
581 loginfo['op'] = keyword 581 loginfo['op'] = keyword
582 self.varhistory.record(**loginfo) 582 self.varhistory.record(**loginfo)
583 # todo make sure keyword is not __doc__ or __module__
584 # pay the cookie monster
585 583
586 # more cookies for the cookie monster 584 # more cookies for the cookie monster
587 if ':' in var: 585 self._setvar_update_overrides(base, **loginfo)
588 self._setvar_update_overrides(base, **loginfo)
589 586
590 if base in self.overridevars: 587 if base in self.overridevars:
591 self._setvar_update_overridevars(var, value) 588 self._setvar_update_overridevars(var, value)
@@ -638,6 +635,7 @@ class DataSmart(MutableMapping):
638 nextnew.update(vardata.contains.keys()) 635 nextnew.update(vardata.contains.keys())
639 new = nextnew 636 new = nextnew
640 self.overrides = None 637 self.overrides = None
638 self.expand_cache = {}
641 639
642 def _setvar_update_overrides(self, var, **loginfo): 640 def _setvar_update_overrides(self, var, **loginfo):
643 # aka pay the cookie monster 641 # aka pay the cookie monster
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 4761c86880..a12adbc937 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -19,7 +19,6 @@ import sys
19import threading 19import threading
20import traceback 20import traceback
21 21
22import bb.exceptions
23import bb.utils 22import bb.utils
24 23
25# This is the pid for which we should generate the event. This is set when 24# This is the pid for which we should generate the event. This is set when
@@ -195,7 +194,12 @@ def fire_ui_handlers(event, d):
195 ui_queue.append(event) 194 ui_queue.append(event)
196 return 195 return
197 196
198 with bb.utils.lock_timeout(_thread_lock): 197 with bb.utils.lock_timeout_nocheck(_thread_lock) as lock:
198 if not lock:
199 # If we can't get the lock, we may be recursively called, queue and return
200 ui_queue.append(event)
201 return
202
199 errors = [] 203 errors = []
200 for h in _ui_handlers: 204 for h in _ui_handlers:
201 #print "Sending event %s" % event 205 #print "Sending event %s" % event
@@ -214,6 +218,9 @@ def fire_ui_handlers(event, d):
214 for h in errors: 218 for h in errors:
215 del _ui_handlers[h] 219 del _ui_handlers[h]
216 220
221 while ui_queue:
222 fire_ui_handlers(ui_queue.pop(), d)
223
217def fire(event, d): 224def fire(event, d):
218 """Fire off an Event""" 225 """Fire off an Event"""
219 226
@@ -759,13 +766,7 @@ class LogHandler(logging.Handler):
759 766
760 def emit(self, record): 767 def emit(self, record):
761 if record.exc_info: 768 if record.exc_info:
762 etype, value, tb = record.exc_info 769 record.bb_exc_formatted = traceback.format_exception(*record.exc_info)
763 if hasattr(tb, 'tb_next'):
764 tb = list(bb.exceptions.extract_traceback(tb, context=3))
765 # Need to turn the value into something the logging system can pickle
766 record.bb_exc_info = (etype, value, tb)
767 record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
768 value = str(value)
769 record.exc_info = None 770 record.exc_info = None
770 fire(record, None) 771 fire(record, None)
771 772
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
deleted file mode 100644
index 801db9c82f..0000000000
--- a/bitbake/lib/bb/exceptions.py
+++ /dev/null
@@ -1,96 +0,0 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import inspect
8import traceback
9import bb.namedtuple_with_abc
10from collections import namedtuple
11
12
13class TracebackEntry(namedtuple.abc):
14 """Pickleable representation of a traceback entry"""
15 _fields = 'filename lineno function args code_context index'
16 _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
17
18 def format(self, formatter=None):
19 if not self.code_context:
20 return self._header.format(self) + '\n'
21
22 formatted = [self._header.format(self) + ':\n']
23
24 for lineindex, line in enumerate(self.code_context):
25 if formatter:
26 line = formatter(line)
27
28 if lineindex == self.index:
29 formatted.append(' >%s' % line)
30 else:
31 formatted.append(' %s' % line)
32 return formatted
33
34 def __str__(self):
35 return ''.join(self.format())
36
37def _get_frame_args(frame):
38 """Get the formatted arguments and class (if available) for a frame"""
39 arginfo = inspect.getargvalues(frame)
40
41 try:
42 if not arginfo.args:
43 return '', None
44 # There have been reports from the field of python 2.6 which doesn't
45 # return a namedtuple here but simply a tuple so fallback gracefully if
46 # args isn't present.
47 except AttributeError:
48 return '', None
49
50 firstarg = arginfo.args[0]
51 if firstarg == 'self':
52 self = arginfo.locals['self']
53 cls = self.__class__.__name__
54
55 arginfo.args.pop(0)
56 del arginfo.locals['self']
57 else:
58 cls = None
59
60 formatted = inspect.formatargvalues(*arginfo)
61 return formatted, cls
62
63def extract_traceback(tb, context=1):
64 frames = inspect.getinnerframes(tb, context)
65 for frame, filename, lineno, function, code_context, index in frames:
66 formatted_args, cls = _get_frame_args(frame)
67 if cls:
68 function = '%s.%s' % (cls, function)
69 yield TracebackEntry(filename, lineno, function, formatted_args,
70 code_context, index)
71
72def format_extracted(extracted, formatter=None, limit=None):
73 if limit:
74 extracted = extracted[-limit:]
75
76 formatted = []
77 for tracebackinfo in extracted:
78 formatted.extend(tracebackinfo.format(formatter))
79 return formatted
80
81
82def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
83 formatted = ['Traceback (most recent call last):\n']
84
85 if hasattr(tb, 'tb_next'):
86 tb = extract_traceback(tb, context)
87
88 formatted.extend(format_extracted(tb, formatter, limit))
89 formatted.extend(traceback.format_exception_only(etype, value))
90 return formatted
91
92def to_string(exc):
93 if isinstance(exc, SystemExit):
94 if not isinstance(exc.code, str):
95 return 'Exited with "%d"' % exc.code
96 return str(exc)
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 5bf2c4b8cf..1a6ff25d4d 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -237,7 +237,7 @@ class URI(object):
237 # to RFC compliant URL format. E.g.: 237 # to RFC compliant URL format. E.g.:
238 # file://foo.diff -> file:foo.diff 238 # file://foo.diff -> file:foo.diff
239 if urlp.scheme in self._netloc_forbidden: 239 if urlp.scheme in self._netloc_forbidden:
240 uri = re.sub("(?<=:)//(?!/)", "", uri, 1) 240 uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1)
241 reparse = 1 241 reparse = 1
242 242
243 if reparse: 243 if reparse:
@@ -499,30 +499,30 @@ def fetcher_init(d):
499 Calls before this must not hit the cache. 499 Calls before this must not hit the cache.
500 """ 500 """
501 501
502 revs = bb.persist_data.persist('BB_URI_HEADREVS', d) 502 with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs:
503 try: 503 try:
504 # fetcher_init is called multiple times, so make sure we only save the 504 # fetcher_init is called multiple times, so make sure we only save the
505 # revs the first time it is called. 505 # revs the first time it is called.
506 if not bb.fetch2.saved_headrevs: 506 if not bb.fetch2.saved_headrevs:
507 bb.fetch2.saved_headrevs = dict(revs) 507 bb.fetch2.saved_headrevs = dict(revs)
508 except: 508 except:
509 pass 509 pass
510
511 # When to drop SCM head revisions controlled by user policy
512 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
513 if srcrev_policy == "cache":
514 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
515 elif srcrev_policy == "clear":
516 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
517 revs.clear()
518 else:
519 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
520 510
521 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) 511 # When to drop SCM head revisions controlled by user policy
512 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
513 if srcrev_policy == "cache":
514 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
515 elif srcrev_policy == "clear":
516 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
517 revs.clear()
518 else:
519 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
520
521 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
522 522
523 for m in methods: 523 for m in methods:
524 if hasattr(m, "init"): 524 if hasattr(m, "init"):
525 m.init(d) 525 m.init(d)
526 526
527def fetcher_parse_save(): 527def fetcher_parse_save():
528 _checksum_cache.save_extras() 528 _checksum_cache.save_extras()
@@ -536,8 +536,8 @@ def fetcher_compare_revisions(d):
536 when bitbake was started and return true if they have changed. 536 when bitbake was started and return true if they have changed.
537 """ 537 """
538 538
539 headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) 539 with dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) as headrevs:
540 return headrevs != bb.fetch2.saved_headrevs 540 return headrevs != bb.fetch2.saved_headrevs
541 541
542def mirror_from_string(data): 542def mirror_from_string(data):
543 mirrors = (data or "").replace('\\n',' ').split() 543 mirrors = (data or "").replace('\\n',' ').split()
@@ -1662,13 +1662,13 @@ class FetchMethod(object):
1662 if not hasattr(self, "_latest_revision"): 1662 if not hasattr(self, "_latest_revision"):
1663 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) 1663 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
1664 1664
1665 revs = bb.persist_data.persist('BB_URI_HEADREVS', d) 1665 with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs:
1666 key = self.generate_revision_key(ud, d, name) 1666 key = self.generate_revision_key(ud, d, name)
1667 try: 1667 try:
1668 return revs[key] 1668 return revs[key]
1669 except KeyError: 1669 except KeyError:
1670 revs[key] = rev = self._latest_revision(ud, d, name) 1670 revs[key] = rev = self._latest_revision(ud, d, name)
1671 return rev 1671 return rev
1672 1672
1673 def sortable_revision(self, ud, d, name): 1673 def sortable_revision(self, ud, d, name):
1674 latest_rev = self._build_revision(ud, d, name) 1674 latest_rev = self._build_revision(ud, d, name)
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py
index eb3e0c6a6b..2ee9ed2194 100644
--- a/bitbake/lib/bb/fetch2/gcp.py
+++ b/bitbake/lib/bb/fetch2/gcp.py
@@ -23,7 +23,6 @@ import urllib.parse, urllib.error
23from bb.fetch2 import FetchMethod 23from bb.fetch2 import FetchMethod
24from bb.fetch2 import FetchError 24from bb.fetch2 import FetchError
25from bb.fetch2 import logger 25from bb.fetch2 import logger
26from bb.fetch2 import runfetchcmd
27 26
28class GCP(FetchMethod): 27class GCP(FetchMethod):
29 """ 28 """
@@ -48,7 +47,6 @@ class GCP(FetchMethod):
48 ud.basename = os.path.basename(ud.path) 47 ud.basename = os.path.basename(ud.path)
49 48
50 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 49 ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
51 ud.basecmd = "gsutil stat"
52 50
53 def get_gcp_client(self): 51 def get_gcp_client(self):
54 from google.cloud import storage 52 from google.cloud import storage
@@ -59,17 +57,20 @@ class GCP(FetchMethod):
59 Fetch urls using the GCP API. 57 Fetch urls using the GCP API.
60 Assumes localpath was called first. 58 Assumes localpath was called first.
61 """ 59 """
60 from google.api_core.exceptions import NotFound
62 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") 61 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
63 if self.gcp_client is None: 62 if self.gcp_client is None:
64 self.get_gcp_client() 63 self.get_gcp_client()
65 64
66 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") 65 bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}")
67 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
68 66
69 # Path sometimes has leading slash, so strip it 67 # Path sometimes has leading slash, so strip it
70 path = ud.path.lstrip("/") 68 path = ud.path.lstrip("/")
71 blob = self.gcp_client.bucket(ud.host).blob(path) 69 blob = self.gcp_client.bucket(ud.host).blob(path)
72 blob.download_to_filename(ud.localpath) 70 try:
71 blob.download_to_filename(ud.localpath)
72 except NotFound:
73 raise FetchError("The GCP API threw a NotFound exception")
73 74
74 # Additional sanity checks copied from the wget class (although there 75 # Additional sanity checks copied from the wget class (although there
75 # are no known issues which mean these are required, treat the GCP API 76 # are no known issues which mean these are required, treat the GCP API
@@ -91,8 +92,7 @@ class GCP(FetchMethod):
91 if self.gcp_client is None: 92 if self.gcp_client is None:
92 self.get_gcp_client() 93 self.get_gcp_client()
93 94
94 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") 95 bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}")
95 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
96 96
97 # Path sometimes has leading slash, so strip it 97 # Path sometimes has leading slash, so strip it
98 path = ud.path.lstrip("/") 98 path = ud.path.lstrip("/")
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index c7ff769fdf..6029144601 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -926,9 +926,8 @@ class Git(FetchMethod):
926 commits = None 926 commits = None
927 else: 927 else:
928 if not os.path.exists(rev_file) or not os.path.getsize(rev_file): 928 if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
929 from pipes import quote
930 commits = bb.fetch2.runfetchcmd( 929 commits = bb.fetch2.runfetchcmd(
931 "git rev-list %s -- | wc -l" % quote(rev), 930 "git rev-list %s -- | wc -l" % shlex.quote(rev),
932 d, quiet=True).strip().lstrip('0') 931 d, quiet=True).strip().lstrip('0')
933 if commits: 932 if commits:
934 open(rev_file, "w").write("%d\n" % int(commits)) 933 open(rev_file, "w").write("%d\n" % int(commits))
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index f7f3af7212..fab4b1164c 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -147,6 +147,19 @@ class GitSM(Git):
147 147
148 return submodules != [] 148 return submodules != []
149 149
150 def call_process_submodules(self, ud, d, extra_check, subfunc):
151 # If we're using a shallow mirror tarball it needs to be
152 # unpacked temporarily so that we can examine the .gitmodules file
153 if ud.shallow and os.path.exists(ud.fullshallow) and extra_check:
154 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
155 try:
156 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
157 self.process_submodules(ud, tmpdir, subfunc, d)
158 finally:
159 shutil.rmtree(tmpdir)
160 else:
161 self.process_submodules(ud, ud.clonedir, subfunc, d)
162
150 def need_update(self, ud, d): 163 def need_update(self, ud, d):
151 if Git.need_update(self, ud, d): 164 if Git.need_update(self, ud, d):
152 return True 165 return True
@@ -164,15 +177,7 @@ class GitSM(Git):
164 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) 177 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
165 need_update_result = True 178 need_update_result = True
166 179
167 # If we're using a shallow mirror tarball it needs to be unpacked 180 self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule)
168 # temporarily so that we can examine the .gitmodules file
169 if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
170 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
171 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
172 self.process_submodules(ud, tmpdir, need_update_submodule, d)
173 shutil.rmtree(tmpdir)
174 else:
175 self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
176 181
177 if need_update_list: 182 if need_update_list:
178 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) 183 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
@@ -195,16 +200,7 @@ class GitSM(Git):
195 raise 200 raise
196 201
197 Git.download(self, ud, d) 202 Git.download(self, ud, d)
198 203 self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule)
199 # If we're using a shallow mirror tarball it needs to be unpacked
200 # temporarily so that we can examine the .gitmodules file
201 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
202 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
203 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
204 self.process_submodules(ud, tmpdir, download_submodule, d)
205 shutil.rmtree(tmpdir)
206 else:
207 self.process_submodules(ud, ud.clonedir, download_submodule, d)
208 204
209 def unpack(self, ud, destdir, d): 205 def unpack(self, ud, destdir, d):
210 def unpack_submodules(ud, url, module, modpath, workdir, d): 206 def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -263,14 +259,6 @@ class GitSM(Git):
263 newfetch = Fetch([url], d, cache=False) 259 newfetch = Fetch([url], d, cache=False)
264 urldata.extend(newfetch.expanded_urldata()) 260 urldata.extend(newfetch.expanded_urldata())
265 261
266 # If we're using a shallow mirror tarball it needs to be unpacked 262 self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule)
267 # temporarily so that we can examine the .gitmodules file
268 if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
269 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
270 subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
271 self.process_submodules(ud, tmpdir, add_submodule, d)
272 shutil.rmtree(tmpdir)
273 else:
274 self.process_submodules(ud, ud.clonedir, add_submodule, d)
275 263
276 return urldata 264 return urldata
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 2e92117634..5bb3b2f361 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -87,7 +87,7 @@ class Wget(FetchMethod):
87 if not ud.localfile: 87 if not ud.localfile:
88 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) 88 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
89 89
90 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" 90 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 100"
91 91
92 if ud.type == 'ftp' or ud.type == 'ftps': 92 if ud.type == 'ftp' or ud.type == 'ftps':
93 self.basecmd += " --passive-ftp" 93 self.basecmd += " --passive-ftp"
@@ -371,7 +371,7 @@ class Wget(FetchMethod):
371 except (FileNotFoundError, netrc.NetrcParseError): 371 except (FileNotFoundError, netrc.NetrcParseError):
372 pass 372 pass
373 373
374 with opener.open(r, timeout=30) as response: 374 with opener.open(r, timeout=100) as response:
375 pass 375 pass
376 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: 376 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
377 if try_again: 377 if try_again:
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 3e18596faa..4f616ff42e 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -89,10 +89,6 @@ class BBLogFormatter(logging.Formatter):
89 msg = logging.Formatter.format(self, record) 89 msg = logging.Formatter.format(self, record)
90 if hasattr(record, 'bb_exc_formatted'): 90 if hasattr(record, 'bb_exc_formatted'):
91 msg += '\n' + ''.join(record.bb_exc_formatted) 91 msg += '\n' + ''.join(record.bb_exc_formatted)
92 elif hasattr(record, 'bb_exc_info'):
93 etype, value, tb = record.bb_exc_info
94 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
95 msg += '\n' + ''.join(formatted)
96 return msg 92 return msg
97 93
98 def colorize(self, record): 94 def colorize(self, record):
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 7581d003fd..327e45c8ac 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -391,6 +391,14 @@ def finalize(fn, d, variant = None):
391 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True: 391 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True:
392 raise bb.BBHandledException() 392 raise bb.BBHandledException()
393 393
394 while True:
395 inherits = d.getVar('__BBDEFINHERITS', False) or []
396 if not inherits:
397 break
398 inherit, filename, lineno = inherits.pop(0)
399 d.setVar('__BBDEFINHERITS', inherits)
400 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
401
394 for var in d.getVar('__BBHANDLERS', False) or []: 402 for var in d.getVar('__BBHANDLERS', False) or []:
395 # try to add the handler 403 # try to add the handler
396 handlerfn = d.getVarFlag(var, "filename", False) 404 handlerfn = d.getVarFlag(var, "filename", False)
@@ -444,14 +452,6 @@ def multi_finalize(fn, d):
444 logger.debug("Appending .bbappend file %s to %s", append, fn) 452 logger.debug("Appending .bbappend file %s to %s", append, fn)
445 bb.parse.BBHandler.handle(append, d, True) 453 bb.parse.BBHandler.handle(append, d, True)
446 454
447 while True:
448 inherits = d.getVar('__BBDEFINHERITS', False) or []
449 if not inherits:
450 break
451 inherit, filename, lineno = inherits.pop(0)
452 d.setVar('__BBDEFINHERITS', inherits)
453 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
454
455 onlyfinalise = d.getVar("__ONLYFINALISE", False) 455 onlyfinalise = d.getVar("__ONLYFINALISE", False)
456 456
457 safe_d = d 457 safe_d = d
@@ -487,7 +487,9 @@ def multi_finalize(fn, d):
487 d.setVar("BBEXTENDVARIANT", variantmap[name]) 487 d.setVar("BBEXTENDVARIANT", variantmap[name])
488 else: 488 else:
489 d.setVar("PN", "%s-%s" % (pn, name)) 489 d.setVar("PN", "%s-%s" % (pn, name))
490 bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d) 490 inherits = d.getVar('__BBDEFINHERITS', False) or []
491 inherits.append((extendedmap[name], fn, 0))
492 d.setVar('__BBDEFINHERITS', inherits)
491 493
492 safe_d.setVar("BBCLASSEXTEND", extended) 494 safe_d.setVar("BBCLASSEXTEND", extended)
493 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) 495 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index bcca791edf..c4454b153a 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -154,6 +154,7 @@ class SQLTable(collections.abc.MutableMapping):
154 154
155 def __exit__(self, *excinfo): 155 def __exit__(self, *excinfo):
156 self.connection.__exit__(*excinfo) 156 self.connection.__exit__(*excinfo)
157 self.connection.close()
157 158
158 @_Decorators.retry() 159 @_Decorators.retry()
159 @_Decorators.transaction 160 @_Decorators.transaction
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 93079a9776..439da2bb44 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -14,6 +14,7 @@ import os
14import sys 14import sys
15import stat 15import stat
16import errno 16import errno
17import itertools
17import logging 18import logging
18import re 19import re
19import bb 20import bb
@@ -2189,12 +2190,20 @@ class RunQueueExecute:
2189 if not hasattr(self, "sorted_setscene_tids"): 2190 if not hasattr(self, "sorted_setscene_tids"):
2190 # Don't want to sort this set every execution 2191 # Don't want to sort this set every execution
2191 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids) 2192 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids)
2193 # Resume looping where we left off when we returned to feed the mainloop
2194 self.setscene_tids_generator = itertools.cycle(self.rqdata.runq_setscene_tids)
2192 2195
2193 task = None 2196 task = None
2194 if not self.sqdone and self.can_start_task(): 2197 if not self.sqdone and self.can_start_task():
2195 # Find the next setscene to run 2198 loopcount = 0
2196 for nexttask in self.sorted_setscene_tids: 2199 # Find the next setscene to run, exit the loop when we've processed all tids or found something to execute
2200 while loopcount < len(self.rqdata.runq_setscene_tids):
2201 loopcount += 1
2202 nexttask = next(self.setscene_tids_generator)
2197 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred: 2203 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
2204 if nexttask in self.sq_deferred and self.sq_deferred[nexttask] not in self.runq_complete:
2205 # Skip deferred tasks quickly before the 'expensive' tests below - this is key to performant multiconfig builds
2206 continue
2198 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \ 2207 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
2199 nexttask not in self.sq_needed_harddeps and \ 2208 nexttask not in self.sq_needed_harddeps and \
2200 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \ 2209 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
@@ -2224,8 +2233,7 @@ class RunQueueExecute:
2224 if t in self.runq_running and t not in self.runq_complete: 2233 if t in self.runq_running and t not in self.runq_complete:
2225 continue 2234 continue
2226 if nexttask in self.sq_deferred: 2235 if nexttask in self.sq_deferred:
2227 if self.sq_deferred[nexttask] not in self.runq_complete: 2236 # Deferred tasks that were still deferred were skipped above so we now need to process
2228 continue
2229 logger.debug("Task %s no longer deferred" % nexttask) 2237 logger.debug("Task %s no longer deferred" % nexttask)
2230 del self.sq_deferred[nexttask] 2238 del self.sq_deferred[nexttask]
2231 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) 2239 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
@@ -2751,8 +2759,12 @@ class RunQueueExecute:
2751 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2759 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2752 self.sq_task_failoutright(dep) 2760 self.sq_task_failoutright(dep)
2753 continue 2761 continue
2762
2763 # For performance, only compute allcovered once if needed
2764 if self.sqdata.sq_deps[task]:
2765 allcovered = self.scenequeue_covered | self.scenequeue_notcovered
2754 for dep in sorted(self.sqdata.sq_deps[task]): 2766 for dep in sorted(self.sqdata.sq_deps[task]):
2755 if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): 2767 if self.sqdata.sq_revdeps[dep].issubset(allcovered):
2756 if dep not in self.sq_buildable: 2768 if dep not in self.sq_buildable:
2757 self.sq_buildable.add(dep) 2769 self.sq_buildable.add(dep)
2758 2770
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 33cc9bcac6..afabaeba18 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -1419,12 +1419,12 @@ class FetchLatestVersionTest(FetcherTest):
1419 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "") 1419 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
1420 : "1.4.0", 1420 : "1.4.0",
1421 # combination version pattern 1421 # combination version pattern
1422 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") 1422 ("sysprof", "git://git.yoctoproject.org/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
1423 : "1.2.0", 1423 : "1.2.0",
1424 ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") 1424 ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
1425 : "2014.01", 1425 : "2014.01",
1426 # version pattern "yyyymmdd" 1426 # version pattern "yyyymmdd"
1427 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") 1427 ("mobile-broadband-provider-info", "git://git.yoctoproject.org/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
1428 : "20120614", 1428 : "20120614",
1429 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX 1429 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
1430 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing 1430 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
@@ -1715,6 +1715,8 @@ class GitShallowTest(FetcherTest):
1715 if cwd is None: 1715 if cwd is None:
1716 cwd = self.gitdir 1716 cwd = self.gitdir
1717 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines() 1717 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines()
1718 # Resolve references into the same format as the comparision (needed by git 2.48 onwards)
1719 actual_refs = self.git(['rev-parse', '--symbolic-full-name'] + actual_refs, cwd=cwd).splitlines()
1718 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines() 1720 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines()
1719 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs))) 1721 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs)))
1720 1722
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index dcd3910cc4..4dc4590c31 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -188,11 +188,19 @@ class TinfoilCookerAdapter:
188 self._cache[name] = attrvalue 188 self._cache[name] = attrvalue
189 return attrvalue 189 return attrvalue
190 190
191 class TinfoilSkiplistByMcAdapter:
192 def __init__(self, tinfoil):
193 self.tinfoil = tinfoil
194
195 def __getitem__(self, mc):
196 return self.tinfoil.get_skipped_recipes(mc)
197
191 def __init__(self, tinfoil): 198 def __init__(self, tinfoil):
192 self.tinfoil = tinfoil 199 self.tinfoil = tinfoil
193 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split() 200 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split()
194 self.collections = {} 201 self.collections = {}
195 self.recipecaches = {} 202 self.recipecaches = {}
203 self.skiplist_by_mc = self.TinfoilSkiplistByMcAdapter(tinfoil)
196 for mc in self.multiconfigs: 204 for mc in self.multiconfigs:
197 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc) 205 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc)
198 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc) 206 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc)
@@ -201,8 +209,6 @@ class TinfoilCookerAdapter:
201 # Grab these only when they are requested since they aren't always used 209 # Grab these only when they are requested since they aren't always used
202 if name in self._cache: 210 if name in self._cache:
203 return self._cache[name] 211 return self._cache[name]
204 elif name == 'skiplist':
205 attrvalue = self.tinfoil.get_skipped_recipes()
206 elif name == 'bbfile_config_priorities': 212 elif name == 'bbfile_config_priorities':
207 ret = self.tinfoil.run_command('getLayerPriorities') 213 ret = self.tinfoil.run_command('getLayerPriorities')
208 bbfile_config_priorities = [] 214 bbfile_config_priorities = []
@@ -514,12 +520,12 @@ class Tinfoil:
514 """ 520 """
515 return defaultdict(list, self.run_command('getOverlayedRecipes', mc)) 521 return defaultdict(list, self.run_command('getOverlayedRecipes', mc))
516 522
517 def get_skipped_recipes(self): 523 def get_skipped_recipes(self, mc=''):
518 """ 524 """
519 Find recipes which were skipped (i.e. SkipRecipe was raised 525 Find recipes which were skipped (i.e. SkipRecipe was raised
520 during parsing). 526 during parsing).
521 """ 527 """
522 return OrderedDict(self.run_command('getSkippedRecipes')) 528 return OrderedDict(self.run_command('getSkippedRecipes', mc))
523 529
524 def get_all_providers(self, mc=''): 530 def get_all_providers(self, mc=''):
525 return defaultdict(list, self.run_command('allProviders', mc)) 531 return defaultdict(list, self.run_command('allProviders', mc))
@@ -533,6 +539,7 @@ class Tinfoil:
533 def get_runtime_providers(self, rdep): 539 def get_runtime_providers(self, rdep):
534 return self.run_command('getRuntimeProviders', rdep) 540 return self.run_command('getRuntimeProviders', rdep)
535 541
542 # TODO: teach this method about mc
536 def get_recipe_file(self, pn): 543 def get_recipe_file(self, pn):
537 """ 544 """
538 Get the file name for the specified recipe/target. Raises 545 Get the file name for the specified recipe/target. Raises
@@ -541,6 +548,7 @@ class Tinfoil:
541 """ 548 """
542 best = self.find_best_provider(pn) 549 best = self.find_best_provider(pn)
543 if not best or (len(best) > 3 and not best[3]): 550 if not best or (len(best) > 3 and not best[3]):
551 # TODO: pass down mc
544 skiplist = self.get_skipped_recipes() 552 skiplist = self.get_skipped_recipes()
545 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) 553 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist)
546 skipreasons = taskdata.get_reasons(pn) 554 skipreasons = taskdata.get_reasons(pn)
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index f86999bb09..3784c93ad8 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -577,6 +577,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
577 else: 577 else:
578 log_exec_tty = False 578 log_exec_tty = False
579 579
580 should_print_hyperlinks = sys.stdout.isatty() and os.environ.get('NO_COLOR', '') == ''
581
580 helper = uihelper.BBUIHelper() 582 helper = uihelper.BBUIHelper()
581 583
582 # Look for the specially designated handlers which need to be passed to the 584 # Look for the specially designated handlers which need to be passed to the
@@ -640,7 +642,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
640 return_value = 0 642 return_value = 0
641 errors = 0 643 errors = 0
642 warnings = 0 644 warnings = 0
643 taskfailures = [] 645 taskfailures = {}
644 646
645 printintervaldelta = 10 * 60 # 10 minutes 647 printintervaldelta = 10 * 60 # 10 minutes
646 printinterval = printintervaldelta 648 printinterval = printintervaldelta
@@ -726,6 +728,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
726 if isinstance(event, bb.build.TaskFailed): 728 if isinstance(event, bb.build.TaskFailed):
727 return_value = 1 729 return_value = 1
728 print_event_log(event, includelogs, loglines, termfilter) 730 print_event_log(event, includelogs, loglines, termfilter)
731 k = "{}:{}".format(event._fn, event._task)
732 taskfailures[k] = event.logfile
729 if isinstance(event, bb.build.TaskBase): 733 if isinstance(event, bb.build.TaskBase):
730 logger.info(event._message) 734 logger.info(event._message)
731 continue 735 continue
@@ -821,7 +825,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
821 825
822 if isinstance(event, bb.runqueue.runQueueTaskFailed): 826 if isinstance(event, bb.runqueue.runQueueTaskFailed):
823 return_value = 1 827 return_value = 1
824 taskfailures.append(event.taskstring) 828 taskfailures.setdefault(event.taskstring)
825 logger.error(str(event)) 829 logger.error(str(event))
826 continue 830 continue
827 831
@@ -942,11 +946,21 @@ def main(server, eventHandler, params, tf = TerminalFilter):
942 try: 946 try:
943 termfilter.clearFooter() 947 termfilter.clearFooter()
944 summary = "" 948 summary = ""
949 def format_hyperlink(url, link_text):
950 if should_print_hyperlinks:
951 start = f'\033]8;;{url}\033\\'
952 end = '\033]8;;\033\\'
953 return f'{start}{link_text}{end}'
954 return link_text
955
945 if taskfailures: 956 if taskfailures:
946 summary += pluralise("\nSummary: %s task failed:", 957 summary += pluralise("\nSummary: %s task failed:",
947 "\nSummary: %s tasks failed:", len(taskfailures)) 958 "\nSummary: %s tasks failed:", len(taskfailures))
948 for failure in taskfailures: 959 for (failure, log_file) in taskfailures.items():
949 summary += "\n %s" % failure 960 summary += "\n %s" % failure
961 if log_file:
962 hyperlink = format_hyperlink(f"file://{log_file}", log_file)
963 summary += "\n log: {}".format(hyperlink)
950 if warnings: 964 if warnings:
951 summary += pluralise("\nSummary: There was %s WARNING message.", 965 summary += pluralise("\nSummary: There was %s WARNING message.",
952 "\nSummary: There were %s WARNING messages.", warnings) 966 "\nSummary: There were %s WARNING messages.", warnings)
diff --git a/bitbake/lib/bb/ui/teamcity.py b/bitbake/lib/bb/ui/teamcity.py
index fca46c2874..7eeaab8d63 100644
--- a/bitbake/lib/bb/ui/teamcity.py
+++ b/bitbake/lib/bb/ui/teamcity.py
@@ -30,7 +30,6 @@ import bb.build
30import bb.command 30import bb.command
31import bb.cooker 31import bb.cooker
32import bb.event 32import bb.event
33import bb.exceptions
34import bb.runqueue 33import bb.runqueue
35from bb.ui import uihelper 34from bb.ui import uihelper
36 35
@@ -102,10 +101,6 @@ class TeamcityLogFormatter(logging.Formatter):
102 details = "" 101 details = ""
103 if hasattr(record, 'bb_exc_formatted'): 102 if hasattr(record, 'bb_exc_formatted'):
104 details = ''.join(record.bb_exc_formatted) 103 details = ''.join(record.bb_exc_formatted)
105 elif hasattr(record, 'bb_exc_info'):
106 etype, value, tb = record.bb_exc_info
107 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
108 details = ''.join(formatted)
109 104
110 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]: 105 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]:
111 # ERROR gets a separate errorDetails field 106 # ERROR gets a separate errorDetails field
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index ebee65d3dd..67e22f4389 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -1857,12 +1857,30 @@ def path_is_descendant(descendant, ancestor):
1857# If we don't have a timeout of some kind and a process/thread exits badly (for example 1857# If we don't have a timeout of some kind and a process/thread exits badly (for example
1858# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better 1858# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
1859# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked. 1859# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
1860# This function can still deadlock python since it can't signal the other threads to exit
1861# (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads
1862# to exit.
1860@contextmanager 1863@contextmanager
1861def lock_timeout(lock): 1864def lock_timeout(lock):
1862 held = lock.acquire(timeout=5*60)
1863 try: 1865 try:
1866 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
1867 held = lock.acquire(timeout=5*60)
1864 if not held: 1868 if not held:
1869 bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack())
1865 os._exit(1) 1870 os._exit(1)
1866 yield held 1871 yield held
1867 finally: 1872 finally:
1868 lock.release() 1873 lock.release()
1874 signal.pthread_sigmask(signal.SIG_SETMASK, s)
1875
1876# A version of lock_timeout without the check that the lock was locked and a shorter timeout
1877@contextmanager
1878def lock_timeout_nocheck(lock):
1879 try:
1880 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
1881 l = lock.acquire(timeout=10)
1882 yield l
1883 finally:
1884 if l:
1885 lock.release()
1886 signal.pthread_sigmask(signal.SIG_SETMASK, s)
diff --git a/bitbake/lib/bblayers/query.py b/bitbake/lib/bblayers/query.py
index bfc18a7593..9b2e081cfd 100644
--- a/bitbake/lib/bblayers/query.py
+++ b/bitbake/lib/bblayers/query.py
@@ -142,10 +142,11 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
142 # Ensure we list skipped recipes 142 # Ensure we list skipped recipes
143 # We are largely guessing about PN, PV and the preferred version here, 143 # We are largely guessing about PN, PV and the preferred version here,
144 # but we have no choice since skipped recipes are not fully parsed 144 # but we have no choice since skipped recipes are not fully parsed
145 skiplist = list(self.tinfoil.cooker.skiplist.keys()) 145 skiplist = list(self.tinfoil.cooker.skiplist_by_mc[mc].keys())
146 mcspec = 'mc:%s:' % mc 146
147 if mc: 147 if mc:
148 skiplist = [s[len(mcspec):] for s in skiplist if s.startswith(mcspec)] 148 mcspec = f'mc:{mc}:'
149 skiplist = [s[len(mcspec):] if s.startswith(mcspec) else s for s in skiplist]
149 150
150 for fn in skiplist: 151 for fn in skiplist:
151 recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_') 152 recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_')
@@ -162,7 +163,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
162 def print_item(f, pn, ver, layer, ispref): 163 def print_item(f, pn, ver, layer, ispref):
163 if not selected_layer or layer == selected_layer: 164 if not selected_layer or layer == selected_layer:
164 if not bare and f in skiplist: 165 if not bare and f in skiplist:
165 skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist[f].skipreason 166 skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist_by_mc[mc][f].skipreason
166 else: 167 else:
167 skipped = '' 168 skipped = ''
168 if show_filenames: 169 if show_filenames:
@@ -301,7 +302,7 @@ Lists recipes with the bbappends that apply to them as subitems.
301 if self.show_appends_for_pn(pn, cooker_data, args.mc): 302 if self.show_appends_for_pn(pn, cooker_data, args.mc):
302 appends = True 303 appends = True
303 304
304 if not args.pnspec and self.show_appends_for_skipped(): 305 if not args.pnspec and self.show_appends_for_skipped(args.mc):
305 appends = True 306 appends = True
306 307
307 if not appends: 308 if not appends:
@@ -317,9 +318,9 @@ Lists recipes with the bbappends that apply to them as subitems.
317 318
318 return self.show_appends_output(filenames, best_filename) 319 return self.show_appends_output(filenames, best_filename)
319 320
320 def show_appends_for_skipped(self): 321 def show_appends_for_skipped(self, mc):
321 filenames = [os.path.basename(f) 322 filenames = [os.path.basename(f)
322 for f in self.tinfoil.cooker.skiplist.keys()] 323 for f in self.tinfoil.cooker.skiplist_by_mc[mc].keys()]
323 return self.show_appends_output(filenames, None, " (skipped)") 324 return self.show_appends_output(filenames, None, " (skipped)")
324 325
325 def show_appends_output(self, filenames, best_filename, name_suffix = ''): 326 def show_appends_output(self, filenames, best_filename, name_suffix = ''):
diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py b/bitbake/lib/toaster/tests/builds/buildtest.py
index cacfccd4d3..e54d561334 100644
--- a/bitbake/lib/toaster/tests/builds/buildtest.py
+++ b/bitbake/lib/toaster/tests/builds/buildtest.py
@@ -128,7 +128,7 @@ class BuildTest(unittest.TestCase):
128 if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): 128 if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
129 ProjectVariable.objects.get_or_create( 129 ProjectVariable.objects.get_or_create(
130 name="SSTATE_MIRRORS", 130 name="SSTATE_MIRRORS",
131 value="file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH", 131 value="file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH",
132 project=project) 132 project=project)
133 133
134 ProjectTarget.objects.create(project=project, 134 ProjectTarget.objects.create(project=project,