summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cooker.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r--bitbake/lib/bb/cooker.py164
1 files changed, 92 insertions, 72 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 939a999974..1810bcc604 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -8,7 +8,7 @@
8# 8#
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11import enum
12import sys, os, glob, os.path, re, time 12import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
@@ -17,7 +17,7 @@ import threading
17from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
18from contextlib import closing 18from contextlib import closing
19from collections import defaultdict, namedtuple 19from collections import defaultdict, namedtuple
20import bb, bb.exceptions, bb.command 20import bb, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue 22import queue
23import signal 23import signal
@@ -48,16 +48,15 @@ class CollectionError(bb.BBHandledException):
48 Exception raised when layer configuration is incorrect 48 Exception raised when layer configuration is incorrect
49 """ 49 """
50 50
51class state:
52 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
53 51
54 @classmethod 52class State(enum.Enum):
55 def get_name(cls, code): 53 INITIAL = 0,
56 for name in dir(cls): 54 PARSING = 1,
57 value = getattr(cls, name) 55 RUNNING = 2,
58 if type(value) == type(cls.initial) and value == code: 56 SHUTDOWN = 3,
59 return name 57 FORCE_SHUTDOWN = 4,
60 raise ValueError("Invalid status code: %s" % code) 58 STOPPED = 5,
59 ERROR = 6
61 60
62 61
63class SkippedPackage: 62class SkippedPackage:
@@ -134,7 +133,8 @@ class BBCooker:
134 self.baseconfig_valid = False 133 self.baseconfig_valid = False
135 self.parsecache_valid = False 134 self.parsecache_valid = False
136 self.eventlog = None 135 self.eventlog = None
137 self.skiplist = {} 136 # The skiplists, one per multiconfig
137 self.skiplist_by_mc = defaultdict(dict)
138 self.featureset = CookerFeatures() 138 self.featureset = CookerFeatures()
139 if featureSet: 139 if featureSet:
140 for f in featureSet: 140 for f in featureSet:
@@ -180,7 +180,7 @@ class BBCooker:
180 pass 180 pass
181 181
182 self.command = bb.command.Command(self, self.process_server) 182 self.command = bb.command.Command(self, self.process_server)
183 self.state = state.initial 183 self.state = State.INITIAL
184 184
185 self.parser = None 185 self.parser = None
186 186
@@ -226,23 +226,22 @@ class BBCooker:
226 bb.warn("Cooker received SIGTERM, shutting down...") 226 bb.warn("Cooker received SIGTERM, shutting down...")
227 elif signum == signal.SIGHUP: 227 elif signum == signal.SIGHUP:
228 bb.warn("Cooker received SIGHUP, shutting down...") 228 bb.warn("Cooker received SIGHUP, shutting down...")
229 self.state = state.forceshutdown 229 self.state = State.FORCE_SHUTDOWN
230 bb.event._should_exit.set() 230 bb.event._should_exit.set()
231 231
232 def setFeatures(self, features): 232 def setFeatures(self, features):
233 # we only accept a new feature set if we're in state initial, so we can reset without problems 233 # we only accept a new feature set if we're in state initial, so we can reset without problems
234 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: 234 if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
235 raise Exception("Illegal state for feature set change") 235 raise Exception("Illegal state for feature set change")
236 original_featureset = list(self.featureset) 236 original_featureset = list(self.featureset)
237 for feature in features: 237 for feature in features:
238 self.featureset.setFeature(feature) 238 self.featureset.setFeature(feature)
239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
240 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): 240 if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
241 self.reset() 241 self.reset()
242 242
243 def initConfigurationData(self): 243 def initConfigurationData(self):
244 244 self.state = State.INITIAL
245 self.state = state.initial
246 self.caches_array = [] 245 self.caches_array = []
247 246
248 sys.path = self.orig_syspath.copy() 247 sys.path = self.orig_syspath.copy()
@@ -281,7 +280,6 @@ class BBCooker:
281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 280 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
282 self.databuilder.parseBaseConfiguration() 281 self.databuilder.parseBaseConfiguration()
283 self.data = self.databuilder.data 282 self.data = self.databuilder.data
284 self.data_hash = self.databuilder.data_hash
285 self.extraconfigdata = {} 283 self.extraconfigdata = {}
286 284
287 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") 285 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
@@ -318,8 +316,14 @@ class BBCooker:
318 try: 316 try:
319 with hashserv.create_client(upstream) as client: 317 with hashserv.create_client(upstream) as client:
320 client.ping() 318 client.ping()
321 except (ConnectionError, ImportError) as e: 319 except ImportError as e:
322 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 320 bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module:
321%s
322Please install the needed module on the build host, or use an environment containing it (e.g a pip venv or OpenEmbedded's buildtools tarball).
323You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts."""
324 % (upstream, repr(e)))
325 except ConnectionError as e:
326 bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s"
323 % (upstream, repr(e))) 327 % (upstream, repr(e)))
324 upstream = None 328 upstream = None
325 329
@@ -370,6 +374,11 @@ class BBCooker:
370 if not clean: 374 if not clean:
371 bb.parse.BBHandler.cached_statements = {} 375 bb.parse.BBHandler.cached_statements = {}
372 376
377 # If writes were made to any of the data stores, we need to recalculate the data
378 # store cache
379 if hasattr(self, "databuilder"):
380 self.databuilder.calc_datastore_hashes()
381
373 def parseConfiguration(self): 382 def parseConfiguration(self):
374 self.updateCacheSync() 383 self.updateCacheSync()
375 384
@@ -612,8 +621,8 @@ class BBCooker:
612 localdata = {} 621 localdata = {}
613 622
614 for mc in self.multiconfigs: 623 for mc in self.multiconfigs:
615 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete) 624 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 625 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
617 bb.data.expandKeys(localdata[mc]) 626 bb.data.expandKeys(localdata[mc])
618 627
619 current = 0 628 current = 0
@@ -680,14 +689,14 @@ class BBCooker:
680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 689 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
681 return taskdata, runlist 690 return taskdata, runlist
682 691
683 def prepareTreeData(self, pkgs_to_build, task): 692 def prepareTreeData(self, pkgs_to_build, task, halt=False):
684 """ 693 """
685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 694 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
686 """ 695 """
687 696
688 # We set halt to False here to prevent unbuildable targets raising 697 # We set halt to False here to prevent unbuildable targets raising
689 # an exception when we're just generating data 698 # an exception when we're just generating data
690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 699 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
691 700
692 return runlist, taskdata 701 return runlist, taskdata
693 702
@@ -701,7 +710,7 @@ class BBCooker:
701 if not task.startswith("do_"): 710 if not task.startswith("do_"):
702 task = "do_%s" % task 711 task = "do_%s" % task
703 712
704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 713 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 714 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
706 rq.rqdata.prepare() 715 rq.rqdata.prepare()
707 return self.buildDependTree(rq, taskdata) 716 return self.buildDependTree(rq, taskdata)
@@ -896,10 +905,11 @@ class BBCooker:
896 905
897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
898 907
899 with open('pn-buildlist', 'w') as f: 908 pns = depgraph["pn"].keys()
900 for pn in depgraph["pn"]: 909 if pns:
901 f.write(pn + "\n") 910 with open('pn-buildlist', 'w') as f:
902 logger.info("PN build list saved to 'pn-buildlist'") 911 f.write("%s\n" % "\n".join(sorted(pns)))
912 logger.info("PN build list saved to 'pn-buildlist'")
903 913
904 # Remove old format output files to ensure no confusion with stale data 914 # Remove old format output files to ensure no confusion with stale data
905 try: 915 try:
@@ -933,7 +943,7 @@ class BBCooker:
933 for mc in self.multiconfigs: 943 for mc in self.multiconfigs:
934 # First get list of recipes, including skipped 944 # First get list of recipes, including skipped
935 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 945 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
936 recipefns.extend(self.skiplist.keys()) 946 recipefns.extend(self.skiplist_by_mc[mc].keys())
937 947
938 # Work out list of bbappends that have been applied 948 # Work out list of bbappends that have been applied
939 applied_appends = [] 949 applied_appends = []
@@ -952,13 +962,7 @@ class BBCooker:
952 '\n '.join(appends_without_recipes[mc]))) 962 '\n '.join(appends_without_recipes[mc])))
953 963
954 if msgs: 964 if msgs:
955 msg = "\n".join(msgs) 965 bb.fatal("\n".join(msgs))
956 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
957 False) or "no"
958 if warn_only.lower() in ("1", "yes", "true"):
959 bb.warn(msg)
960 else:
961 bb.fatal(msg)
962 966
963 def handlePrefProviders(self): 967 def handlePrefProviders(self):
964 968
@@ -1338,7 +1342,7 @@ class BBCooker:
1338 self.buildSetVars() 1342 self.buildSetVars()
1339 self.reset_mtime_caches() 1343 self.reset_mtime_caches()
1340 1344
1341 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1345 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1342 1346
1343 layername = self.collections[mc].calc_bbfile_priority(fn)[2] 1347 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1344 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) 1348 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
@@ -1399,11 +1403,11 @@ class BBCooker:
1399 1403
1400 msg = None 1404 msg = None
1401 interrupted = 0 1405 interrupted = 0
1402 if halt or self.state == state.forceshutdown: 1406 if halt or self.state == State.FORCE_SHUTDOWN:
1403 rq.finish_runqueue(True) 1407 rq.finish_runqueue(True)
1404 msg = "Forced shutdown" 1408 msg = "Forced shutdown"
1405 interrupted = 2 1409 interrupted = 2
1406 elif self.state == state.shutdown: 1410 elif self.state == State.SHUTDOWN:
1407 rq.finish_runqueue(False) 1411 rq.finish_runqueue(False)
1408 msg = "Stopped build" 1412 msg = "Stopped build"
1409 interrupted = 1 1413 interrupted = 1
@@ -1459,7 +1463,6 @@ class BBCooker:
1459 1463
1460 if t in task or getAllTaskSignatures: 1464 if t in task or getAllTaskSignatures:
1461 try: 1465 try:
1462 rq.rqdata.prepare_task_hash(tid)
1463 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) 1466 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1464 except KeyError: 1467 except KeyError:
1465 sig.append(self.getTaskSignatures(target, [t])[0]) 1468 sig.append(self.getTaskSignatures(target, [t])[0])
@@ -1474,12 +1477,12 @@ class BBCooker:
1474 def buildTargetsIdle(server, rq, halt): 1477 def buildTargetsIdle(server, rq, halt):
1475 msg = None 1478 msg = None
1476 interrupted = 0 1479 interrupted = 0
1477 if halt or self.state == state.forceshutdown: 1480 if halt or self.state == State.FORCE_SHUTDOWN:
1478 bb.event._should_exit.set() 1481 bb.event._should_exit.set()
1479 rq.finish_runqueue(True) 1482 rq.finish_runqueue(True)
1480 msg = "Forced shutdown" 1483 msg = "Forced shutdown"
1481 interrupted = 2 1484 interrupted = 2
1482 elif self.state == state.shutdown: 1485 elif self.state == State.SHUTDOWN:
1483 rq.finish_runqueue(False) 1486 rq.finish_runqueue(False)
1484 msg = "Stopped build" 1487 msg = "Stopped build"
1485 interrupted = 1 1488 interrupted = 1
@@ -1574,7 +1577,7 @@ class BBCooker:
1574 1577
1575 1578
1576 def updateCacheSync(self): 1579 def updateCacheSync(self):
1577 if self.state == state.running: 1580 if self.state == State.RUNNING:
1578 return 1581 return
1579 1582
1580 if not self.baseconfig_valid: 1583 if not self.baseconfig_valid:
@@ -1584,19 +1587,19 @@ class BBCooker:
1584 1587
1585 # This is called for all async commands when self.state != running 1588 # This is called for all async commands when self.state != running
1586 def updateCache(self): 1589 def updateCache(self):
1587 if self.state == state.running: 1590 if self.state == State.RUNNING:
1588 return 1591 return
1589 1592
1590 if self.state in (state.shutdown, state.forceshutdown, state.error): 1593 if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1591 if hasattr(self.parser, 'shutdown'): 1594 if hasattr(self.parser, 'shutdown'):
1592 self.parser.shutdown(clean=False) 1595 self.parser.shutdown(clean=False)
1593 self.parser.final_cleanup() 1596 self.parser.final_cleanup()
1594 raise bb.BBHandledException() 1597 raise bb.BBHandledException()
1595 1598
1596 if self.state != state.parsing: 1599 if self.state != State.PARSING:
1597 self.updateCacheSync() 1600 self.updateCacheSync()
1598 1601
1599 if self.state != state.parsing and not self.parsecache_valid: 1602 if self.state != State.PARSING and not self.parsecache_valid:
1600 bb.server.process.serverlog("Parsing started") 1603 bb.server.process.serverlog("Parsing started")
1601 self.parsewatched = {} 1604 self.parsewatched = {}
1602 1605
@@ -1630,9 +1633,10 @@ class BBCooker:
1630 self.parser = CookerParser(self, mcfilelist, total_masked) 1633 self.parser = CookerParser(self, mcfilelist, total_masked)
1631 self._parsecache_set(True) 1634 self._parsecache_set(True)
1632 1635
1633 self.state = state.parsing 1636 self.state = State.PARSING
1634 1637
1635 if not self.parser.parse_next(): 1638 if not self.parser.parse_next():
1639 bb.server.process.serverlog("Parsing completed")
1636 collectlog.debug("parsing complete") 1640 collectlog.debug("parsing complete")
1637 if self.parser.error: 1641 if self.parser.error:
1638 raise bb.BBHandledException() 1642 raise bb.BBHandledException()
@@ -1640,7 +1644,7 @@ class BBCooker:
1640 self.handlePrefProviders() 1644 self.handlePrefProviders()
1641 for mc in self.multiconfigs: 1645 for mc in self.multiconfigs:
1642 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1646 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1643 self.state = state.running 1647 self.state = State.RUNNING
1644 1648
1645 # Send an event listing all stamps reachable after parsing 1649 # Send an event listing all stamps reachable after parsing
1646 # which the metadata may use to clean up stale data 1650 # which the metadata may use to clean up stale data
@@ -1713,10 +1717,10 @@ class BBCooker:
1713 1717
1714 def shutdown(self, force=False): 1718 def shutdown(self, force=False):
1715 if force: 1719 if force:
1716 self.state = state.forceshutdown 1720 self.state = State.FORCE_SHUTDOWN
1717 bb.event._should_exit.set() 1721 bb.event._should_exit.set()
1718 else: 1722 else:
1719 self.state = state.shutdown 1723 self.state = State.SHUTDOWN
1720 1724
1721 if self.parser: 1725 if self.parser:
1722 self.parser.shutdown(clean=False) 1726 self.parser.shutdown(clean=False)
@@ -1726,7 +1730,7 @@ class BBCooker:
1726 if hasattr(self.parser, 'shutdown'): 1730 if hasattr(self.parser, 'shutdown'):
1727 self.parser.shutdown(clean=False) 1731 self.parser.shutdown(clean=False)
1728 self.parser.final_cleanup() 1732 self.parser.final_cleanup()
1729 self.state = state.initial 1733 self.state = State.INITIAL
1730 bb.event._should_exit.clear() 1734 bb.event._should_exit.clear()
1731 1735
1732 def reset(self): 1736 def reset(self):
@@ -1813,8 +1817,8 @@ class CookerCollectFiles(object):
1813 bb.event.fire(CookerExit(), eventdata) 1817 bb.event.fire(CookerExit(), eventdata)
1814 1818
1815 # We need to track where we look so that we can know when the cache is invalid. There 1819 # We need to track where we look so that we can know when the cache is invalid. There
1816 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1820 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1817 # (or os.scandir() for python 3.6+) calls while we run glob(). 1821 # calls while we run glob().
1818 origlistdir = os.listdir 1822 origlistdir = os.listdir
1819 if hasattr(os, 'scandir'): 1823 if hasattr(os, 'scandir'):
1820 origscandir = os.scandir 1824 origscandir = os.scandir
@@ -2098,7 +2102,6 @@ class Parser(multiprocessing.Process):
2098 except Exception as exc: 2102 except Exception as exc:
2099 tb = sys.exc_info()[2] 2103 tb = sys.exc_info()[2]
2100 exc.recipe = filename 2104 exc.recipe = filename
2101 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2102 return True, None, exc 2105 return True, None, exc
2103 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2106 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2104 # and for example a worker thread doesn't just exit on its own in response to 2107 # and for example a worker thread doesn't just exit on its own in response to
@@ -2113,7 +2116,7 @@ class CookerParser(object):
2113 self.mcfilelist = mcfilelist 2116 self.mcfilelist = mcfilelist
2114 self.cooker = cooker 2117 self.cooker = cooker
2115 self.cfgdata = cooker.data 2118 self.cfgdata = cooker.data
2116 self.cfghash = cooker.data_hash 2119 self.cfghash = cooker.databuilder.data_hash
2117 self.cfgbuilder = cooker.databuilder 2120 self.cfgbuilder = cooker.databuilder
2118 2121
2119 # Accounting statistics 2122 # Accounting statistics
@@ -2225,9 +2228,8 @@ class CookerParser(object):
2225 2228
2226 for process in self.processes: 2229 for process in self.processes:
2227 process.join() 2230 process.join()
2228 # Added in 3.7, cleans up zombies 2231 # clean up zombies
2229 if hasattr(process, "close"): 2232 process.close()
2230 process.close()
2231 2233
2232 bb.codeparser.parser_cache_save() 2234 bb.codeparser.parser_cache_save()
2233 bb.codeparser.parser_cache_savemerge() 2235 bb.codeparser.parser_cache_savemerge()
@@ -2237,12 +2239,13 @@ class CookerParser(object):
2237 profiles = [] 2239 profiles = []
2238 for i in self.process_names: 2240 for i in self.process_names:
2239 logfile = "profile-parse-%s.log" % i 2241 logfile = "profile-parse-%s.log" % i
2240 if os.path.exists(logfile): 2242 if os.path.exists(logfile) and os.path.getsize(logfile):
2241 profiles.append(logfile) 2243 profiles.append(logfile)
2242 2244
2243 pout = "profile-parse.log.processed" 2245 if profiles:
2244 bb.utils.process_profilelog(profiles, pout = pout) 2246 pout = "profile-parse.log.processed"
2245 print("Processed parsing statistics saved to %s" % (pout)) 2247 bb.utils.process_profilelog(profiles, pout = pout)
2248 print("Processed parsing statistics saved to %s" % (pout))
2246 2249
2247 def final_cleanup(self): 2250 def final_cleanup(self):
2248 if self.syncthread: 2251 if self.syncthread:
@@ -2299,8 +2302,12 @@ class CookerParser(object):
2299 return False 2302 return False
2300 except ParsingFailure as exc: 2303 except ParsingFailure as exc:
2301 self.error += 1 2304 self.error += 1
2302 logger.error('Unable to parse %s: %s' % 2305
2303 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2306 exc_desc = str(exc)
2307 if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2308 exc_desc = 'Exited with "%d"' % exc.code
2309
2310 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2304 self.shutdown(clean=False) 2311 self.shutdown(clean=False)
2305 return False 2312 return False
2306 except bb.parse.ParseError as exc: 2313 except bb.parse.ParseError as exc:
@@ -2309,20 +2316,33 @@ class CookerParser(object):
2309 self.shutdown(clean=False, eventmsg=str(exc)) 2316 self.shutdown(clean=False, eventmsg=str(exc))
2310 return False 2317 return False
2311 except bb.data_smart.ExpansionError as exc: 2318 except bb.data_smart.ExpansionError as exc:
2319 def skip_frames(f, fn_prefix):
2320 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2321 f = f.tb_next
2322 return f
2323
2312 self.error += 1 2324 self.error += 1
2313 bbdir = os.path.dirname(__file__) + os.sep 2325 bbdir = os.path.dirname(__file__) + os.sep
2314 etype, value, _ = sys.exc_info() 2326 etype, value, tb = sys.exc_info()
2315 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2327
2328 # Remove any frames where the code comes from bitbake. This
2329 # prevents deep (and pretty useless) backtraces for expansion error
2330 tb = skip_frames(tb, bbdir)
2331 cur = tb
2332 while cur:
2333 cur.tb_next = skip_frames(cur.tb_next, bbdir)
2334 cur = cur.tb_next
2335
2316 logger.error('ExpansionError during parsing %s', value.recipe, 2336 logger.error('ExpansionError during parsing %s', value.recipe,
2317 exc_info=(etype, value, tb)) 2337 exc_info=(etype, value, tb))
2318 self.shutdown(clean=False) 2338 self.shutdown(clean=False)
2319 return False 2339 return False
2320 except Exception as exc: 2340 except Exception as exc:
2321 self.error += 1 2341 self.error += 1
2322 etype, value, tb = sys.exc_info() 2342 _, value, _ = sys.exc_info()
2323 if hasattr(value, "recipe"): 2343 if hasattr(value, "recipe"):
2324 logger.error('Unable to parse %s' % value.recipe, 2344 logger.error('Unable to parse %s' % value.recipe,
2325 exc_info=(etype, value, exc.traceback)) 2345 exc_info=sys.exc_info())
2326 else: 2346 else:
2327 # Most likely, an exception occurred during raising an exception 2347 # Most likely, an exception occurred during raising an exception
2328 import traceback 2348 import traceback
@@ -2343,7 +2363,7 @@ class CookerParser(object):
2343 for virtualfn, info_array in result: 2363 for virtualfn, info_array in result:
2344 if info_array[0].skipped: 2364 if info_array[0].skipped:
2345 self.skipped += 1 2365 self.skipped += 1
2346 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2366 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2347 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2367 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2348 parsed=parsed, watcher = self.cooker.add_filewatch) 2368 parsed=parsed, watcher = self.cooker.add_filewatch)
2349 return True 2369 return True