summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cooker.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r--bitbake/lib/bb/cooker.py971
1 files changed, 540 insertions, 431 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index f4ab797edf..03f262ac16 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -8,26 +8,25 @@
8# 8#
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11import enum
12import sys, os, glob, os.path, re, time 12import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
15import multiprocessing 15from bb import multiprocessing
16import sre_constants
17import threading 16import threading
18from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
19from contextlib import closing 18from contextlib import closing
20from collections import defaultdict, namedtuple 19from collections import defaultdict, namedtuple
21import bb, bb.exceptions, bb.command 20import bb, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
23import queue 22import queue
24import signal 23import signal
25import prserv.serv 24import prserv.serv
26import pyinotify
27import json 25import json
28import pickle 26import pickle
29import codecs 27import codecs
30import hashserv 28import hashserv
29import ctypes
31 30
32logger = logging.getLogger("BitBake") 31logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection") 32collectlog = logging.getLogger("BitBake.Collection")
@@ -50,16 +49,15 @@ class CollectionError(bb.BBHandledException):
50 Exception raised when layer configuration is incorrect 49 Exception raised when layer configuration is incorrect
51 """ 50 """
52 51
53class state:
54 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
55 52
56 @classmethod 53class State(enum.Enum):
57 def get_name(cls, code): 54 INITIAL = 0,
58 for name in dir(cls): 55 PARSING = 1,
59 value = getattr(cls, name) 56 RUNNING = 2,
60 if type(value) == type(cls.initial) and value == code: 57 SHUTDOWN = 3,
61 return name 58 FORCE_SHUTDOWN = 4,
62 raise ValueError("Invalid status code: %s" % code) 59 STOPPED = 5,
60 ERROR = 6
63 61
64 62
65class SkippedPackage: 63class SkippedPackage:
@@ -81,7 +79,7 @@ class SkippedPackage:
81 79
82 80
83class CookerFeatures(object): 81class CookerFeatures(object):
84 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3)) 82 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
85 83
86 def __init__(self): 84 def __init__(self):
87 self._features=set() 85 self._features=set()
@@ -104,12 +102,15 @@ class CookerFeatures(object):
104 102
105class EventWriter: 103class EventWriter:
106 def __init__(self, cooker, eventfile): 104 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker 105 self.cooker = cooker
109 self.eventfile = eventfile 106 self.eventfile = eventfile
110 self.event_queue = [] 107 self.event_queue = []
111 108
112 def write_event(self, event): 109 def write_variables(self):
110 with open(self.eventfile, "a") as f:
111 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
112
113 def send(self, event):
113 with open(self.eventfile, "a") as f: 114 with open(self.eventfile, "a") as f:
114 try: 115 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') 116 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
@@ -119,28 +120,6 @@ class EventWriter:
119 import traceback 120 import traceback
120 print(err, traceback.format_exc()) 121 print(err, traceback.format_exc())
121 122
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144 123
145#============================================================================# 124#============================================================================#
146# BBCooker 125# BBCooker
@@ -150,43 +129,34 @@ class BBCooker:
150 Manages one bitbake build run 129 Manages one bitbake build run
151 """ 130 """
152 131
153 def __init__(self, featureSet=None, idleCallBackRegister=None): 132 def __init__(self, featureSet=None, server=None):
154 self.recipecaches = None 133 self.recipecaches = None
134 self.baseconfig_valid = False
135 self.parsecache_valid = False
155 self.eventlog = None 136 self.eventlog = None
156 self.skiplist = {} 137 # The skiplists, one per multiconfig
138 self.skiplist_by_mc = defaultdict(dict)
157 self.featureset = CookerFeatures() 139 self.featureset = CookerFeatures()
158 if featureSet: 140 if featureSet:
159 for f in featureSet: 141 for f in featureSet:
160 self.featureset.setFeature(f) 142 self.featureset.setFeature(f)
161 143
144 self.orig_syspath = sys.path.copy()
145 self.orig_sysmodules = [*sys.modules]
146
162 self.configuration = bb.cookerdata.CookerConfiguration() 147 self.configuration = bb.cookerdata.CookerConfiguration()
163 148
164 self.idleCallBackRegister = idleCallBackRegister 149 self.process_server = server
150 self.idleCallBackRegister = None
151 self.waitIdle = None
152 if server:
153 self.idleCallBackRegister = server.register_idle_function
154 self.waitIdle = server.wait_for_idle
165 155
166 bb.debug(1, "BBCooker starting %s" % time.time()) 156 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush() 157
168 158 self.configwatched = {}
169 self.configwatcher = pyinotify.WatchManager() 159 self.parsewatched = {}
170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
181 self.watcher = pyinotify.WatchManager()
182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190 160
191 # If being called by something like tinfoil, we need to clean cached data 161 # If being called by something like tinfoil, we need to clean cached data
192 # which may now be invalid 162 # which may now be invalid
@@ -197,14 +167,6 @@ class BBCooker:
197 self.hashserv = None 167 self.hashserv = None
198 self.hashservaddr = None 168 self.hashservaddr = None
199 169
200 self.inotify_modified_files = []
201
202 def _process_inotify_updates(server, cooker, abort):
203 cooker.process_inotify_updates()
204 return 1.0
205
206 self.idleCallBackRegister(_process_inotify_updates, self)
207
208 # TOSTOP must not be set or our children will hang when they output 170 # TOSTOP must not be set or our children will hang when they output
209 try: 171 try:
210 fd = sys.stdout.fileno() 172 fd = sys.stdout.fileno()
@@ -218,8 +180,8 @@ class BBCooker:
218 except UnsupportedOperation: 180 except UnsupportedOperation:
219 pass 181 pass
220 182
221 self.command = bb.command.Command(self) 183 self.command = bb.command.Command(self, self.process_server)
222 self.state = state.initial 184 self.state = State.INITIAL
223 185
224 self.parser = None 186 self.parser = None
225 187
@@ -228,108 +190,68 @@ class BBCooker:
228 signal.signal(signal.SIGHUP, self.sigterm_exception) 190 signal.signal(signal.SIGHUP, self.sigterm_exception)
229 191
230 bb.debug(1, "BBCooker startup complete %s" % time.time()) 192 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232 193
233 def init_configdata(self): 194 def init_configdata(self):
234 if not hasattr(self, "data"): 195 if not hasattr(self, "data"):
235 self.initConfigurationData() 196 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) 197 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ() 198 self.handlePRServ()
239 199
240 def process_inotify_updates(self): 200 def _baseconfig_set(self, value):
241 for n in [self.confignotifier, self.notifier]: 201 if value and not self.baseconfig_valid:
242 if n.check_events(timeout=0): 202 bb.server.process.serverlog("Base config valid")
243 # read notified events and enqeue them 203 elif not value and self.baseconfig_valid:
244 n.read_events() 204 bb.server.process.serverlog("Base config invalidated")
245 n.process_events() 205 self.baseconfig_valid = value
246 206
247 def config_notifications(self, event): 207 def _parsecache_set(self, value):
248 if event.maskname == "IN_Q_OVERFLOW": 208 if value and not self.parsecache_valid:
249 bb.warn("inotify event queue overflowed, invalidating caches.") 209 bb.server.process.serverlog("Parse cache valid")
250 self.parsecache_valid = False 210 elif not value and self.parsecache_valid:
251 self.baseconfig_valid = False 211 bb.server.process.serverlog("Parse cache invalidated")
252 bb.parse.clear_cache() 212 self.parsecache_valid = value
253 return 213
254 if not event.pathname in self.configwatcher.bbwatchedfiles: 214 def add_filewatch(self, deps, configwatcher=False):
255 return 215 if configwatcher:
256 if not event.pathname in self.inotify_modified_files: 216 watcher = self.configwatched
257 self.inotify_modified_files.append(event.pathname) 217 else:
258 self.baseconfig_valid = False 218 watcher = self.parsewatched
259
260 def notifications(self, event):
261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
268 return
269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
271 self.parsecache_valid = False
272 219
273 def add_filewatch(self, deps, watcher=None, dirs=False):
274 if not watcher:
275 watcher = self.watcher
276 for i in deps: 220 for i in deps:
277 watcher.bbwatchedfiles.add(i[0]) 221 f = i[0]
278 if dirs: 222 mtime = i[1]
279 f = i[0] 223 watcher[f] = mtime
280 else:
281 f = os.path.dirname(i[0])
282 if f in watcher.bbseen:
283 continue
284 watcher.bbseen.add(f)
285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
288 # the parser. The parent directory of these files may not exist, in which case we need
289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
293 watcher.bbwatchedfiles.add(watchtarget)
294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
301 watcher.bbseen.add(f)
302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309 224
310 def sigterm_exception(self, signum, stackframe): 225 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM: 226 if signum == signal.SIGTERM:
312 bb.warn("Cooker received SIGTERM, shutting down...") 227 bb.warn("Cooker received SIGTERM, shutting down...")
313 elif signum == signal.SIGHUP: 228 elif signum == signal.SIGHUP:
314 bb.warn("Cooker received SIGHUP, shutting down...") 229 bb.warn("Cooker received SIGHUP, shutting down...")
315 self.state = state.forceshutdown 230 self.state = State.FORCE_SHUTDOWN
231 bb.event._should_exit.set()
316 232
317 def setFeatures(self, features): 233 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems 234 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: 235 if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
320 raise Exception("Illegal state for feature set change") 236 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset) 237 original_featureset = list(self.featureset)
322 for feature in features: 238 for feature in features:
323 self.featureset.setFeature(feature) 239 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 240 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): 241 if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
326 self.reset() 242 self.reset()
327 243
328 def initConfigurationData(self): 244 def initConfigurationData(self):
329 245 self.state = State.INITIAL
330 self.state = state.initial
331 self.caches_array = [] 246 self.caches_array = []
332 247
248 sys.path = self.orig_syspath.copy()
249 for mod in [*sys.modules]:
250 if mod not in self.orig_sysmodules:
251 del sys.modules[mod]
252
253 self.configwatched = {}
254
333 # Need to preserve BB_CONSOLELOG over resets 255 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None 256 consolelog = None
335 if hasattr(self, "data"): 257 if hasattr(self, "data"):
@@ -338,12 +260,12 @@ class BBCooker:
338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 260 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking() 261 self.enableDataTracking()
340 262
341 all_extra_cache_names = [] 263 caches_name_array = ['bb.cache:CoreRecipeInfo']
342 # We hardcode all known cache types in a single place, here. 264 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: 265 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") 266 caches_name_array.append("bb.cache_extra:HobRecipeInfo")
345 267 if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names 268 caches_name_array.append("bb.cache:SiggenRecipeInfo")
347 269
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! 270 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed! 271 # This is the entry point, no further check needed!
@@ -359,9 +281,12 @@ class BBCooker:
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration() 282 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data 283 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
363 self.extraconfigdata = {} 284 self.extraconfigdata = {}
364 285
286 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
287 if not self.configuration.writeeventlog and eventlog:
288 self.setupEventLog(eventlog)
289
365 if consolelog: 290 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog) 291 self.data.setVar("BB_CONSOLELOG", consolelog)
367 292
@@ -371,31 +296,51 @@ class BBCooker:
371 self.disableDataTracking() 296 self.disableDataTracking()
372 297
373 for mc in self.databuilder.mcdata.values(): 298 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends") 299 self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
376 300
377 self.baseconfig_valid = True 301 self._baseconfig_set(True)
378 self.parsecache_valid = False 302 self._parsecache_set(False)
379 303
380 def handlePRServ(self): 304 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration 305 # Setup a PR Server based on the new configuration
382 try: 306 try:
383 self.prhost = prserv.serv.auto_start(self.data) 307 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e: 308 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting") 309 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
386 310
387 if self.data.getVar("BB_HASHSERVE") == "auto": 311 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket 312 # Create a new hash server bound to a unix domain socket
389 if not self.hashserv: 313 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 314 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
315 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
316 if upstream:
317 try:
318 with hashserv.create_client(upstream) as client:
319 client.ping()
320 except ImportError as e:
321 bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module:
322%s
323Please install the needed module on the build host, or use an environment containing it:
324 - if you are using bitbake-setup, run 'bitbake-setup install-buildtools'
325 - openembedded-core layer contains 'scripts/install-buildtools' that can also be used
326 - or set up pip venv
327You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts."""
328 % (upstream, repr(e)))
329 except ConnectionError as e:
330 bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s"
331 % (upstream, repr(e)))
332 upstream = None
333
391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 334 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False) 335 self.hashserv = hashserv.create_server(
393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever) 336 self.hashservaddr,
394 self.hashserv.process.start() 337 dbfile,
395 self.data.setVar("BB_HASHSERVE", self.hashservaddr) 338 sync=False,
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr) 339 upstream=upstream,
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr) 340 )
341 self.hashserv.serve_as_process(log_level=logging.WARNING)
398 for mc in self.databuilder.mcdata: 342 for mc in self.databuilder.mcdata:
343 self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) 344 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
400 345
401 bb.parse.init_parser(self.data) 346 bb.parse.init_parser(self.data)
@@ -410,6 +355,34 @@ class BBCooker:
410 if hasattr(self, "data"): 355 if hasattr(self, "data"):
411 self.data.disableTracking() 356 self.data.disableTracking()
412 357
358 def revalidateCaches(self):
359 bb.parse.clear_cache()
360
361 clean = True
362 for f in self.configwatched:
363 if not bb.parse.check_mtime(f, self.configwatched[f]):
364 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
365 self._baseconfig_set(False)
366 self._parsecache_set(False)
367 clean = False
368 break
369
370 if clean:
371 for f in self.parsewatched:
372 if not bb.parse.check_mtime(f, self.parsewatched[f]):
373 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
374 self._parsecache_set(False)
375 clean = False
376 break
377
378 if not clean:
379 bb.parse.BBHandler.cached_statements = {}
380
381 # If writes were made to any of the data stores, we need to recalculate the data
382 # store cache
383 if hasattr(self, "databuilder"):
384 self.databuilder.calc_datastore_hashes()
385
413 def parseConfiguration(self): 386 def parseConfiguration(self):
414 self.updateCacheSync() 387 self.updateCacheSync()
415 388
@@ -428,8 +401,24 @@ class BBCooker:
428 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) 401 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
429 402
430 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) 403 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
431 404 self.collections = {}
432 self.parsecache_valid = False 405 for mc in self.multiconfigs:
406 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
407
408 self._parsecache_set(False)
409
410 def setupEventLog(self, eventlog):
411 if self.eventlog and self.eventlog[0] != eventlog:
412 bb.event.unregister_UIHhandler(self.eventlog[1])
413 self.eventlog = None
414 if not self.eventlog or self.eventlog[0] != eventlog:
415 # we log all events to a file if so directed
416 # register the log file writer as UI Handler
417 if not os.path.exists(os.path.dirname(eventlog)):
418 bb.utils.mkdirhier(os.path.dirname(eventlog))
419 writer = EventWriter(self, eventlog)
420 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
421 self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
433 422
434 def updateConfigOpts(self, options, environment, cmdline): 423 def updateConfigOpts(self, options, environment, cmdline):
435 self.ui_cmdline = cmdline 424 self.ui_cmdline = cmdline
@@ -450,14 +439,7 @@ class BBCooker:
450 setattr(self.configuration, o, options[o]) 439 setattr(self.configuration, o, options[o])
451 440
452 if self.configuration.writeeventlog: 441 if self.configuration.writeeventlog:
453 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog: 442 self.setupEventLog(self.configuration.writeeventlog)
454 bb.event.unregister_UIHhandler(self.eventlog[1])
455 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
456 # we log all events to a file if so directed
457 # register the log file writer as UI Handler
458 writer = EventWriter(self, self.configuration.writeeventlog)
459 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
460 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
461 443
462 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel 444 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
463 bb.msg.loggerDefaultDomains = self.configuration.debug_domains 445 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
@@ -487,37 +469,37 @@ class BBCooker:
487 # Now update all the variables not in the datastore to match 469 # Now update all the variables not in the datastore to match
488 self.configuration.env = environment 470 self.configuration.env = environment
489 471
472 self.revalidateCaches()
490 if not clean: 473 if not clean:
491 logger.debug("Base environment change, triggering reparse") 474 logger.debug("Base environment change, triggering reparse")
492 self.reset() 475 self.reset()
493 476
494 def runCommands(self, server, data, abort):
495 """
496 Run any queued asynchronous command
497 This is done by the idle handler so it runs in true context rather than
498 tied to any UI.
499 """
500
501 return self.command.runAsyncCommand()
502
503 def showVersions(self): 477 def showVersions(self):
504 478
505 (latest_versions, preferred_versions) = self.findProviders() 479 (latest_versions, preferred_versions, required) = self.findProviders()
506 480
507 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") 481 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
508 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") 482 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
509 483
510 for p in sorted(self.recipecaches[''].pkg_pn): 484 for p in sorted(self.recipecaches[''].pkg_pn):
511 pref = preferred_versions[p] 485 preferred = preferred_versions[p]
512 latest = latest_versions[p] 486 latest = latest_versions[p]
487 requiredstr = ""
488 preferredstr = ""
489 if required[p]:
490 if preferred[0] is not None:
491 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
492 else:
493 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
494 else:
495 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
513 496
514 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
515 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] 497 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
516 498
517 if pref == latest: 499 if preferred == latest:
518 prefstr = "" 500 preferredstr = ""
519 501
520 logger.plain("%-35s %25s %25s", p, lateststr, prefstr) 502 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
521 503
522 def showEnvironment(self, buildfile=None, pkgs_to_build=None): 504 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
523 """ 505 """
@@ -533,6 +515,8 @@ class BBCooker:
533 if not orig_tracking: 515 if not orig_tracking:
534 self.enableDataTracking() 516 self.enableDataTracking()
535 self.reset() 517 self.reset()
518 # reset() resets to the UI requested value so we have to redo this
519 self.enableDataTracking()
536 520
537 def mc_base(p): 521 def mc_base(p):
538 if p.startswith('mc:'): 522 if p.startswith('mc:'):
@@ -556,21 +540,21 @@ class BBCooker:
556 if pkgs_to_build[0] in set(ignore.split()): 540 if pkgs_to_build[0] in set(ignore.split()):
557 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) 541 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
558 542
559 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True) 543 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
560 544
561 mc = runlist[0][0] 545 mc = runlist[0][0]
562 fn = runlist[0][3] 546 fn = runlist[0][3]
563 547
564 if fn: 548 if fn:
565 try: 549 try:
566 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 550 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
567 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn)) 551 envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
568 except Exception as e: 552 except Exception as e:
569 parselog.exception("Unable to read %s", fn) 553 parselog.exception("Unable to read %s", fn)
570 raise 554 raise
571 else: 555 else:
572 if not mc in self.databuilder.mcdata: 556 if not mc in self.databuilder.mcdata:
573 bb.fatal('Not multiconfig named "%s" found' % mc) 557 bb.fatal('No multiconfig named "%s" found' % mc)
574 envdata = self.databuilder.mcdata[mc] 558 envdata = self.databuilder.mcdata[mc]
575 data.expandKeys(envdata) 559 data.expandKeys(envdata)
576 parse.ast.runAnonFuncs(envdata) 560 parse.ast.runAnonFuncs(envdata)
@@ -585,7 +569,7 @@ class BBCooker:
585 data.emit_env(env, envdata, True) 569 data.emit_env(env, envdata, True)
586 logger.plain(env.getvalue()) 570 logger.plain(env.getvalue())
587 571
588 # emit the metadata which isnt valid shell 572 # emit the metadata which isn't valid shell
589 for e in sorted(envdata.keys()): 573 for e in sorted(envdata.keys()):
590 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): 574 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
591 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) 575 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
@@ -594,7 +578,7 @@ class BBCooker:
594 self.disableDataTracking() 578 self.disableDataTracking()
595 self.reset() 579 self.reset()
596 580
597 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False): 581 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
598 """ 582 """
599 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 583 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
600 """ 584 """
@@ -641,8 +625,8 @@ class BBCooker:
641 localdata = {} 625 localdata = {}
642 626
643 for mc in self.multiconfigs: 627 for mc in self.multiconfigs:
644 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete) 628 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
645 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 629 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
646 bb.data.expandKeys(localdata[mc]) 630 bb.data.expandKeys(localdata[mc])
647 631
648 current = 0 632 current = 0
@@ -690,19 +674,18 @@ class BBCooker:
690 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 674 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
691 mcdeps |= set(taskdata[mc].get_mcdepends()) 675 mcdeps |= set(taskdata[mc].get_mcdepends())
692 new = False 676 new = False
693 for mc in self.multiconfigs: 677 for k in mcdeps:
694 for k in mcdeps: 678 if k in seen:
695 if k in seen: 679 continue
696 continue 680 l = k.split(':')
697 l = k.split(':') 681 depmc = l[2]
698 depmc = l[2] 682 if depmc not in self.multiconfigs:
699 if depmc not in self.multiconfigs: 683 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
700 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) 684 else:
701 else: 685 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
702 logger.debug("Adding providers for multiconfig dependency %s" % l[3]) 686 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
703 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) 687 seen.add(k)
704 seen.add(k) 688 new = True
705 new = True
706 689
707 for mc in self.multiconfigs: 690 for mc in self.multiconfigs:
708 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 691 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
@@ -710,14 +693,14 @@ class BBCooker:
710 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 693 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
711 return taskdata, runlist 694 return taskdata, runlist
712 695
713 def prepareTreeData(self, pkgs_to_build, task): 696 def prepareTreeData(self, pkgs_to_build, task, halt=False):
714 """ 697 """
715 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 698 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
716 """ 699 """
717 700
718 # We set abort to False here to prevent unbuildable targets raising 701 # We set halt to False here to prevent unbuildable targets raising
719 # an exception when we're just generating data 702 # an exception when we're just generating data
720 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 703 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
721 704
722 return runlist, taskdata 705 return runlist, taskdata
723 706
@@ -731,7 +714,7 @@ class BBCooker:
731 if not task.startswith("do_"): 714 if not task.startswith("do_"):
732 task = "do_%s" % task 715 task = "do_%s" % task
733 716
734 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 717 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
735 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 718 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
736 rq.rqdata.prepare() 719 rq.rqdata.prepare()
737 return self.buildDependTree(rq, taskdata) 720 return self.buildDependTree(rq, taskdata)
@@ -792,7 +775,9 @@ class BBCooker:
792 for dep in rq.rqdata.runtaskentries[tid].depends: 775 for dep in rq.rqdata.runtaskentries[tid].depends:
793 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) 776 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
794 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] 777 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
795 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep))) 778 if depmc:
779 depmc = "mc:" + depmc + ":"
780 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
796 if taskfn not in seen_fns: 781 if taskfn not in seen_fns:
797 seen_fns.append(taskfn) 782 seen_fns.append(taskfn)
798 packages = [] 783 packages = []
@@ -924,10 +909,11 @@ class BBCooker:
924 909
925 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 910 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
926 911
927 with open('pn-buildlist', 'w') as f: 912 pns = depgraph["pn"].keys()
928 for pn in depgraph["pn"]: 913 if pns:
929 f.write(pn + "\n") 914 with open('pn-buildlist', 'w') as f:
930 logger.info("PN build list saved to 'pn-buildlist'") 915 f.write("%s\n" % "\n".join(sorted(pns)))
916 logger.info("PN build list saved to 'pn-buildlist'")
931 917
932 # Remove old format output files to ensure no confusion with stale data 918 # Remove old format output files to ensure no confusion with stale data
933 try: 919 try:
@@ -961,7 +947,7 @@ class BBCooker:
961 for mc in self.multiconfigs: 947 for mc in self.multiconfigs:
962 # First get list of recipes, including skipped 948 # First get list of recipes, including skipped
963 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 949 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
964 recipefns.extend(self.skiplist.keys()) 950 recipefns.extend(self.skiplist_by_mc[mc].keys())
965 951
966 # Work out list of bbappends that have been applied 952 # Work out list of bbappends that have been applied
967 applied_appends = [] 953 applied_appends = []
@@ -980,13 +966,7 @@ class BBCooker:
980 '\n '.join(appends_without_recipes[mc]))) 966 '\n '.join(appends_without_recipes[mc])))
981 967
982 if msgs: 968 if msgs:
983 msg = "\n".join(msgs) 969 bb.fatal("\n".join(msgs))
984 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
985 False) or "no"
986 if warn_only.lower() in ("1", "yes", "true"):
987 bb.warn(msg)
988 else:
989 bb.fatal(msg)
990 970
991 def handlePrefProviders(self): 971 def handlePrefProviders(self):
992 972
@@ -1056,6 +1036,11 @@ class BBCooker:
1056 if matches: 1036 if matches:
1057 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1037 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1058 1038
1039 def testCookerCommandEvent(self, filepattern):
1040 # Dummy command used by OEQA selftest to test tinfoil without IO
1041 matches = ["A", "B"]
1042 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1043
1059 def findProviders(self, mc=''): 1044 def findProviders(self, mc=''):
1060 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1045 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1061 1046
@@ -1063,10 +1048,16 @@ class BBCooker:
1063 if pn in self.recipecaches[mc].providers: 1048 if pn in self.recipecaches[mc].providers:
1064 filenames = self.recipecaches[mc].providers[pn] 1049 filenames = self.recipecaches[mc].providers[pn]
1065 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) 1050 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1066 filename = eligible[0] 1051 if eligible is not None:
1052 filename = eligible[0]
1053 else:
1054 filename = None
1067 return None, None, None, filename 1055 return None, None, None, filename
1068 elif pn in self.recipecaches[mc].pkg_pn: 1056 elif pn in self.recipecaches[mc].pkg_pn:
1069 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1057 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1058 if required and preferred_file is None:
1059 return None, None, None, None
1060 return (latest, latest_f, preferred_ver, preferred_file)
1070 else: 1061 else:
1071 return None, None, None, None 1062 return None, None, None, None
1072 1063
@@ -1211,15 +1202,15 @@ class BBCooker:
1211 except bb.utils.VersionStringException as vse: 1202 except bb.utils.VersionStringException as vse:
1212 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1203 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1213 if not res: 1204 if not res:
1214 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) 1205 parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1215 continue 1206 continue
1216 else: 1207 else:
1217 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) 1208 parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1218 continue 1209 continue
1219 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec) 1210 parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1220 collection_depends[c].append(rec) 1211 collection_depends[c].append(rec)
1221 else: 1212 else:
1222 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) 1213 parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1223 1214
1224 # Recursively work out collection priorities based on dependencies 1215 # Recursively work out collection priorities based on dependencies
1225 def calc_layer_priority(collection): 1216 def calc_layer_priority(collection):
@@ -1231,7 +1222,7 @@ class BBCooker:
1231 if depprio > max_depprio: 1222 if depprio > max_depprio:
1232 max_depprio = depprio 1223 max_depprio = depprio
1233 max_depprio += 1 1224 max_depprio += 1
1234 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) 1225 parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1235 collection_priorities[collection] = max_depprio 1226 collection_priorities[collection] = max_depprio
1236 1227
1237 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities 1228 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
@@ -1243,7 +1234,7 @@ class BBCooker:
1243 errors = True 1234 errors = True
1244 continue 1235 continue
1245 elif regex == "": 1236 elif regex == "":
1246 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c) 1237 parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1247 cre = re.compile('^NULL$') 1238 cre = re.compile('^NULL$')
1248 errors = False 1239 errors = False
1249 else: 1240 else:
@@ -1290,8 +1281,8 @@ class BBCooker:
1290 if bf.startswith("/") or bf.startswith("../"): 1281 if bf.startswith("/") or bf.startswith("../"):
1291 bf = os.path.abspath(bf) 1282 bf = os.path.abspath(bf)
1292 1283
1293 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} 1284 collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1294 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1285 filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1295 try: 1286 try:
1296 os.stat(bf) 1287 os.stat(bf)
1297 bf = os.path.abspath(bf) 1288 bf = os.path.abspath(bf)
@@ -1355,9 +1346,10 @@ class BBCooker:
1355 self.buildSetVars() 1346 self.buildSetVars()
1356 self.reset_mtime_caches() 1347 self.reset_mtime_caches()
1357 1348
1358 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1349 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1359 1350
1360 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn)) 1351 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1352 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1361 infos = dict(infos) 1353 infos = dict(infos)
1362 1354
1363 fn = bb.cache.realfn2virtual(fn, cls, mc) 1355 fn = bb.cache.realfn2virtual(fn, cls, mc)
@@ -1383,14 +1375,16 @@ class BBCooker:
1383 self.recipecaches[mc].rundeps[fn] = defaultdict(list) 1375 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1384 self.recipecaches[mc].runrecs[fn] = defaultdict(list) 1376 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1385 1377
1378 bb.parse.siggen.setup_datacache(self.recipecaches)
1379
1386 # Invalidate task for target if force mode active 1380 # Invalidate task for target if force mode active
1387 if self.configuration.force: 1381 if self.configuration.force:
1388 logger.verbose("Invalidate task %s, %s", task, fn) 1382 logger.verbose("Invalidate task %s, %s", task, fn)
1389 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn) 1383 bb.parse.siggen.invalidate_task(task, fn)
1390 1384
1391 # Setup taskdata structure 1385 # Setup taskdata structure
1392 taskdata = {} 1386 taskdata = {}
1393 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) 1387 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1394 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) 1388 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1395 1389
1396 if quietlog: 1390 if quietlog:
@@ -1400,21 +1394,24 @@ class BBCooker:
1400 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") 1394 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1401 if fireevents: 1395 if fireevents:
1402 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) 1396 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1397 if self.eventlog:
1398 self.eventlog[2].write_variables()
1399 bb.event.enable_heartbeat()
1403 1400
1404 # Execute the runqueue 1401 # Execute the runqueue
1405 runlist = [[mc, item, task, fn]] 1402 runlist = [[mc, item, task, fn]]
1406 1403
1407 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1404 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1408 1405
1409 def buildFileIdle(server, rq, abort): 1406 def buildFileIdle(server, rq, halt):
1410 1407
1411 msg = None 1408 msg = None
1412 interrupted = 0 1409 interrupted = 0
1413 if abort or self.state == state.forceshutdown: 1410 if halt or self.state == State.FORCE_SHUTDOWN:
1414 rq.finish_runqueue(True) 1411 rq.finish_runqueue(True)
1415 msg = "Forced shutdown" 1412 msg = "Forced shutdown"
1416 interrupted = 2 1413 interrupted = 2
1417 elif self.state == state.shutdown: 1414 elif self.state == State.SHUTDOWN:
1418 rq.finish_runqueue(False) 1415 rq.finish_runqueue(False)
1419 msg = "Stopped build" 1416 msg = "Stopped build"
1420 interrupted = 1 1417 interrupted = 1
@@ -1425,41 +1422,70 @@ class BBCooker:
1425 failures += len(exc.args) 1422 failures += len(exc.args)
1426 retval = False 1423 retval = False
1427 except SystemExit as exc: 1424 except SystemExit as exc:
1428 self.command.finishAsyncCommand(str(exc))
1429 if quietlog: 1425 if quietlog:
1430 bb.runqueue.logger.setLevel(rqloglevel) 1426 bb.runqueue.logger.setLevel(rqloglevel)
1431 return False 1427 return bb.server.process.idleFinish(str(exc))
1432 1428
1433 if not retval: 1429 if not retval:
1434 if fireevents: 1430 if fireevents:
1435 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) 1431 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1436 self.command.finishAsyncCommand(msg) 1432 bb.event.disable_heartbeat()
1437 # We trashed self.recipecaches above 1433 # We trashed self.recipecaches above
1438 self.parsecache_valid = False 1434 self._parsecache_set(False)
1439 self.configuration.limited_deps = False 1435 self.configuration.limited_deps = False
1440 bb.parse.siggen.reset(self.data) 1436 bb.parse.siggen.reset(self.data)
1441 if quietlog: 1437 if quietlog:
1442 bb.runqueue.logger.setLevel(rqloglevel) 1438 bb.runqueue.logger.setLevel(rqloglevel)
1443 return False 1439 return bb.server.process.idleFinish(msg)
1444 if retval is True: 1440
1445 return True
1446 return retval 1441 return retval
1447 1442
1448 self.idleCallBackRegister(buildFileIdle, rq) 1443 self.idleCallBackRegister(buildFileIdle, rq)
1449 1444
1445 def getTaskSignatures(self, target, tasks):
1446 sig = []
1447 getAllTaskSignatures = False
1448
1449 if not tasks:
1450 tasks = ["do_build"]
1451 getAllTaskSignatures = True
1452
1453 for task in tasks:
1454 taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
1455 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1456 rq.rqdata.prepare()
1457
1458 for l in runlist:
1459 mc, pn, taskname, fn = l
1460
1461 taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
1462 for t in taskdep['tasks']:
1463 if t in taskdep['nostamp'] or "setscene" in t:
1464 continue
1465 tid = bb.runqueue.build_tid(mc, fn, t)
1466
1467 if t in task or getAllTaskSignatures:
1468 try:
1469 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1470 except KeyError:
1471 sig.append(self.getTaskSignatures(target, [t])[0])
1472
1473 return sig
1474
1450 def buildTargets(self, targets, task): 1475 def buildTargets(self, targets, task):
1451 """ 1476 """
1452 Attempt to build the targets specified 1477 Attempt to build the targets specified
1453 """ 1478 """
1454 1479
1455 def buildTargetsIdle(server, rq, abort): 1480 def buildTargetsIdle(server, rq, halt):
1456 msg = None 1481 msg = None
1457 interrupted = 0 1482 interrupted = 0
1458 if abort or self.state == state.forceshutdown: 1483 if halt or self.state == State.FORCE_SHUTDOWN:
1484 bb.event._should_exit.set()
1459 rq.finish_runqueue(True) 1485 rq.finish_runqueue(True)
1460 msg = "Forced shutdown" 1486 msg = "Forced shutdown"
1461 interrupted = 2 1487 interrupted = 2
1462 elif self.state == state.shutdown: 1488 elif self.state == State.SHUTDOWN:
1463 rq.finish_runqueue(False) 1489 rq.finish_runqueue(False)
1464 msg = "Stopped build" 1490 msg = "Stopped build"
1465 interrupted = 1 1491 interrupted = 1
@@ -1470,18 +1496,16 @@ class BBCooker:
1470 failures += len(exc.args) 1496 failures += len(exc.args)
1471 retval = False 1497 retval = False
1472 except SystemExit as exc: 1498 except SystemExit as exc:
1473 self.command.finishAsyncCommand(str(exc)) 1499 return bb.server.process.idleFinish(str(exc))
1474 return False
1475 1500
1476 if not retval: 1501 if not retval:
1477 try: 1502 try:
1478 for mc in self.multiconfigs: 1503 for mc in self.multiconfigs:
1479 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) 1504 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1480 finally: 1505 finally:
1481 self.command.finishAsyncCommand(msg) 1506 bb.event.disable_heartbeat()
1482 return False 1507 return bb.server.process.idleFinish(msg)
1483 if retval is True: 1508
1484 return True
1485 return retval 1509 return retval
1486 1510
1487 self.reset_mtime_caches() 1511 self.reset_mtime_caches()
@@ -1498,7 +1522,7 @@ class BBCooker:
1498 1522
1499 bb.event.fire(bb.event.BuildInit(packages), self.data) 1523 bb.event.fire(bb.event.BuildInit(packages), self.data)
1500 1524
1501 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort) 1525 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1502 1526
1503 buildname = self.data.getVar("BUILDNAME", False) 1527 buildname = self.data.getVar("BUILDNAME", False)
1504 1528
@@ -1511,6 +1535,9 @@ class BBCooker:
1511 1535
1512 for mc in self.multiconfigs: 1536 for mc in self.multiconfigs:
1513 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) 1537 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1538 if self.eventlog:
1539 self.eventlog[2].write_variables()
1540 bb.event.enable_heartbeat()
1514 1541
1515 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1542 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1516 if 'universe' in targets: 1543 if 'universe' in targets:
@@ -1520,7 +1547,13 @@ class BBCooker:
1520 1547
1521 1548
1522 def getAllKeysWithFlags(self, flaglist): 1549 def getAllKeysWithFlags(self, flaglist):
1550 def dummy_autorev(d):
1551 return
1552
1523 dump = {} 1553 dump = {}
1554 # Horrible but for now we need to avoid any sideeffects of autorev being called
1555 saved = bb.fetch2.get_autorev
1556 bb.fetch2.get_autorev = dummy_autorev
1524 for k in self.data.keys(): 1557 for k in self.data.keys():
1525 try: 1558 try:
1526 expand = True 1559 expand = True
@@ -1540,20 +1573,14 @@ class BBCooker:
1540 dump[k][d] = None 1573 dump[k][d] = None
1541 except Exception as e: 1574 except Exception as e:
1542 print(e) 1575 print(e)
1576 bb.fetch2.get_autorev = saved
1543 return dump 1577 return dump
1544 1578
1545 1579
1546 def updateCacheSync(self): 1580 def updateCacheSync(self):
1547 if self.state == state.running: 1581 if self.state == State.RUNNING:
1548 return 1582 return
1549 1583
1550 # reload files for which we got notifications
1551 for p in self.inotify_modified_files:
1552 bb.parse.update_cache(p)
1553 if p in bb.parse.BBHandler.cached_statements:
1554 del bb.parse.BBHandler.cached_statements[p]
1555 self.inotify_modified_files = []
1556
1557 if not self.baseconfig_valid: 1584 if not self.baseconfig_valid:
1558 logger.debug("Reloading base configuration data") 1585 logger.debug("Reloading base configuration data")
1559 self.initConfigurationData() 1586 self.initConfigurationData()
@@ -1561,19 +1588,22 @@ class BBCooker:
1561 1588
1562 # This is called for all async commands when self.state != running 1589 # This is called for all async commands when self.state != running
1563 def updateCache(self): 1590 def updateCache(self):
1564 if self.state == state.running: 1591 if self.state == State.RUNNING:
1565 return 1592 return
1566 1593
1567 if self.state in (state.shutdown, state.forceshutdown, state.error): 1594 if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1568 if hasattr(self.parser, 'shutdown'): 1595 if hasattr(self.parser, 'shutdown'):
1569 self.parser.shutdown(clean=False, force = True) 1596 self.parser.shutdown(clean=False)
1570 self.parser.final_cleanup() 1597 self.parser.final_cleanup()
1571 raise bb.BBHandledException() 1598 raise bb.BBHandledException()
1572 1599
1573 if self.state != state.parsing: 1600 if self.state != State.PARSING:
1574 self.updateCacheSync() 1601 self.updateCacheSync()
1575 1602
1576 if self.state != state.parsing and not self.parsecache_valid: 1603 if self.state != State.PARSING and not self.parsecache_valid:
1604 bb.server.process.serverlog("Parsing started")
1605 self.parsewatched = {}
1606
1577 bb.parse.siggen.reset(self.data) 1607 bb.parse.siggen.reset(self.data)
1578 self.parseConfiguration () 1608 self.parseConfiguration ()
1579 if CookerFeatures.SEND_SANITYEVENTS in self.featureset: 1609 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
@@ -1587,37 +1617,35 @@ class BBCooker:
1587 for dep in self.configuration.extra_assume_provided: 1617 for dep in self.configuration.extra_assume_provided:
1588 self.recipecaches[mc].ignored_dependencies.add(dep) 1618 self.recipecaches[mc].ignored_dependencies.add(dep)
1589 1619
1590 self.collections = {}
1591
1592 mcfilelist = {} 1620 mcfilelist = {}
1593 total_masked = 0 1621 total_masked = 0
1594 searchdirs = set() 1622 searchdirs = set()
1595 for mc in self.multiconfigs: 1623 for mc in self.multiconfigs:
1596 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1597 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1624 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1598 1625
1599 mcfilelist[mc] = filelist 1626 mcfilelist[mc] = filelist
1600 total_masked += masked 1627 total_masked += masked
1601 searchdirs |= set(search) 1628 searchdirs |= set(search)
1602 1629
1603 # Add inotify watches for directories searched for bb/bbappend files 1630 # Add mtimes for directories searched for bb/bbappend files
1604 for dirent in searchdirs: 1631 for dirent in searchdirs:
1605 self.add_filewatch([[dirent]], dirs=True) 1632 self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
1606 1633
1607 self.parser = CookerParser(self, mcfilelist, total_masked) 1634 self.parser = CookerParser(self, mcfilelist, total_masked)
1608 self.parsecache_valid = True 1635 self._parsecache_set(True)
1609 1636
1610 self.state = state.parsing 1637 self.state = State.PARSING
1611 1638
1612 if not self.parser.parse_next(): 1639 if not self.parser.parse_next():
1613 collectlog.debug(1, "parsing complete") 1640 bb.server.process.serverlog("Parsing completed")
1641 collectlog.debug("parsing complete")
1614 if self.parser.error: 1642 if self.parser.error:
1615 raise bb.BBHandledException() 1643 raise bb.BBHandledException()
1616 self.show_appends_with_no_recipes() 1644 self.show_appends_with_no_recipes()
1617 self.handlePrefProviders() 1645 self.handlePrefProviders()
1618 for mc in self.multiconfigs: 1646 for mc in self.multiconfigs:
1619 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1647 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1620 self.state = state.running 1648 self.state = State.RUNNING
1621 1649
1622 # Send an event listing all stamps reachable after parsing 1650 # Send an event listing all stamps reachable after parsing
1623 # which the metadata may use to clean up stale data 1651 # which the metadata may use to clean up stale data
@@ -1633,7 +1661,7 @@ class BBCooker:
1633 # Return a copy, don't modify the original 1661 # Return a copy, don't modify the original
1634 pkgs_to_build = pkgs_to_build[:] 1662 pkgs_to_build = pkgs_to_build[:]
1635 1663
1636 if len(pkgs_to_build) == 0: 1664 if not pkgs_to_build:
1637 raise NothingToBuild 1665 raise NothingToBuild
1638 1666
1639 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() 1667 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
@@ -1655,7 +1683,7 @@ class BBCooker:
1655 1683
1656 if 'universe' in pkgs_to_build: 1684 if 'universe' in pkgs_to_build:
1657 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") 1685 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1658 parselog.debug(1, "collating packages for \"universe\"") 1686 parselog.debug("collating packages for \"universe\"")
1659 pkgs_to_build.remove('universe') 1687 pkgs_to_build.remove('universe')
1660 for mc in self.multiconfigs: 1688 for mc in self.multiconfigs:
1661 for t in self.recipecaches[mc].universe_target: 1689 for t in self.recipecaches[mc].universe_target:
@@ -1680,26 +1708,36 @@ class BBCooker:
1680 def post_serve(self): 1708 def post_serve(self):
1681 self.shutdown(force=True) 1709 self.shutdown(force=True)
1682 prserv.serv.auto_shutdown() 1710 prserv.serv.auto_shutdown()
1711 if hasattr(bb.parse, "siggen"):
1712 bb.parse.siggen.exit()
1683 if self.hashserv: 1713 if self.hashserv:
1684 self.hashserv.process.terminate() 1714 self.hashserv.process.terminate()
1685 self.hashserv.process.join() 1715 self.hashserv.process.join()
1686 if hasattr(self, "data"): 1716 if hasattr(self, "data"):
1687 bb.event.fire(CookerExit(), self.data) 1717 bb.event.fire(CookerExit(), self.data)
1688 1718
1689 def shutdown(self, force = False): 1719 def shutdown(self, force=False):
1690 if force: 1720 if force:
1691 self.state = state.forceshutdown 1721 self.state = State.FORCE_SHUTDOWN
1722 bb.event._should_exit.set()
1692 else: 1723 else:
1693 self.state = state.shutdown 1724 self.state = State.SHUTDOWN
1694 1725
1695 if self.parser: 1726 if self.parser:
1696 self.parser.shutdown(clean=not force, force=force) 1727 self.parser.shutdown(clean=False)
1697 self.parser.final_cleanup() 1728 self.parser.final_cleanup()
1698 1729
1699 def finishcommand(self): 1730 def finishcommand(self):
1700 self.state = state.initial 1731 if hasattr(self.parser, 'shutdown'):
1732 self.parser.shutdown(clean=False)
1733 self.parser.final_cleanup()
1734 self.state = State.INITIAL
1735 bb.event._should_exit.clear()
1701 1736
1702 def reset(self): 1737 def reset(self):
1738 if hasattr(bb.parse, "siggen"):
1739 bb.parse.siggen.exit()
1740 self.finishcommand()
1703 self.initConfigurationData() 1741 self.initConfigurationData()
1704 self.handlePRServ() 1742 self.handlePRServ()
1705 1743
@@ -1711,9 +1749,9 @@ class BBCooker:
1711 if hasattr(self, "data"): 1749 if hasattr(self, "data"):
1712 self.databuilder.reset() 1750 self.databuilder.reset()
1713 self.data = self.databuilder.data 1751 self.data = self.databuilder.data
1714 self.parsecache_valid = False 1752 # In theory tinfoil could have modified the base data before parsing,
1715 self.baseconfig_valid = False 1753 # ideally need to track if anything did modify the datastore
1716 1754 self._parsecache_set(False)
1717 1755
1718class CookerExit(bb.event.Event): 1756class CookerExit(bb.event.Event):
1719 """ 1757 """
@@ -1728,16 +1766,16 @@ class CookerCollectFiles(object):
1728 def __init__(self, priorities, mc=''): 1766 def __init__(self, priorities, mc=''):
1729 self.mc = mc 1767 self.mc = mc
1730 self.bbappends = [] 1768 self.bbappends = []
1731 # Priorities is a list of tupples, with the second element as the pattern. 1769 # Priorities is a list of tuples, with the second element as the pattern.
1732 # We need to sort the list with the longest pattern first, and so on to 1770 # We need to sort the list with the longest pattern first, and so on to
1733 # the shortest. This allows nested layers to be properly evaluated. 1771 # the shortest. This allows nested layers to be properly evaluated.
1734 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) 1772 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1735 1773
1736 def calc_bbfile_priority(self, filename): 1774 def calc_bbfile_priority(self, filename):
1737 for _, _, regex, pri in self.bbfile_config_priorities: 1775 for layername, _, regex, pri in self.bbfile_config_priorities:
1738 if regex.match(filename): 1776 if regex.match(filename):
1739 return pri, regex 1777 return pri, regex, layername
1740 return 0, None 1778 return 0, None, None
1741 1779
1742 def get_bbfiles(self): 1780 def get_bbfiles(self):
1743 """Get list of default .bb files by reading out the current directory""" 1781 """Get list of default .bb files by reading out the current directory"""
@@ -1756,7 +1794,7 @@ class CookerCollectFiles(object):
1756 for ignored in ('SCCS', 'CVS', '.svn'): 1794 for ignored in ('SCCS', 'CVS', '.svn'):
1757 if ignored in dirs: 1795 if ignored in dirs:
1758 dirs.remove(ignored) 1796 dirs.remove(ignored)
1759 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))] 1797 found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
1760 1798
1761 return found 1799 return found
1762 1800
@@ -1764,7 +1802,7 @@ class CookerCollectFiles(object):
1764 """Collect all available .bb build files""" 1802 """Collect all available .bb build files"""
1765 masked = 0 1803 masked = 0
1766 1804
1767 collectlog.debug(1, "collecting .bb files") 1805 collectlog.debug("collecting .bb files")
1768 1806
1769 files = (config.getVar( "BBFILES") or "").split() 1807 files = (config.getVar( "BBFILES") or "").split()
1770 1808
@@ -1772,16 +1810,16 @@ class CookerCollectFiles(object):
1772 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) 1810 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1773 config.setVar("BBFILES_PRIORITIZED", " ".join(files)) 1811 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1774 1812
1775 if not len(files): 1813 if not files:
1776 files = self.get_bbfiles() 1814 files = self.get_bbfiles()
1777 1815
1778 if not len(files): 1816 if not files:
1779 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") 1817 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1780 bb.event.fire(CookerExit(), eventdata) 1818 bb.event.fire(CookerExit(), eventdata)
1781 1819
1782 # We need to track where we look so that we can add inotify watches. There 1820 # We need to track where we look so that we can know when the cache is invalid. There
1783 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1821 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1784 # (or os.scandir() for python 3.6+) calls while we run glob(). 1822 # calls while we run glob().
1785 origlistdir = os.listdir 1823 origlistdir = os.listdir
1786 if hasattr(os, 'scandir'): 1824 if hasattr(os, 'scandir'):
1787 origscandir = os.scandir 1825 origscandir = os.scandir
@@ -1835,7 +1873,7 @@ class CookerCollectFiles(object):
1835 try: 1873 try:
1836 re.compile(mask) 1874 re.compile(mask)
1837 bbmasks.append(mask) 1875 bbmasks.append(mask)
1838 except sre_constants.error: 1876 except re.error:
1839 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) 1877 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1840 1878
1841 # Then validate the combined regular expressions. This should never 1879 # Then validate the combined regular expressions. This should never
@@ -1843,7 +1881,7 @@ class CookerCollectFiles(object):
1843 bbmask = "|".join(bbmasks) 1881 bbmask = "|".join(bbmasks)
1844 try: 1882 try:
1845 bbmask_compiled = re.compile(bbmask) 1883 bbmask_compiled = re.compile(bbmask)
1846 except sre_constants.error: 1884 except re.error:
1847 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) 1885 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1848 bbmask = None 1886 bbmask = None
1849 1887
@@ -1851,7 +1889,7 @@ class CookerCollectFiles(object):
1851 bbappend = [] 1889 bbappend = []
1852 for f in newfiles: 1890 for f in newfiles:
1853 if bbmask and bbmask_compiled.search(f): 1891 if bbmask and bbmask_compiled.search(f):
1854 collectlog.debug(1, "skipping masked file %s", f) 1892 collectlog.debug("skipping masked file %s", f)
1855 masked += 1 1893 masked += 1
1856 continue 1894 continue
1857 if f.endswith('.bb'): 1895 if f.endswith('.bb'):
@@ -1859,7 +1897,7 @@ class CookerCollectFiles(object):
1859 elif f.endswith('.bbappend'): 1897 elif f.endswith('.bbappend'):
1860 bbappend.append(f) 1898 bbappend.append(f)
1861 else: 1899 else:
1862 collectlog.debug(1, "skipping %s: unknown file extension", f) 1900 collectlog.debug("skipping %s: unknown file extension", f)
1863 1901
1864 # Build a list of .bbappend files for each .bb file 1902 # Build a list of .bbappend files for each .bb file
1865 for f in bbappend: 1903 for f in bbappend:
@@ -1910,7 +1948,7 @@ class CookerCollectFiles(object):
1910 # Calculate priorities for each file 1948 # Calculate priorities for each file
1911 for p in pkgfns: 1949 for p in pkgfns:
1912 realfn, cls, mc = bb.cache.virtualfn2realfn(p) 1950 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1913 priorities[p], regex = self.calc_bbfile_priority(realfn) 1951 priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
1914 if regex in unmatched_regex: 1952 if regex in unmatched_regex:
1915 matched_regex.add(regex) 1953 matched_regex.add(regex)
1916 unmatched_regex.remove(regex) 1954 unmatched_regex.remove(regex)
@@ -1961,66 +1999,85 @@ class ParsingFailure(Exception):
1961 Exception.__init__(self, realexception, recipe) 1999 Exception.__init__(self, realexception, recipe)
1962 2000
1963class Parser(multiprocessing.Process): 2001class Parser(multiprocessing.Process):
1964 def __init__(self, jobs, results, quit, init, profile): 2002 def __init__(self, jobs, next_job_id, results, quit, profile):
1965 self.jobs = jobs 2003 self.jobs = jobs
2004 self.next_job_id = next_job_id
1966 self.results = results 2005 self.results = results
1967 self.quit = quit 2006 self.quit = quit
1968 self.init = init
1969 multiprocessing.Process.__init__(self) 2007 multiprocessing.Process.__init__(self)
1970 self.context = bb.utils.get_context().copy() 2008 self.context = bb.utils.get_context().copy()
1971 self.handlers = bb.event.get_class_handlers().copy() 2009 self.handlers = bb.event.get_class_handlers().copy()
1972 self.profile = profile 2010 self.profile = profile
2011 self.queue_signals = False
2012 self.signal_received = []
2013 self.signal_threadlock = threading.Lock()
2014 self.exit = False
2015
2016 def catch_sig(self, signum, frame):
2017 if self.queue_signals:
2018 self.signal_received.append(signum)
2019 else:
2020 self.handle_sig(signum, frame)
2021
2022 def handle_sig(self, signum, frame):
2023 if signum == signal.SIGTERM:
2024 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2025 os.kill(os.getpid(), signal.SIGTERM)
2026 elif signum == signal.SIGINT:
2027 self.exit = True
1973 2028
1974 def run(self): 2029 def run(self):
2030 bb.utils.profile_function("parsing" in self.profile, self.realrun, "profile-parse-%s.log" % multiprocessing.current_process().name, process=False)
1975 2031
1976 if not self.profile: 2032 def realrun(self):
1977 self.realrun() 2033 # Signal handling here is hard. We must not terminate any process or thread holding the write
1978 return 2034 # lock for the event stream as it will not be released, ever, and things will hang.
2035 # Python handles signals in the main thread/process but they can be raised from any thread and
2036 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2037 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2038 # new thread should also do so) and we defer handling but we handle with the local thread lock
2039 # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2040 # can be in the critical section.
2041 signal.signal(signal.SIGTERM, self.catch_sig)
2042 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2043 signal.signal(signal.SIGINT, self.catch_sig)
2044 bb.utils.set_process_name(multiprocessing.current_process().name)
2045 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2046 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
1979 2047
2048 pending = []
2049 havejobs = True
1980 try: 2050 try:
1981 import cProfile as profile 2051 while (havejobs or pending) and not self.exit:
1982 except: 2052 if self.quit.is_set():
1983 import profile 2053 break
1984 prof = profile.Profile()
1985 try:
1986 profile.Profile.runcall(prof, self.realrun)
1987 finally:
1988 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1989 prof.dump_stats(logfile)
1990 2054
1991 def realrun(self): 2055 job = None
1992 if self.init: 2056 if havejobs:
1993 self.init() 2057 with self.next_job_id.get_lock():
2058 if self.next_job_id.value < len(self.jobs):
2059 job = self.jobs[self.next_job_id.value]
2060 self.next_job_id.value += 1
2061 else:
2062 havejobs = False
1994 2063
1995 pending = [] 2064 if job:
1996 while True: 2065 result = self.parse(*job)
1997 try: 2066 # Clear the siggen cache after parsing to control memory usage, its huge
1998 self.quit.get_nowait() 2067 bb.parse.siggen.postparsing_clean_cache()
1999 except queue.Empty: 2068 pending.append(result)
2000 pass
2001 else:
2002 self.results.close()
2003 self.results.join_thread()
2004 break
2005 2069
2006 if pending: 2070 if pending:
2007 result = pending.pop() 2071 try:
2008 else: 2072 result = pending.pop()
2009 try: 2073 self.results.put(result, timeout=0.05)
2010 job = self.jobs.pop() 2074 except queue.Full:
2011 except IndexError: 2075 pending.append(result)
2012 self.results.close() 2076 finally:
2013 self.results.join_thread() 2077 self.results.close()
2014 break 2078 self.results.join_thread()
2015 result = self.parse(*job)
2016 # Clear the siggen cache after parsing to control memory usage, its huge
2017 bb.parse.siggen.postparsing_clean_cache()
2018 try:
2019 self.results.put(result, timeout=0.25)
2020 except queue.Full:
2021 pending.append(result)
2022 2079
2023 def parse(self, mc, cache, filename, appends): 2080 def parse(self, mc, cache, filename, appends, layername):
2024 try: 2081 try:
2025 origfilter = bb.event.LogHandler.filter 2082 origfilter = bb.event.LogHandler.filter
2026 # Record the filename we're parsing into any events generated 2083 # Record the filename we're parsing into any events generated
@@ -2034,17 +2091,16 @@ class Parser(multiprocessing.Process):
2034 bb.event.set_class_handlers(self.handlers.copy()) 2091 bb.event.set_class_handlers(self.handlers.copy())
2035 bb.event.LogHandler.filter = parse_filter 2092 bb.event.LogHandler.filter = parse_filter
2036 2093
2037 return True, mc, cache.parse(filename, appends) 2094 return True, mc, cache.parse(filename, appends, layername)
2038 except Exception as exc: 2095 except Exception as exc:
2039 tb = sys.exc_info()[2] 2096 tb = sys.exc_info()[2]
2040 exc.recipe = filename 2097 exc.recipe = filename
2041 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) 2098 return True, None, exc
2042 return True, exc
2043 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2099 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2044 # and for example a worker thread doesn't just exit on its own in response to 2100 # and for example a worker thread doesn't just exit on its own in response to
2045 # a SystemExit event for example. 2101 # a SystemExit event for example.
2046 except BaseException as exc: 2102 except BaseException as exc:
2047 return True, ParsingFailure(exc, filename) 2103 return True, None, ParsingFailure(exc, filename)
2048 finally: 2104 finally:
2049 bb.event.LogHandler.filter = origfilter 2105 bb.event.LogHandler.filter = origfilter
2050 2106
@@ -2053,7 +2109,7 @@ class CookerParser(object):
2053 self.mcfilelist = mcfilelist 2109 self.mcfilelist = mcfilelist
2054 self.cooker = cooker 2110 self.cooker = cooker
2055 self.cfgdata = cooker.data 2111 self.cfgdata = cooker.data
2056 self.cfghash = cooker.data_hash 2112 self.cfghash = cooker.databuilder.data_hash
2057 self.cfgbuilder = cooker.databuilder 2113 self.cfgbuilder = cooker.databuilder
2058 2114
2059 # Accounting statistics 2115 # Accounting statistics
@@ -2070,14 +2126,15 @@ class CookerParser(object):
2070 2126
2071 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array) 2127 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2072 self.fromcache = set() 2128 self.fromcache = set()
2073 self.willparse = set() 2129 self.willparse = []
2074 for mc in self.cooker.multiconfigs: 2130 for mc in self.cooker.multiconfigs:
2075 for filename in self.mcfilelist[mc]: 2131 for filename in self.mcfilelist[mc]:
2076 appends = self.cooker.collections[mc].get_file_appends(filename) 2132 appends = self.cooker.collections[mc].get_file_appends(filename)
2133 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2077 if not self.bb_caches[mc].cacheValid(filename, appends): 2134 if not self.bb_caches[mc].cacheValid(filename, appends):
2078 self.willparse.add((mc, self.bb_caches[mc], filename, appends)) 2135 self.willparse.append((mc, self.bb_caches[mc], filename, appends, layername))
2079 else: 2136 else:
2080 self.fromcache.add((mc, self.bb_caches[mc], filename, appends)) 2137 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2081 2138
2082 self.total = len(self.fromcache) + len(self.willparse) 2139 self.total = len(self.fromcache) + len(self.willparse)
2083 self.toparse = len(self.willparse) 2140 self.toparse = len(self.willparse)
@@ -2086,6 +2143,7 @@ class CookerParser(object):
2086 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or 2143 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2087 multiprocessing.cpu_count()), self.toparse) 2144 multiprocessing.cpu_count()), self.toparse)
2088 2145
2146 bb.cache.SiggenRecipeInfo.reset()
2089 self.start() 2147 self.start()
2090 self.haveshutdown = False 2148 self.haveshutdown = False
2091 self.syncthread = None 2149 self.syncthread = None
@@ -2093,32 +2151,25 @@ class CookerParser(object):
2093 def start(self): 2151 def start(self):
2094 self.results = self.load_cached() 2152 self.results = self.load_cached()
2095 self.processes = [] 2153 self.processes = []
2154
2096 if self.toparse: 2155 if self.toparse:
2097 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2156 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2098 def init():
2099 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2100 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2101 signal.signal(signal.SIGINT, signal.SIG_IGN)
2102 bb.utils.set_process_name(multiprocessing.current_process().name)
2103 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2104 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2105
2106 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2107 self.result_queue = multiprocessing.Queue()
2108 2157
2109 def chunkify(lst,n): 2158 next_job_id = multiprocessing.Value(ctypes.c_int, 0)
2110 return [lst[i::n] for i in range(n)] 2159 self.parser_quit = multiprocessing.Event()
2111 self.jobs = chunkify(list(self.willparse), self.num_processes) 2160 self.result_queue = multiprocessing.Queue()
2112 2161
2162 # Have to pass in willparse at fork time so all parsing processes have the unpickleable data
2163 # then access it by index from the parse queue.
2113 for i in range(0, self.num_processes): 2164 for i in range(0, self.num_processes):
2114 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) 2165 parser = Parser(self.willparse, next_job_id, self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2115 parser.start() 2166 parser.start()
2116 self.process_names.append(parser.name) 2167 self.process_names.append(parser.name)
2117 self.processes.append(parser) 2168 self.processes.append(parser)
2118 2169
2119 self.results = itertools.chain(self.results, self.parse_generator()) 2170 self.results = itertools.chain(self.results, self.parse_generator())
2120 2171
2121 def shutdown(self, clean=True, force=False): 2172 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2122 if not self.toparse: 2173 if not self.toparse:
2123 return 2174 return
2124 if self.haveshutdown: 2175 if self.haveshutdown:
@@ -2132,118 +2183,174 @@ class CookerParser(object):
2132 self.total) 2183 self.total)
2133 2184
2134 bb.event.fire(event, self.cfgdata) 2185 bb.event.fire(event, self.cfgdata)
2135 2186 else:
2136 for process in self.processes: 2187 bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2137 self.parser_quit.put(None) 2188 bb.error("Parsing halted due to errors, see error messages above")
2138 2189
2139 # Cleanup the queue before call process.join(), otherwise there might be 2190 # Cleanup the queue before call process.join(), otherwise there might be
2140 # deadlocks. 2191 # deadlocks.
2141 while True: 2192 def read_results():
2142 try: 2193 while True:
2143 self.result_queue.get(timeout=0.25) 2194 try:
2144 except queue.Empty: 2195 self.result_queue.get(timeout=0.25)
2145 break 2196 except queue.Empty:
2146 2197 break
2147 for process in self.processes: 2198 except KeyError:
2148 if force: 2199 # The restore state from SiggenRecipeInfo in cache.py can
2149 process.join(.1) 2200 # fail here if this is an unclean shutdown since the state may have been
2150 process.terminate() 2201 # reset. Ignore key errors for that reason, we don't care.
2151 else: 2202 pass
2152 process.join()
2153
2154 self.parser_quit.close()
2155 # Allow data left in the cancel queue to be discarded
2156 self.parser_quit.cancel_join_thread()
2157 2203
2158 def sync_caches(): 2204 def sync_caches():
2159 for c in self.bb_caches.values(): 2205 for c in self.bb_caches.values():
2206 bb.cache.SiggenRecipeInfo.reset()
2160 c.sync() 2207 c.sync()
2161 2208
2162 sync = threading.Thread(target=sync_caches, name="SyncThread") 2209 self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2163 self.syncthread = sync 2210 self.syncthread.start()
2164 sync.start() 2211
2212 self.parser_quit.set()
2213
2214 read_results()
2215
2216 for process in self.processes:
2217 process.join(2)
2218
2219 for process in self.processes:
2220 if process.exitcode is None:
2221 os.kill(process.pid, signal.SIGINT)
2222
2223 read_results()
2224
2225 for process in self.processes:
2226 process.join(2)
2227
2228 for process in self.processes:
2229 if process.exitcode is None:
2230 process.terminate()
2231
2232 for process in self.processes:
2233 process.join()
2234 # clean up zombies
2235 process.close()
2236
2237 bb.codeparser.parser_cache_save()
2165 bb.codeparser.parser_cache_savemerge() 2238 bb.codeparser.parser_cache_savemerge()
2239 bb.cache.SiggenRecipeInfo.reset()
2166 bb.fetch.fetcher_parse_done() 2240 bb.fetch.fetcher_parse_done()
2167 if self.cooker.configuration.profile: 2241 if self.cooker.configuration.profile:
2168 profiles = [] 2242 profiles = []
2169 for i in self.process_names: 2243 for i in self.process_names:
2170 logfile = "profile-parse-%s.log" % i 2244 logfile = "profile-parse-%s.log" % i
2171 if os.path.exists(logfile): 2245 if os.path.exists(logfile) and os.path.getsize(logfile):
2172 profiles.append(logfile) 2246 profiles.append(logfile)
2173 2247
2174 pout = "profile-parse.log.processed" 2248 if profiles:
2175 bb.utils.process_profilelog(profiles, pout = pout) 2249 fn_out = "profile-parse.log.report"
2176 print("Processed parsing statistics saved to %s" % (pout)) 2250 bb.utils.process_profilelog(profiles, fn_out=fn_out)
2251 print("Processed parsing statistics saved to %s" % (fn_out))
2177 2252
2178 def final_cleanup(self): 2253 def final_cleanup(self):
2179 if self.syncthread: 2254 if self.syncthread:
2180 self.syncthread.join() 2255 self.syncthread.join()
2181 2256
2182 def load_cached(self): 2257 def load_cached(self):
2183 for mc, cache, filename, appends in self.fromcache: 2258 for mc, cache, filename, appends, layername in self.fromcache:
2184 cached, infos = cache.load(filename, appends) 2259 infos = cache.loadCached(filename, appends)
2185 yield not cached, mc, infos 2260 yield False, mc, infos
2186 2261
2187 def parse_generator(self): 2262 def parse_generator(self):
2188 while True: 2263 empty = False
2264 while self.processes or not empty:
2265 for process in self.processes.copy():
2266 if not process.is_alive():
2267 process.join()
2268 self.processes.remove(process)
2269
2189 if self.parsed >= self.toparse: 2270 if self.parsed >= self.toparse:
2190 break 2271 break
2191 2272
2192 try: 2273 try:
2193 result = self.result_queue.get(timeout=0.25) 2274 result = self.result_queue.get(timeout=0.25)
2194 except queue.Empty: 2275 except queue.Empty:
2195 pass 2276 empty = True
2277 yield None, None, None
2196 else: 2278 else:
2197 value = result[1] 2279 empty = False
2198 if isinstance(value, BaseException): 2280 yield result
2199 raise value 2281
2200 else: 2282 if not (self.parsed >= self.toparse):
2201 yield result 2283 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? (%s %s of %s) Exiting." % (len(self.processes), self.parsed, self.toparse), None)
2284
2202 2285
2203 def parse_next(self): 2286 def parse_next(self):
2204 result = [] 2287 result = []
2205 parsed = None 2288 parsed = None
2206 try: 2289 try:
2207 parsed, mc, result = next(self.results) 2290 parsed, mc, result = next(self.results)
2291 if isinstance(result, BaseException):
2292 # Turn exceptions back into exceptions
2293 raise result
2294 if parsed is None:
2295 # Timeout, loop back through the main loop
2296 return True
2297
2208 except StopIteration: 2298 except StopIteration:
2209 self.shutdown() 2299 self.shutdown()
2210 return False 2300 return False
2211 except bb.BBHandledException as exc: 2301 except bb.BBHandledException as exc:
2212 self.error += 1 2302 self.error += 1
2213 logger.error('Failed to parse recipe: %s' % exc.recipe) 2303 logger.debug('Failed to parse recipe: %s' % exc.recipe)
2214 self.shutdown(clean=False, force=True) 2304 self.shutdown(clean=False)
2215 return False 2305 return False
2216 except ParsingFailure as exc: 2306 except ParsingFailure as exc:
2217 self.error += 1 2307 self.error += 1
2218 logger.error('Unable to parse %s: %s' % 2308
2219 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2309 exc_desc = str(exc)
2220 self.shutdown(clean=False, force=True) 2310 if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2311 exc_desc = 'Exited with "%d"' % exc.code
2312
2313 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2314 self.shutdown(clean=False)
2221 return False 2315 return False
2222 except bb.parse.ParseError as exc: 2316 except bb.parse.ParseError as exc:
2223 self.error += 1 2317 self.error += 1
2224 logger.error(str(exc)) 2318 logger.error(str(exc))
2225 self.shutdown(clean=False, force=True) 2319 self.shutdown(clean=False, eventmsg=str(exc))
2226 return False 2320 return False
2227 except bb.data_smart.ExpansionError as exc: 2321 except bb.data_smart.ExpansionError as exc:
2322 def skip_frames(f, fn_prefix):
2323 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2324 f = f.tb_next
2325 return f
2326
2228 self.error += 1 2327 self.error += 1
2229 bbdir = os.path.dirname(__file__) + os.sep 2328 bbdir = os.path.dirname(__file__) + os.sep
2230 etype, value, _ = sys.exc_info() 2329 etype, value, tb = sys.exc_info()
2231 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2330
2331 # Remove any frames where the code comes from bitbake. This
2332 # prevents deep (and pretty useless) backtraces for expansion error
2333 tb = skip_frames(tb, bbdir)
2334 cur = tb
2335 while cur:
2336 cur.tb_next = skip_frames(cur.tb_next, bbdir)
2337 cur = cur.tb_next
2338
2232 logger.error('ExpansionError during parsing %s', value.recipe, 2339 logger.error('ExpansionError during parsing %s', value.recipe,
2233 exc_info=(etype, value, tb)) 2340 exc_info=(etype, value, tb))
2234 self.shutdown(clean=False, force=True) 2341 self.shutdown(clean=False)
2235 return False 2342 return False
2236 except Exception as exc: 2343 except Exception as exc:
2237 self.error += 1 2344 self.error += 1
2238 etype, value, tb = sys.exc_info() 2345 _, value, _ = sys.exc_info()
2239 if hasattr(value, "recipe"): 2346 if hasattr(value, "recipe"):
2240 logger.error('Unable to parse %s' % value.recipe, 2347 logger.error('Unable to parse %s' % value.recipe,
2241 exc_info=(etype, value, exc.traceback)) 2348 exc_info=sys.exc_info())
2242 else: 2349 else:
2243 # Most likely, an exception occurred during raising an exception 2350 # Most likely, an exception occurred during raising an exception
2244 import traceback 2351 import traceback
2245 logger.error('Exception during parse: %s' % traceback.format_exc()) 2352 logger.error('Exception during parse: %s' % traceback.format_exc())
2246 self.shutdown(clean=False, force=True) 2353 self.shutdown(clean=False)
2247 return False 2354 return False
2248 2355
2249 self.current += 1 2356 self.current += 1
@@ -2259,17 +2366,19 @@ class CookerParser(object):
2259 for virtualfn, info_array in result: 2366 for virtualfn, info_array in result:
2260 if info_array[0].skipped: 2367 if info_array[0].skipped:
2261 self.skipped += 1 2368 self.skipped += 1
2262 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2369 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2263 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2370 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2264 parsed=parsed, watcher = self.cooker.add_filewatch) 2371 parsed=parsed, watcher = self.cooker.add_filewatch)
2265 return True 2372 return True
2266 2373
2267 def reparse(self, filename): 2374 def reparse(self, filename):
2375 bb.cache.SiggenRecipeInfo.reset()
2268 to_reparse = set() 2376 to_reparse = set()
2269 for mc in self.cooker.multiconfigs: 2377 for mc in self.cooker.multiconfigs:
2270 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) 2378 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2379 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2271 2380
2272 for mc, filename, appends in to_reparse: 2381 for mc, filename, appends, layername in to_reparse:
2273 infos = self.bb_caches[mc].parse(filename, appends) 2382 infos = self.bb_caches[mc].parse(filename, appends, layername)
2274 for vfn, info_array in infos: 2383 for vfn, info_array in infos:
2275 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) 2384 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)